import { AnimationAction } from 'three';
import { AnimationClip } from 'three';
import { AnimationMixer } from 'three';
import { Audio as Audio_2 } from 'three';
import { AudioListener as AudioListener_3 } from 'three';
import { BatchedMesh } from 'three';
import { BloomEffect as BloomEffect_2 } from 'postprocessing';
import { Box3 } from 'three';
import { BufferGeometry } from 'three';
import { Camera as Camera_2 } from 'three';
import { Collider as Collider_2 } from '@dimforge/rapier3d-compat';
import { ColliderDesc } from '@dimforge/rapier3d-compat';
import { Color } from 'three';
import { ColorRepresentation } from 'three';
import { Curve } from 'three';
import { default as default_2 } from 'peerjs';
import { default as default_3 } from 'three/src/materials/nodes/MeshPhysicalNodeMaterial.js';
import { DepthOfFieldEffect } from 'postprocessing';
import { DepthTexture } from 'three';
import { dimforgeRapier3dCompat } from '@dimforge/rapier3d-compat';
import { DocumentedOptions } from '../../../node_modules/three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js';
import { Effect } from 'postprocessing';
import { EffectComposer } from 'postprocessing';
import { EffectComposer as EffectComposer_2 } from '../../node_modules/@types/three/examples/jsm/postprocessing/EffectComposer.js';
import { EmitterShape } from 'three.quarks';
import { Euler } from 'three';
import { EventDispatcher } from 'three';
import { Face } from 'three';
import * as fflate from 'three/examples/jsm/libs/fflate.module.js';
import * as flatbuffers from 'flatbuffers';
import { Fog as Fog_2 } from 'three';
import { Font } from '../../node_modules/@types/three/examples/jsm/loaders/FontLoader.js';
import { Frustum } from 'three';
import { GLTF as GLTF_2 } from '../../node_modules/@types/three/examples/jsm/loaders/GLTFLoader.js';
import { GLTF as GLTF_3 } from '../../../node_modules/@types/three/examples/jsm/loaders/GLTFLoader.js';
import { GLTFExporter } from '../../../node_modules/@types/three/examples/jsm/exporters/GLTFExporter.js';
import { GLTFExporterOptions } from '../../../../node_modules/@types/three/examples/jsm/exporters/GLTFExporter.js';
import { GLTFLoader } from '../../../node_modules/@types/three/examples/jsm/loaders/GLTFLoader.js';
import { GLTFLoaderPlugin } from '../../../node_modules/@types/three/examples/jsm/loaders/GLTFLoader.js';
import { GLTFParser } from '../../../node_modules/@types/three/examples/jsm/loaders/GLTFLoader.js';
import { Group } from 'three';
import { InstancedMesh } from 'three';
import { Intersection } from 'three';
import { IParticleSystem as IParticleSystem_2 } from 'three.quarks';
import { KeyframeTrack } from 'three';
import { Layers } from 'three';
import { LightProbe } from 'three';
import { Line2 } from '../../../../node_modules/@types/three/examples/jsm/lines/Line2.js';
import { Loader } from 'three';
import { LoadingManager } from 'three';
import { LOD_Results } from '@needle-tools/gltf-progressive';
import { LODsManager as LODsManager_2 } from '@needle-tools/gltf-progressive';
import { Material } from 'three';
import { Matrix4 } from 'three';
import { MediaConnection } from 'peerjs';
import { Mesh } from 'three';
import { MeshBasicMaterial } from 'three';
import { MeshPhysicalMaterial } from 'three';
import { MeshStandardMaterial } from 'three';
import { N8AOPostPass } from 'n8ao';
import { NEEDLE_progressive } from '@needle-tools/gltf-progressive';
import { NEEDLE_progressive_plugin } from '@needle-tools/gltf-progressive';
import { needleToolsMaterialx } from '@needle-tools/materialx';
import { NormalBufferAttributes } from 'three';
import { Object3D } from 'three';
import { Object3DEventMap } from 'three';
import { Options } from '../../../node_modules/three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js';
import { OrbitControls as OrbitControls_2 } from '../../node_modules/@types/three/examples/jsm/controls/OrbitControls.js';
import { OrthographicCamera } from 'three';
import { ParticleSystem as ParticleSystem_2 } from 'three.quarks';
import { Pass } from 'postprocessing';
import { peerjs } from 'peerjs';
import { PeerJSOption } from 'peerjs';
import { PerspectiveCamera } from 'three';
import { PositionalAudio } from 'three';
import { postprocessing } from 'postprocessing';
import { Particle as QParticle } from 'three.quarks';
import { Behavior as QParticleBehaviour } from 'three.quarks';
import { TrailParticle as QTrailParticle } from 'three.quarks';
import { Quaternion } from 'three';
import { QueryFilterFlags } from '@dimforge/rapier3d-compat';
import { RawShaderMaterial } from 'three';
import { Ray } from 'three';
import { Raycaster } from 'three';
import { Scene } from 'three';
import { ShaderMaterial } from 'three';
import { ShapeJSON } from 'three.quarks';
import { SkinnedMesh } from 'three';
import { Sprite as Sprite_2 } from 'three';
import { SpriteMaterial } from 'three';
import { Texture } from 'three';
import * as ThreeMeshUI from 'three-mesh-ui';
import { TransformControls } from '../../node_modules/@types/three/examples/jsm/controls/TransformControls.js';
import { Vector2 } from 'three';
import { Vector2Like } from 'three';
import { Vector3 } from 'three';
import { Vector3 as Vector3_2 } from 'three.quarks';
import { Vector3Like } from 'three';
import { Vector4 } from 'three';
import { Vector4 as Vector4_2 } from 'three.quarks';
import { Vector4Like } from 'three';
import { VideoTexture } from 'three';
import { WebGLCubeRenderTarget } from 'three';
import { WebGLRenderer } from 'three';
import { WebGLRendererParameters } from 'three';
import { WebGLRenderTarget } from 'three';
import { WebXRArrayCamera } from 'three';
import { World } from '@dimforge/rapier3d-compat';
import { XRControllerModelFactory } from '../../../../node_modules/@types/three/examples/jsm/webxr/XRControllerModelFactory.js';
import { XRHandMeshModel } from '../../../../node_modules/@types/three/examples/jsm/webxr/XRHandMeshModel.js';
import { XRHandSpace } from 'three';

export declare const $componentName: unique symbol;

export declare const $physicsKey: unique symbol;

export declare class __Ignore {
}

export declare function __internalNotifyObjectDestroyed(obj: Object3D): void;

/** Data describing the accessible semantics for a 3D object or component. */
declare type AccessibilityData = {
    /** ARIA role (e.g. `"button"`, `"img"`, `"region"`). */
    role: string;
    /** Human-readable label announced by screen readers. */
    label: string;
    /** When `true`, the element is hidden from the accessibility tree. */
    hidden?: boolean;
    /** When `true`, indicates the element's content is being updated. */
    busy?: boolean;
};

/**
 * Manages an accessible, screen-reader-friendly overlay for a Needle Engine {@link Context}.
 *
 * The manager maintains a visually-hidden DOM tree that mirrors relevant 3D scene objects
 * with appropriate ARIA roles and labels. It also provides a live region so that hover
 * events in the 3D scene can be announced to assistive technology without stealing focus.
 *
 * ## Automatic integration
 * Several built-in components register accessible elements automatically:
 * - {@link DragControls} — announces draggable objects and drag state
 * - {@link Button} — exposes UI buttons to the accessibility tree
 * - {@link Text} — exposes UI text content to screen readers
 * - {@link ChangeTransformOnClick} — announces clickable transform actions
 * - {@link ChangeMaterialOnClick} — announces clickable material changes
 * - {@link EmphasizeOnClick} — announces clickable emphasis effects
 * - {@link PlayAudioOnClick} — announces clickable audio playback
 * - {@link PlayAnimationOnClick} — announces clickable animation triggers
 *
 * ## What this unlocks
 * - Hovering over buttons and interactive objects with the cursor announces them to screen readers via an ARIA live region — no focus steal required
 * - Screen readers can discover and navigate interactive 3D objects in the scene
 * - Drag operations update the accessibility state (busy, label changes) in real time
 * - Custom components can participate by calling {@link updateElement}, {@link focus}, and {@link hover}
 *
 * Access the manager via `this.context.accessibility` from any component.
 */
declare class AccessibilityManager {
    private readonly context;
    private static readonly _managers;
    /** Returns the {@link AccessibilityManager} associated with the given context or component. */
    static get(obj: Context | IComponent): AccessibilityManager | undefined;
    constructor(context: Context);
    private _enabled;
    /** Enables or disables the accessibility overlay. When disabled, the overlay DOM is removed. */
    set enabled(value: boolean);
    /** Removes all tracked accessibility elements, keeping only the live region. */
    clear(): void;
    /** Removes the overlay from the DOM and unregisters this manager from the context. */
    dispose(): void;
    private readonly root;
    private readonly liveRegion;
    private readonly treeElements;
    /**
     * Creates or updates the accessible DOM element for a 3D object or component.
     * @param obj - The scene object or component to represent.
     * @param data - Partial accessibility data (role, label, hidden, busy) to apply.
     */
    updateElement<T extends Object3D | IComponent>(obj: T, data: Partial<AccessibilityData>): void;
    /** Moves keyboard focus to the accessible element representing the given object. */
    focus<T extends Object3D | IComponent>(obj: T): void;
    /** Removes keyboard focus from the accessible element representing the given object. */
    unfocus<T extends Object3D | IComponent>(obj: T): void;
    /**
     * Announces a hover event to screen readers via the ARIA live region.
     * @param obj - The hovered object (used to look up its label if `text` is not provided).
     * @param text - Optional text to announce. Falls back to the element's `aria-label`.
     */
    hover<T extends Object3D | IComponent>(obj: T, text?: string): void;
    /** Removes the accessible DOM element for the given object and stops tracking it. */
    removeElement(obj: Object3D | IComponent): void;
    private set liveRegionMode(value);
}

export declare class ActionBuilder {
    static sequence(...params: IBehaviorElement[]): GroupActionModel;
    static parallel(...params: IBehaviorElement[]): GroupActionModel;
    static fadeAction(targetObject: Target, duration: number, show: boolean): ActionModel;
    /**
     * creates an action that plays an animation
     * @param start offset in seconds!
     * @param duration in seconds! 0 means play to end
     */
    static startAnimationAction(targetObject: Target, anim: RegisteredAnimationInfo, reversed?: boolean, pingPong?: boolean): IBehaviorElement;
    static waitAction(duration: number): ActionModel;
    static lookAtCameraAction(targets: Target, duration?: number, front?: Vec3_2, up?: Vec3_2): ActionModel;
    static emphasize(targets: Target, duration: number, motionType?: EmphasizeActionMotionType, moveDistance?: number, style?: MotionStyle): ActionModel;
    static transformAction(targets: Target, transformTarget: Target, duration: number, transformType: Space, easeType?: EaseType): ActionModel;
    static playAudioAction(targets: Target, audio: string, type?: PlayAction, gain?: number, auralMode?: AuralMode): ActionModel;
    static impulseAction(targets: Target, velocity: Vec3_2): ActionModel;
}

export declare class ActionCollection {
    private actions;
    private sortedActions?;
    constructor(actions: DocumentAction[]);
    private organize;
    /** returns all document actions affecting the object passed in */
    getActions(obj: Object3D): DocumentAction[] | null;
}

export declare class ActionModel implements IBehaviorElement {
    private static global_id;
    id: string;
    tokenId?: "ChangeScene" | "Visibility" | "StartAnimation" | "Wait" | "LookAtCamera" | "Emphasize" | "Transform" | "Audio" | "Impulse";
    affectedObjects?: string | Target;
    easeType?: EaseType;
    motionType: EmphasizeActionMotionType | VisibilityActionMotionType | undefined;
    duration?: number;
    moveDistance?: number;
    style?: MotionStyle;
    type?: Space | PlayAction | VisibilityMode;
    front?: Vec3_2;
    up?: Vec3_2;
    start?: number;
    animationSpeed?: number;
    reversed?: boolean;
    pingPong?: boolean;
    xFormTarget?: Target | string;
    audio?: string;
    gain?: number;
    auralMode?: AuralMode;
    multiplePerformOperation?: MultiplePerformOperation;
    velocity?: Vec3_2;
    comment?: string;
    animationName?: string;
    clone(): ActionModel;
    constructor(affectedObjects?: string | Target, id?: string);
    writeTo(document: USDDocument, writer: USDWriter): void;
}

export declare const activeInHierarchyFieldName = "needle_isActiveInHierarchy";

/**
 * Register a callback when an {@link HTMLElement} attribute changes.
 * This is used, for example, by the Skybox component to watch for changes to the environment-* and skybox-* attributes.
 * @returns A function that can be used to unregister the callback
 */
export declare function addAttributeChangeCallback(domElement: HTMLElement, name: string, callback: AttributeChangeCallback): () => void;

export declare function addComponent<T extends IComponent>(obj: Object3D, componentInstance: T | ConstructorConcrete<T>, init?: ComponentInit<T>, opts?: {
    callAwake: boolean;
}): T;

/** Register callbacks for registering custom gltf importer or exporter plugins */
export declare function addCustomExtensionPlugin(ext: INeedleGLTFExtensionPlugin): void;

export declare function addNewComponent<T extends IComponent>(obj: Object3D, componentInstance: T, callAwake?: boolean): T;

/**
 * Use patcher for patching properties insteadof calling Object.defineProperty individually
 * since this will cause conflicts if multiple patches need to be applied to the same property
 */
export declare function addPatch<T extends object>(prototype: T, fieldName: string, beforeCallback?: Prefix | null, afterCallback?: Postfix | null): void;

/**
 * The Addressables class is used to register and manage {@link AssetReference} types
 * It can be accessed from components via {@link Context.Current} or {@link Context.addressables} (e.g. `this.context.addressables`)
 */
export declare class Addressables {
    private _context;
    private _assetReferences;
    /* Excluded from this release type: __constructor */
    /* Excluded from this release type: dispose */
    private preUpdate;
    /**
     * Find a registered AssetReference by its URL
     */
    findAssetReference(url: string): AssetReference | null;
    /* Excluded from this release type: registerAssetReference */
    /* Excluded from this release type: unregisterAssetReference */
}

/**[documentation](https://developer.apple.com/documentation/arkit/usdz_schemas_for_ar/preliminary_anchoringapi/preliminary_planeanchoring_alignment) */
declare type Alignment = "horizontal" | "vertical" | "any";

/**
 * The [AlignmentConstraint](https://engine.needle.tools/docs/api/AlignmentConstraint) positions and scales this GameObject to span between two target objects.
 * The object is rotated to face `to` and scaled along Z to match the distance.
 *
 * **Use cases:**
 * - Dynamic beams or laser effects between objects
 * - Stretchy connectors or ropes
 * - Visual links between UI elements
 * - Debug lines between transforms
 *
 * **How it works:**
 * - Position: Centered between `from` and `to` (or at `from` if not centered)
 * - Rotation: Looks at `to` from `from`
 * - Scale: Z-axis scales to match distance, X/Y use `width`
 *
 * @example Create a beam between two objects
 * ```ts
 * const beam = beamMesh.addComponent(AlignmentConstraint);
 * // Set targets via serialized properties in editor
 * // or via code if properties are exposed
 * ```
 *
 * @summary Aligns and scales object between two targets
 * @category Constraints
 * @group Components
 * @see {@link SmoothFollow} for following with smoothing
 **/
export declare class AlignmentConstraint extends Component {
    private from;
    private to;
    private width;
    private centered;
    private _centerPos;
    awake(): void;
    update(): void;
}

declare type AlphaKey = {
    time: number;
    alpha: number;
};

/* Excluded from this release type: AmbientMode */

/**[documentation](https://developer.apple.com/documentation/arkit/usdz_schemas_for_ar/preliminary_anchoringapi/preliminary_anchoring_type) */
declare type Anchoring = "plane" | "image" | "face" | "none";

/**
 * Animation component to play animations on a GameObject.
 * For simpler animation needs compared to {@link Animator}, this component directly
 * plays AnimationClips without state machine logic.
 *
 * **Key features:**
 * - Play animations by index, name, or clip reference
 * - Cross-fade between animations with `fadeDuration`
 * - Loop or play once with optional clamping
 * - Random start time and speed variation
 * - Promise-based completion handling
 *
 *
 * ![](https://cloud.needle.tools/-/media/zXQhLgtxr5ZaxLDTDb3MXA.gif)
 *
 * @example Play animation by name
 * ```ts
 * const anim = this.gameObject.getComponent(Animation);
 * await anim?.play("Walk", { loop: true, fadeDuration: 0.3 });
 * ```
 *
 * @example Play with options
 * ```ts
 * anim?.play(0, {
 *   loop: false,
 *   clampWhenFinished: true,
 *   speed: 2
 * });
 * ```
 *
 * @summary Plays animations from AnimationClips
 * @category Animation and Sequencing
 * @group Components
 * @see {@link Animator} for state machine-based animation
 * @see {@link PlayOptions} for all playback options
 * @link https://engine.needle.tools/samples/?overlay=samples&tag=animation
 * @link https://engine.needle.tools/samples/imunogard/
 *
 * @link https://engine.needle.tools/docs/blender/animation.html
 *
 * ![](https://cloud.needle.tools/-/media/vAYv-kU-eMpICqQZHJktCA.gif)
 *
 */
declare class Animation_2 extends Component implements IAnimationComponent {
    get isAnimationComponent(): boolean;
    addClip(clip: AnimationClip): void;
    /**
     * If true, the animation will start playing when the component is enabled
     */
    playAutomatically: boolean;
    /**
     * If true, the animation will start at a random time. This is used when the animation component is enabled
     * @default false
     */
    randomStartTime: boolean;
    /**
     * The animation min-max speed range
     * @default undefined
     */
    minMaxSpeed?: Vec2_2;
    /**
     * The normalized offset to start the animation at. This will override startTime
     * @default undefined
     */
    minMaxOffsetNormalized?: Vec2_2;
    /**
     * Set to true to loop the animation
     * @default true
     */
    loop: boolean;
    /**
     * If true, the animation will clamp when finished
     */
    clampWhenFinished: boolean;
    /**
     * The time in seconds of the first running animation action
     * @default 0
     */
    get time(): number;
    set time(val: number);
    get duration(): number;
    private _tempAnimationClipBeforeGameObjectExisted;
    /**
     * Get the first animation clip in the animations array
     */
    get clip(): AnimationClip | null;
    /**
     * Set the first animation clip in the animations array
     */
    set clip(val: AnimationClip | null);
    set clips(animations: AnimationClip[]);
    private _tempAnimationsArray;
    set animations(animations: AnimationClip[]);
    get animations(): AnimationClip[];
    private mixer;
    /**
     * The animation actions
     */
    get actions(): Array<AnimationAction>;
    set actions(val: Array<AnimationAction>);
    private _actions;
    private _handles;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: update */
    /* Excluded from this release type: onDisable */
    /* Excluded from this release type: onDestroy */
    /** Get an animation action by the animation clip name */
    getAction(name: string): AnimationAction | null;
    /** Is any animation playing? */
    get isPlaying(): boolean;
    /** Stops all currently playing animations */
    stopAll(opts?: Pick<PlayOptions, "fadeDuration">): void;
    /**
     * Stops a specific animation clip or index. If clip is undefined then all animations will be stopped
     */
    stop(clip?: AnimationIdentifier, opts?: Pick<PlayOptions, "fadeDuration">): void;
    /**
     * Pause all animations or a specific animation clip or index
     * @param clip optional animation clip, index or name, if undefined all animations will be paused
     * @param unpause if true, the animation will be resumed
     */
    pause(clip?: AnimationIdentifier, unpause?: boolean): void;
    /**
     * Resume all paused animations.
     * Note that this will not fade animations in or out and just unpause previous animations. If an animation was faded out which means it's not running anymore, it will not be resumed.
     */
    resume(): void;
    /**
     * Play an animation clip or an clip at the specified index.
     * @param clipOrNumber the animation clip, index or name to play. If undefined, the first animation in the animations array will be played
     * @param options the play options. Use to set the fade duration, loop, speed, start time, end time, clampWhenFinished
     * @returns a promise that resolves when the animation is finished (note that it will not resolve if the animation is looping)
     */
    play(clipOrNumber?: AnimationIdentifier, options?: PlayOptions): Promise<AnimationAction> | void;
    private internalOnPlay;
    private tryFindHandle;
    private ensureMixer;
}
export { Animation_2 as Animation }

/**
 * @category Animation and Sequencing
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */
export declare type AnimationClipModel = {
    clip: string | number | AnimationClip;
    loop: boolean;
    duration: number;
    removeStartOffset: boolean;
    position?: Vec3_3 | Vector3;
    rotation?: Quat | Quaternion;
};

/**
 * AnimationCurve is a representation of a curve that can be used to animate values over time.
 *
 * @category Animation
 * @group Utilities
 */
export declare class AnimationCurve {
    /**
     * Creates an animation curve that goes from the `from` value to the `to` value over the given `duration`.
     */
    static linearFromTo(from: number, to: number, duration: number): AnimationCurve;
    /** Creates an animation curve with just one keyframe */
    static constant(value: number): AnimationCurve;
    /**
     * The keyframes that define the curve.
     */
    keys: Array<Keyframe_2>;
    /**
     * Clones this AnimationCurve and returns a new instance with the same keyframes (the keyframes are also cloned).
     */
    clone(): AnimationCurve;
    /** The duration of the curve, which is the time of the last keyframe. */
    get duration(): number;
    /** Evaluates the curve at the given time and returns the value of the curve at that time.
     * @param time The time at which to evaluate the curve.
     * @returns The value of the curve at the given time.
     */
    evaluate(time: number): number;
    static interpolateValue(time: number, keyframe1: Keyframe_2, keyframe2: Keyframe_2): number;
}

export declare class AnimationExtension implements IUSDExporterExtension {
    get extensionName(): string;
    get animationData(): Map<Object3D<Object3DEventMap>, TransformData[]>;
    get registeredClips(): MapIterator<AnimationClip>;
    get animatedRoots(): MapIterator<Object3D<Object3DEventMap>>;
    get holdClipMap(): Map<AnimationClip, AnimationClip>;
    /** For each animated object, contains time/pos/rot/scale samples in the format that USD needs,
     *  ready to be written to the .usda file.
     */
    private dict;
    /** Map of all roots (Animation/Animator or scene) and all targets that they animate.
     *  We need that info so that we can ensure that each target has the same number of TransformData entries
     *  so that switching between animations doesn't result in data "leaking" to another clip.
     */
    private rootTargetMap;
    private rootAndClipToRegisteredAnimationMap;
    /** Clips registered for each root */
    private rootToRegisteredClip;
    private lastClipEndTime;
    private clipToStartTime;
    private clipToHoldClip;
    private serializers;
    /** Determines if we inject a rest pose clip for each root - only makes sense for QuickLook */
    injectRestPoses: boolean;
    /** Determines if we inject a PlayAnimationOnClick component with "scenestart" trigger - only makes sense for QuickLook */
    injectImplicitBehaviours: boolean;
    constructor(quickLookCompatible: boolean);
    getStartTimeCode(): number;
    /** Returns the end time code, based on 60 frames per second, for all registered animations.
     * This matches the highest time value in the USDZ file. */
    getEndTimeCode(): number;
    getClipCount(root: Object3D): number;
    getStartTimeByClip(clip: AnimationClip | null): number;
    /** Register an AnimationClip for a specific root object.
     * @param root The root object that the animation clip is targeting.
     * @param clip The animation clip to register. If null, a rest pose is registered.
     * @returns The registered animation info, which contains the start time and duration of the clip.
     */
    registerAnimation(root: Object3D, clip: AnimationClip | null): RegisteredAnimationInfo | null;
    onAfterHierarchy(_context: any): void;
    onAfterBuildDocument(_context: any): void;
    onExportObject(object: any, model: USDObject, _context: any): void;
}

declare type AnimationIdentifier = AnimationClip | number | string | undefined;

/**
 * Registry for animation related data. Use {@link registerAnimationMixer} to register an animation mixer instance.
 * Can be accessed from {@link Context.animations} and is used internally e.g. when exporting GLTF files.
 * @category Animation
 */
declare class AnimationsRegistry {
    readonly context: Context;
    readonly mixers: AnimationMixer[];
    constructor(context: Context);
    /* Excluded from this release type: onDestroy */
    /**
     * Register an animation mixer instance.
     */
    registerAnimationMixer(mixer: AnimationMixer): void;
    /**
     * Unregister an animation mixer instance.
     */
    unregisterAnimationMixer(mixer: AnimationMixer | null | undefined): void;
}

export declare class AnimationTrackHandler extends TrackHandler {
    /* Excluded from this release type: models */
    /* Excluded from this release type: trackOffset */
    /** The object that is being animated. */
    target?: Object3D;
    /** The AnimationMixer, should be shared with the animator if an animator is bound */
    mixer?: AnimationMixer;
    clips: Array<AnimationClip>;
    actions: Array<AnimationAction>;
    /**
     * You can use the weight to blend the timeline animation tracks with multiple animation tracks on the same object.
     * @default 1
     */
    weight: number;
    /** holds data/info about clips differences */
    private _actionOffsets;
    private _didBind;
    private _animator;
    onDisable(): void;
    onDestroy(): void;
    onStateChanged(): void;
    createHooks(clipModel: Models.AnimationClipModel, clip: any): void;
    bind(): void;
    private ensureTrackOffsets;
    private _useclipOffsets;
    private _totalOffsetPosition;
    private _totalOffsetRotation;
    private _totalOffsetPosition2;
    private _totalOffsetRotation2;
    private _summedPos;
    private _tempPos;
    private _summedRot;
    private _tempRot;
    private _clipRotQuat;
    evaluate(time: number): void;
    private createRotationInterpolant;
    private createPositionInterpolant;
}

declare class AnimationTriggers {
    disabledTrigger: string;
    highlightedTrigger: string;
    normalTrigger: string;
    pressedTrigger: string;
    selectedTrigger: string;
}

/**
 * Utility class for working with animations.
 */
export declare class AnimationUtils {
    /**
     * Tests if the root object of an AnimationAction can be animated. Objects where matrixAutoUpdate or matrixWorldAutoUpdate is set to false may not animate correctly.
     * @param action The AnimationAction to test
     * @param allowLog Whether to allow logging warnings. Default is false, which only allows logging in development environments.
     * @returns True if the root object can be animated, false otherwise
     */
    static testIfRootCanAnimate(action: AnimationAction, allowLog?: boolean): boolean;
    /**
     * Tries to get the animation actions from an animation mixer.
     * @param mixer The animation mixer to get the actions from
     * @returns The actions or null if the mixer is invalid
     */
    static tryGetActionsFromMixer(mixer: AnimationMixer): Array<AnimationAction> | null;
    static tryGetAnimationClipsFromObjectHierarchy(obj: Object3D, target?: Array<AnimationClip>): Array<AnimationClip>;
    /**
     * Assigns animations from a GLTF file to the objects in the scene.
     * This method will look for objects in the scene that have animations and assign them to the correct objects.
     * @param file The GLTF file to assign the animations from
     */
    static autoplayAnimations(file: Object3D | Pick<Model, "animations" | "scene">): Array<IAnimationComponent> | null;
    static emptyClip(): AnimationClip;
    static createScaleClip(options?: ScaleClipOptions): AnimationClip;
}

/**
 * Animator plays and manages state-machine based animations on a GameObject.
 * Uses an {@link AnimatorController} for state transitions, blending, and parameters.
 *
 * **State machine animations:**
 * Define animation states and transitions in Unity's Animator window or in [Blender's Animator Controller editor](https://engine.needle.tools/docs/blender/animation.html)
 * Control transitions via parameters (bool, int, float, trigger).
 *
 * ![](https://cloud.needle.tools/-/media/zXQhLgtxr5ZaxLDTDb3MXA.gif)
 *
 * **Creating at runtime:**
 * Use `AnimatorController.createFromClips()` to create controllers from code.
 *
 * **Parameters:**
 * - `setTrigger(name)` - Trigger a one-shot transition
 * - `setBool(name, value)` - Set boolean parameter
 * - `setFloat(name, value)` - Set float parameter
 * - `setInteger(name, value)` - Set integer parameter
 *
 * @example Trigger animation state
 * ```ts
 * const animator = myCharacter.getComponent(Animator);
 * animator.setTrigger("Jump");
 * animator.setFloat("Speed", 5);
 * animator.setBool("IsRunning", true);
 * ```
 *
 * @example Listen to animation events
 * ```ts
 * animator.onLoop(evt => console.log("Animation looped"));
 * animator.onFinished(evt => console.log("Animation finished"));
 * ```
 *
 * @summary Plays and manages animations on a GameObject based on an AnimatorController
 * @category Animation and Sequencing
 * @group Components
 * @see {@link AnimatorController} for state machine configuration
 * @see {@link Animation} for simple clip playback
 * @see {@link PlayableDirector} for timeline-based animation
 *
 * @link https://engine.needle.tools/docs/blender/animation.html
 */
export declare class Animator extends Component implements IAnimationComponent {
    /**
     * Identifies this component as an animation component in the engine
     */
    get isAnimationComponent(): boolean;
    /**
     * When enabled, animation will affect the root transform position and rotation
     */
    applyRootMotion: boolean;
    /**
     * Indicates whether this animator contains root motion data
     */
    hasRootMotion: boolean;
    /**
     * When enabled, the animator will maintain its state when the component is disabled
     */
    keepAnimatorControllerStateOnDisable: boolean;
    /**
     * Sets or replaces the animator controller for this component.
     * Handles binding the controller to this animator instance and ensures
     * proper initialization when the controller changes.
     * @param val The animator controller model or instance to use
     */
    set runtimeAnimatorController(val: AnimatorControllerModel | AnimatorController | undefined | null);
    /**
     * Gets the current animator controller instance
     * @returns The current animator controller or null if none is assigned
     */
    get runtimeAnimatorController(): AnimatorController | undefined | null;
    /**
     * Retrieves information about the current animation state
     * @returns The current state information, or undefined if no state is playing
     */
    getCurrentStateInfo(): AnimatorStateInfo | null | undefined;
    /**
     * The currently playing animation action that can be used to modify animation properties
     * @returns The current animation action, or null if no animation is playing
     */
    get currentAction(): AnimationAction | null;
    /**
     * Indicates whether animation parameters have been modified since the last update
     * @returns True if parameters have been changed
     */
    get parametersAreDirty(): boolean;
    private _parametersAreDirty;
    /**
     * Indicates whether the animator state has changed since the last update
     * @returns True if the animator has been changed
     */
    get isDirty(): boolean;
    private _isDirty;
    /**@deprecated use play() */
    Play(name: string | number, layer?: number, normalizedTime?: number, transitionDurationInSec?: number): void;
    /**
     * Plays an animation on the animator
     * @param name The name or hash of the animation to play
     * @param layer The layer to play the animation on (-1 for default layer)
     * @param normalizedTime The time position to start playing (0-1 range, NEGATIVE_INFINITY for current position)
     * @param transitionDurationInSec The duration of the blend transition in seconds
     */
    play(name: string | number, layer?: number, normalizedTime?: number, transitionDurationInSec?: number): void;
    /**@deprecated use reset */
    Reset(): void;
    /**
     * Resets the animator controller to its initial state
     */
    reset(): void;
    /**@deprecated use setBool */
    SetBool(name: string | number, val: boolean): void;
    /**
     * Sets a boolean parameter in the animator
     * @param name The name or hash of the parameter
     * @param value The boolean value to set
     */
    setBool(name: string | number, value: boolean): void;
    /**@deprecated use getBool */
    GetBool(name: string | number): boolean;
    /**
     * Gets a boolean parameter from the animator
     * @param name The name or hash of the parameter
     * @returns The value of the boolean parameter, or false if not found
     */
    getBool(name: string | number): boolean;
    /**
     * Toggles a boolean parameter between true and false
     * @param name The name or hash of the parameter
     */
    toggleBool(name: string | number): void;
    /**@deprecated use setFloat */
    SetFloat(name: string | number, val: number): void;
    /**
     * Sets a float parameter in the animator
     * @param name The name or hash of the parameter
     * @param val The float value to set
     */
    setFloat(name: string | number, val: number): void;
    /**@deprecated use getFloat */
    GetFloat(name: string | number): number;
    /**
     * Gets a float parameter from the animator
     * @param name The name or hash of the parameter
     * @returns The value of the float parameter, or -1 if not found
     */
    getFloat(name: string | number): number;
    /**@deprecated use setInteger */
    SetInteger(name: string | number, val: number): void;
    /**
     * Sets an integer parameter in the animator
     * @param name The name or hash of the parameter
     * @param val The integer value to set
     */
    setInteger(name: string | number, val: number): void;
    /**@deprecated use getInteger */
    GetInteger(name: string | number): number;
    /**
     * Gets an integer parameter from the animator
     * @param name The name or hash of the parameter
     * @returns The value of the integer parameter, or -1 if not found
     */
    getInteger(name: string | number): number;
    /**@deprecated use setTrigger */
    SetTrigger(name: string | number): void;
    /**
     * Activates a trigger parameter in the animator
     * @param name The name or hash of the trigger parameter
     */
    setTrigger(name: string | number): void;
    /**@deprecated use resetTrigger */
    ResetTrigger(name: string | number): void;
    /**
     * Resets a trigger parameter in the animator
     * @param name The name or hash of the trigger parameter
     */
    resetTrigger(name: string | number): void;
    /**@deprecated use getTrigger */
    GetTrigger(name: string | number): void;
    /**
     * Gets the state of a trigger parameter from the animator
     * @param name The name or hash of the trigger parameter
     * @returns The state of the trigger parameter
     */
    getTrigger(name: string | number): boolean | undefined;
    /**@deprecated use isInTransition */
    IsInTransition(): boolean;
    /**
     * Checks if the animator is currently in a transition between states
     * @returns True if the animator is currently blending between animations
     */
    isInTransition(): boolean;
    /**@deprecated use setSpeed */
    SetSpeed(speed: number): void;
    /**
     * Sets the playback speed of the animator
     * @param speed The new playback speed multiplier
     */
    setSpeed(speed: number): void;
    /**
     * Sets a random playback speed between the min and max values
     * @param minMax Object with x (minimum) and y (maximum) speed values
     */
    set minMaxSpeed(minMax: {
        x: number;
        y: number;
    });
    /**
     * Sets a random normalized time offset for animations between min (x) and max (y) values
     * @param minMax Object with x (min) and y (max) values for the offset range
     */
    set minMaxOffsetNormalized(minMax: {
        x: number;
        y: number;
    });
    private _speed;
    private _normalizedStartOffset;
    private _animatorController?;
    awake(): void;
    private _initializeWithRuntimeAnimatorController?;
    initializeRuntimeAnimatorController(force?: boolean): void;
    onDisable(): void;
    onBeforeRender(): void;
}

export declare enum AnimatorConditionMode {
    If = 1,
    IfNot = 2,
    Greater = 3,
    Less = 4,
    Equals = 6,
    NotEqual = 7
}

/**
 * Controls the playback of animations using a state machine architecture.
 *
 * The AnimatorController manages animation states, transitions between states,
 * and parameters that affect those transitions. It is used by the {@link Animator}
 * component to control animation behavior on 3D models.
 *
 * Use the static method {@link AnimatorController.createFromClips} to create
 * an animator controller from a set of animation clips.
 *
 * @category Animation and Sequencing
 * @group Utilities
 */
export declare class AnimatorController {
    /**
     * Creates an AnimatorController from a set of animation clips.
     * Each clip becomes a state in the controller's state machine.
     *
     * @param clips - The animation clips to use for creating states
     * @param options - Configuration options for the controller including looping behavior and transitions
     * @returns A new AnimatorController instance
     */
    static createFromClips(clips: AnimationClip[], options?: CreateAnimatorControllerOptions): AnimatorController;
    /**
     * Plays an animation state by name or hash.
     *
     * @param name - The name or hash identifier of the state to play
     * @param layerIndex - The layer index (defaults to 0)
     * @param normalizedTime - The normalized time to start the animation from (0-1)
     * @param durationInSec - Transition duration in seconds
     */
    play(name: string | number, layerIndex?: number, normalizedTime?: number, durationInSec?: number): void;
    /**
     * Resets the controller to its initial state.
     */
    reset(): void;
    /**
     * Sets a boolean parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @param value - The boolean value to set
     */
    setBool(name: string | number, value: boolean): void;
    /**
     * Gets a boolean parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @returns The boolean value of the parameter, or false if not found
     */
    getBool(name: string | number): boolean;
    /**
     * Sets a float parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @param val - The float value to set
     * @returns True if the parameter was found and set, false otherwise
     */
    setFloat(name: string | number, val: number): boolean;
    /**
     * Gets a float parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @returns The float value of the parameter, or 0 if not found
     */
    getFloat(name: string | number): number;
    /**
     * Sets an integer parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @param val - The integer value to set
     */
    setInteger(name: string | number, val: number): void;
    /**
     * Gets an integer parameter value by name or hash.
     *
     * @param name - The name or hash identifier of the parameter
     * @returns The integer value of the parameter, or 0 if not found
     */
    getInteger(name: string | number): number;
    /**
     * Sets a trigger parameter to active (true).
     * Trigger parameters are automatically reset after they are consumed by a transition.
     *
     * @param name - The name or hash identifier of the trigger parameter
     */
    setTrigger(name: string | number): void;
    /**
     * Resets a trigger parameter to inactive (false).
     *
     * @param name - The name or hash identifier of the trigger parameter
     */
    resetTrigger(name: string | number): void;
    /**
     * Gets the current state of a trigger parameter.
     *
     * @param name - The name or hash identifier of the trigger parameter
     * @returns The boolean state of the trigger, or false if not found
     */
    getTrigger(name: string | number): boolean;
    /**
     * Checks if the controller is currently in a transition between states.
     *
     * @returns True if a transition is in progress, false otherwise
     */
    isInTransition(): boolean;
    /** Set the speed of the animator controller. Larger values will make the animation play faster. */
    setSpeed(speed: number): void;
    private _speed;
    /**
     * Finds an animation state by name or hash.
     * @deprecated Use findState instead
     *
     * @param name - The name or hash identifier of the state to find
     * @returns The found state or null if not found
     */
    FindState(name: string | number | undefined | null): State | null;
    /**
     * Finds an animation state by name or hash.
     *
     * @param name - The name or hash identifier of the state to find
     * @returns The found state or null if not found
     */
    findState(name: string | number | undefined | null): State | null;
    /**
     * Gets information about the current playing animation state.
     *
     * @returns An AnimatorStateInfo object with data about the current state, or null if no state is active
     */
    getCurrentStateInfo(): AnimatorStateInfo | null;
    /**
     * Gets the animation action currently playing.
     *
     * @returns The current animation action, or null if no action is playing
     */
    get currentAction(): AnimationAction | null;
    /**
     * The normalized time (0-1) to start playing the first state at.
     * This affects the initial state when the animator is first enabled.
     */
    normalizedStartOffset: number;
    /**
     * The Animator component this controller is bound to.
     */
    animator?: Animator;
    /**
     * The data model describing the animation states and transitions.
     */
    model: AnimatorControllerModel;
    /**
     * Gets the engine context from the bound animator.
     */
    get context(): Context | undefined | null;
    /**
     * Gets the animation mixer used by this controller.
     */
    get mixer(): AnimationMixer;
    /**
     * Cleans up resources used by this controller.
     * Stops all animations and unregisters the mixer from the animation system.
     */
    dispose(): void;
    /**
     * Binds this controller to an animator component.
     * Creates a new animation mixer and sets up animation actions.
     *
     * @param animator - The animator to bind this controller to
     */
    bind(animator: Animator): void;
    /**
     * Creates a deep copy of this controller.
     * Clones the model data but does not copy runtime state.
     *
     * @returns A new AnimatorController instance with the same configuration
     */
    clone(): AnimatorController | null;
    /**
     * Updates the controller's state machine and animations.
     * Called each frame by the animator component.
     *
     * @param weight - The weight to apply to the animations (for blending)
     */
    update(weight: number): void;
    private _mixer;
    private _activeState?;
    /**
     * Gets the currently active animation state.
     *
     * @returns The active state or undefined if no state is active
     */
    get activeState(): State | undefined;
    constructor(model: AnimatorControllerModel);
    private _activeStates;
    private updateActiveStates;
    private setStartTransition;
    private evaluateTransitions;
    private setTimescale;
    private getState;
    /**
     * These actions have been active previously but not faded out because we entered a state that has no real animation - no duration. In which case we hold the previously active actions until they are faded out.
     */
    private readonly _heldActions;
    private releaseHeldActions;
    private transitionTo;
    private createAction;
    private evaluateCondition;
    private createActions;
    /**
     * Yields all animation actions managed by this controller.
     * Iterates through all states in all layers and returns their actions.
     */
    enumerateActions(): Generator<AnimationAction, void, unknown>;
    private rootMotionHandler?;
}

export declare type AnimatorControllerModel = {
    name: string;
    guid: string;
    parameters: Parameter[];
    layers: Layer[];
};

export declare enum AnimatorControllerParameterType {
    Float = 1,
    Int = 3,
    Bool = 4,
    Trigger = 9
}

export declare class AnimatorStateInfo {
    /** The name of the animation */
    readonly name: string;
    /** The hash of the name */
    readonly nameHash: number;
    /** The normalized time of the animation */
    readonly normalizedTime: number;
    /** The length of the animation */
    readonly length: number;
    /** The current speed of the animation */
    readonly speed: number;
    /** The current action playing. It can be used to modify the action */
    readonly action: AnimationAction | null;
    /**
     * If the state has any transitions
     */
    readonly hasTransitions: boolean;
    constructor(state: State, normalizedTime: number, length: number, speed: number);
}

/**
 * [Antialiasing](https://engine.needle.tools/docs/api/Antialiasing) provides SMAA (Subpixel Morphological Antialiasing) post-processing effect to smooth edges in the rendered scene.
 * @category Effects
 * @group Components
 */
export declare class Antialiasing extends PostProcessingEffect {
    get typeName(): string;
    readonly preset: VolumeParameter;
    onCreateEffect(): EffectProviderResult;
}

declare type AnyString = string & {
    _brand?: never;
};

/**
 * The Application class can be used to mute audio globally, and to check if the application (canvas) is currently visible (it's tab is active and not minimized).
 */
export declare class Application extends EventTarget {
    static get userInteractionRegistered(): boolean;
    /**  @deprecated use Application.registerWaitForInteraction instead */
    static readonly registerWaitForAllowAudio: typeof Application.registerWaitForInteraction;
    /**
     * Register a callback that will be called when the user interacts with the page (click, touch, keypress, etc).
     * If the user has already interacted with the page, the callback will be called immediately.
     * This can be used to wait for user interaction before playing audio, for example.
     */
    static registerWaitForInteraction(cb: Function): void;
    /**
     * Unregister a callback that was previously registered with registerWaitForInteraction.
     */
    static unregisterWaitForInteraction(cb: Function): void;
    private _mute;
    /** audio muted? */
    get muted(): boolean;
    /** set global audio mute */
    set muted(value: boolean);
    private readonly context;
    /** @returns true if the document is focused */
    get hasFocus(): boolean;
    /**
     * @returns true if the application is currently visible (it's tab is active and not minimized)
     */
    get isVisible(): boolean;
    private _isVisible;
    /* Excluded from this release type: __constructor */
    private onVisiblityChanged;
}

/* Excluded from this release type: apply */

export declare function applyHMRChanges(newModule: any): boolean;

/* Excluded from this release type: applyPrototypeExtensions */

declare enum AspectMode {
    None = 0,
    AdjustHeight = 1,
    AdjustWidth = 2
}

export declare class AssetDatabase {
    constructor();
}

/** ### AssetReferences can be used to load glTF or GLB assets
 * Use {@link AssetReference.getOrCreateFromUrl} to get an AssetReference for a URL to be easily loaded. When using the same URL multiple times the same AssetReference will be returned, this avoids loading or creating the same asset multiple times.
 *
 * **Important methods:**
 * - {@link preload} to load the asset binary without creating an instance yet.
 * - {@link loadAssetAsync} to load the asset and create an instance.
 * - {@link instantiate} to load the asset and create another instance.
 * - {@link unload} to dispose allocated memory and destroy the asset instance.
 *
 * @example Loading an asset from a URL
 * ```ts
 * import { AssetReference } from '@needle-tools/engine';
 * const assetRef = AssetReference.getOrCreateFromUrl("https://example.com/myModel.glb");
 * const instance = await assetRef.loadAssetAsync();
 * scene.add(instance);
 * ```
 *
 * @example Referencing an asset in a component and loading it on start
 * ```ts
 * import { Behaviour, serializable, AssetReference } from '@needle-tools/engine';
 *
 * export class MyComponent extends Behaviour {
 *
 *   @serializable(AssetReference)
 *   myModel?: AssetReference;
 *
 *   // Load the model on start. Start is called after awake and onEnable
 *   start() {
 *     if (this.myModel) {
 *       this.myModel.loadAssetAsync().then(instance => {
 *         if (instance) {
 *           // add the loaded model to this component's game object
 *           this.gameObject.add(instance);
 *         }
 *       });
 *     }
 *   }
 * }
 * ```
 *
 * ### Related:
 * - {@link ImageReference} to load external image URLs
 * - {@link FileReference} to load external file URLs
 * - {@link loadAsset} to load assets directly without using AssetReferences
 */
export declare class AssetReference {
    /**
     * Get an AssetReference for a URL to be easily loaded.
     * AssetReferences are cached so calling this method multiple times with the same arguments will always return the same AssetReference.
     * @param url The URL of the asset to load. The url can be relative or absolute.
     * @param context The context to use for loading the asset
     * @returns the AssetReference for the URL
     */
    static getOrCreateFromUrl(url: string, context?: Context): AssetReference;
    /**
     * Get an AssetReference for a URL to be easily loaded.
     * AssetReferences are cached so calling this method multiple times with the same arguments will always return the same AssetReference.
     */
    static getOrCreate(sourceId: SourceIdentifier | IComponent, url: string, context?: Context): AssetReference;
    readonly isAssetReference = true;
    /**
     * This is the loaded asset root object. If the asset is a glb/gltf file this will be the {@link three#Scene} object.
     */
    get rawAsset(): any;
    /** The loaded asset root
     */
    get asset(): Object3D | null;
    protected set asset(val: Object3D | null);
    /** The url of the loaded asset (or the asset to be loaded)
     * @deprecated use url */
    get uri(): string;
    /** The url of the loaded asset (or the asset to be loaded) */
    get url(): string;
    /** The name of the assigned url. This name is deduced from the url and might not reflect the actual name of the asset */
    get urlName(): string;
    /**
     * @returns true if the uri is a valid URL (http, https, blob)
     */
    get hasUrl(): boolean;
    private _rawAsset;
    private _glbRoot?;
    private _url;
    private _urlName;
    private _progressListeners;
    private _isLoadingRawBinary;
    private _rawBinary?;
    /* Excluded from this release type: __constructor */
    constructor(uri: string, _hash?: string, asset?: any);
    private onResolvePrefab;
    private get mustLoad();
    private _loadingPromise;
    /**
     * @returns `true` if the asset has been loaded (via preload) or if it exists already (assigned to `asset`) */
    isLoaded(): boolean | ArrayBufferLike;
    /** frees previously allocated memory and destroys the current `asset` instance (if any) */
    unload(): void;
    /** loads the asset binary without creating an instance */
    preload(): Promise<ArrayBufferLike | null>;
    /** Loads the asset and returns a single shared instance (assigned to {@link asset}).
     * Calling this multiple times will **not** create additional instances — it returns the same `Object3D`.
     * To create a new independent clone, use {@link instantiate} instead.
     * @param prog Optional progress callback invoked during download.
     * @returns The loaded root `Object3D`, or `null` if loading fails.
     */
    loadAssetAsync(prog?: ProgressCallback | null): Promise<Object3D | null>;
    /** loads and returns a new instance of `asset` */
    instantiate(parent?: Object3D | IInstantiateOptions | null): Promise<Object3D<Object3DEventMap> | null>;
    /** loads and returns a new instance of `asset` - this call is networked so an instance will be created on all connected users */
    instantiateSynced(parent?: Object3D | SyncInstantiateOptions, saveOnServer?: boolean): Promise<Object3D<Object3DEventMap> | null>;
    beginListenDownload(evt: ProgressCallback): void;
    endListenDownload(evt: ProgressCallback): void;
    private raiseProgressEvent;
    private static readonly currentlyInstantiating;
    private onInstantiate;
    /**
     * try to ignore the intermediate created object
     * because it causes trouble if we instantiate an assetreference per player
     * and call destroy on the player marker root
     * @returns the scene root object if the asset was a glb/gltf
     */
    private tryGetActualGameObjectRoot;
}

/**
 * Used to attract Rigidbodies towards the position of this component.
 * Add Rigidbodies to the `targets` array to have them be attracted.
 * You can use negative strength values to create a repulsion effect.
 *
 * @example Attractor component attracting a Rigidbody
 * ```ts
 * const attractor = object.addComponent(Attractor);
 * attractor.strength = 5; // positive value to attract
 * attractor.radius = 10; // only attract within 10 units
 * attractor.targets.push(rigidbody); // add the Rigidbody to be attracted
 * @summary Attract Rigidbodies towards the position of this component
 * @category Physics
 * @group Components
 */
export declare class Attractor extends Component {
    strength: number;
    radius: number;
    targets: Rigidbody[];
    update(): void;
}

declare type AttributeChangeCallback = (value: string | null) => void;

/**
 * @category Animation and Sequencing
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */
export declare type AudioClipModel = {
    clip: string;
    loop: boolean;
    volume: number;
};

declare type AudioClipModel_2 = Models.ClipModel & {
    _didTriggerPlay: boolean;
};

export declare class AudioExtension implements IUSDExporterExtension {
    static getName(clip: string): string;
    get extensionName(): string;
    private files;
    onExportObject?(object: Object3D, model: USDObject, _context: USDZExporterContext): void;
    onAfterSerialize(context: USDZExporterContext): Promise<void>;
}

/**
 * The [AudioListener](https://engine.needle.tools/docs/api/AudioListener) represents a listener that can hear audio sources in the scene.
 * This component creates and manages a Three.js {@link three#AudioListener}, automatically connecting it
 * to the main camera or a Camera in the parent hierarchy.
 *
 * @summary Receives audio in the scene and outputs it to speakers
 * @category Multimedia
 * @group Components
 */
declare class AudioListener_2 extends Component {
    /**
     * Gets the existing Three.js {@link three#AudioListener} instance or creates a new one if it doesn't exist.
     * This listener is responsible for capturing audio in the 3D scene.
     * @returns The {@link three#AudioListener} instance
     */
    get listener(): AudioListener_3;
    private _listener;
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    private onInteraction;
    private addListenerIfItExists;
    private removeListenerIfItExists;
}
export { AudioListener_2 as AudioListener }

/**
 * Defines how audio volume attenuates over distance from the listener.
 */
declare enum AudioRolloffMode {
    /**
     * Logarithmic rolloff provides a natural, real-world attenuation where volume decreases
     * exponentially with distance.
     */
    Logarithmic = 0,
    /**
     * Linear rolloff provides a straightforward volume reduction that decreases at a constant
     * rate with distance.
     */
    Linear = 1,
    /**
     * Custom rolloff allows for defining specialized distance-based attenuation curves.
     * Note: Custom rolloff is not fully implemented in this version.
     */
    Custom = 2
}

/**
 * Plays audio clips in the scene with support for spatial (3D) positioning.
 *
 * **Browser autoplay policies:**
 * Web browsers require user interaction before playing audio. Use
 * `AudioSource.userInteractionRegistered` to check if playback is allowed,
 * or `registerWaitForAllowAudio()` to queue playback until interaction occurs.
 *
 * **Spatial audio:**
 * Set `spatialBlend` to 1 for full 3D positioning, or 0 for 2D (non-spatial).
 * Requires an {@link AudioListener} in the scene (typically on the camera).
 *
 * **Visibility handling:**
 * Audio automatically pauses when the tab is hidden unless `playInBackground = true`.
 * On mobile, audio always pauses in background regardless of this setting.
 *
 * @example Play audio on button click
 * ```ts
 * onClick() {
 *   const audio = this.getComponent(AudioSource);
 *   audio.play();
 * }
 * ```
 *
 * @example Wait for user interaction
 * ```ts
 * AudioSource.registerWaitForAllowAudio(() => {
 *   this.getComponent(AudioSource)?.play();
 * });
 * ```
 *
 * @summary Plays audio clips from files or media streams
 * @category Multimedia
 * @group Components
 * @see {@link AudioListener} for the audio receiver component
 * @see {@link AudioRolloffMode} for distance attenuation options
 * @see {@link Voip} for voice communication
 * @see {@link PlayableDirector} for timeline-based audio
 * @link https://engine.needle.tools/samples/?overlay=samples&tag=audio
 * @link https://spatial-audio-zubckswmztj.needle.run/
 */
export declare class AudioSource extends Component {
    /**
     * Checks if the user has interacted with the page to allow audio playback.
     * Audio playback often requires a user gesture first due to browser autoplay policies.
     * This is the same as calling {@link Application.userInteractionRegistered}.
     *
     * @returns Whether user interaction has been registered to allow audio playback
     */
    static get userInteractionRegistered(): boolean;
    /**
     * Registers a callback that will be executed once the user has interacted with the page,
     * allowing audio playback to begin.
     * This is the same as calling {@link Application.registerWaitForInteraction}.
     *
     * @param cb - The callback function to execute when user interaction is registered
     */
    static registerWaitForAllowAudio(cb: Function): void;
    /**
     * The audio clip to play. Can be a URL string pointing to an audio file or a {@link MediaStream} object.
     */
    clip: string | MediaStream;
    /**
     * When true, the audio will automatically start playing when the component is enabled.
     * When false, you must call play() manually to start audio playback.
     * @default false
     */
    playOnAwake: boolean;
    /**
     * When true, the audio clip will be loaded during initialization rather than when play() is called.
     * This can reduce playback delay but increases initial loading time.
     * @default true
     */
    preload: boolean;
    /**
     * When true, audio will continue playing when the browser tab loses focus.
     * When false, audio will pause when the tab is minimized or not active.
     * @default true
     */
    playInBackground: boolean;
    /**
     * Indicates whether the audio is currently playing.
     *
     * @returns True if the audio is playing, false otherwise
     */
    get isPlaying(): boolean;
    /**
     * The total duration of the current audio clip in seconds.
     *
     * @returns Duration in seconds or undefined if no clip is loaded
     */
    get duration(): number | undefined;
    /**
     * The current playback position as a normalized value between 0 and 1.
     * Can be set to seek to a specific position in the audio.
     */
    get time01(): number;
    set time01(val: number);
    /**
     * The current playback position in seconds.
     * Can be set to seek to a specific time in the audio.
     */
    get time(): number;
    set time(val: number);
    /**
     * When true, the audio will repeat after reaching the end.
     * When false, audio will play once and stop.
     * @default false
     */
    get loop(): boolean;
    set loop(val: boolean);
    /**
     * Controls how the audio is positioned in space.
     * Values range from 0 (2D, non-positional) to 1 (fully 3D positioned).
     * Note: 2D playback is not fully supported in the current implementation.
     */
    get spatialBlend(): number;
    set spatialBlend(val: number);
    /**
     * The minimum distance from the audio source at which the volume starts to attenuate.
     * Within this radius, the audio plays at full volume regardless of distance.
     */
    get minDistance(): number;
    set minDistance(val: number);
    /**
     * The maximum distance from the audio source beyond which the volume no longer decreases.
     * This defines the outer limit of the attenuation curve.
     */
    get maxDistance(): number;
    set maxDistance(val: number);
    private _spatialBlend;
    private _minDistance;
    private _maxDistance;
    /**
     * Controls the overall volume/loudness of the audio.
     * Values range from 0 (silent) to 1 (full volume).
     * @default 1
     */
    get volume(): number;
    set volume(val: number);
    private _volume;
    /**
     * Controls the playback rate (speed) of the audio.
     * Values greater than 1 increase speed, values less than 1 decrease it.
     * This affects both speed and pitch of the audio.
     * @default 1
     */
    set pitch(val: number);
    get pitch(): number;
    /**
     * Determines how audio volume decreases with distance from the listener.
     * @default AudioRolloffMode.Logarithmic
     * @see {@link AudioRolloffMode}
     */
    rollOffMode: AudioRolloffMode;
    private _loop;
    private sound;
    private helper;
    private wasPlaying;
    private audioLoader;
    private shouldPlay;
    private _lastClipStartedLoading;
    private _audioElement;
    /**
     * Returns the underlying {@link PositionalAudio} object, creating it if necessary.
     * The audio source needs a user interaction to be initialized due to browser autoplay policies.
     *
     * @returns The three.js PositionalAudio object or null if unavailable
     */
    get Sound(): PositionalAudio | null;
    /**
     * Indicates whether the audio source is queued to play when possible.
     * This may be true before user interaction has been registered.
     *
     * @returns Whether the audio source intends to play
     */
    get ShouldPlay(): boolean;
    /**
     * Returns the Web Audio API context associated with this audio source.
     *
     * @returns The {@link AudioContext} or null if not available
     */
    get audioContext(): AudioContext | undefined;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    private onVisibilityChanged;
    private onApplicationMuteChanged;
    private createAudio;
    private __onAllowAudioCallback;
    private applySpatialDistanceSettings;
    private onNewClip;
    /**
     * Plays the audio clip or media stream.
     * If no argument is provided, plays the currently assigned clip.
     *
     * @param clip - Optional audio clip or {@link MediaStream} to play
     */
    play(clip?: string | MediaStream | undefined): void;
    /**
     * Pauses audio playback while maintaining the current position.
     * Use play() to resume from the paused position.
     */
    pause(): void;
    /**
     * Stops audio playback completely and resets the playback position to the beginning.
     * Unlike pause(), calling play() after stop() will start from the beginning.
     */
    stop(): void;
    private _lastContextTime;
    private _hasEnded;
    private _needUpdateSpatialDistanceSettings;
    /* Excluded from this release type: update */
}

export declare class AudioTrackHandler extends TrackHandler {
    models: Array<AudioClipModel_2>;
    listener: AudioListener_3;
    audio: Array<Audio_2>;
    audioContextTimeOffset: Array<number>;
    lastTime: number;
    audioSource?: AudioSource;
    private _audioLoader;
    private getAudioFilePath;
    onAllowAudioChanged(allow: boolean): void;
    addModel(model: Models.ClipModel): void;
    onDisable(): void;
    onDestroy(): void;
    onMuteChanged(): void;
    stop(): void;
    private _playableDirectorResumed;
    onPauseChanged(): void;
    evaluate(time: number): void;
    /** Call to load audio buffer for a specific time in the track. Can be used to preload the timeline audio */
    loadAudio(time: number, lookAhead?: number, lookBehind?: number): Promise<(AudioBuffer | null)[]> | null;
    private isInTimeRange;
    private static _audioBuffers;
    static dispose(): void;
    private handleAudioLoading;
}

/* Excluded from this release type: AuralMode */

/**
 * Avatar component to setup a WebXR avatar with head and hand objects.
 *
 * The avatar will automatically synchronize the head and hand objects with the XR rig when entering an XR session.
 *
 * @summary WebXR Avatar component for head and hands synchronization
 * @category XR
 * @category Networking
 * @group Components
 */
export declare class Avatar extends Component {
    head?: AssetReference;
    leftHand?: AssetReference;
    rightHand?: AssetReference;
    private _leftHandMeshes?;
    private _rightHandMeshes?;
    private _syncTransforms?;
    onEnterXR(_args: NeedleXREventArgs): Promise<void>;
    onLeaveXR(_args: NeedleXREventArgs): void;
    onUpdateXR(args: NeedleXREventArgs): void;
    onBeforeRender(): void;
    private updateHandVisibility;
    private updateRemoteAvatarVisibility;
    private tryFindAvatarObjectsIfMissing;
    private prepareAvatar;
    private loadAvatarObjects;
}

/* Excluded from this release type: Avatar_Brain_LookAt */

/* Excluded from this release type: Avatar_MouthShapes */

/* Excluded from this release type: Avatar_MustacheShake */

/* Excluded from this release type: Avatar_POI */

/* Excluded from this release type: AvatarBlink_Simple */

/* Excluded from this release type: AvatarEyeLook_Rotation */

/**
 * Handles loading and instantiating avatar models from various sources.
 * Provides functionality to find and extract important parts of an avatar (head, hands).
 *
 * Debug mode can be enabled with the URL parameter `?debugavatar`,
 * which will log detailed information about avatar loading and configuration.
 */
export declare class AvatarLoader {
    private readonly avatarRegistryUrl;
    /**
     * Retrieves or creates a new avatar instance from an ID or existing Object3D.
     * @param context The application context
     * @param avatarId Either a string ID to load an avatar or an existing Object3D to use as avatar
     * @returns Promise resolving to an AvatarModel if successful, or null if failed
     */
    getOrCreateNewAvatarInstance(context: Context, avatarId: string | Object3D): Promise<AvatarModel | null>;
    /**
     * Loads an avatar model from a file or registry using the provided ID.
     * @param context The engine context
     * @param avatarId The ID of the avatar to load
     * @returns Promise resolving to the loaded avatar's Object3D, or null if failed
     */
    private loadAvatar;
    /**
     * Caches an avatar model for reuse.
     * @param _id The ID to associate with the model
     * @param _model The avatar model to cache
     */
    private cacheModel;
    /**
     * Analyzes an Object3D to find avatar parts (head, hands) based on naming conventions.
     * @param obj The Object3D to search for avatar parts
     * @returns A structured AvatarModel with references to found parts
     */
    private findAvatar;
    /**
     * Recursively searches for an avatar part by name within an Object3D hierarchy.
     * @param obj The Object3D to search within
     * @param searchString Array of strings that should all be present in the object name
     * @returns The found Object3D part or null if not found
     */
    private findAvatarPart;
    /**
     * Handles HTTP response errors from avatar loading operations.
     * @param response The fetch API response to check
     * @returns The response if it was ok
     * @throws Error with status text if response was not ok
     */
    private handleCustomAvatarErrors;
}

/**
 * Marks a GameObject as being controlled or owned by a player in networked XR sessions.
 * This is used internally by the networking system to identify player-controlled objects.
 *
 * **Note:** This is an internal marker class. For most use cases, use the {@link Avatar} component instead.
 *
 * @summary Internal marker for player-controlled objects in networked sessions
 * @category XR
 * @category Networking
 * @group Components
 * @see {@link Avatar} for avatar setup and configuration
 */
export declare class AvatarMarker extends Component {
    /**
     * Get an avatar marker by index from the global list of avatar markers.
     * @param index The index in the instances array
     * @returns The AvatarMarker at the specified index, or null if index is out of bounds
     */
    static getAvatar(index: number): AvatarMarker | null;
    /** Global list of all active AvatarMarker instances */
    static instances: AvatarMarker[];
    /**
     * Subscribe to avatar marker creation events.
     * @param cb Callback function called when a new avatar marker is created
     * @returns The callback function (for removal)
     */
    static onAvatarMarkerCreated(cb: (args: AvatarMarkerEventArgs) => void): Function;
    /**
     * Subscribe to avatar marker destruction events.
     * @param cb Callback function called when an avatar marker is destroyed
     * @returns The callback function (for removal)
     */
    static onAvatarMarkerDestroyed(cb: (args: AvatarMarkerEventArgs) => void): Function;
    private static _onNewAvatarMarkerAdded;
    private static _onAvatarMarkerDestroyed;
    /** The network connection ID of the player who owns this avatar */
    connectionId: string;
    /** Reference to the avatar GameObject with optional XR flags */
    avatar?: Object3D & {
        flags?: XRFlag[];
    };
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onDestroy */
    /**
     * Check if this avatar marker represents the local player.
     * @returns True if this avatar belongs to the local player, false otherwise
     */
    isLocalAvatar(): boolean;
}

/**
 * Event arguments for avatar marker creation and destruction events
 */
declare type AvatarMarkerEventArgs = {
    /** The AvatarMarker component instance */
    avatarMarker: AvatarMarker;
    /** The GameObject that contains the avatar marker */
    gameObject: Object3D;
};

/**
 * Represents an avatar model with head and hands references.
 * Used for representing characters in 3D space.
 */
export declare class AvatarModel {
    /** The root object of the avatar model */
    root: Object3D;
    /** The head object of the avatar model */
    head: Object3D;
    /** The left hand object of the avatar model, if available */
    leftHand: Object3D | null;
    /** The right hand object of the avatar model, if available */
    rigthHand: Object3D | null;
    /**
     * Checks if the avatar model has a valid configuration.
     * An avatar is considered valid if it has a head.
     * @returns Whether the avatar has a valid setup
     */
    get isValid(): boolean;
    /**
     * Creates a new avatar model.
     * @param root The root object of the avatar
     * @param head The head object of the avatar
     * @param leftHand The left hand object of the avatar
     * @param rigthHand The right hand object of the avatar
     */
    constructor(root: Object3D, head: Object3D, leftHand: Object3D | null, rigthHand: Object3D | null);
}

export declare enum Axes {
    None = 0,
    X = 2,
    Y = 4,
    Z = 8,
    All = -1
}

/**
 * The [AxesHelper](https://engine.needle.tools/docs/api/AxesHelper) visualizes the local coordinate axes of an object.
 * Displays colored lines for X (red), Y (green), and Z (blue) axes.
 *
 * **Use cases:**
 * - Debugging object orientation and rotation
 * - Visualizing pivot points
 * - Understanding coordinate systems
 *
 * **Properties:**
 * - `length` - Length of axis lines in world units
 * - `depthTest` - Whether axes are occluded by scene objects
 * - `isGizmo` - Only show when `?gizmos` URL parameter is set
 *
 * @example Add axes visualization
 * ```ts
 * const axes = myObject.addComponent(AxesHelper);
 * axes.length = 2;
 * axes.depthTest = false; // Always visible on top
 * ```
 *
 * @summary Visualizes object axes (X=red, Y=green, Z=blue)
 * @category Helpers
 * @group Components
 * @see {@link GridHelper} for grid visualization
 * @see {@link Gizmos} for debug drawing utilities
 */
export declare class AxesHelper extends Component {
    /**
     * The length of each axis line in scene units.
     */
    length: number;
    /**
     * Whether the axes should be occluded by objects in the scene.
     * When set to false, axes will always appear on top regardless of their depth.
     */
    depthTest: boolean;
    /**
     * When true, this helper will only be visible if the debug flag `?gizmos` is enabled.
     */
    isGizmo: boolean;
    private _axes;
    /**
     * Creates and adds the axes visualization to the scene when the component is enabled.
     * If marked as a gizmo, it will only be shown when gizmos are enabled in the global parameters.
     */
    onEnable(): void;
    /**
     * Removes the axes visualization from the scene when the component is disabled.
     */
    onDisable(): void;
}

declare enum Axis {
    Horizontal = "x",
    Vertical = "y"
}

declare type BalloonOptions = {
    type: LogType;
    /**
     * A key can be used to update an existing message instead of showing a new one.
     */
    key?: string;
    /**
     * If true, only show this message once. If the same message is logged again, it will be ignored. Note that messages are considered the same if their text is identical, so using this with dynamic values might lead to unexpected results.
     */
    once?: boolean;
    /**
     * Duration in seconds for how long the message should be shown. By default, messages will be shown for 10 seconds.
     */
    duration?: number;
};

/**
 * Derive from this class if you want to implement your own UI components.
 * It provides utility methods and simplifies managing the underlying three-mesh-ui hierarchy.
 * @category User Interface
 * @group Components
 */
export declare class BaseUIComponent extends Component {
    /** Is this object on the root of the UI hierarchy ? */
    isRoot(): boolean;
    /** Access the parent canvas component */
    get canvas(): ICanvas | null;
    /** @deprecated use `canvas` */
    protected get Canvas(): ICanvas | null;
    /** Mark the UI dirty which will trigger an THREE-Mesh-UI update */
    markDirty(): void;
    /** the underlying three-mesh-ui */
    get shadowComponent(): Object3D | null;
    private set shadowComponent(value);
    private _shadowComponent;
    private _controlsChildLayout;
    get controlsChildLayout(): boolean;
    set controlsChildLayout(val: boolean);
    private _root?;
    protected get Root(): UIRootComponent | null;
    protected _parentComponent?: BaseUIComponent | null;
    __internalNewInstanceCreated(args: ComponentInit<this>): this;
    onEnable(): void;
    /** Add a three-mesh-ui object to the UI hierarchy
     * @param container the three-mesh-ui object to add
     * @param parent the parent component to add the object to
     */
    protected addShadowComponent(container: any, parent?: BaseUIComponent): void;
    protected setShadowComponentOwner(current: ThreeMeshUI.MeshUIBaseElement | Object3D | null | undefined): void;
    private traverseOwnedShadowComponents;
    /** Remove the underlying UI object from the hierarchy */
    protected removeShadowComponent(): void;
    protected onAfterAddedToScene(): void;
    setInteractable(value: boolean): void;
}

/**
 * The [BasicIKConstraint](https://engine.needle.tools/docs/api/BasicIKConstraint) provides simple two-bone inverse kinematics.
 * Positions this GameObject as a "joint" between `from` and `to` targets,
 * using a `hint` object to determine the bend direction.
 *
 * **Use cases:**
 * - Simple arm/leg IK (elbow/knee positioning)
 * - Mechanical linkages
 * - Procedural animation joints
 *
 * **How it works:**
 * - Calculates joint position based on `desiredDistance` (bone length)
 * - Uses `hint` to determine which way the joint bends
 * - Automatically handles stretching when targets are too far apart
 *
 * @example Setup basic limb IK
 * ```ts
 * // Attach to the elbow/knee joint object
 * const ik = elbowJoint.addComponent(BasicIKConstraint);
 * // Configure via serialized properties in editor:
 * // - from: shoulder/hip
 * // - to: wrist/ankle
 * // - hint: control point for bend direction
 * ```
 *
 * @summary Two-bone inverse kinematics constraint
 * @category Animation and Sequencing
 * @group Components
 * @see {@link AlignmentConstraint} for simpler alignment
 */
export declare class BasicIKConstraint extends Component {
    private from;
    private to;
    private hint;
    private desiredDistance;
    onEnable(): void;
    update(): void;
}

declare type BeforeLODExportArguments = {
    readonly exporter: USDZExporter;
    /** The type of LOD being exported, either a texture or a mesh */
    readonly type: "texture" | "mesh";
    /** The Renderer gameobject */
    readonly object: Object3D;
    /** The mesh being exported, if applicable */
    readonly mesh?: Mesh;
    /** The material being exported, if applicable */
    readonly material?: Material;
    /** By default LOD 0 will be exported (highest quality).
     * Set to a different LOD level if needed.
     * Set to -1 to disable LOD export and simply export the current version.
     */
    overrideLevel?: number;
};

export declare function beginListenDestroy(context: Context): void;

export declare function beginListenInstantiate(context: Context): () => void;

/** internal USDZ behaviours extension */
export declare class BehaviorExtension implements IUSDExporterExtension {
    get extensionName(): string;
    private behaviours;
    addBehavior(beh: BehaviorModel): void;
    /** Register audio clip for USDZ export. The clip will be embedded in the resulting file. */
    addAudioClip(clipUrl: string): string;
    behaviourComponents: Array<UsdzBehaviour>;
    private behaviourComponentsCopy;
    private audioClips;
    private audioClipsCopy;
    private targetUuids;
    getAllTargetUuids(): Set<string>;
    onBeforeBuildDocument(context: USDZExporterContext): Promise<void> | Promise<any[]>;
    onExportObject(_object: any, model: USDObject, context: any): void;
    onAfterBuildDocument(context: USDZExporterContext): void;
    onAfterHierarchy(context: USDZExporterContext, writer: USDWriter): void;
    onAfterSerialize(context: USDZExporterContext): Promise<void>;
}

export declare class BehaviorModel {
    static global_id: number;
    id: string;
    trigger: IBehaviorElement | IBehaviorElement[];
    action: IBehaviorElement;
    exclusive: boolean;
    makeExclusive(exclusive: boolean): BehaviorModel;
    constructor(id: string, trigger: IBehaviorElement | IBehaviorElement[], action: IBehaviorElement);
    writeTo(_ext: BehaviorExtension, document: USDDocument, writer: USDWriter): void;
}

export declare type BinaryCallback = {
    (data: any | flatbuffers.ByteBuffer): void;
};

export declare const binaryIdentifierCasts: {
    [key: string]: (bin: flatbuffers.ByteBuffer) => object;
};

export declare namespace BlobStorage {
    /** The base url for the blob storage.
     * The expected endpoints are:
     * - POST `/api/needle/blob` - to request a new upload url
     */
    const baseUrl: string | undefined;
    /**
     * Generates an md5 hash from a given buffer
     * @param buffer The buffer to hash
     * @returns The md5 hash
     */
    export function hashMD5(buffer: ArrayBuffer): string;
    export function hashMD5_Base64(buffer: ArrayBuffer): string;
    export function hashSha256(buffer: ArrayBuffer): Promise<string>;
    export type Upload_Result = {
        readonly key: string | null;
        readonly success: boolean;
        readonly download_url: string | null;
    };
    /**
     * Checks if the current user can upload a file of the given size
     * @param info The file info
     */
    export function canUpload(info: {
        filesize: number;
    }): boolean;
    export type CustomFile = {
        name: string;
        data: ArrayBuffer;
        type?: string;
    };
    export type UploadOptions = {
        /** Allows to abort the upload. See AbortController */
        abort?: AbortSignal;
        /** When set to `true` no balloon messages will be displayed on screen */
        silent?: boolean;
        /** Called when the upload starts and is finished */
        onProgress?: (progress: {
            progress01: number;
            state: "inprogress" | "finished";
        }) => void;
    };
    export function upload(file: CustomFile, opts?: UploadOptions): Promise<Upload_Result | null>;
    export function upload(file: File, opts?: UploadOptions): Promise<Upload_Result | null>;
    export function getBlobUrlForKey(key: string): string;
    export function download(url: string, progressCallback?: (prog: ProgressEvent) => void): Promise<Uint8Array | null>;
    export {};
}

/**
 * [BloomEffect](https://engine.needle.tools/docs/api/BloomEffect) can be used to make bright areas in the scene glow.
 * @link Sample https://engine.needle.tools/samples/postprocessing
 * @example
 * ```typescript
 * const bloom = new Bloom();
 * bloom.intensity.value = 1.5;
 * bloom.threshold.value = 0.5;
 * bloom.scatter.value = 0.5;
 * volume.add(bloom);
 * ```
 *
 * @summary Bloom Post-Processing Effect
 * @category Effects
 * @group Components
 */
export declare class BloomEffect extends PostProcessingEffect {
    /** Whether to use selective bloom by default */
    static useSelectiveBloom: boolean;
    get typeName(): string;
    /**
     * The bloom threshold controls at what brightness level the bloom effect will be applied.
     * A higher value means the bloom will be applied to brighter areas or lights only
     * @default 0.9
     */
    readonly threshold: VolumeParameter;
    /**
     * Intensity of the bloom effect. A higher value will increase the intensity of the bloom effect.
     * @default 1
     */
    readonly intensity: VolumeParameter;
    /**
     * Scatter value. The higher the value, the more the bloom will scatter.
     * @default 0.7
     */
    readonly scatter: VolumeParameter;
    /**
     * Set to true to use selective bloom when the effect gets created.
     * @default false
     */
    selectiveBloom?: boolean;
    init(): void;
    onCreateEffect(): BloomEffect_2;
}

/**
 * BoxCollider represents a box-shaped (cuboid) collision volume.
 * Most common collider type, efficient for walls, floors, crates, and rectangular objects.
 *
 * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
 *
 * @example Create a floor collider
 * ```ts
 * const box = floor.addComponent(BoxCollider);
 * box.size = new Vector3(10, 0.1, 10);
 * box.center = new Vector3(0, -0.05, 0);
 * ```
 *
 * @example Auto-fit to mesh geometry
 * ```ts
 * const collider = BoxCollider.add(myMesh, { rigidbody: true });
 * // Collider size is automatically set from mesh bounds
 * ```
 *
 * - Example: https://samples.needle.tools/physics-basic
 *
 * @summary Box-shaped physics collider
 * @category Physics
 * @group Components
 * @see {@link Collider} for base collider functionality
 * @see {@link SphereCollider} for sphere shapes
 */
export declare class BoxCollider extends Collider implements IBoxCollider {
    /**
     * Creates and adds a BoxCollider to the given object.
     * @param obj The object to add the collider to
     * @param opts Configuration options for the collider and optional rigidbody
     * @returns The newly created BoxCollider
     */
    static add(obj: Mesh | Object3D, opts?: {
        rigidbody: boolean;
        debug?: boolean;
    }): BoxCollider;
    /**
     * The size of the box collider along each axis.
     */
    size: Vector3;
    /**
     * The center position of the box collider relative to the transform's position.
     */
    center: Vector3;
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    /* Excluded from this release type: onValidate */
    /**
     * Automatically fits the collider to the geometry of the object.
     * Sets the size and center based on the object's bounding box.
     * @param opts Options object with a debug flag to visualize the bounding box
     */
    autoFit(opts?: {
        debug?: boolean;
    }): void;
}

/**
 * BoxGizmo is a component that displays a box around the object in the scene. It can optionally expand to the object's bounds.
 *
 * @summary Display a box around the object
 * @category Helpers
 * @group Components
 */
export declare class BoxGizmo extends Component {
    objectBounds: boolean;
    color?: Color;
    isGizmo: boolean;
    private _gizmoObject;
    private _boxHelper;
    onEnable(): void;
    onDisable(): void;
    private syncObjectBounds;
}

/**
 * A component that creates a bounding box around an object and provides intersection testing functionality.
 *
 * Debug mode can be enabled with the URL parameter `?debugboxhelper`, which will visualize intersection tests.
 * Helper visualization can be enabled with the URL parameter `?gizmos`.
 *
 * @summary Bounding box helper with intersection tests
 * @category Helpers
 * @group Components
 */
export declare class BoxHelperComponent extends Component {
    /** The bounding box for this component */
    private box;
    private static testBox;
    private _lastMatrixUpdateFrame;
    private static _position;
    private static _size;
    private static _emptyObjectSize;
    /**
     * Tests if an object intersects with this helper's bounding box
     * @param obj The object to test for intersection
     * @returns True if objects intersect, false if not, undefined if the provided object is invalid
     */
    isInBox(obj: Object3D): boolean | undefined;
    /**
     * Tests if this helper's bounding box intersects with another box
     * @param box The {@link Box3} to test for intersection
     * @returns True if boxes intersect, false otherwise
     */
    intersects(box: Box3): boolean;
    /**
     * Updates the helper's bounding box based on the gameObject's position and scale
     * @param force Whether to force an update regardless of frame count
     * @returns The updated {@link Box3}
     */
    updateBox(force?: boolean): Box3;
    private _helper;
    private _color;
    awake(): void;
    /**
     * Creates and displays a visual wireframe representation of this box helper
     * @param col Optional color for the wireframe. If not provided, uses default color
     * @param force If true, shows the helper even if gizmos are disabled
     */
    showHelper(col?: ColorRepresentation | null, force?: boolean): void;
}

export declare const build_scene_functions: {
    [name: string]: (context: Context) => Promise<void>;
};

/** The build time of the project */
export declare const BUILD_TIME: string;

export declare const builtinComponentKeyName = "builtin_components";

/**
 * [Button](https://engine.needle.tools/docs/api/Button) is a UI component that can be clicked to trigger actions.
 * Supports visual states (normal, highlighted, pressed, disabled) with
 * color tints or animation transitions.
 *
 * **Visual transitions:**
 * - `ColorTint` - Tint the button image with state colors
 * - `Animation` - Trigger animator states for each button state
 * - `SpriteSwap` - Swap sprites for each state (not fully supported)
 *
 * **Requirements:**
 * - Typically paired with an {@link Image} component for visuals or any 3D object
 *
 * @example Listen to button clicks
 * ```ts
 * const button = myButton.getComponent(Button);
 * button.onClick.addEventListener(() => {
 *   console.log("Button clicked!");
 * });
 * ```
 *
 * @example Programmatically click a button
 * ```ts
 * button.click(); // Triggers onClick event
 * ```
 *
 * @summary UI Button that can be clicked to perform actions
 * @category User Interface
 * @group Components
 * @see {@link EventList} for onClick callback handling
 * @see {@link Image} for button visuals
 * @see {@link GraphicRaycaster} for UI interaction
 * @see {@link Transition} for visual state options
 */
export declare class Button extends Component implements IPointerEventHandler {
    /**
     * Invokes the onClick event
     */
    click(): void;
    onClick: EventList<void>;
    private _isHovered;
    onPointerEnter(evt: PointerEventData): void;
    onPointerExit(): void;
    onPointerDown(_: any): void;
    onPointerUp(_: any): void;
    onPointerClick(args: PointerEventData): void;
    colors?: ButtonColors;
    transition?: Transition_2;
    animationTriggers?: AnimationTriggers;
    animator?: Animator;
    set interactable(value: boolean);
    get interactable(): boolean;
    private _interactable;
    private set_interactable;
    awake(): void;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    onDestroy(): void;
    private _requestedAnimatorTrigger?;
    private setAnimatorTriggerAtEndOfFrame;
    private _isInit;
    private _image?;
    private init;
    private stateSetup;
    private getFinalColor;
}

declare class ButtonColors {
    colorMultiplier: 1;
    disabledColor: RGBAColor;
    fadeDuration: number;
    highlightedColor: RGBAColor;
    normalColor: RGBAColor;
    pressedColor: RGBAColor;
    selectedColor: RGBAColor;
}

/**
 * Used by the NeedleMenuElement to create a button at {@link NeedleMenuElement#appendChild}
 */
declare type ButtonInfo = {
    /** Invoked when the button is clicked */
    onClick: (evt: Event) => void;
    /** Visible button text */
    label: string;
    /** Material icon name: https://fonts.google.com/icons */
    icon?: string;
    /** "left" or "right" to place the icon on the left or right side of the button. Default is "left" */
    iconSide?: "left" | "right";
    /**
     * Priority controls the order of buttons in the menu.
     * If not enough space is available to show all buttons - the highest priority elements will always be visible
     *
     * **Sorting**
     * Low priority is icon is on the left,
     * high priority is icon is on the right.
     * @default undefined
     */
    priority?: number;
    /** Experimental. Allows to put two buttons in one row for the compact layout */
    class?: "row2";
    title?: string;
};

/** All known (types) button names for various devices and cases combined. You should use the device specific names if you e.g. know you only deal with a mouse use MouseButtonName  */
export declare type ButtonName = "unknown" | MouseButtonName | GamepadButtonName | XRControllerButtonName | XRGestureName;

/**
 * Use the ButtonsFactory to create buttons with icons and functionality
 * Get access to the default buttons by using `ButtonsFactory.instance`
 * The factory will create the buttons if they don't exist yet, and return the existing ones if they do (this allows you to reparent or modify created buttons)
 *
 * @category HTML
 */
export declare class ButtonsFactory {
    private static _instance?;
    /**
     * Get access to the default HTML button factory.
     * Use this to get or create default Needle Engine buttons that can be added to your HTML UI
     * If you want to create a new factory and create new button instances instead of shared buttons, use `ButtonsFactory.create()` instead
     */
    static get instance(): ButtonsFactory;
    /**
     * Get access to the default HTML button factory.
     * Use this to get or create default Needle Engine buttons that can be added to your HTML UI
     * If you want to create a new factory and create new button instances instead of shared buttons, use `ButtonsFactory.create()` instead
     */
    static getOrCreate(): ButtonsFactory;
    /** create a new buttons factory */
    static create(): ButtonsFactory;
    private _fullscreenButton?;
    /**
     * Get the fullscreen button (or undefined if it doesn't exist yet). Call {@link ButtonsFactory.createFullscreenButton} to get or create it
     */
    get fullscreenButton(): HTMLButtonElement | undefined;
    /** Create a fullscreen button (or return the existing one if it already exists) */
    createFullscreenButton(ctx: IContext): HTMLButtonElement | null;
    private _muteButton?;
    /** Get the mute button (or undefined if it doesn't exist yet). Call {@link ButtonsFactory.createMuteButton} to get or create it */
    get muteButton(): HTMLButtonElement | undefined;
    /** Create a mute button (or return the existing one if it already exists) */
    createMuteButton(ctx: IContext): HTMLButtonElement;
    private _qrButton?;
    /**
     * Get the QR code button (or undefined if it doesn't exist yet). Call {@link ButtonsFactory.createQRCode} to get or create it
     */
    get qrButton(): HTMLButtonElement | undefined;
    private _customQRButtonUrl;
    /** Get or set the QR code button URL - this URL will open when scanning the QR code */
    set qrButtonUrl(url: string);
    get qrButtonUrl(): string;
    /** Create a QR code button (or return the existing one if it already exists)
     * The QR code button will show a QR code that can be scanned to open the current page on a phone
     * The QR code will be generated with the current URL when the button is clicked
     * @returns the QR code button element
     */
    createQRCode(opts?: {
        anchorElement?: HTMLElement;
    }): HTMLButtonElement;
    private hideElementDuringXRSession;
}

/* Excluded from this release type: calculateProgress01 */

export declare enum CallDirection {
    Incoming = "incoming",
    Outgoing = "outgoing"
}

declare class CallHandle extends EventDispatcher<any> {
    readonly peerId: string;
    readonly userId: string;
    readonly direction: CallDirection;
    readonly call: MediaConnection;
    get stream(): MediaStream | null;
    private _stream;
    private _isDisposed;
    close(): void;
    get isOpen(): boolean;
    get isOpening(): boolean;
    get isClosed(): boolean;
    constructor(userId: string, call: MediaConnection, direction: CallDirection, stream?: MediaStream | null);
}

/**
 * CallInfo represents a single callback method that can be invoked by the {@link EventList}.
 */
export declare class CallInfo {
    /**
     * When the CallInfo is enabled it will be invoked when the EventList is invoked
     */
    enabled: boolean;
    /**
     * The target object to invoke the method on OR the function to invoke
     */
    target: Object | Function;
    methodName: string | null;
    /**
     * The arguments to invoke this method with
     */
    arguments?: Array<any>;
    get canClone(): boolean;
    constructor(target: Function);
    constructor(target: Object, methodName: string | null, args?: Array<any>, enabled?: boolean);
    invoke(...args: any): void;
}

/**
 * [Camera](https://engine.needle.tools/docs/api/Camera) handles rendering from a specific viewpoint in the scene.
 * Supports both perspective and orthographic cameras with various rendering options.
 * Internally uses three.js {@link PerspectiveCamera} or {@link OrthographicCamera}.
 *
 * ![](https://cloud.needle.tools/-/media/UU96_SJNXdVjaAvPNW3kZA.webp)
 *
 * **Background clearing:**
 * Control how the camera clears the background using `clearFlags`:
 * - `Skybox` - Use scene skybox/environment
 * - `SolidColor` - Clear with `backgroundColor`
 * - `None` - Don't clear (for layered rendering)
 *
 * **Render targets:**
 * Set `targetTexture` to a {@link RenderTexture} to render to a texture
 * instead of the screen (useful for mirrors, portals, minimaps).
 *
 * [![](https://cloud.needle.tools/-/media/W4tYZuJVVJFVp7NTaHPOnA.gif)](https://engine.needle.tools/samples/movie-set)
 *
 * @example Configure camera settings
 * ```ts
 * const cam = this.context.mainCameraComponent;
 * cam.fieldOfView = 60;
 * cam.nearClipPlane = 0.1;
 * cam.farClipPlane = 1000;
 * cam.clearFlags = ClearFlags.SolidColor;
 * cam.backgroundColor = new RGBAColor(0.1, 0.1, 0.2, 1);
 * ```
 *
 * - Example: https://engine.needle.tools/samples/multiple-cameras
 *
 * @summary Rendering scenes from a specific viewpoint
 * @category Camera and Controls
 * @group Components
 * @see {@link OrbitControls} for camera interaction
 * @see {@link RenderTexture} for off-screen rendering
 * @see {@link ClearFlags} for background clearing options
 * @link https://engine.needle.tools/samples/movie-set/
 */
export declare class Camera extends Component implements ICamera {
    /**
     * Returns whether this component is a camera
     * @returns {boolean} Always returns true
     */
    get isCamera(): boolean;
    /**
     * Gets or sets the camera's aspect ratio (width divided by height).
     * For perspective cameras, this directly affects the camera's projection matrix.
     * When set, automatically updates the projection matrix.
     */
    get aspect(): number;
    set aspect(value: number);
    /**
     * Gets or sets the camera's field of view in degrees for perspective cameras.
     * When set, automatically updates the projection matrix.
     */
    get fieldOfView(): number | undefined;
    set fieldOfView(val: number | undefined);
    /**
     * Gets or sets the camera's near clipping plane distance.
     * Objects closer than this distance won't be rendered.
     * When set, automatically updates the projection matrix.
     */
    get nearClipPlane(): number;
    set nearClipPlane(val: number);
    private _nearClipPlane;
    /**
     * Gets or sets the camera's far clipping plane distance.
     * Objects farther than this distance won't be rendered.
     * When set, automatically updates the projection matrix.
     */
    get farClipPlane(): number;
    set farClipPlane(val: number);
    private _farClipPlane;
    /**
     * Applies both the camera's near and far clipping planes and updates the projection matrix.
     * This ensures rendering occurs only within the specified distance range.
     */
    applyClippingPlane(): void;
    /**
     * Gets or sets the camera's clear flags that determine how the background is rendered.
     * Options include skybox, solid color, or transparent background.
     */
    get clearFlags(): ClearFlags;
    set clearFlags(val: ClearFlags | "skybox" | "solidcolor");
    /**
     * Determines if the camera should use orthographic projection instead of perspective.
     */
    orthographic: boolean;
    /**
     * The size of the orthographic camera's view volume when in orthographic mode.
     * Larger values show more of the scene.
     */
    orthographicSize: number;
    /**
     * Controls the transparency level of the camera background in AR mode on supported devices.
     * Value from 0 (fully transparent) to 1 (fully opaque).
     */
    ARBackgroundAlpha: number;
    /**
     * Gets or sets the layers mask that determines which objects this camera will render.
     * Uses the {@link https://threejs.org/docs/#api/en/core/Layers.mask|three.js layers mask} convention.
     */
    set cullingMask(val: number);
    get cullingMask(): number;
    private _cullingMask;
    /**
     * Sets only a specific layer to be active for rendering by this camera.
     * This is equivalent to calling `layers.set(val)` on the three.js camera object.
     * @param val The layer index to set active
     */
    set cullingLayer(val: number);
    /**
     * Gets or sets the blurriness of the skybox background.
     * Values range from 0 (sharp) to 1 (maximum blur).
     */
    set backgroundBlurriness(val: number | undefined);
    get backgroundBlurriness(): number | undefined;
    private _backgroundBlurriness?;
    /**
     * Gets or sets the intensity of the skybox background.
     * Values range from 0 (dark) to 10 (very bright).
     */
    set backgroundIntensity(val: number | undefined);
    get backgroundIntensity(): number | undefined;
    private _backgroundIntensity?;
    /**
     * Gets or sets the rotation of the skybox background.
     * Controls the orientation of the environment map.
     */
    set backgroundRotation(val: Euler | undefined);
    get backgroundRotation(): Euler | undefined;
    private _backgroundRotation?;
    /**
     * Gets or sets the intensity of the environment lighting.
     * Controls how strongly the environment map affects scene lighting.
     */
    set environmentIntensity(val: number | undefined);
    get environmentIntensity(): number | undefined;
    private _environmentIntensity?;
    /**
     * Gets or sets the background color of the camera when {@link ClearFlags} is set to {@link ClearFlags.SolidColor}.
     * The alpha component controls transparency.
     */
    get backgroundColor(): RGBAColor | null;
    set backgroundColor(val: RGBAColor | Color | null);
    /**
     * Gets or sets the texture that the camera should render to instead of the screen.
     * Useful for creating effects like mirrors, portals or custom post processing.
     */
    set targetTexture(rt: RenderTexture | null);
    get targetTexture(): RenderTexture | null;
    private _targetTexture;
    private _backgroundColor?;
    private _fov?;
    private _cam;
    private _clearFlags;
    private _skybox?;
    /**
     * Gets the three.js camera object. Creates one if it doesn't exist yet.
     * @returns {PerspectiveCamera | OrthographicCamera} The three.js camera object
     * @deprecated Use {@link threeCamera} instead
     */
    get cam(): PerspectiveCamera | OrthographicCamera;
    /**
     * Gets the three.js camera object. Creates one if it doesn't exist yet.
     * @returns {PerspectiveCamera | OrthographicCamera} The three.js camera object
     */
    get threeCamera(): PerspectiveCamera | OrthographicCamera;
    private static _origin;
    private static _direction;
    /**
     * Converts screen coordinates to a ray in world space.
     * Useful for implementing picking or raycasting from screen to world.
     *
     * @param x The x screen coordinate
     * @param y The y screen coordinate
     * @param ray Optional ray object to reuse instead of creating a new one
     * @returns {Ray} A ray originating from the camera position pointing through the screen point
     */
    screenPointToRay(x: number, y: number, ray?: Ray): Ray;
    private _frustum?;
    /**
     * Gets the camera's view frustum for culling and visibility checks.
     * Creates the frustum if it doesn't exist and returns it.
     *
     * @returns {Frustum} The camera's view frustum
     */
    getFrustum(): Frustum;
    /**
     * Forces an update of the camera's frustum.
     * This is automatically called every frame in onBeforeRender.
     */
    updateFrustum(): void;
    /**
     * Gets this camera's projection-screen matrix.
     *
     * @param target Matrix4 object to store the result in
     * @param forceUpdate Whether to force recalculation of the matrix
     * @returns {Matrix4} The requested projection screen matrix
     */
    getProjectionScreenMatrix(target: Matrix4, forceUpdate?: boolean): Matrix4;
    private readonly _projScreenMatrix;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    onLeaveXR(_args: NeedleXREventArgs): void;
    /* Excluded from this release type: onBeforeRender */
    /**
     * Creates a three.js camera object if it doesn't exist yet and sets its properties.
     * This is called internally when accessing the {@link threeCamera} property.
     */
    buildCamera(): void;
    /**
     * Applies clear flags if this is the active main camera.
     * @param opts Options for applying clear flags
     */
    applyClearFlagsIfIsActiveCamera(opts?: {
        applySkybox: boolean;
    }): void;
    /**
     * Applies this camera's clear flags and related settings to the renderer.
     * This controls how the background is rendered (skybox, solid color, transparent).
     * @param opts Options for applying clear flags
     */
    applyClearFlags(opts?: {
        applySkybox: boolean;
    }): void;
    /**
     * Applies the skybox texture to the scene background.
     */
    applySceneSkybox(): void;
    /**
     * Determines if the background should be transparent when in passthrough AR mode.
     *
     * @param context The current rendering context
     * @returns {boolean} True when in XR on a pass through device where the background should be invisible
     */
    static backgroundShouldBeTransparent(context: Context): boolean;
}

export declare class CameraTargetReachedEvent extends CustomEvent<{
    controls: OrbitControls;
    type: "camera" | "lookat";
}> {
    constructor(ctrls: OrbitControls, type: "camera" | "lookat");
}

/**
 * [Canvas](https://engine.needle.tools/docs/api/Canvas) is the root component for all UI elements in a scene.
 * Defines the rendering area and manages layout for child UI elements.
 *
 * **Render modes:**
 * - `WorldSpace` - UI exists in 3D space, can be viewed from any angle
 * - `ScreenSpaceOverlay` - UI rendered on top of everything (HUD)
 * - `ScreenSpaceCamera` - UI rendered at a distance from a specific camera
 *
 * **Usage:**
 * All UI components ({@link Button}, {@link Text}, {@link Image}) must be
 * children of a Canvas to render correctly. Multiple canvases can exist
 * in a scene with different settings.
 *
 * **Rendering options:**
 * - `renderOnTop` - Always render above other objects
 * - `depthWrite` - Write to depth buffer (affects occlusion)
 * - `doubleSided` - Render both sides of UI elements
 *
 * @example Create a world-space UI panel
 * ```ts
 * const canvas = panel.getComponent(Canvas);
 * canvas.renderMode = RenderMode.WorldSpace;
 * canvas.doubleSided = true;
 * ```
 *
 * @summary Root component for UI elements, managing layout and rendering settings
 * @category User Interface
 * @group Components
 * @see {@link RenderMode} for rendering options
 * @see {@link RectTransform} for UI layout
 * @see {@link Button} for clickable UI elements
 * @see {@link Text} for UI text rendering
 */
export declare class Canvas extends UIRootComponent implements ICanvas {
    get isCanvas(): boolean;
    get screenspace(): any;
    set renderOnTop(val: boolean);
    get renderOnTop(): boolean;
    private _renderOnTop;
    set depthWrite(val: boolean);
    get depthWrite(): boolean;
    private _depthWrite;
    set doubleSided(val: boolean);
    get doubleSided(): boolean;
    private _doubleSided;
    set castShadows(val: boolean);
    get castShadows(): boolean;
    private _castShadows;
    set receiveShadows(val: boolean);
    get receiveShadows(): boolean;
    private _receiveShadows;
    get renderMode(): RenderMode;
    set renderMode(val: RenderMode);
    private _renderMode;
    private _rootCanvas;
    set rootCanvas(val: Canvas);
    get rootCanvas(): Canvas;
    private _scaleFactor;
    get scaleFactor(): number;
    private set scaleFactor(value);
    worldCamera?: Camera;
    planeDistance: number;
    awake(): void;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    private _boundRenderSettingsChanged;
    private previousParent;
    private _lastMatrixWorld;
    private _rectTransforms;
    registerTransform(rt: IRectTransform): void;
    unregisterTransform(rt: IRectTransform): void;
    private _layoutGroups;
    registerLayoutGroup(group: ILayoutGroup): void;
    unregisterLayoutGroup(group: ILayoutGroup): void;
    private _receivers;
    registerEventReceiver(receiver: ICanvasEventReceiver): void;
    unregisterEventReceiver(receiver: ICanvasEventReceiver): void;
    onEnterXR(args: NeedleXREventArgs): Promise<void>;
    onLeaveXR(args: NeedleXREventArgs): void;
    onBeforeRenderRoutine: () => void;
    onAfterRenderRoutine: () => void;
    private invokeBeforeRenderEvents;
    private handleLayoutUpdates;
    applyRenderSettings(): void;
    private _updateRenderSettingsRoutine?;
    private onRenderSettingsChanged;
    private _updateRenderSettingsDelayed;
    private _activeRenderMode;
    private _lastWidth;
    private _lastHeight;
    private onUpdateRenderMode;
}

/**
 * [CanvasGroup](https://engine.needle.tools/docs/api/CanvasGroup) is a UI component that allows you to control the transparency and interactivity of a group of UI elements.
 * By adjusting the alpha property, you can fade in or out all child UI elements simultaneously.
 * The interactable and blocksRaycasts properties let you enable or disable user interaction for the entire group.
 * @summary Group UI elements to control transparency and interactivity
 * @category User Interface
 * @group Components
 */
export declare class CanvasGroup extends Component implements ICanvasGroup {
    get alpha(): number;
    set alpha(val: number);
    get isCanvasGroup(): boolean;
    private _alpha;
    interactable: boolean;
    blocksRaycasts: boolean;
    private _isDirty;
    private markDirty;
    private applyChangesDelayed;
    private _buffer;
    private applyChangesNow;
}

/**
 * CapsuleCollider represents a capsule-shaped collision volume (cylinder with hemispherical ends).
 * Ideal for character controllers and objects that need a rounded collision shape.
 *
 * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
 *
 * - Example: https://samples.needle.tools/physics-basic
 * - Example: https://samples.needle.tools/physics-playground
 * - Example: https://samples.needle.tools/physics-&-animation
 *
 * @category Physics
 * @group Components
 */
export declare class CapsuleCollider extends Collider {
    /**
     * The center position of the capsule collider relative to the transform's position.
     */
    center: Vector3;
    /**
     * The radius of the capsule's cylindrical body and hemispherical ends.
     */
    radius: number;
    /**
     * The total height of the capsule including both hemispherical ends.
     */
    height: number;
    /**
     * Registers the capsule collider with the physics engine.
     */
    onEnable(): void;
}

/**
 * Switches the material of objects in the scene when clicked.
 * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
 *
 * Finds all objects in the scene that use `materialToSwitch` and replaces it with `variantMaterial`.
 * Multiple `ChangeMaterialOnClick` components using the same `materialToSwitch` can be combined to create a material selection UI.
 *
 * @see {@link SetActiveOnClick} to toggle visibility of objects when clicked
 * @see {@link PlayAnimationOnClick} to play animations when clicked
 * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
 * @summary Changes the material of objects when clicked
 * @category Everywhere Actions
 * @group Components
 */
export declare class ChangeMaterialOnClick extends Component implements IPointerClickHandler, UsdzBehaviour {
    /**
     * The material that will be switched to the variant material
     */
    materialToSwitch?: Material;
    /**
     * The material that will be switched to
     */
    variantMaterial?: Material;
    /**
     * The duration of the fade effect in seconds (USDZ/Quicklook only)
     * @default 0
     */
    fadeDuration: number;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    onDestroy(): void;
    onPointerEnter(_args: PointerEventData): void;
    onPointerExit(_: PointerEventData): void;
    onPointerClick(args: PointerEventData): void;
    private _objectsWithThisMaterial;
    /** Get all objects in the scene that have the assigned materialToSwitch */
    private get objectsWithThisMaterial();
    private selfModel;
    private targetModels;
    private static _materialTriggersPerId;
    private static _startHiddenBehaviour;
    private static _parallelStartHiddenActions;
    beforeCreateDocument(_ext: BehaviorExtension, _context: any): Promise<void>;
    createBehaviours(_ext: BehaviorExtension, model: USDObject, _context: any): void;
    afterCreateDocument(ext: BehaviorExtension, _context: any): void;
    private createAndAttachBehaviors;
    private static getMaterialName;
    static variantSwitchIndex: number;
    private createVariants;
}

/**
 * Moves an object to the target object's transform when clicked.
 * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
 *
 * @see {@link SetActiveOnClick}to toggle visibility of objects when clicked
 * @see {@link PlayAnimationOnClick} to play animations when clicked
 * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
 * @summary Moves an object to a target transform upon click
 * @category Everywhere Actions
 * @group Components
 */
export declare class ChangeTransformOnClick extends Component implements IPointerClickHandler, UsdzBehaviour {
    /** The object to move. */
    object?: Object3D;
    /** The target object whose transform to move to. */
    target?: Object3D;
    /** The duration of the movement animation in seconds. */
    duration: number;
    /** If true, the motion is relative to the object's current transform instead of moving to the target's absolute position. */
    relativeMotion: boolean;
    private coroutine;
    private targetPos;
    private targetRot;
    private targetScale;
    onEnable(): void;
    onDisable(): void;
    onDestroy(): void;
    onPointerEnter(): void;
    onPointerExit(): void;
    onPointerClick(args: PointerEventData): void;
    private moveToTarget;
    private moveRelative;
    beforeCreateDocument(ext: any): void;
}

/**
 * The [CharacterController](https://engine.needle.tools/docs/api/CharacterController) adds a capsule collider and rigidbody to the object, constrains rotation, and provides movement and grounded state.
 * It is designed for typical character movement in 3D environments.
 *
 * The controller automatically:
 * - Creates a {@link CapsuleCollider} if one doesn't exist
 * - Creates a {@link Rigidbody} if one doesn't exist
 * - Locks rotation on all axes to prevent tipping over
 * - Tracks ground contact for jump detection
 *
 * @example Basic character movement
 * ```ts
 * export class MyCharacter extends Behaviour {
 *   @serializable(CharacterController)
 *   controller?: CharacterController;
 *
 *   update() {
 *     const input = this.context.input;
 *     const move = new Vector3();
 *     if (input.isKeyPressed("KeyW")) move.z = 0.1;
 *     if (input.isKeyPressed("KeyS")) move.z = -0.1;
 *     this.controller?.move(move);
 *   }
 * }
 * ```
 *
 * @summary Character Movement Controller
 * @category Character
 * @group Components
 * @see {@link CharacterControllerInput} for ready-to-use input handling
 * @see {@link Rigidbody} for physics configuration
 * @see {@link CapsuleCollider} for collision shape
 */
export declare class CharacterController extends Component {
    /** Center offset of the capsule collider in local space */
    center: Vector3;
    /** Radius of the capsule collider */
    radius: number;
    /** Height of the capsule collider */
    height: number;
    private _rigidbody;
    get rigidbody(): Rigidbody;
    private _activeGroundCollisions;
    awake(): void;
    onEnable(): void;
    /**
     * Moves the character by adding the given vector to its position.
     * Movement is applied directly without physics simulation.
     * @param vec The movement vector to apply
     */
    move(vec: Vector3): void;
    onCollisionEnter(col: Collision): void;
    onCollisionExit(col: Collision): void;
    /** Returns true if the character is currently touching the ground */
    get isGrounded(): boolean;
    private _contactVelocity;
    /**
     * Returns the combined velocity of all objects the character is standing on.
     * Useful for moving platforms - add this to your movement for proper platform riding.
     */
    get contactVelocity(): Vector3;
}

/**
 * CharacterControllerInput handles user input to control a {@link CharacterController}.
 * It supports movement, looking around, jumping, and double jumping.
 *
 * Default controls:
 * - **W/S**: Move forward/backward
 * - **A/D**: Rotate left/right
 * - **Space**: Jump (supports double jump)
 *
 * The component automatically sets animator parameters:
 * - `running` (bool): True when moving
 * - `jumping` (bool): True when starting a jump
 * - `doubleJump` (bool): True during double jump
 * - `falling` (bool): True when falling from height
 *
 * @example Custom input handling
 * ```ts
 * const input = this.gameObject.getComponent(CharacterControllerInput);
 * input?.move(new Vector2(0, 1)); // Move forward
 * input?.jump(); // Trigger jump
 * ```
 *
 * @summary User Input for Character Controller
 * @category Character
 * @group Components
 * @see {@link CharacterController} for the movement controller
 * @see {@link Animator} for animation integration
 */
export declare class CharacterControllerInput extends Component {
    /** The CharacterController to drive with input */
    controller?: CharacterController;
    /** Movement speed multiplier */
    movementSpeed: number;
    /** Rotation speed multiplier */
    rotationSpeed: number;
    /** Impulse force applied when jumping from ground */
    jumpForce: number;
    /** Impulse force applied for the second jump (set to 0 to disable double jump) */
    doubleJumpForce: number;
    /** Optional Animator for character animations */
    animator?: Animator;
    lookForward: boolean;
    awake(): void;
    update(): void;
    move(move: Vector2): void;
    look(look: Vector2): void;
    jump(): void;
    private lookInput;
    private moveInput;
    private jumpInput;
    onBeforeRender(): void;
    private _currentSpeed;
    private _currentAngularSpeed;
    private _temp;
    private _jumpCount;
    private _currentRotation;
    handleInput(move: Vector2, look: Vector2, jump: boolean): void;
    private _raycastOptions;
}

/**
 * [ChromaticAberration](https://engine.needle.tools/docs/api/ChromaticAberration) simulates the color fringing effect seen in real-world cameras.
 * It offsets the red, green, and blue color channels to create a distorted, colorful edge around objects.
 * This effect can enhance the visual appeal of scenes by adding a subtle or pronounced chromatic distortion.
 * @summary Chromatic Aberration Post-Processing Effect
 * @category Effects
 * @group Components
 */
export declare class ChromaticAberration extends PostProcessingEffect {
    get typeName(): string;
    readonly intensity: VolumeParameter;
    onCreateEffect(): EffectProviderResult;
}

/**
 * The circular buffer class can be used to cache objects that don't need to be created every frame.
 * This structure is used for e.g. Vector3 or Quaternion objects in the engine when calling `getTempVector3` or `getTempQuaternion`.
 *
 * @example Create a circular buffer that caches Vector3 objects. Max size is 10.
 * ```typescript
 * const buffer = new CircularBuffer(() => new Vector3(), 10);
 * const vec = buffer.get();
 * ```
 *
 * @example Create a circular buffer that caches Quaternion objects. Max size is 1000.
 * ```typescript
 * const buffer = new CircularBuffer(() => new Quaternion(), 1000);
 * const quat = buffer.get();
 * ```
 */
export declare class CircularBuffer<T> {
    private _factory;
    private _cache;
    private _maxSize;
    private _index;
    constructor(factory: () => T, maxSize: number);
    get(): T;
}

/**
 * The ClearFlags enum is used to determine how the camera clears the background
 */
export declare enum ClearFlags {
    /** Don't clear the background */
    None = 0,
    /** Clear the background with a skybox */
    Skybox = 1,
    /** Clear the background with a solid color. The alpha channel of the color determines the transparency */
    SolidColor = 2,
    /** Clear the background with a transparent color */
    Uninitialized = 4
}

/**
 * Clear all overlay messages from the screen
 */
declare function clearMessages(): void;
export { clearMessages as clearBalloonMessages }
export { clearMessages as clearOverlayMessages }

/**
 * [ClickThrough](https://engine.needle.tools/docs/api/ClickThrough) enables pointer events to pass through the 3D canvas to HTML elements positioned behind it.
 * This component dynamically toggles `pointer-events: none` on the canvas when no 3D objects are hit by raycasts, allowing interaction with underlying HTML content.
 *
 * ![](https://cloud.needle.tools/-/media/VeahihyjzpBWf4jHHVnrqw.gif)
 *
 * **How It Works:**
 * The component listens to pointer events and performs raycasts to detect if any 3D objects are under the cursor:
 * - **When 3D objects are hit**: Canvas has `pointer-events: all` (normal 3D interaction)
 * - **When nothing is hit**: Canvas has `pointer-events: none` (clicks pass through to HTML)
 *
 * This creates a seamless experience where users can interact with both 3D objects and underlying HTML elements
 * through the same canvas area, depending on what's under the cursor.
 *
 * **Key Features:**
 * - Automatic pointer event routing based on 3D hit detection
 * - Works with both mouse and touch input
 * - Supports transparent or semi-transparent canvases
 * - Can be enabled via component or HTML attribute
 * - No performance impact when disabled
 * - Handles multi-touch scenarios correctly
 *
 * **Common Use Cases:**
 * - Overlaying 3D elements on top of HTML content (headers, hero sections)
 * - Creating "floating" 3D objects that don't block underlying UI
 * - Mixed 2D/3D interfaces where both need to be interactive
 * - Transparent 3D overlays on websites
 * - Product showcases with clickable text/buttons beneath the 3D view
 * - Interactive storytelling with mixed HTML and 3D content
 *
 * **Setup Options:**
 *
 * **Option 1: Component-based** (programmatic setup)
 * ```ts
 * // Add to any GameObject in your scene
 * scene.addComponent(ClickThrough);
 * ```
 *
 * **Option 2: HTML attribute** (declarative setup, recommended)
 * ```html
 * <!-- Enable clickthrough via HTML attribute -->
 * <needle-engine clickthrough></needle-engine>
 *
 * <!-- Dynamically toggle clickthrough -->
 * <needle-engine id="engine" clickthrough="true"></needle-engine>
 * <script>
 *   // Disable clickthrough
 *   document.getElementById('engine').setAttribute('clickthrough', 'false');
 * </script>
 * ```
 *
 * @example Basic transparent canvas over HTML
 * ```html
 * <style>
 *   .container { position: relative; }
 *   needle-engine { position: absolute; top: 0; left: 0; }
 *   .html-content { position: absolute; top: 0; left: 0; }
 * </style>
 *
 * <div class="container">
 *   <div class="html-content">
 *     <h1>Click me!</h1>
 *     <button>I'm clickable through the 3D canvas</button>
 *   </div>
 *   <needle-engine clickthrough src="scene.glb"></needle-engine>
 * </div>
 * ```
 *
 * @example Programmatic setup with toggle
 * ```ts
 * const clickthrough = scene.addComponent(ClickThrough);
 *
 * // Toggle clickthrough based on some condition
 * function setInteractiveMode(mode: 'html' | '3d' | 'mixed') {
 *   switch(mode) {
 *     case 'html':
 *       clickthrough.enabled = false; // 3D blocks HTML
 *       break;
 *     case '3d':
 *       clickthrough.enabled = false; // 3D only
 *       break;
 *     case 'mixed':
 *       clickthrough.enabled = true;  // Smart switching
 *       break;
 *   }
 * }
 * ```
 *
 * @example 3D header with clickable logo beneath
 * ```html
 * <!-- 3D animated object over a clickable logo -->
 * <div class="header">
 *   <a href="/" class="logo">My Brand</a>
 *   <needle-engine clickthrough src="header-animation.glb"></needle-engine>
 * </div>
 * ```
 *
 * **Technical Notes:**
 * - The component uses `pointer-events` CSS property for passthrough
 * - Touch events are handled separately with a special timing mechanism
 * - Only pointer ID 0 is tracked to avoid multi-touch issues
 * - The component stores the previous `pointer-events` value and restores it on disable
 * - Raycasts are performed on both `pointerdown` and `pointermove` events
 *
 * **Troubleshooting:**
 * - Ensure your canvas has a transparent background if you want to see HTML beneath
 * - Make sure 3D objects have colliders or are raycastable
 * - If clicks aren't passing through, check that no invisible objects are blocking raycasts
 * - HTML elements must be properly positioned (z-index) behind the canvas
 *
 * **Live Example:**
 * - [3D Over HTML Sample on Stackblitz](https://stackblitz.com/~/github.com/needle-engine/sample-3d-over-html)
 *
 * @see {@link Context.input} - The input system used for pointer event detection
 * @see {@link Context.physics.raycast} - Used to detect 3D object hits
 * @see {@link ObjectRaycaster} - Controls which objects are raycastable
 * @see {@link PointerEvents} - For more complex pointer interaction handling
 * @see {@link NEPointerEvent} - The pointer event type used internally
 *
 * @summary Enables pointer events to pass through canvas to HTML elements behind it
 * @category Web
 * @group Components
 * @component
 */
export declare class ClickThrough extends Component {
    private _previousPointerEvents;
    onEnable(): void;
    onDisable(): void;
    onPointerEnter(): void;
    private onPointerEvent;
    private _touchDidHitAnything;
    private onTouchStart;
    private onTouchEnd;
}

/**
 * @category Animation and Sequencing
 * @see {@link TimelineAssetModel} for the data structure of a timeline asset, which can be played using the PlayableDirector component.
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */
export declare enum ClipExtrapolation {
    None = 0,
    Hold = 1,
    Loop = 2,
    PingPong = 3,
    Continue = 4
}

export declare type ClipMapping = {
    /** the object this clip is for */
    node: Object3D;
    /** the animationclip we resolve from a json ptr */
    clip: AnimationClip;
};

/**
 * @category Animation and Sequencing
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */
export declare type ClipModel = {
    start: number;
    end: number;
    duration: number;
    timeScale: number;
    asset: any | AudioClipModel | ControlClipModel | AnimationClipModel;
    clipIn: number;
    easeInDuration: number;
    easeOutDuration: number;
    preExtrapolationMode: ClipExtrapolation;
    postExtrapolationMode: ClipExtrapolation;
    reversed?: boolean;
};

/**
 * Collider is the base class for all physics collision shapes.
 * Colliders define the physical boundary of objects for collision detection.
 *
 * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
 *
 * **Usage with Rigidbody:**
 * - Add a collider to define collision shape
 * - Add a {@link Rigidbody} to the same or parent object for physics simulation
 * - Without Rigidbody, collider acts as static geometry
 *
 * **Trigger mode:**
 * Set `isTrigger = true` for detection without physical collision.
 * Triggers fire `onTriggerEnter`, `onTriggerStay`, `onTriggerExit` events.
 *
 * **Collision filtering:**
 * Use `membership` and `filter` arrays to control which objects collide.
 *
 * @example Add a box collider to an object
 * ```ts
 * const collider = myObject.addComponent(BoxCollider);
 * collider.size = new Vector3(1, 2, 1);
 * collider.center = new Vector3(0, 1, 0);
 * ```
 *
 * - Example: https://samples.needle.tools/physics-basic
 * - Example: https://samples.needle.tools/physics-playground
 *
 * @summary Physics collider base class
 * @category Physics
 * @group Components
 * @see {@link BoxCollider} for box-shaped colliders
 * @see {@link SphereCollider} for sphere-shaped colliders
 * @see {@link CapsuleCollider} for capsule-shaped colliders
 * @see {@link MeshCollider} for mesh-based colliders
 * @see {@link Rigidbody} for physics simulation
 * @link https://engine.needle.tools/samples/?room=needle272&overlay=samples&tag=physics
 * @link https://engine.needle.tools/samples-uploads/basic-physics/?showcolliders
 */
export declare abstract class Collider extends Component implements ICollider {
    /* Excluded from this release type: isCollider */
    /**
     * The {@link Rigidbody} that this collider is attached to. This handles the physics simulation for this collider.
     */
    attachedRigidbody: Rigidbody | null;
    /**
     * When `true` the collider will not be used for collision detection but will still trigger events.
     * Trigger colliders can trigger events when other colliders enter their space, without creating a physical response/collision.
     */
    isTrigger: boolean;
    /**
     * The physics material that defines physical properties of the collider such as friction and bounciness.
     */
    sharedMaterial?: PhysicsMaterial;
    /**
     * The layers that this collider belongs to. Used for filtering collision detection.
     * @default [0]
     */
    membership: number[];
    /**
     * The layers that this collider will interact with. Used for filtering collision detection.
     */
    filter?: number[];
    /* Excluded from this release type: awake */
    /* Excluded from this release type: start */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    /* Excluded from this release type: onDestroy */
    /**
     * Returns the underlying physics body from the physics engine.
     * Only available if the component is enabled and active in the scene.
     */
    get body(): any;
    /**
     * Updates the collider's properties in the physics engine.
     * Use this when you've changed collider properties and need to sync with the physics engine.
     */
    updateProperties: () => void;
    /**
     * Updates the physics material in the physics engine.
     * Call this after changing the sharedMaterial property.
     */
    updatePhysicsMaterial(): void;
}

/**
 * Holds information about a collision event. Includes a list of contact points and the colliders involved
 */
export declare class Collision {
    /** The contact points of this collision. Contains information about positions, normals, distance, friction, impulse... */
    readonly contacts: ContactPoint[];
    /* Excluded from this release type: __constructor */
    /** the gameobject this collision event belongs to (e.g. if onCollisionEnter is called then `me` is the same as `this.gameObject`) */
    readonly me: IGameObject;
    private _collider;
    /** the other collider the collision happened with */
    get collider(): ICollider;
    private _gameObject;
    /** the other object the collision happened with */
    get gameObject(): IGameObject;
    /** the other rigidbody we hit, null if none attached */
    get rigidBody(): IRigidbody | null;
}

export declare enum CollisionDetectionMode {
    Discrete = 0,
    Continuous = 1
}

declare type Color4 = {
    r: number;
    g: number;
    b: number;
    a: number;
};

/**
 * [ColorAdjustments](https://engine.needle.tools/docs/api/ColorAdjustments) allows you to modify the overall color properties of the rendered scene, including post-exposure, contrast, hue shift, and saturation.
 * These adjustments can be used to enhance the visual aesthetics of the scene or to achieve specific artistic effects.
 * @summary Color Adjustments Post-Processing Effect
 * @category Effects
 * @group Components
 */
export declare class ColorAdjustments extends PostProcessingEffect {
    get typeName(): string;
    /**
     * Whether values for contrast, hueshift or saturation are remapped to a different range.
     */
    remap: boolean;
    readonly postExposure: VolumeParameter;
    /**
     * Range -1 to 1, where 0 is the default value, -1 is the lowest contrast and 1 is the highest contrast.
     * @default 0
     */
    readonly contrast: VolumeParameter;
    readonly hueShift: VolumeParameter;
    readonly saturation: VolumeParameter;
    init(): void;
    onCreateEffect(): EffectProviderResult;
}

export declare class ColorBySpeedModule {
    enabled: boolean;
    range: Vector2;
    color: MinMaxGradient;
    evaluate<T extends Vector3 | Vector3_2>(vel: T, lerpFactor: number, color: Vector4 | Vector4_2): void;
}

declare type ColorKey = {
    time: number;
    color: Color4;
};

export declare class ColorOverLifetimeModule {
    enabled: boolean;
    color: MinMaxGradient;
}

declare class ColorSerializer extends TypeSerializer {
    constructor();
    onDeserialize(data: any): Color | RGBAColor | void;
    onSerialize(data: any): any | void;
}

export declare const colorSerializer: ColorSerializer;

/**
 * Utility method to check if two materials were created from the same glTF material
 */
export declare function compareAssociation<T extends Material>(obj1: T, obj2: T): boolean;

/**
 * Needle Engine component's are the main building blocks of the Needle Engine.
 * Derive from {@link Behaviour} to implement your own using the provided lifecycle methods.
 * Components can be added to any {@link Object3D} using {@link addComponent} or {@link GameObject.addComponent}.
 *
 * **Component lifecycle event methods:**
 * {@link awake}, {@link start}, {@link onEnable}, {@link onDisable}, {@link onDestroy}, {@link earlyUpdate}, {@link update}, {@link lateUpdate}, {@link onBeforeRender}, {@link onAfterRender}.
 *
 * **XR event methods:**
 * {@link onEnterXR}, {@link onLeaveXR}, {@link onUpdateXR}, {@link onXRControllerAdded} and {@link onXRControllerRemoved}.
 *
 * **Input event methods:**
 * {@link onPointerDown}, {@link onPointerUp}, {@link onPointerEnter}, {@link onPointerExit} and {@link onPointerMove}.
 *
 * @example
 * ```typescript
 * import { Behaviour } from "@needle-tools/engine";
 * export class MyComponent extends Behaviour {
 *  start() {
 *     console.log("Hello World", this.gameObject.name);
 *  }
 *  update() {
 *    console.log("Frame", this.context.time.frame);
 *  }
 * }
 * ```
 *
 * @group Components
 */
declare abstract class Component implements IComponent, EventTarget, Partial<INeedleXRSessionEventReceiver>, Partial<IPointerEventHandler> {
    /* Excluded from this release type: isComponent */
    /**
     * Get the original component type name before minification (available if the component is registered in the TypeStore)
     */
    get [$componentName](): string | undefined;
    private __context;
    /**
     * The context this component belongs to, providing access to the runtime environment
     * including physics, timing utilities, camera, and scene
     */
    get context(): Context;
    set context(context: Context);
    /**
     * Shorthand accessor for the current scene from the context
     * @returns The scene this component belongs to
     */
    get scene(): Scene;
    /**
     * The layer value of the GameObject this component is attached to
     * Used for visibility and physics filtering
     */
    get layer(): number;
    /**
     * The name of the GameObject this component is attached to
     * Used for debugging and finding objects
     */
    get name(): string;
    private __name?;
    set name(str: string);
    /**
     * The tag of the GameObject this component is attached to
     * Used for categorizing objects and efficient lookup
     */
    get tag(): string;
    set tag(str: string);
    /**
     * Indicates whether the GameObject is marked as static
     * Static objects typically don't move and can be optimized by the engine
     */
    get static(): boolean;
    set static(value: boolean);
    /**
     * Checks if this component is currently active (enabled and part of an active GameObject hierarchy)
     * Components that are inactive won't receive lifecycle method calls
     * @returns True if the component is enabled and all parent GameObjects are active
     */
    get activeAndEnabled(): boolean;
    private get __isActive();
    private get __isActiveInHierarchy();
    private set __isActiveInHierarchy(value);
    /**
     * Reference to the GameObject this component is attached to
     * This is a three.js Object3D with additional GameObject functionality
     */
    gameObject: GameObject;
    /**
     * Unique identifier for this component instance,
     * used for finding and tracking components
     */
    guid: string;
    /**
     * Identifier for the source asset that created this component.
     * For example, URL to the glTF file this component was loaded from
     */
    sourceId?: SourceIdentifier;
    /**
     * Called when this component needs to remap guids after an instantiate operation.
     * @param guidsMap Mapping from old guids to newly generated guids
     */
    resolveGuids?(guidsMap: GuidsMap): void;
    /**
     * Called once when the component becomes active for the first time.
     * This is the first lifecycle callback to be invoked
     */
    awake(): void;
    /**
     * Called every time the component becomes enabled or active in the hierarchy.
     * Invoked after {@link awake} and before {@link start}.
     */
    onEnable(): void;
    /**
     * Called every time the component becomes disabled or inactive in the hierarchy.
     * Invoked when the component or any parent GameObject becomes invisible
     */
    onDisable(): void;
    /**
     * Called when the component is destroyed.
     * Use for cleanup operations like removing event listeners
     */
    onDestroy(): void;
    /**
     * Called when a field decorated with @validate() is modified.
     * @param prop The name of the field that was changed
     */
    onValidate?(prop?: string): void;
    /**
     * Called when the context's pause state changes.
     * @param isPaused Whether the context is currently paused
     * @param wasPaused The previous pause state
     */
    onPausedChanged?(isPaused: boolean, wasPaused: boolean): void;
    /**
     * Called once at the beginning of the first frame after the component is enabled.
     * Use for initialization that requires other components to be awake.
     */
    start?(): void;
    /**
     * Called at the beginning of each frame before regular updates.
     * Use for logic that needs to run before standard update callbacks.
     */
    earlyUpdate?(): void;
    /**
     * Called once per frame during the main update loop.
     * The primary location for frame-based game logic.
     */
    update?(): void;
    /**
     * Called after all update functions have been called.
     * Use for calculations that depend on other components being updated first.
     */
    lateUpdate?(): void;
    /**
     * Called immediately before the scene is rendered.
     * @param frame Current XRFrame if in an XR session, null otherwise
     */
    onBeforeRender?(frame: XRFrame | null): void;
    /**
     * Called after the scene has been rendered.
     * Use for post-processing or UI updates that should happen after rendering
     */
    onAfterRender?(): void;
    /**
     * Called when this component's collider begins colliding with another collider.
     * @param col Information about the collision that occurred
     */
    onCollisionEnter?(col: Collision): any;
    /**
     * Called when this component's collider stops colliding with another collider.
     * @param col Information about the collision that ended
     */
    onCollisionExit?(col: Collision): any;
    /**
     * Called each frame while this component's collider is colliding with another collider
     * @param col Information about the ongoing collision
     */
    onCollisionStay?(col: Collision): any;
    /**
     * Called when this component's trigger collider is entered by another collider
     * @param col The collider that entered this trigger
     */
    onTriggerEnter?(col: ICollider): any;
    /**
     * Called each frame while another collider is inside this component's trigger collider
     * @param col The collider that is inside this trigger
     */
    onTriggerStay?(col: ICollider): any;
    /**
     * Called when another collider exits this component's trigger collider
     * @param col The collider that exited this trigger
     */
    onTriggerExit?(col: ICollider): any;
    /**
     * Determines if this component supports a specific XR mode
     * @param mode The XR session mode to check support for
     * @returns True if the component supports the specified mode
     */
    supportsXR?(mode: XRSessionMode): boolean;
    /**
     * Called before an XR session is requested
     * Use to modify session initialization parameters
     * @param mode The XR session mode being requested
     * @param args The session initialization parameters that can be modified
     */
    onBeforeXR?(mode: XRSessionMode, args: XRSessionInit): void;
    /**
     * Called when this component joins an XR session or becomes active in a running session
     * @param args Event data for the XR session
     */
    onEnterXR?(args: NeedleXREventArgs): void;
    /**
     * Called each frame while this component is active in an XR session
     * @param args Event data for the current XR frame
     */
    onUpdateXR?(args: NeedleXREventArgs): void;
    /**
     * Called when this component exits an XR session or becomes inactive during a session
     * @param args Event data for the XR session
     */
    onLeaveXR?(args: NeedleXREventArgs): void;
    /**
     * Called when an XR controller is connected or when this component becomes active
     * in a session with existing controllers
     * @param args Event data for the controller that was added
     */
    onXRControllerAdded?(args: NeedleXRControllerEventArgs): void;
    /**
     * Called when an XR controller is disconnected or when this component becomes inactive
     * during a session with controllers
     * @param args Event data for the controller that was removed
     */
    onXRControllerRemoved?(args: NeedleXRControllerEventArgs): void;
    /**
     * Called when a pointer enters this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerEnter?(args: PointerEventData): any;
    /**
     * Called when a pointer moves while over this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerMove?(args: PointerEventData): any;
    /**
     * Called when a pointer exits this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerExit?(args: PointerEventData): any;
    /**
     * Called when a pointer button is pressed while over this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerDown?(args: PointerEventData): any;
    /**
     * Called when a pointer button is released while over this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerUp?(args: PointerEventData): any;
    /**
     * Called when a pointer completes a click interaction with this component's GameObject
     * @param args Data about the pointer event
     */
    onPointerClick?(args: PointerEventData): any;
    /**
     * Starts a coroutine that can yield to wait for events.
     * Coroutines allow for time-based sequencing of operations without blocking.
     * Coroutines are based on generator functions, a JavaScript language feature.
     *
     * @param routine Generator function to start
     * @param evt Event to register the coroutine for (default: FrameEvent.Update)
     * @returns The generator function that can be used to stop the coroutine
     * @example
     * Time-based sequencing of operations
     * ```ts
     * *myCoroutine() {
     *   yield WaitForSeconds(1); // wait for 1 second
     *   yield WaitForFrames(10); // wait for 10 frames
     *   yield new Promise(resolve => setTimeout(resolve, 1000)); // wait for a promise to resolve
     * }
     * ```
     * @example
     * Coroutine that logs a message every 5 frames
     * ```ts
     * onEnable() {
     *   this.startCoroutine(this.myCoroutine());
     * }
     * private *myCoroutine() {
     *   while(this.activeAndEnabled) {
     *     console.log("Hello World", this.context.time.frame);
     *     // wait for 5 frames
     *     for(let i = 0; i < 5; i++) yield;
     *   }
     * }
     * ```
     */
    startCoroutine(routine: Generator, evt?: FrameEvent): Generator;
    /**
     * Stops a coroutine that was previously started with startCoroutine
     * @param routine The routine to be stopped
     * @param evt The frame event the routine was registered with
     */
    stopCoroutine(routine: Generator, evt?: FrameEvent): void;
    /**
     * Checks if this component has been destroyed
     * @returns True if the component or its GameObject has been destroyed
     */
    get destroyed(): boolean;
    /**
     * Destroys this component and removes it from its GameObject
     * After destruction, the component will no longer receive lifecycle callbacks
     */
    destroy(): void;
    /* Excluded from this release type: __didAwake */
    /* Excluded from this release type: __didStart */
    /* Excluded from this release type: __didEnable */
    /* Excluded from this release type: __isEnabled */
    /* Excluded from this release type: __destroyed */
    /* Excluded from this release type: __internalDidAwakeAndStart */
    /* Excluded from this release type: __constructor */
    /* Excluded from this release type: __internalNewInstanceCreated */
    /* Excluded from this release type: _internalInit */
    /* Excluded from this release type: __internalAwake */
    /* Excluded from this release type: __internalStart */
    /* Excluded from this release type: __internalEnable */
    /* Excluded from this release type: __internalDisable */
    /* Excluded from this release type: __internalDestroy */
    /**
     * Controls whether this component is enabled
     * Disabled components don't receive lifecycle callbacks
     */
    get enabled(): boolean;
    set enabled(val: boolean);
    /**
     * Gets the position of this component's GameObject in world space.
     * Note: This is equivalent to calling `this.gameObject.worldPosition`
     */
    get worldPosition(): Vector3;
    /**
     * Sets the position of this component's GameObject in world space
     * @param val The world position vector to set
     */
    set worldPosition(val: Vector3);
    /**
     * Sets the position of this component's GameObject in world space using individual coordinates
     * @param x X-coordinate in world space
     * @param y Y-coordinate in world space
     * @param z Z-coordinate in world space
     */
    setWorldPosition(x: number, y: number, z: number): void;
    /**
     * Gets the rotation of this component's GameObject in world space as a quaternion
     * Note: This is equivalent to calling `this.gameObject.worldQuaternion`
     */
    get worldQuaternion(): Quaternion;
    /**
     * Sets the rotation of this component's GameObject in world space using a quaternion
     * @param val The world rotation quaternion to set
     */
    set worldQuaternion(val: Quaternion);
    /**
     * Sets the rotation of this component's GameObject in world space using quaternion components
     * @param x X component of the quaternion
     * @param y Y component of the quaternion
     * @param z Z component of the quaternion
     * @param w W component of the quaternion
     */
    setWorldQuaternion(x: number, y: number, z: number, w: number): void;
    /**
     * Gets the rotation of this component's GameObject in world space as Euler angles (in radians)
     */
    get worldEuler(): Euler;
    /**
     * Sets the rotation of this component's GameObject in world space using Euler angles (in radians)
     * @param val The world rotation Euler angles to set
     */
    set worldEuler(val: Euler);
    /**
     * Gets the rotation of this component's GameObject in world space as Euler angles (in degrees)
     * Note: This is equivalent to calling `this.gameObject.worldRotation`
     */
    get worldRotation(): Vector3;
    /**
     * Sets the rotation of this component's GameObject in world space using Euler angles (in degrees)
     * @param val The world rotation vector to set (in degrees)
     */
    set worldRotation(val: Vector3);
    /**
     * Sets the rotation of this component's GameObject in world space using individual Euler angles
     * @param x X-axis rotation
     * @param y Y-axis rotation
     * @param z Z-axis rotation
     * @param degrees Whether the values are in degrees (true) or radians (false)
     */
    setWorldRotation(x: number, y: number, z: number, degrees?: boolean): void;
    private static _forward;
    /**
     * Gets the forward direction vector (0,0,-1) of this component's GameObject in world space
     */
    get forward(): Vector3;
    private static _right;
    /**
     * Gets the right direction vector (1,0,0) of this component's GameObject in world space
     */
    get right(): Vector3;
    private static _up;
    /**
     * Gets the up direction vector (0,1,0) of this component's GameObject in world space
     */
    get up(): Vector3;
    /**
     * Storage for event listeners registered to this component
     * @private
     */
    private _eventListeners;
    /**
     * Registers an event listener for the specified event type
     * @param type The event type to listen for
     * @param listener The callback function to execute when the event occurs
     */
    addEventListener<T extends Event>(type: string, listener: (evt: T) => any): void;
    /**
     * Removes a previously registered event listener
     * @param type The event type the listener was registered for
     * @param listener The callback function to remove
     */
    removeEventListener<T extends Event>(type: string, listener: (arg: T) => any): void;
    /**
     * Dispatches an event to all registered listeners
     * @param evt The event object to dispatch
     * @returns Always returns false (standard implementation of EventTarget)
     */
    dispatchEvent(evt: Event): boolean;
}
export { Component as Behaviour }
export { Component }

export declare type ComponentInit<T> = Partial<NoInternalNeedleEngineState<NoInternals<NoUndefinedNoFunctions<T>>>>;

declare type ComponentLifecycleEvent = "component-added" | "removing-component";

export declare class ComponentLifecycleEvents {
    private static eventListeners;
    static addComponentLifecylceEventListener(evt: ComponentLifecycleEvent | (string & {}), cb: (data: IComponent) => void): void;
    static removeComponentLifecylceEventListener(evt: ComponentLifecycleEvent | (string & {}), cb: (data: IComponent) => void): void;
    static dispatchComponentLifecycleEvent(evt: ComponentLifecycleEvent, data: IComponent): void;
}

declare type ComponentMap = {
    type: ComponentType;
    rootNodeName?: string;
    gamepadIndices?: {
        [key in GamepadKey]?: number;
    };
    visualResponses?: {
        [key: string]: {
            states: Array<string>;
        };
    };
};

declare class ComponentSerializer extends TypeSerializer {
    constructor();
    onSerialize(data: any, _context: SerializationContext): {
        guid: any;
    } | undefined;
    onDeserialize(data: any, context: SerializationContext): any;
    findObjectForGuid(guid: string, root: Object3D): any;
}

export declare const componentSerializer: ComponentSerializer;

declare type ComponentType = "button" | "thumbstick" | "squeeze" | "touchpad";

export declare type Condition = {
    parameter: string;
    mode: AnimatorConditionMode;
    threshold: number;
};

/** Events regarding the websocket connection (e.g. when the connection opens) */
export declare enum ConnectionEvents {
    ConnectionInfo = "connection-start-info"
}

export declare type Constructor<T> = abstract new (...args: any[]) => T;

export declare type ConstructorConcrete<T> = new (...args: any[]) => T;

/**
 * Holds information about physics contacts
 */
export declare class ContactPoint {
    private readonly _point;
    private readonly _normal;
    private readonly _tangentVelocity;
    /** the distance of the collision point */
    readonly distance: number;
    /** the impulse velocity */
    readonly impulse: number;
    readonly friction: number;
    /** worldspace point */
    get point(): Vector3;
    /** worldspace normal */
    get normal(): Vector3;
    /** worldspace tangent */
    get tangentVelocity(): Vector3;
    /* Excluded from this release type: __constructor */
}

/**
 * [ContactShadows](https://engine.needle.tools/docs/api/ContactShadows) renders proximity-based soft shadows on flat surfaces.
 * Ideal for products or objects that need visual grounding without real-time shadows.
 * Produces soft, blurred shadows that hug the ground, giving a sense of contact and depth.
 *
 * ![](https://cloud.needle.tools/-/media/87bPTNXHcsbV-An-oSEvHQ.gif)
 *
 * **Setup options:**
 * 1. `ContactShadows.auto(context)` - Auto-create and fit to scene
 * 2. Add component manually to control position and scale
 * 3. HTML attribute: `<needle-engine contactshadows="0.7">`
 *
 * **Properties:**
 * - `opacity` / `darkness` - Shadow intensity
 * - `blur` - Softness of shadow edges
 * - Object scale defines shadow area size
 *
 * **Debug:** Use `?debugcontactshadows` URL parameter.
 *
 *
 * @example Auto-create contact shadows
 * ```ts
 * const shadows = ContactShadows.auto(this.context);
 * shadows.opacity = 0.5;
 * shadows.darkness = 0.8;
 * ```
 *
 * @summary Display contact shadows on the ground
 * @category Rendering
 * @group Components
 * @see {@link ShadowCatcher} for real-time shadows from lights (more accurate, higher performance cost)
 * @see {@link Light} for real-time shadow casting
 * @see {@link Renderer} for material/rendering control
 * @link https://engine.needle.tools/samples/contact-shadows for a demo of contact shadows
 */
export declare class ContactShadows extends Component {
    private static readonly _instances;
    /**
     * Create contact shadows for the scene. Automatically fits the shadows to the scene.
     * The instance of contact shadows will be created only once.
     * @param context The context to create the contact shadows in.
     * @returns The instance of the contact shadows.
     */
    static auto(context?: Context, params?: FitParameters): ContactShadows;
    /**
     * When enabled the contact shadows component will be created to fit the whole scene.
     * @default false
     */
    autoFit: boolean;
    /**
     * Darkness of the shadows.
     * @default 0.5
     */
    darkness: number;
    /**
     * Opacity of the shadows.
     * @default 0.5
     */
    opacity: number;
    /**
     * Blur of the shadows.
     * @default 4.0
     */
    blur: number;
    /**
     * When enabled objects will not be visible below the shadow plane
     * @default false
     */
    occludeBelowGround: boolean;
    /**
     * When enabled the backfaces of objects will cast shadows as well.
     * @default true
     */
    backfaceShadows: boolean;
    /**
     * The minimum size of the shadows box
     * @default undefined
     */
    minSize?: Partial<Vec3>;
    /**
     * When enabled the shadows will not be updated automatically. Use `needsUpdate()` to update the shadows manually.
     * This is useful when you want to update the shadows only when the scene changes.
     * @default false
     */
    manualUpdate: boolean;
    /**
     * Call this method to update the shadows manually. The update will be done in the next frame.
     */
    set needsUpdate(val: boolean);
    get needsUpdate(): boolean;
    private _needsUpdate;
    /** All shadow objects are parented to this object.
     * The gameObject itself should not be transformed because we want the ContactShadows object e.g. also have a GroundProjectedEnv component
     * in which case ContactShadows scale would affect the projection
     **/
    private readonly shadowsRoot;
    private shadowCamera?;
    private readonly shadowGroup;
    private renderTarget?;
    private renderTargetBlur?;
    private plane?;
    private occluderMesh?;
    private blurPlane?;
    private planeMaterial?;
    private depthMaterial?;
    private horizontalBlurMaterial?;
    private verticalBlurMaterial?;
    private textureSize;
    /**
     * Call to fit the shadows to the scene.
     */
    fitShadows(params?: FitParameters): void;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: start */
    onEnable(): void;
    /* Excluded from this release type: onDestroy */
    /* Excluded from this release type: onBeforeRender */
    private blurShadow;
    private applyMinSize;
}

/**
 * The Needle Engine context is the main access point that holds all the data and state of a Needle Engine application.
 * It can be used to access the {@link Context.scene}, {@link Context.renderer}, {@link Context.mainCamera}, {@link Context.input}, {@link Context.physics}, {@link Context.time}, {@link Context.connection} (networking), and more.
 *
 * The context is automatically created when using the `<needle-engine>` web component.
 *
 * @example Accessing the context from a [component](https://engine.needle.tools/docs/api/Behaviour):
 * ```typescript
 * import { Behaviour } from "@needle-tools/engine";
 * import { Mesh, BoxGeometry, MeshBasicMaterial } from "three";
 * export class MyScript extends Behaviour {
 *   start() {
 *     console.log("Hello from MyScript");
 *     this.context.scene.add(new Mesh(new BoxGeometry(), new MeshBasicMaterial()));
 *   }
 * }
 * ```
 *
 * @example Accessing the context from a [hook](https://engine.needle.tools/docs/scripting.html#hooks) without a component e.g. from a javascript module or svelte or react component.
 *
 * ```typescript
 * import { onStart } from "@needle-tools/engine";
 *
 * onStart((context) => {
 *   console.log("Hello from onStart hook");
 *   context.scene.add(new Mesh(new BoxGeometry(), new MeshBasicMaterial()));
 * });
 * ```
 *
 */
export declare class Context implements IContext {
    private static _defaultTargetFramerate;
    /** When a new context is created this is the framerate that will be used by default */
    static get DefaultTargetFrameRate(): number | undefined;
    /** When a new context is created this is the framerate that will be used by default */
    static set DefaultTargetFrameRate(val: number | undefined);
    private static _defaultWebglRendererParameters;
    /** The default parameters that will be used when creating a new WebGLRenderer.
     * Modify in global context to change the default parameters for all new contexts.
     * @example
     * ```typescript
     * import { Context } from "@needle-tools/engine";
     * Context.DefaultWebGLRendererParameters.antialias = false;
     * ```
     */
    static get DefaultWebGLRendererParameters(): WebGLRendererParameters;
    /** The needle engine version */
    get version(): string;
    /** The currently active context. Only set during the update loops */
    static get Current(): Context;
    /** @internal this property should not be set by user code */
    static set Current(context: Context);
    static get All(): Context[];
    /** The name of the context */
    name: string;
    /** An alias for the context */
    alias: string | undefined | null;
    /** When the renderer or camera are managed by an external process (e.g. when running in r3f context).
     * When this is false you are responsible to call update(timestamp, xframe.
     * It is also currently assumed that rendering is handled performed by an external process
     * */
    isManagedExternally: boolean;
    /** set to true to pause the update loop. You can receive an event for it in your components.
     * Note that script updates will not be called when paused */
    isPaused: boolean;
    /** When enabled the application will run while not visible on the page */
    runInBackground: boolean;
    /**
     * Set to the target framerate you want your application to run in (you can use ?stats to check the fps)
     * Set to undefined if you want to run at the maximum framerate
     */
    targetFrameRate?: number | {
        value?: number;
    };
    /** Use a higher number for more accurate physics simulation.
     * When undefined physics steps will be 1 for mobile devices and 5 for desktop devices
     * Set to 0 to disable physics updates
     * TODO: changing physics steps is currently not supported because then forces that we get from the character controller and rigidbody et al are not correct anymore - this needs to be properly tested before making this configureable
     */
    private physicsSteps?;
    /** used to append to loaded assets */
    hash?: string;
    /** The `<needle-engine>` web component */
    domElement: NeedleEngineWebComponent | HTMLElement;
    appendHTMLElement(element: HTMLElement): HTMLElement;
    get resolutionScaleFactor(): number;
    /** use to scale the resolution up or down of the renderer. default is 1 */
    set resolutionScaleFactor(val: number);
    private _resolutionScaleFactor;
    private _boundingClientRectFrame;
    private _boundingClientRect;
    private _domX;
    private _domY;
    /** update bounding rects + domX, domY */
    private calculateBoundingClientRect;
    /** The width of the `<needle-engine>` element on the website */
    get domWidth(): number;
    /** The height of the `<needle-engine>` element on the website */
    get domHeight(): number;
    /** the X position of the `<needle-engine>` element on the website */
    get domX(): number;
    /** the Y position of the `<needle-engine>` element on the website */
    get domY(): number;
    /**
     * Is a XR session currently active and presenting?
     * @returns true if the xr renderer is currently presenting
     */
    get isInXR(): boolean;
    /** shorthand for `NeedleXRSession.active`
     * Automatically set by NeedleXRSession when a XR session is active
     * @returns the active XR session or null if no session is active
     * */
    xr: NeedleXRSession | null;
    /**
     * Shorthand for `this.xr?.mode`. AR or VR
     * @returns the current XR session mode (immersive-vr or immersive-ar)
     */
    get xrSessionMode(): XRSessionMode | undefined;
    /** Shorthand for `this.xrSessionMode === "immersive-vr"`
     * @returns true if a webxr VR session is currently active.
     */
    get isInVR(): boolean;
    /**
     * Shorthand for `this.xrSessionMode === "immersive-ar"`
     * @returns true if a webxr AR session is currently active.
     */
    get isInAR(): boolean;
    /** If a XR session is active and in pass through mode (immersive-ar on e.g. Quest)
     * @returns true if the XR session is in pass through mode
     */
    get isInPassThrough(): boolean;
    /** access the raw `XRSession` object (shorthand for `context.renderer.xr.getSession()`). For more control use `NeedleXRSession.active` */
    get xrSession(): XRSession | null;
    /** @returns the latest XRFrame (if a XRSession is currently active)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRFrame
     */
    get xrFrame(): XRFrame | null;
    /** @returns the current WebXR camera while the WebXRManager is active (shorthand for `context.renderer.xr.getCamera()`) */
    get xrCamera(): WebXRArrayCamera | undefined;
    private _xrFrame;
    /**
     * The AR overlay element is used to display 2D HTML elements while a AR session is active.
     */
    get arOverlayElement(): HTMLElement;
    /**
     * Current event of the update cycle (e.g. `FrameEvent.EarlyUpdate` or `FrameEvent.OnBeforeRender`)
     */
    get currentFrameEvent(): FrameEvent;
    private _currentFrameEvent;
    /**
     * The scene contains all objects in the hierarchy and is automatically rendered by the context every frane.
     */
    scene: Scene;
    /**
     * The renderer is used to render the scene. It is automatically created when the context is created.
     */
    renderer: WebGLRenderer;
    /**
     * The effect composer can be used to render postprocessing effects. If assigned then it will automatically render the scene every frame.
     */
    composer: EffectComposer | EffectComposer_2 | null;
    /* Excluded from this release type: scripts */
    /* Excluded from this release type: scripts_pausedChanged */
    /* Excluded from this release type: scripts_earlyUpdate */
    /* Excluded from this release type: scripts_update */
    /* Excluded from this release type: scripts_lateUpdate */
    /* Excluded from this release type: scripts_onBeforeRender */
    /* Excluded from this release type: scripts_onAfterRender */
    /* Excluded from this release type: scripts_WithCorroutines */
    /* Excluded from this release type: scripts_immersive_vr */
    /* Excluded from this release type: scripts_immersive_ar */
    /* Excluded from this release type: coroutines */
    /** callbacks called once after the context has been created */
    readonly post_setup_callbacks: Function[];
    /** called every frame at the beginning of the frame (after component start events and before earlyUpdate) */
    readonly pre_update_callbacks: Function[];
    /** called every frame before rendering (after all component events) */
    readonly pre_render_callbacks: Array<(frame: XRFrame | null) => void>;
    /** called every frame after rendering (after all component events) */
    readonly post_render_callbacks: Function[];
    /** called every frame befroe update (this list is emptied every frame) */
    readonly pre_update_oneshot_callbacks: Function[];
    /* Excluded from this release type: new_scripts */
    /* Excluded from this release type: new_script_start */
    /* Excluded from this release type: new_scripts_pre_setup_callbacks */
    /* Excluded from this release type: new_scripts_post_setup_callbacks */
    /* Excluded from this release type: new_scripts_xr */
    /**
     * The **main camera component** of the scene - this camera is used for rendering.
     * Use `setCurrentCamera` for updating the main camera.
     */
    mainCameraComponent: ICamera | undefined;
    /**
     * The main camera of the scene - this camera is used for rendering
     * Use `setCurrentCamera` for updating the main camera.
     */
    get mainCamera(): Camera_2;
    /** Set the main camera of the scene. If set to null the camera of the {@link mainCameraComponent} will be used - this camera is used for rendering */
    set mainCamera(cam: Camera_2 | null);
    private _mainCamera;
    private _fallbackCamera;
    /** access application state (e.g. if all audio should be muted) */
    application: Application;
    /** access animation mixer used by components in the scene */
    animations: AnimationsRegistry;
    /** access timings (current frame number, deltaTime, timeScale, ...) */
    time: Time;
    /** access input data (e.g. click or touch events) */
    input: Input;
    /** access physics related methods (e.g. raycasting). To access the phyiscs engine use `context.physics.engine` */
    physics: Physics;
    /** access networking methods (use it to send or listen to messages or join a networking backend) */
    connection: NetworkConnection;
    /**  @deprecated AssetDatabase is deprecated */
    assets: AssetDatabase;
    /** The main light in the scene */
    mainLight: ILight | null;
    /** @deprecated Use sceneLighting */
    get rendererData(): RendererData;
    /** Access the scene lighting manager to control lighting settings in the context */
    sceneLighting: RendererData;
    addressables: Addressables;
    lightmaps: ILightDataRegistry;
    players: PlayerViewManager;
    /** Access the LODs manager to control LOD behavior in the context */
    readonly lodsManager: LODsManager;
    /** Access the needle menu to add or remove buttons to the menu element */
    readonly menu: NeedleMenu_2;
    readonly accessibility: AccessibilityManager;
    /**
     * Checks if the context is fully created and ready
     * @returns true if the context is fully created and ready
     */
    get isCreated(): boolean;
    /**
     * The source identifier(s) of the root scene(s) loaded into this context.
     * When using `<needle-engine>` web component this will be the `src` attribute(s).
     * @returns The source identifier for of the root scene
     */
    get rootSourceId(): SourceIdentifier | undefined;
    private _needsUpdateSize;
    private _isCreated;
    private _isCreating;
    private _isVisible;
    private _stats;
    constructor(args?: ContextArgs);
    /**
     * Calling this function will dispose the current renderer and create a new one which will then be assigned to the context. It can be used to create a new renderer with custom WebGLRendererParameters.
     * **Note**: Instead you can also modify the static `Context.DefaultWebGlRendererParameters` before the context is created.
     * **Note**: This method is recommended because it re-uses an potentially already existing canvas element. This is necessary to keep input event handlers from working (e.g. components like OrbitControls subscribe to input events on the canvas)
     * @returns {WebGLRenderer} the newly created renderer
     */
    createNewRenderer(params?: WebGLRendererParameters): WebGLRenderer;
    private _intersectionObserver;
    private internalOnUpdateVisible;
    private _disposeCallbacks;
    /** will request a renderer size update the next render call (will call updateSize the next update) */
    requestSizeUpdate(): void;
    /** Clamps the renderer max resolution. If undefined the max resolution is not clamped. Default is undefined */
    maxRenderResolution?: Vec2;
    /** Control the renderer devicePixelRatio.
     * **Options**
     * - `auto` - Needle Engine automatically sets the pixel ratio to the current window.devicePixelRatio.
     * - `manual` - Needle Engine will not change the renderer pixel ratio. You can set it manually.
     * - `number` - Needle Engine will set the pixel ratio to the given number. The change will be applied to the renderer and the composer (if used) at the end of the current frame.
     */
    get devicePixelRatio(): "auto" | "manual" | number;
    set devicePixelRatio(val: "auto" | "manual" | number);
    private _devicePixelRatio;
    /**
     * Update the renderer and canvas size. This is also automatically called when a DOM size change is detected.
     */
    updateSize(force?: boolean): void;
    /**
     * Update the camera aspect ratio or orthorgraphic camera size. This is automatically called when a DOM size change is detected.
     */
    updateAspect(camera: PerspectiveCamera | OrthographicCamera, width?: number, height?: number): void;
    /** This will recreate the whole needle engine context and dispose the whole scene content
     * All content will be reloaded (loading times might be faster due to browser caches)
     * All scripts will be recreated */
    recreate(): void;
    private _originalCreationArgs?;
    /** @deprecated use create. This method will be removed in a future version */
    onCreate(opts?: ContextCreateArgs): Promise<boolean>;
    /* Excluded from this release type: create */
    private onUnhandledRejection;
    /** Dispatches an error */
    private onError;
    /**
     * Clears the context and destroys all scenes and objects in the scene.
     * The ContextCleared event is called at the end.
     * This is automatically called when e.g. the `src` attribute changes on `<needle-engine>`
     * or when the web component is removed from the DOM
     */
    clear(): void;
    /**
     * Dispose all allocated resources and clears the scene. This is automatically called e.g. when the `<needle-engine>` component is removed from the DOM.
     */
    dispose(): void;
    /**@deprecated use dispose()  */
    onDestroy(): void;
    private internalOnDestroy;
    /* Excluded from this release type: registerCoroutineUpdate */
    /* Excluded from this release type: unregisterCoroutineUpdate */
    /* Excluded from this release type: stopAllCoroutinesFrom */
    private _cameraStack;
    /** Change the main camera */
    setCurrentCamera(cam: ICamera): void;
    /**
     * Remove the camera from the mainCamera stack (if it has been set before with `setCurrentCamera`)
     */
    removeCamera(cam?: ICamera | null): void;
    private readonly _onBeforeRenderListeners;
    private readonly _onAfterRenderListeners;
    /** Use to subscribe to onBeforeRender events on threejs objects.
     * @link https://threejs.org/docs/#api/en/core/Object3D.onBeforeRender
     */
    addBeforeRenderListener(target: Object3D, callback: OnRenderCallback): void;
    /** Remove callback from three `onBeforeRender` event (if it has been added with `addBeforeRenderListener(...)`)
     * @link https://threejs.org/docs/#api/en/core/Object3D.onBeforeRender
     */
    removeBeforeRenderListener(target: Object3D, callback: OnRenderCallback): void;
    /**
     * Subscribe to onAfterRender events on threejs objects
     * @link https://threejs.org/docs/#api/en/core/Object3D.onAfterRender
     */
    addAfterRenderListener(target: Object3D, callback: OnRenderCallback): void;
    /**
     * Remove from onAfterRender events on threejs objects
     * @link https://threejs.org/docs/#api/en/core/Object3D.onAfterRender
     */
    removeAfterRenderListener(target: Object3D, callback: OnRenderCallback): void;
    private _createRenderCallbackWrapper;
    private _requireDepthTexture;
    private _requireColorTexture;
    private _renderTarget?;
    private _isRendering;
    /** @returns true while the WebGL renderer is rendering (between onBeforeRender and onAfterRender events) */
    get isRendering(): boolean;
    setRequireDepth(val: boolean): void;
    setRequireColor(val: boolean): void;
    get depthTexture(): DepthTexture | null;
    get opaqueColorTexture(): Texture | null;
    /** @returns true if the `<needle-engine>` DOM element is visible on screen (`context.domElement`) */
    get isVisibleToUser(): boolean;
    private _needsVisibleUpdate;
    private _lastStyleComputedResult;
    private _createId;
    private internalOnCreate;
    private readonly rootSceneSourceIdentifiers;
    private internalLoadInitialContent;
    /** Sets the animation loop.
     * Can not be done while creating the context or when disposed
     **/
    restartRenderLoop(): boolean;
    private _renderlooperrors;
    /** Performs a full update step including script callbacks, rendering (unless isManagedExternally is set to false) and post render callbacks */
    update(timestamp: DOMHighResTimeStamp, frame?: XRFrame | null): void;
    /** Call to **manually** perform physics steps.
     * By default the context uses the `physicsSteps` property to perform steps during the update loop
     * If you just want to increase the accuracy of physics you can instead set the `physicsSteps` property to a higher value
     * */
    updatePhysics(steps: number): void;
    /**
     * Set a rect or dom element. The camera center will be moved to the center of the rect.
     * This is useful if you have Needle Engine embedded in a HTML layout and while you want the webgl background to fill e.g. the whole screen you want to move the camera center to free space.
     * For that you can simply pass in the rect or HMTL div that you want the camera to center on.
     * @param rect The focus rect or null to disable
     * @param settings Optional settings for the focus rect. These will override the `focusRectSettings` property
     */
    setCameraFocusRect(rect: FocusRect | null, settings?: Partial<FocusRectSettings>): void;
    get focusRect(): FocusRect | null;
    get focusRectSize(): null | {
        x: number;
        y: number;
        width: number;
        height: number;
    };
    /** Settings when a focus rect is set. Use `setCameraFocusRect(...)` to do so.
     * This can be used to offset the renderer center e.g. to a specific DOM element.
     */
    readonly focusRectSettings: FocusRectSettings;
    private _focusRect;
    private _lastTimestamp;
    private _accumulatedTime;
    private _dispatchReadyAfterFrame;
    private internalStep;
    private internalOnBeforeRender;
    private internalUpdatePhysics;
    private internalOnRender;
    private internalOnAfterRender;
    private readonly _tempClearColor;
    private readonly _tempClearColor2;
    renderNow(camera?: Camera_2): boolean;
    private _contextRestoreTries;
    private handleRendererContextLost;
    /** returns true if we should return out of the frame loop */
    private _wasPaused;
    private onHandlePaused;
    private evaluatePaused;
    private renderRequiredTextures;
    private executeCoroutines;
}

export declare class ContextArgs {
    name?: string;
    /** for debugging only */
    alias?: string;
    /** the hash is used as a seed when initially loading the scene files */
    hash?: string;
    /** when true the context will not check if it's visible in the viewport and always update and render */
    runInBackground?: boolean;
    /** the DOM element the context belongs to or is inside of (this does not have to be the canvas. use renderer.domElement if you want to access the dom canvas) */
    domElement?: HTMLElement | null;
    /** externally owned renderer */
    renderer?: WebGLRenderer;
    /** externally owned camera */
    camera?: Camera_2;
    /** externally owned scene */
    scene?: Scene;
}

export declare type ContextCallback = (evt: ContextEventArgs) => void | Promise<any> | IComponent;

export declare class ContextCreateArgs {
    /** list of glTF or GLB files to load */
    files: Array<string>;
    abortSignal?: AbortSignal;
    /** called when loading a provided glTF file started */
    onLoadingStart?: (index: number, file: string) => void;
    /** called on update for each loaded glTF file */
    onLoadingProgress?: (args: LoadingProgressArgs) => void;
    /** Called after a gLTF file has finished loading */
    onLoadingFinished?: (index: number, file: string, glTF: Model | null) => void;
}

/** The various events that can be dispatched by a Needle Engine {@link IContext} instance
 */
export declare enum ContextEvent {
    /** called once when the context is registered to the registry, the context is not fully initialized at this point */
    ContextRegistered = "ContextRegistered",
    /** called before the first glb is loaded, can be used to initialize physics engine, is awaited */
    ContextCreationStart = "ContextCreationStart",
    /** Called when the context content has been created, before the first frame. This callback may be called multiple time during the lifetime of the context, e.g. every time when the context is created and cleared */
    ContextCreated = "ContextCreated",
    /** Called after the first frame has been rendered after creation */
    ContextFirstFrameRendered = "ContextFirstFrameRendered",
    /** Called before the context gets destroyed */
    ContextDestroying = "ContextDestroying",
    /** Called when the context has been destroyed */
    ContextDestroyed = "ContextDestroyed",
    /** Called when the context could not find a camera during creation */
    MissingCamera = "MissingCamera",
    /** Called before the context is being cleared (all objects in the scene are being destroyed and state is reset) */
    ContextClearing = "ContextClearing",
    /** Called after the context has been cleared (all objects in the scene have been destroyed and state has been reset) */
    ContextCleared = "ContextCleared"
}

export declare type ContextEventArgs = {
    event: ContextEvent;
    context: IContext;
    files?: LoadedModel[];
};

/** Use to register to various Needle Engine context events and to get access to all current instances
 * e.g. when being created in the DOM
 * @example
 * ```typescript
 * import { NeedleEngine } from "./engine/engine_context_registry.js";
 * NeedleEngine.addContextCreatedCallback((evt) => {
 *    console.log("Context created", evt.context);
 * });
 * ```
 * */
declare class ContextRegistry {
    /** The currently active (rendering) Needle Engine context */
    static get Current(): IContext;
    /** @internal */
    static set Current(ctx: IContext);
    /** Returns the array of all registered Needle Engine contexts. Do not modify */
    static get All(): Context[];
    /** All currently registered Needle Engine contexts. Do not modify */
    static Registered: IContext[];
    /* Excluded from this release type: register */
    /* Excluded from this release type: unregister */
    private static _callbacks;
    /**
     * Register a callback to be called when the given event occurs
     */
    static registerCallback(evt: ContextEvent, callback: ContextCallback): void;
    /** Unregister a callback */
    static unregisterCallback(evt: ContextEvent, callback: ContextCallback): void;
    /* Excluded from this release type: dispatchCallback */
    /**
     * Register a callback to be called when a context is created
     */
    static addContextCreatedCallback(callback: ContextCallback): void;
    /**
     * Register a callback to be called when a context is registered
     */
    static addContextDestroyedCallback(callback: ContextCallback): void;
}
export { ContextRegistry }
export { ContextRegistry as NeedleEngine }

/**
 * @category Animation and Sequencing
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */
export declare type ControlClipModel = {
    sourceObject: string | Object3D;
    controlActivation: boolean;
    updateDirector: boolean;
};

/** true when selectstart was ever received.
 * On VisionOS 1.1 we always have select events (as per the spec), so this is always true
 */
declare type ControllerAxes = "xr-standard-thumbstick" | "xr-standard-touchpad";

/** Event Arguments when a controller changed event is invoked (added or removed)
 * Access the controller via `args.controller`, the `args.change` property indicates if the controller was added or removed
 */
export declare type ControllerChangedEvt = (args: NeedleXRControllerEventArgs) => void;

export declare class ControlTrackHandler extends TrackHandler {
    models: Array<Models.ClipModel>;
    timelines: Array<PlayableDirector | null>;
    resolveSourceObjects(_context: Context): void;
    private _previousActiveModel;
    evaluate(time: number): void;
}

/**@obsolete use Graphics.copyTexture */
export declare function copyTexture(texture: Texture): Texture;

export declare type CoroutineData = {
    comp: IComponent;
    main: Generator;
    chained?: Array<Generator>;
};

/**
 * Configuration options for creating an AnimatorController
 */
declare type CreateAnimatorControllerOptions = {
    /** Should each animation state loop */
    looping?: boolean;
    /** Set to false to disable generating transitions between animation clips */
    autoTransition?: boolean;
    /** Duration in seconds for transitions between states */
    transitionDuration?: number;
};

export declare function createMotion(name: string, id?: InstantiateIdProvider): Motion;

/* Excluded from this release type: CreateTrackFunction */

/**
 * [CursorFollow](https://engine.needle.tools/docs/api/CursorFollow) makes an object smoothly follow the cursor or touch position in 3D space.
 * The component tracks pointer movement and updates the object's position to follow it, with optional damping for smooth motion.
 *
 * ![](https://cloud.needle.tools/-/media/GDspQGC_kB85Bc9IyEtr9Q.gif)
 *
 * **How It Works:**
 * The component creates a ray from the camera through the cursor position and places the object along that ray.
 * By default, it maintains the object's initial distance from the camera, creating a natural cursor-following effect
 * that works consistently regardless of camera movement.
 *
 * **Key Features:**
 * - Smooth cursor following with configurable damping
 * - Works with both mouse and touch input
 * - Can follow cursor across the entire page or just within the canvas
 * - Maintains consistent distance from camera by default
 * - Optional surface snapping using raycasts
 * - Responds to camera movement automatically
 *
 * **Common Use Cases:**
 * - Interactive 3D cursors or pointers
 * - Look-at effects combined with {@link LookAtConstraint}
 * - Floating UI elements that track cursor
 * - Interactive product showcases
 * - 3D header effects and hero sections
 * - Virtual laser pointers in XR experiences
 *
 * @example Basic cursor follow with smooth damping
 * ```ts
 * const follower = new Object3D();
 * follower.position.set(0, 0, -5); // Initial position 5 units from camera
 * follower.addComponent(CursorFollow, {
 *   damping: 0.2,        // Smooth following with 200ms damping
 *   keepDistance: true,  // Maintain initial distance
 *   useFullPage: true    // Track cursor across entire page
 * });
 * scene.add(follower);
 * ```
 *
 * @example Surface-snapping cursor with raycast
 * ```ts
 * const cursor = new Object3D();
 * cursor.addComponent(CursorFollow, {
 *   snapToSurface: true,  // Snap to surfaces in the scene
 *   keepDistance: false,  // Don't maintain distance when snapping
 *   damping: 0.1          // Quick, responsive movement
 * });
 * scene.add(cursor);
 * ```
 *
 * @example Instant cursor following (no damping)
 * ```ts
 * gameObject.addComponent(CursorFollow, {
 *   damping: 0,           // Instant movement
 *   useFullPage: false    // Only track within canvas
 * });
 * ```
 *
 * @example Interactive 3D header that looks at cursor
 * ```ts
 * const character = loadModel("character.glb");
 * const lookTarget = new Object3D();
 * lookTarget.addComponent(CursorFollow, { damping: 0.3 });
 * character.addComponent(LookAtConstraint, { target: lookTarget });
 * scene.add(lookTarget, character);
 * ```
 *
 * - Example: [Look At Cursor sample](https://engine.needle.tools/samples/look-at-cursor-interactive-3d-header/) - Combines CursorFollow with LookAt for an interactive 3D header
 *
 * @see {@link PointerEvents} - For more complex pointer interaction handling
 * @see {@link DragControls} - For dragging objects in 3D space
 * @see {@link OrbitControls} - For camera controls that work alongside CursorFollow
 * @see {@link Context.input} - The input system that provides cursor position
 * @see {@link Context.physics.raycastFromRay} - Used when snapToSurface is enabled
 *
 * @summary Makes objects follow the cursor/touch position in 3D space
 * @category Interactivity
 * @category Web
 * @group Components
 * @component
 */
export declare class CursorFollow extends Component {
    static readonly NAME = "CursorFollow";
    /**
     * Damping factor controlling how smoothly the object follows the cursor (in seconds).
     *
     * This value determines the "lag" or smoothness of the following motion:
     * - `0`: Instant movement, no damping (object snaps directly to cursor position)
     * - `0.1-0.2`: Quick, responsive following with slight smoothing
     * - `0.3-0.5`: Noticeable smooth trailing effect
     * - `1.0+`: Slow, heavily damped movement
     *
     * The damping uses delta time, so the movement speed is framerate-independent and
     * provides consistent behavior across different devices.
     *
     * **Tip:** For look-at effects, values between 0.2-0.4 typically feel most natural.
     * For cursor indicators, 0.1 or less provides better responsiveness.
     *
     * @default 0
     */
    damping: number;
    /**
     * Whether the object should track the cursor across the entire webpage or only within the canvas.
     *
     * **When `true` (default):**
     * - The object follows the cursor anywhere on the page, even outside the canvas bounds
     * - Perfect for look-at effects where you want continuous tracking
     * - Great for embedded 3D elements that should feel aware of the whole page
     * - Example: A 3D character in a hero section that watches the cursor as you scroll
     *
     * **When `false`:**
     * - The object only follows the cursor when it's inside the Needle Engine canvas
     * - Useful for contained experiences where the 3D element shouldn't react to external cursor movement
     * - Better for multi-canvas scenarios or when you want isolated 3D interactions
     *
     * **Note:** When enabled, the component listens to `window.pointermove` events to track the
     * full-page cursor position. When disabled, it uses the context's input system which is
     * canvas-relative.
     *
     * @see {@link Context.input.mousePositionRC} for canvas-relative cursor position
     * @default true
     */
    useFullPage: boolean;
    /**
     * Whether to maintain the object's initial distance from the camera while following the cursor.
     *
     * **When `true` (default):**
     * - The object stays at a constant distance from the camera, moving in a spherical arc around it
     * - Creates a natural "floating at cursor position" effect
     * - The object's depth remains consistent as you move the cursor around
     * - Perfect for cursors, pointers, or look-at targets
     *
     * **When `false`:**
     * - The object's distance can change based on where the cursor projects in 3D space
     * - More useful when combined with {@link snapToSurface} to follow surface geometry
     * - Can create unusual depth behavior if not carefully configured
     *
     * **How it works:**
     * On the first update, the component measures the distance from the object to the camera.
     * This initial distance is then maintained throughout the object's lifetime (unless {@link updateDistance} is called).
     * The object moves along a ray from the camera through the cursor, staying at this fixed distance.
     *
     * @see {@link updateDistance} to manually recalculate the distance
     * @default true
     */
    keepDistance: boolean;
    /**
     * When enabled, the object snaps to the surfaces of other objects in the scene using raycasting.
     *
     * **How it works:**
     * After positioning the object at the cursor location, a raycast is performed backwards toward the camera.
     * If the ray hits any surface, the object is moved to that hit point, effectively "snapping" to the surface.
     *
     * **Use cases:**
     * - 3D paint or decal placement tools
     * - Surface markers or waypoints
     * - Interactive object placement in AR/VR
     * - Cursor that follows terrain or mesh surfaces
     *
     * **Important notes:**
     * - Requires objects in the scene to have colliders for raycasting to work
     * - Works best with {@link keepDistance} set to `false` to allow depth changes
     * - Can be combined with {@link damping} for smooth surface following
     * - The raycast uses the physics system's raycast functionality
     *
     * **Debug mode:**
     * Add `?debugcursor` to your URL to visualize the raycast hits with green debug lines.
     *
     * @see {@link Context.physics.raycastFromRay} for the underlying raycast implementation
     * @see {@link keepDistance} should typically be false when using surface snapping
     * @default false
     */
    snapToSurface: boolean;
    private _distance;
    /**
     * Manually recalculates the distance between the object and the camera.
     *
     * By default, the distance is calculated once when the component starts and then maintained
     * when {@link keepDistance} is enabled. Use this method to update the reference distance
     * if the camera or object has moved significantly.
     *
     * **Use cases:**
     * - After teleporting the camera or object
     * - When switching between different camera positions
     * - After zoom operations that change the desired following distance
     * - Dynamically adjusting the cursor's depth in response to user input
     *
     * @param force - If `true`, forces a recalculation even if {@link keepDistance} is enabled and distance was already set
     *
     * @example Recalculate distance after camera movement
     * ```ts
     * const cursorFollow = gameObject.getComponent(CursorFollow);
     * camera.position.set(0, 0, 10); // Move camera
     * cursorFollow?.updateDistance(true); // Update the reference distance
     * ```
     */
    updateDistance(force?: boolean): void;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    private _ndc_x;
    private _ndc_y;
    private _onPointerMove;
    /* Excluded from this release type: lateUpdate */
}

/**
 * Available cursor types
 * @link https://developer.mozilla.org/en-US/docs/Web/CSS/cursor
 */
export declare type CursorTypeName = "auto" | "default" | "none" | "context-menu" | "help" | "pointer" | "progress" | "wait" | "cell" | "crosshair" | "text" | "vertical-text" | "alias" | "copy" | "move" | "no-drop" | "not-allowed" | "grab" | "grabbing" | "all-scroll" | "col-resize" | "row-resize" | "n-resize" | "e-resize" | "s-resize" | "w-resize" | "nw-resize" | "se-resize" | "sw-resize" | "ew-resize" | "ns-resize" | "nesw-resize" | "nwse-resize" | "zoom-in" | "zoom-out";

/**
 * Custom branding for the QuickLook overlay, used by {@link USDZExporter}.
 */
export declare class CustomBranding {
    /** The call to action button text. If not set, the button will close the QuickLook overlay. */
    callToAction?: string;
    /** The title of the overlay. */
    checkoutTitle?: string;
    /** The subtitle of the overlay. */
    checkoutSubtitle?: string;
    /** if assigned the call to action button in quicklook will open the URL. Otherwise it will just close quicklook. */
    callToActionURL?: string;
}

/* Excluded from this release type: CustomLoader */

declare type CustomLoaderCallback = (args: {
    context: Context;
    url: string;
    mimetype: NeedleMimetype;
}) => CustomLoader | Promise<CustomLoader> | null | undefined | void;

export declare type CustomModel = {
    animations: AnimationClip[];
    scene: Object3D;
    scenes: Object3D[];
};

export declare class CustomShader extends RawShaderMaterial {
    private identifier;
    private onBeforeRenderSceneCallback;
    clone(): this;
    constructor(identifier: SourceIdentifier, ...args: any[]);
    dispose(): void;
    private _sphericalHarmonicsName;
    private _objToWorldName;
    private _worldToObjectName;
    private static viewProjection;
    private static _viewProjectionValues;
    private _viewProjectionName;
    private static viewMatrix;
    private static _viewMatrixValues;
    private _viewMatrixName;
    private static _worldSpaceCameraPosName;
    private static _worldSpaceCameraPos;
    private static _mainLightColor;
    private static _mainLightPosition;
    private static _lightData;
    private _rendererData;
    private get depthTextureUniform();
    private get opaqueTextureUniform();
    private onBeforeRenderScene;
    onBeforeRender(_renderer: any, _scene: any, camera: any, _geometry: any, obj: any, _group: any): void;
    onUpdateUniforms(camera?: Camera_2, obj?: any): void;
}

export declare const debugNet: boolean;

export declare const debugOwner: boolean;

/** Reads back a texture from the GPU (can be compressed, a render texture, or anything), optionally applies RGBA colorScale to it, and returns CPU data for further usage.
 * Note that there are WebGL / WebGPU rules preventing some use of data between WebGL contexts.
 */
export declare function decompressGpuTexture(texture: any, maxTextureSize?: number, renderer?: WebGLRenderer | null, colorScale?: Vector4 | undefined): Promise<ImageReadbackResult>;

/** Deep clones an object
 * @param obj The object to clone
 * @param predicate A function that can be used to skip certain properties from being cloned
 * @returns The cloned object
 * @example
 * const clone = deepClone(obj, (owner, propertyName, current) => {
 *    if (propertyName === "dontCloneMe") return false;
 *   return true;
 * });
 * */
export declare function deepClone(obj: any, predicate?: deepClonePredicate): any;

declare type deepClonePredicate = (owner: any, propertyName: string, current: any) => boolean;

/* Excluded from this release type: DefaultReflectionMode */

/** Wait for a specific amount of milliseconds to pass
 * @returns a promise that resolves after a certain amount of milliseconds
 * @example
 * ```typescript
 * await delay(1000);
 * ```
 */
export declare function delay(milliseconds: number): Promise<void>;

/** Will wait for a specific amount of frames to pass
 * @param frameCount The amount of frames to wait for
 * @param context The context to use, if not provided the current context will be used
 * @returns a promise that resolves after a certain amount of frames
 * @example
 * ```typescript
 * await delayForFrames(10);
 * ```
 */
export declare function delayForFrames(frameCount: number, context?: Context): Promise<void>;

/**
 * Marks a GameObject as deletable by {@link DeleteBox} zones.
 * Objects with this component will be destroyed (and synced across network)
 * when they enter a DeleteBox area.
 *
 * **Note:** Objects currently being used (with {@link UsageMarker}) are protected from deletion.
 *
 * @example Make an object deletable
 * ```ts
 * const deletable = spawnedObject.addComponent(Deletable);
 * // Object can now be destroyed by entering a DeleteBox
 * ```
 *
 * @summary Marks object as destroyable by DeleteBox
 * @category Interactivity
 * @group Components
 * @see {@link DeleteBox} for the deletion trigger
 * @see {@link UsageMarker} for protecting objects in use
 */
export declare class Deletable extends Component {
    update(): void;
}

/**
 * The [DeleteBox](https://engine.needle.tools/docs/api/DeleteBox) component creates an invisible deletion zone that destroys objects entering it.
 * Works with objects that have a {@link Deletable} component attached.
 *
 * ![](https://cloud.needle.tools/-/media/J-Gmdhl214kfdjkfYViG8g.gif)
 *
 * **Use cases:**
 * - Trash bins in sandbox builders
 * - Kill zones in physics simulations
 * - Cleanup areas for multiplayer scenes
 *
 * **Setup:**
 * 1. Add DeleteBox to a GameObject with a BoxCollider-like shape
 * 2. Add {@link Deletable} component to objects that should be destroyable
 * 3. Objects entering the box will be destroyed (synced across network)
 *
 * **Debug:** Use `?debugdeletable` URL parameter to visualize deletion areas.
 *
 * - Example: https://engine.needle.tools/samples/collaborative-sandbox
 *
 * @example Create a deletion zone
 * ```ts
 * const trashBin = trashBinModel.addComponent(DeleteBox);
 * // Objects with Deletable component will be destroyed when entering
 * ```
 *
 * @summary Box area that deletes objects entering it
 * @category Interactivity
 * @group Components
 * @see {@link Deletable} - Add to objects that can be destroyed
 * @see {@link Duplicatable} for spawning objects
 * @see {@link DragControls} for moving objects
 */
export declare class DeleteBox extends BoxHelperComponent {
    static _instances: DeleteBox[];
    onEnable(): void;
    onDisable(): void;
}

/**
 * [DepthOfField](https://engine.needle.tools/docs/api/DepthOfField) simulates the focusing behavior of real-world cameras by blurring objects that are outside the focal plane.
 * This effect enhances the sense of depth in a scene by mimicking how cameras focus on subjects at varying distances, creating a more immersive visual experience.
 * It can be adjusted to achieve different artistic effects, from subtle background blurring to pronounced bokeh effects.
 * @summary Depth of Field Post-Processing Effect
 * @category Effects
 * @group Components
 */
export declare class DepthOfField extends PostProcessingEffect {
    get typeName(): string;
    mode: DepthOfFieldMode;
    readonly focusDistance: VolumeParameter;
    readonly focalLength: VolumeParameter;
    readonly aperture: VolumeParameter;
    readonly gaussianMaxRadius: VolumeParameter;
    readonly resolutionScale: VolumeParameter;
    readonly bokehScale: VolumeParameter;
    init(): void;
    onCreateEffect(): DepthOfFieldEffect[] | undefined;
    unapply(): void;
}

declare enum DepthOfFieldMode {
    Off = 0,
    Gaussian = 1,
    Bokeh = 2
}

export declare function deserializeObject(obj: ISerializable, serializedData: object, context: SerializationContext): boolean;

/**
 * Destroys a GameObject or Component, removing it from the scene and cleaning up resources.
 * Calls `onDisable()` and `onDestroy()` lifecycle methods on all affected components.
 *
 * @param instance The Object3D or Component to destroy
 * @param recursive If true (default), also destroys all children recursively
 * @param dispose If true, also disposes GPU resources (geometries, materials, textures)
 *
 * @example Destroy an object
 * ```ts
 * import { destroy } from "@needle-tools/engine";
 * destroy(this.gameObject);
 * ```
 *
 * @example Destroy with resource disposal
 * ```ts
 * destroy(myObject, true, true); // recursive + dispose GPU resources
 * ```
 *
 * @see {@link GameObject.destroy} for the static method equivalent
 * @see {@link setDontDestroy} to mark objects as non-destroyable
 */
export declare function destroy(instance: Object3D | IComponent, recursive?: boolean, dispose?: boolean): void;

export declare function destroyComponentInstance(componentInstance: IComponent): void;

export declare function determineMimeTypeFromExtension(name: string): NeedleMimetype | null;

/**
 * DeviceFlag shows or hides GameObjects based on device type.
 * Use for responsive 3D content - show different UI, models, or interactions
 * depending on mobile vs desktop.
 *
 * **Device types:**
 * - `Desktop` - Traditional computers with mouse/keyboard
 * - `Mobile` - Phones and tablets with touch input
 * - Combine with bitwise OR for multiple: `Desktop | Mobile`
 *
 * @example Show only on desktop
 * ```ts
 * const flag = myObject.addComponent(DeviceFlag);
 * flag.visibleOn = DeviceType.Desktop;
 * ```
 *
 * @example Show on both mobile and desktop
 * ```ts
 * flag.visibleOn = DeviceType.Desktop | DeviceType.Mobile;
 * ```
 *
 * @summary Show or hide GameObject based on device type
 * @category Utilities
 * @group Components
 * @see {@link DeviceType} for device options
 * @see {@link XRFlag} for XR-based visibility
 */
export declare class DeviceFlag extends Component {
    visibleOn: DeviceType;
    onEnable(): void;
    apply(): void;
    private test;
}

declare enum DeviceType {
    Never = 0,
    Desktop = 1,
    Mobile = 2
}

/**
 * Utility functions to detect certain device types (mobile, desktop), browsers, or capabilities.
 * @category Utilities
 */
export declare namespace DeviceUtilities {
    /** @returns `true` for MacOS or Windows devices. `false` for Hololens and other headsets. */
    export function isDesktop(): boolean;
    /** @returns `true` if it's a phone or tablet */
    export function isMobileDevice(): boolean;
    /** @deprecated use {@link isiPad} instead */
    export function isIPad(): boolean;
    /** @returns `true` if we're currently on an iPad */
    export function isiPad(): boolean;
    /** @returns `true` if we're currently on an Android device */
    export function isAndroidDevice(): boolean;
    /** @returns `true` if we're currently using the Mozilla XR Browser (only available for iOS) */
    export function isMozillaXR(): boolean;
    /** @returns `true` if we're currently in the Needle App Clip */
    export function isNeedleAppClip(): boolean;
    /** @returns `true` for MacOS devices */
    export function isMacOS(): boolean;
    /** @returns `true` for VisionOS devices */
    export function isVisionOS(): boolean;
    /** @returns `true` for mobile Apple devices like iPad, iPhone, iPod, Vision Pro, ... */
    export function isiOS(): boolean;
    /** @returns `true` if we're currently on safari */
    export function isSafari(): boolean;
    /** @returns `true` for Meta Quest devices and browser. */
    export function isQuest(): boolean;
    /** @returns `true` if the browser has `<a rel="ar">` support, which indicates USDZ QuickLook support. */
    export function supportsQuickLookAR(): boolean;
    /** @returns `true` if the user allowed to use the microphone */
    export function microphonePermissionsGranted(): Promise<boolean>;
    export function getiOSVersion(): string | null;
    export function getChromeVersion(): string | null;
    export function getSafariVersion(): string | null;
}

/**
 * Controls how the {@link PlayableDirector} behaves when playback reaches the end.
 * @see {@link PlayableDirector.extrapolationMode}
 */
declare enum DirectorWrapMode {
    /** Hold the last frame when playback reaches the end of the timeline. */
    Hold = 0,
    /** Loop back to the start and continue playing indefinitely. */
    Loop = 1,
    /** Stop playback when the end is reached. The timeline will not loop. */
    None = 2
}

/** Recursive disposes all referenced resources by this object. Does not traverse children */
export declare function disposeObjectResources(obj: object | null | undefined): void;

export declare function disposeStream(str: MediaStream | null | undefined): void;

declare abstract class DocumentAction {
    get id(): string;
    protected object: Object3D;
    protected model?: USDObject;
    constructor(obj: Object3D);
    apply(document: USDDocument): void;
    protected abstract onApply(document: USDDocument): any;
}

export declare class DocumentExtension implements IUSDExporterExtension {
    get extensionName(): string;
    onAfterBuildDocument(_context: any): void;
}

/**
 * [DragControls](https://engine.needle.tools/docs/api/DragControls) enables interactive dragging of objects in 2D (screen space) or 3D (world space).
 *
 * ![](https://cloud.needle.tools/-/media/HyrtRDLjdmndr23_SR4mYw.gif)
 *
 * **Drag modes:**
 * - `XZPlane` - Drag on horizontal plane (good for floor objects)
 * - `Attached` - Follow pointer directly (screen plane in 2D, controller in XR)
 * - `HitNormal` - Drag along the surface normal where clicked
 * - `DynamicViewAngle` - Auto-switch between XZ and screen based on view angle
 * - `SnapToSurfaces` - Snap to scene geometry while dragging
 *
 * **Features:**
 * - Works across desktop, mobile, VR, and AR
 * - Optional grid snapping (`snapGridResolution`)
 * - Rotation preservation (`keepRotation`)
 * - Automatic networking with {@link SyncedTransform}
 *
 *
 * **Debug:** Use `?debugdrag` URL parameter for visual helpers.
 *
 * @example Basic draggable object
 * ```ts
 * const drag = myObject.addComponent(DragControls);
 * drag.dragMode = DragMode.XZPlane;
 * drag.snapGridResolution = 0.5; // Snap to 0.5 unit grid
 * ```
 *
 * - Example: https://engine.needle.tools/samples/collaborative-sandbox
 *
 * @summary Enables dragging of objects in 2D or 3D space
 * @category Interactivity
 * @group Components
 * @see {@link DragMode} for available drag behaviors
 * @see {@link Duplicatable} for drag-to-duplicate functionality
 * @see {@link SyncedTransform} for networked dragging
 * @see {@link ObjectRaycaster} for pointer detection
 */
export declare class DragControls extends Component implements IPointerEventHandler {
    /**
     * Checks if any DragControls component is currently active with selected objects
     * @returns True if any DragControls component is currently active
     */
    static get HasAnySelected(): boolean;
    private static _active;
    /**
     * Retrieves a list of all DragControl components that are currently dragging objects.
     * @returns Array of currently active DragControls components
     */
    static get CurrentlySelected(): DragControls[];
    /** Registry of currently active and enabled DragControls components */
    private static _instances;
    /**
     * Determines how and where the object is dragged along. Different modes include
     * dragging along a plane, attached to the pointer, or following surface normals.
     */
    dragMode: DragMode;
    /**
     * Snaps dragged objects to a 3D grid with the specified resolution.
     * Set to 0 to disable snapping.
     */
    snapGridResolution: number;
    /**
     * When true, maintains the original rotation of the dragged object while moving it.
     * When false, allows the object to rotate freely during dragging.
     */
    keepRotation: boolean;
    /**
     * Determines how and where the object is dragged along while dragging in XR.
     * Uses a separate setting from regular drag mode for better XR interaction.
     */
    xrDragMode: DragMode;
    /**
     * When true, maintains the original rotation of the dragged object during XR dragging.
     * When false, allows the object to rotate freely during XR dragging.
     */
    xrKeepRotation: boolean;
    /**
     * Multiplier that affects how quickly objects move closer or further away when dragging in XR.
     * Higher values make distance changes more pronounced.
     * This is similar to mouse acceleration on a screen.
     */
    xrDistanceDragFactor: number;
    /**
     * When enabled, draws a visual line from the dragged object downwards to the next raycast hit,
     * providing visual feedback about the object's position relative to surfaces below it.
     */
    showGizmo: boolean;
    /**
     * Returns the object currently being dragged by this DragControls component, if any.
     * @returns The object being dragged or null if no object is currently dragged
     */
    get draggedObject(): Object3D<Object3DEventMap> | null;
    /**
     * Updates the object that is being dragged by the DragControls.
     * This can be used to change the target during a drag operation.
     * @param obj The new object to drag, or null to stop dragging
     */
    setTargetObject(obj: Object3D | null): void;
    private _rigidbody;
    /** The object to be dragged – we pass this to handlers when they are created */
    private _targetObject;
    private _dragHelper;
    private static lastHovered;
    private _draggingRigidbodies;
    private _potentialDragStartEvt;
    private _dragHandlers;
    private _totalMovement;
    /** A marker is attached to components that are currently interacted with, to e.g. prevent them from being deleted. */
    private _marker;
    private _isDragging;
    private _didDrag;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: start */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    onDestroy(): void;
    /**
     * Checks if editing is allowed for the current networking connection.
     * @param _obj Optional object to check edit permissions for
     * @returns True if editing is allowed
     */
    private allowEdit;
    /* Excluded from this release type: onPointerEnter */
    /* Excluded from this release type: onPointerMove */
    /* Excluded from this release type: onPointerExit */
    /* Excluded from this release type: onPointerDown */
    /* Excluded from this release type: onPointerUp */
    /* Excluded from this release type: update */
    /**
     * Called when the first pointer starts dragging on this object.
     * Sets up network synchronization and marks rigidbodies for dragging.
     * Not called for subsequent pointers on the same object.
     * @param evt Pointer event data that initiated the drag
     */
    private onFirstDragStart;
    /**
     * Called each frame as long as any pointer is dragging this object.
     * Updates visuals and keeps rigidbodies awake during the drag.
     */
    private onAnyDragUpdate;
    /**
     * Called when the last pointer has been removed from this object.
     * Cleans up drag state and applies final velocities to rigidbodies.
     * @param evt Pointer event data for the last pointer that was lifted
     */
    private onLastDragEnd;
}

/**
 * The DragMode determines how an object is dragged around in the scene.
 */
export declare enum DragMode {
    /** Object stays at the same horizontal plane as it started. Commonly used for objects on the floor */
    XZPlane = 0,
    /** Object is dragged as if it was attached to the pointer. In 2D, that means it's dragged along the camera screen plane. In XR, it's dragged by the controller/hand. */
    Attached = 1,
    /** Object is dragged along the initial raycast hit normal. */
    HitNormal = 2,
    /** Combination of XZ and Screen based on the viewing angle. Low angles result in Screen dragging and higher angles in XZ dragging. */
    DynamicViewAngle = 3,
    /** The drag plane is snapped to surfaces in the scene while dragging. */
    SnapToSurfaces = 4,
    /** Don't allow dragging the object */
    None = 5
}

/**
 * DropListener enables drag-and-drop loading of 3D files directly into your scene.
 * Users can drop glTF/GLB files onto the canvas to dynamically add new objects at runtime.
 *
 * [![](https://cloud.needle.tools/-/media/p5LNPTQ0u4mRXA6WiSmzIQ.gif)](https://engine.needle.tools/samples/droplistener)
 *
 * **Supported formats:** glTF, GLB, FBX, OBJ, USDZ, VRM
 *
 * **Key features:**
 * - Drop files directly onto canvas or onto a specific {@link dropArea}
 * - Paste URLs from clipboard (Ctrl/Cmd+V)
 * - Auto-fit objects to a specific size with {@link fitIntoVolume}
 * - Network sync to share dropped objects with other users
 * - Special handling for GitHub and Polyhaven URLs
 *
 * **Events:**
 * - `file-dropped` - Fired for each dropped file
 * - `object-added` - Fired when object is loaded and added to scene
 *
 * **Debug:** Use `?debugdroplistener` URL parameter
 *
 * @example Listen for dropped objects
 * ```ts
 * const dropListener = myObject.addComponent(DropListener);
 * dropListener.useNetworking = true;
 * dropListener.fitIntoVolume = true;
 *
 * dropListener.on(DropListenerEvents.ObjectAdded, (evt) => {
 *   const { object, model } = evt.detail;
 *   console.log("Added:", object.name);
 * });
 * ```
 *
 * @example Load from URL programmatically
 * ```ts
 * const obj = await dropListener.loadFromURL("https://example.com/model.glb");
 * ```
 * Hint: We recommend to use {@link AssetReference} for preloading and referencing assets in code if you simply want to load a model.
 *
 * @summary Drag-and-drop file loading for 3D assets
 * @category Asset Management
 * @group Components
 * @see {@link SceneSwitcher} for loading entire scenes
 * @see {@link AssetReference} for preloading assets
 * @see {@link SyncedTransform} for networking support
 * @link https://engine.needle.tools/samples/droplistener for a live demo
 */
export declare class DropListener extends Component {
    /**
     * When assigned, the DropListener will only accept files that are dropped on this specific object.
     * This allows creating designated drop zones in your scene.
     */
    dropArea?: Object3D;
    /**
     * When enabled, dropped objects will be automatically scaled to fit within the volume defined by fitVolumeSize.
     * Useful for ensuring dropped models appear at an appropriate scale.
     *
     * **Tip**: Use the handy `fitObjectIntoVolume` function (`import { fitObjectIntoVolume } from "@needle-tools/engine"`) for custom fitting needs.
     *
     * @default false
     */
    fitIntoVolume: boolean;
    /**
     * Defines the dimensions of the volume that dropped objects will be scaled to fit within.
     * Only used when fitIntoVolume is enabled.
     */
    fitVolumeSize: Vector3;
    /**
     * When enabled, dropped objects will be positioned at the point where the cursor hit the scene.
     * When disabled, objects will be placed at the origin of the DropListener.
     * @default true
     */
    placeAtHitPosition: boolean;
    /**
     * When enabled, the DropListener will automatically synchronize dropped files to other connected clients.
     * When a file is dropped locally, it will be uploaded to blob storage and the URL will be shared with other clients.
     * @default false
     */
    useNetworking: boolean;
    /**
     * Event list that gets invoked after a file has been successfully added to the scene.
     * Receives {@link DropListenerOnDropArguments} containing the added object and related information.
     * @event object-added
     * @example
     * ```typescript
     * dropListener.onDropped.addEventListener((evt) => {
     *  console.log("Object added", evt.model);
     * });
     */
    onDropped: EventList<DropListenerOnDropArguments>;
    /**
     * Loads a file from the given URL and adds it to the scene.
     * @returns A promise that resolves to the loaded object or null if loading failed.
     */
    loadFromURL(url: string, data?: {
        point?: Vec3;
        size?: Vec3;
    }): Promise<Object3D | null>;
    /**
     * Forgets all previously added objects.
     * The droplistener will then not be able to remove previously added objects.
     */
    forgetObjects(): void;
    awake(): void;
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    /**
     * Handles network events received from other clients containing information about dropped objects
     * @param evt Network event data containing object information, position, and content URL
     */
    private onNetworkEvent;
    /**
     * Handles clipboard paste events and processes them as potential URL drops
     * Only URLs are processed by this handler, and only when editing is allowed
     * @param evt The paste event
     */
    private handlePaste;
    /**
     * Handles drag events over the renderer's canvas
     * Prevents default behavior to enable drop events
     * @param evt The drag event
     */
    private onDrag;
    /**
     * Processes drop events to add files to the scene
     * Handles both file drops and text/URL drops
     * @param evt The drop event
     */
    private onDrop;
    /**
     * Processes a dropped or pasted URL and tries to load it as a 3D model
     * Handles special cases like GitHub URLs and Polyhaven asset URLs
     * @param url The URL to process
     * @param ctx Context information about where the drop occurred
     * @param isRemote Whether this URL was shared from a remote client
     * @returns The added object or null if loading failed
     */
    private addFromUrl;
    private _abort;
    /**
     * Processes dropped files and loads them as 3D models.
     * When enabled, it also handles network drops (sending files between clients).
     * Automatically handles cancelling previous uploads if new files are dropped.
     * @param fileList Array of dropped files
     * @param ctx Context information about where on the screen or in 3D space the drop occurred
     */
    private addFromFiles;
    /** Previously added objects */
    private readonly _addedObjects;
    private readonly _addedModels;
    /**
     * Removes all previously added objects from the scene
     * @param doDestroy When true, destroys the objects; when false, just clears the references
     */
    private removePreviouslyAddedObjects;
    /**
     * Adds a loaded model to the scene with proper positioning and scaling.
     * Handles placement based on component settings and raycasting.
     * If {@link fitIntoVolume} is enabled, the object will be scaled to fit within the volume defined by {@link fitVolumeSize}.
     * @param data The loaded model data and content hash
     * @param ctx Context information about where the drop occurred
     * @param isRemote Whether this object was shared from a remote client
     * @returns The added object or null if adding failed
     */
    private onObjectLoaded;
    /**
     * Sends a network event to other clients about a dropped object
     * Only triggered when networking is enabled and the connection is established
     * @param url The URL to the content that was dropped
     * @param obj The object that was added to the scene
     * @param contentmd5 The content hash for verification
     */
    private sendDropEvent;
    /**
     * Deletes remote state for this DropListener's objects
     * Called when new files are dropped to clean up previous state
     */
    private deleteDropEvent;
    /**
     * Tests if a drop event occurred within the designated drop area if one is specified
     * @param ctx The drop context containing screen position information
     * @returns True if the drop is valid (either no drop area is set or the drop occurred inside it)
     */
    private testIfIsInDropArea;
}

/**
 * Network event arguments passed between clients when using the DropListener with networking
 */
export declare type DropListenerNetworkEventArguments = {
    /** Unique identifier of the sender */
    guid: string;
    /** Name of the dropped object */
    name: string;
    /** URL or array of URLs to the dropped content */
    url: string | string[];
    /** Worldspace point where the object was placed in the scene */
    point: Vec3;
    /** Bounding box size */
    size: Vec3;
    /** MD5 hash of the content for verification */
    contentMD5: string;
};

/**
 * Arguments provided to handlers when an object is dropped or added to the scene
 */
export declare type DropListenerOnDropArguments = {
    /** The DropListener component that processed the drop event */
    sender: DropListener;
    /** The root object added to the scene */
    object: Object3D;
    /** The complete model with all associated data */
    model: Model;
    /** MD5 hash of the content for verification */
    contentMD5: string;
    /** The original dropped URL or File object */
    dropped: URL | File | undefined;
};

/**
 * The [Duplicatable](https://engine.needle.tools/docs/api/Duplicatable) component creates clones of a GameObject when clicked/tapped/dragged.
 * Perfect for spawning objects, creating drag-and-drop inventories, or multiplayer object creation.
 *
 * ![](https://cloud.needle.tools/-/media/J_ij9vxhh1zhS8h2ftGBXQ.gif)
 *
 * **How it works:**
 * - When the user clicks on this object, it creates a clone of the assigned `object`
 * - The clone is automatically set up with {@link DragControls} so users can drag it
 * - If networking is enabled, clones are synced via {@link SyncedTransform}
 * - Rate limiting prevents spam (controlled by `limitCount`)
 *
 * **Setup tips:**
 * - Assign `object` to a template object (it will be hidden and used as source)
 * - If `object` is not assigned, the component's own GameObject is used as template
 * - Add an {@link ObjectRaycaster} to enable pointer detection (added automatically if missing)
 *
 * @example Basic duplicatable button
 * ```ts
 * const duplicatable = spawnButton.addComponent(Duplicatable);
 * duplicatable.object = templateObject; // Object to clone
 * duplicatable.parent = spawnContainer;  // Where to place clones
 * duplicatable.limitCount = 10;          // Max 10 per second
 * ```
 *
 * @summary Duplicates a GameObject on pointer events
 * @category Interactivity
 * @group Components
 * @see {@link DragControls} for dragging the duplicated objects
 * @see {@link SyncedTransform} for networking support
 * @see {@link GameObject.instantiateSynced} for the underlying instantiation
 * @link https://engine.needle.tools/samples/collaborative-sandbox/
 */
export declare class Duplicatable extends Component implements IPointerEventHandler {
    /**
     * Parent object for spawned duplicates.
     * If not set, duplicates are parented to this GameObject's parent.
     */
    parent: GameObject | null;
    /**
     * Template object to duplicate. This object will be hidden and used as the source for clones.
     * If not assigned, this GameObject itself is used as the template.
     */
    object: GameObject | null;
    /**
     * Maximum duplications allowed per second to prevent spam.
     * The counter decreases by 1 each second.
     * @default 60
     */
    limitCount: number;
    private _currentCount;
    private _startPosition;
    private _startQuaternion;
    start(): void;
    onEnable(): void;
    private _forwardPointerEvents;
    onPointerEnter(args: PointerEventData): void;
    onPointerExit(args: PointerEventData): void;
    /* Excluded from this release type: onPointerDown */
    /* Excluded from this release type: onPointerUp */
    private cloneLimitIntervalFn;
    private handleDuplication;
}

/* Excluded from this release type: EaseType */

export declare const editorGuidKeyName = "needle_editor_guid";

/* Excluded from this release type: EditorModification */

export declare type EffectProviderResult = Effect | Pass | Array<Effect | Pass>;

/**
 * [EffectWrapper](https://engine.needle.tools/docs/api/EffectWrapper) wraps a custom postprocessing effect to integrate it with the Needle Engine post-processing pipeline.
 *
 * @category Effects
 * @group Components
 */
export declare class EffectWrapper extends PostProcessingEffect {
    readonly effect: Effect;
    constructor(effect: Effect);
    get typeName(): string;
    onCreateEffect(): EffectProviderResult | undefined;
}

export declare class EmissionModule {
    enabled: boolean;
    get burstCount(): number;
    bursts: ParticleBurst[];
    rateOverTime: MinMaxCurve;
    rateOverTimeMultiplier: number;
    rateOverDistance: MinMaxCurve;
    rateOverDistanceMultiplier: number;
    /** set from system */
    system: IParticleSystem;
    reset(): void;
    getBurst(): number;
}

/* Excluded from this release type: EmphasizeActionMotionType */

/**
 * Applies an emphasis animation to a target object when this object is clicked.
 * Works in USDZ/QuickLook (Everywhere Actions).
 *
 * The emphasis effect can be a bounce, jiggle, or other motion type defined by `motionType`.
 *
 * @see {@link PlayAnimationOnClick} to play animations when clicked
 * @see {@link SetActiveOnClick} to toggle visibility when clicked
 * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
 * @summary Emphasizes the target object when clicked
 * @category Everywhere Actions
 * @group Components
 */
export declare class EmphasizeOnClick extends Component implements UsdzBehaviour {
    /** The target object to emphasize. */
    target?: Object3D;
    /** The duration of the emphasis animation in seconds. */
    duration: number;
    /** The type of motion to use for the emphasis effect (e.g. `"bounce"`, `"jiggle"`). */
    motionType: EmphasizeActionMotionType;
    onEnable(): void;
    onDisable(): void;
    onDestroy(): void;
    beforeCreateDocument(): void;
    createBehaviours(ext: any, model: any, _context: any): void;
    afterCreateDocument(_ext: any, _context: any): void;
}

/** Enable a spatial debug console that follows the camera */
export declare function enableSpatialConsole(active: boolean): void;

/* Excluded from this release type: EngineLoadingView */

export declare type EnumToPrimitiveUnion<T> = `${T & string}` | ParseNumber<`${T & number}`>;

export declare class EnvironmentScene extends Scene {
    constructor(name: 'legacy' | 'neutral');
    createAreaLightMaterial(intensity: number): MeshBasicMaterial;
}

export declare const euler: EulerSerializer;

declare class EulerSerializer extends TypeSerializer {
    constructor();
    onDeserialize(data: any, _context: SerializationContext): Euler | undefined;
    onSerialize(data: any, _context: SerializationContext): {
        x: any;
        y: any;
        z: any;
        order: any;
    };
}

/**
 * EventList manages a list of callbacks that can be invoked together.
 * Used for Unity-style events that can be configured in the editor (Unity or Blender).
 *
 * **Serialization:**
 * EventLists are serializable - callbacks configured in Unity/Blender will work at runtime.
 * Mark fields with `@serializable(EventList)` for editor support.
 *
 * **Usage patterns:**
 * - Button click handlers
 * - Animation events
 * - Custom component callbacks
 * - Scene loading events
 *
 * ![](https://cloud.needle.tools/-/media/P7bEKQvfgRUMTb2Wi1hWXg.png)
 * *Screenshot of a Unity component with an EventList field*
 *
 * ![](https://cloud.needle.tools/-/media/i2hi2OHfbaDyHyBL6Gt58A.png)
 * *Screenshot of a Blender component with an EventList field*
 *
 * @example Create and use an EventList
 * ```ts
 * // Define in your component
 * @serializable(EventList)
 * onClick: EventList = new EventList();
 *
 * // Add listeners
 * this.onClick.addEventListener(() => console.log("Clicked!"));
 *
 * // Invoke all listeners
 * this.onClick.invoke();
 * ```
 *
 * @example Listen with arguments
 * ```ts
 * const onScore = new EventList<{ points: number }>();
 * onScore.addEventListener(data => console.log("Scored:", data.points));
 * onScore.invoke({ points: 100 });
 * ```
 *
 * @category Events
 * @group Utilities
 * @see {@link CallInfo} for individual callback configuration
 * @see {@link Button} for UI button events
 */
export declare class EventList<TArgs extends any = any> implements IEventList {
    /** checked during instantiate to create a new instance */
    readonly isEventList = true;
    /* Excluded from this release type: __internalOnInstantiate */
    private target?;
    private key?;
    /** set an event target to try invoke the EventTarget dispatchEvent when this EventList is invoked */
    setEventTarget(key: string, target: object): void;
    /** How many callback methods are subscribed to this event */
    get listenerCount(): number;
    /** If the event is currently being invoked */
    get isInvoking(): boolean;
    private _isInvoking;
    private readonly methods;
    private readonly _methodsCopy;
    /**
     * Create a new EventList with the given callback methods. You can pass either CallInfo instances or functions directly.
     * @returns a new EventList instance with the given callback methods
     * @example
     * ```ts
     * const onClick = EventList.from(
     *   () => console.log("Clicked!"),
     *   new CallInfo(someObject, "someMethod", [arg1, arg2])
     * );
     * onClick.invoke();
     * ```
     */
    static from(...evts: Array<Function>): EventList<any>;
    /**
     * Create a new EventList with the given callback methods. You can pass either CallInfo instances or functions directly.
     * @returns a new EventList instance with the given callback methods
     */
    constructor(evts?: Array<CallInfo | Function> | Function);
    /** Invoke all the methods that are subscribed to this event
     * @param args optional arguments to pass to the event listeners. These will be passed before any custom arguments defined in the CallInfo instances. So if you have a CallInfo with arguments and you also pass arguments to invoke, the arguments passed to invoke will take precedence over the CallInfo arguments.
     * @returns true if the event was successfully invoked, false if there are no listeners or if a circular invocation was detected
     */
    invoke(...args: Array<TArgs>): boolean;
    /** Add a new event listener to this event
     * @returns a function to remove the event listener
     */
    addEventListener(callback: (args: TArgs) => void): Function;
    /**
     * Remove an event listener from this event.
     * @returns true if the event listener was found and removed, false otherwise
     */
    removeEventListener(fn: Function | null | undefined): boolean;
    /**
     * Remove all event listeners from this event. Use with caution! This will remove all listeners!
     */
    removeAllEventListeners(): void;
}

declare type EventListCall = {
    method: string;
    target: string;
    argument?: any;
    arguments?: Array<any>;
    enabled?: boolean;
};

declare class EventListData {
    type: string;
    calls: Array<EventListCall>;
}

declare type EventListenerOptions_2 = {
    /** For addEventListener: The queue to add the listener to. Listeners in the same queue are called in the order they were added. Default is 0.
     * For removeEventListener: The queue to remove the listener from. If no queue is specified the listener will be removed from all queues
     */
    queue?: InputEventQueue | number;
    /** If true, the listener will be removed after it is invoked once. */
    once?: boolean;
    /** The listener will be removed when the given AbortSignal object's `abort()` method is called. If not specified, no AbortSignal is associated with the listener. */
    signal?: AbortSignal;
};

export declare class EventListEvent<TArgs extends any> extends Event {
    args?: TArgs;
}

declare class EventListSerializer extends TypeSerializer {
    constructor();
    onSerialize(_data: EventList<any>, _context: SerializationContext): EventListData | undefined;
    onDeserialize(data: EventListData, context: SerializationContext): EventList<any> | undefined | null;
}

export declare const eventListSerializer: EventListSerializer;

/**
 * [EventSystem](https://engine.needle.tools/docs/api/EventSystem) is responsible for managing and dispatching input events to UI components within the scene.
 * @summary Manages and dispatches input events to UI components
 * @category User Interface
 * @group Components
 */
export declare class EventSystem extends Component {
    static ensureUpdateMeshUI(instance: any, context: Context, force?: boolean): void;
    static markUIDirty(_context: Context): void;
    static createIfNoneExists(context: Context): void;
    static get(ctx: Context): EventSystem | null;
    /** Get the currently active event system */
    static get instance(): EventSystem | null;
    private readonly raycaster;
    register(rc: Raycaster_2): void;
    unregister(rc: Raycaster_2): void;
    get hasActiveUI(): boolean;
    get isHoveringObjects(): boolean;
    awake(): void;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    /**
     * all pointers that have pressed something
     *
     * key: pointerId
     * value: object that was pressed, data of the pointer event, handlers that are releavant to the event
     */
    private pressedByID;
    /**
     * all hovered objects
     *
     * key: pointerId
     * value: object that is hovered, data of the pointer event
     */
    private hoveredByID;
    onBeforeRender(): void;
    /**
     * Handle an pointer event from the input system
     */
    private onPointerEvent;
    private readonly _sortedHits;
    /**
     * cache for objects that we want to raycast against. It's cleared before each call to performRaycast invoking raycasters
     */
    private readonly _testObjectsCache;
    /** that's the raycaster that is CURRENTLY being used for raycasting (the shouldRaycastObject method uses this) */
    private _currentlyActiveRaycaster;
    private _currentPointerEventName;
    /**
     * Checks if an object that we encounter has an event component and if it does, we add it to our objects cache
     * If it doesnt we tell our raycasting system to ignore it and continue in the child hierarchy
     * We do this to avoid raycasts against objects that are not going to be used by the event system
     * Because there's no component callback to be invoked anyways.
     * This is especially important to avoid expensive raycasts against SkinnedMeshes
     *
     * Further optimizations would be to check what type of event we're dealing with
     * For example if an event component has only an onPointerClick method we don't need to raycast during movement events
     * */
    private shouldRaycastObject;
    private shouldRaycastObject_AddToYesCache;
    /** the raycast filter is always overriden */
    private performRaycast;
    private assignHitInformation;
    private handleIntersections;
    private _sortingBuffer;
    private _noDepthTestingResults;
    private sortCandidates;
    private out;
    /**
     * Handle hit result by preparing all needed information before propagation.
     * Then calling propagate.
     */
    private handleEventOnObject;
    /**
     * Propagate up in hiearchy and call the callback for each component that is possibly a handler
     */
    private propagate;
    /**
     * Propagate up in hierarchy and call handlers based on the pointer event data
     */
    private handleMainInteraction;
    /** Propagate up in hierarchy and call onPointerExit */
    private propagatePointerExit;
    /** handles onPointerUp - this will also release the pointerCapture */
    private invokeOnPointerUp;
    /** Responsible for invoking onPointerEnter (and updating onPointerExit). We invoke onPointerEnter once per active pointerId */
    private handlePointerEnter;
    /** Responsible for invoking onPointerExit (and updating onPointerEnter). We invoke onPointerExit once per active pointerId */
    private handlePointerExit;
    /** updates the pointer state list for a component
     * @param comp the component to update
     * @param pointerId the pointerId to update
     * @param symbol the symbol to use for the state
     * @param add if true, the pointerId is added to the state list, if false the pointerId will be removed
     */
    private updatePointerState;
    /** the list of component handlers that requested pointerCapture for a specific pointerId */
    private readonly _capturedPointer;
    /** check if the event was marked to be captured: if yes add the current component to the captured list */
    private handlePointerCapture;
    /** removes the component from the pointer capture list */
    releasePointerCapture(evt: PointerEventData, component: IPointerEventHandler): void;
    /** invoke the pointerMove event on all captured handlers */
    private invokePointerCapture;
    private readonly pointerEnterSymbol;
    private readonly pointerExitSymbol;
    private isChild;
    private handleMeshUiObjectWithoutShadowDom;
    private currentActiveMeshUIComponents;
    private handleMeshUIIntersection;
    private resetMeshUIStates;
    private testIsVisible;
}

/**
 * The [EventTrigger](https://engine.needle.tools/docs/api/EventTrigger) component is used to trigger events when certain pointer events occur on the GameObject.
 * It implements the {@link IPointerEventHandler} interface and can be used to expose events to the user in the editor without writing code.
 *
 * @summary Triggers events on pointer interactions
 * @category Interactivity
 * @group Components
 */
export declare class EventTrigger extends Component implements IPointerEventHandler {
    /** A list of events that should be triggered when a pointer event occurs on the GameObject. */
    triggers?: Array<TriggerEvent>;
    /* Excluded from this release type: invoke */
    private hasTrigger;
    private shouldChangeCursor;
    /* Excluded from this release type: onPointerClick */
    /* Excluded from this release type: onPointerEnter */
    /* Excluded from this release type: onPointerExit */
    /* Excluded from this release type: onPointerDown */
    /* Excluded from this release type: onPointerUp */
}

/* Excluded from this release type: EventType */

export declare function exportAsGLTF(_opts: ExportOptions): Promise<ArrayBuffer | Record<string, any>>;

declare class ExportData {
    node: Object3D;
    nodeIndex: number;
    nodeDef: any;
    constructor(node: Object3D, nodeIndex: number, nodeDef: any);
}

declare type ExportOptions = {
    context: Context;
    scene?: Object3D | Array<Object3D>;
    binary?: boolean;
    animations?: boolean;
    downloadAs?: string;
};

declare type ExportOptions_2 = GLTFExporterOptions & {
    pivot?: Vector3;
    needleComponents?: boolean;
};

/** FBX type */
export declare type FBX = {
    animations: AnimationClip[];
    scene: Object3D;
    scenes: Object3D[];
};

declare type FieldChangedCallbackFn = (newValue: any, previousValue: any) => void | boolean | any;

export declare class FieldWithDefault {
    path: string | null;
    asset: object | null;
    default: any;
}

export declare enum File_Event {
    File_Spawned = "file-spawned"
}

/**
 * Use this if a file is a external file URL. The file can be any arbitrary binary data like a videofile or a text asset.
 *
 * ### Related:
 * - {@link AssetReference} to load glTF or GLB assets
 * - {@link ImageReference} to load external image URLs
 */
export declare class FileReference {
    private static cache;
    static getOrCreate(url: string): FileReference;
    /** Load the file binary data
     * @returns a promise that resolves to the binary data of the file. Make sure to await this request or use `.then(res => {...})` to get the result.
     */
    loadRaw(): Promise<Blob>;
    /** Load the file as text (if the referenced file is a text file like a .txt or .json file)
     * @returns a promise that resolves to the text data of the file. Make sure to await this request or use `.then(res => {...})` to get the result. If the format is json you can use `JSON.parse(result)` to convert it to a json object
     */
    loadText(): Promise<string>;
    /** The resolved url to the file */
    readonly url: string;
    private res?;
    constructor(url: string);
}

/* Excluded from this release type: FileReferenceSerializer */

export declare class FileSpawnModel implements IModel {
    guid: string;
    file_name: string;
    file_hash: string;
    file_size: number;
    position: Vector3 | null;
    scale: Vector3 | null;
    seed: number;
    sender: string;
    /** the url to download the file */
    downloadUrl: string;
    parentGuid?: string;
    boundsSize?: Vector3;
    constructor(connectionId: string, seed: number, guid: string, name: string, hash: string, size: number, position: Vector3, scale: Vector3, downloadUrl: string);
}

/** @link https://developer.mozilla.org/en-US/docs/Web/API/XRFrame/fillPoses */
declare type FillPosesFunction = (spaces: IterableIterator<XRJointSpace>, referenceSpace: XRSpace, targetArray: Float32Array) => void;

declare type FilterStartingWith<T, Prefix extends string> = {
    [K in keyof T as K extends string ? (K extends `${Prefix}${infer _}` ? never : K) : never]: T[K];
};

/** Removes all properties that match TypesToFilter */
declare type FilterTypes<T, TypesToFilter> = {
    [P in keyof T as T[P] extends TypesToFilter ? never : P]: T[P];
};

export declare function findByGuid(guid: string, hierarchy: Object3D): IGameObject | IComponent | null | undefined;

/**
 * Searches the the scene for a component of the given type.
 * If the contextOrScene is not provided, the current context is used.
 * @param type The type of the component to search for.
 * @param contextOrScene The context or scene to search in. If not provided, the current context is used.
 * @param includeInactive If true, also inactive components are returned. Default is true.
 * @returns The first component of the given type found in the scene or null if none was found.
 * @example
 * ```typescript
 * const myComponent = findObjectOfType(MyComponent);
 * ```
 */
export declare function findObjectOfType<T extends IComponent>(type: Constructor<T>, contextOrScene?: undefined | Object3D | {
    scene: Scene;
}, includeInactive?: boolean): T | null;

/**
 * Searches the the scene for all components of the given type.
 * If the contextOrScene is not provided, the current context is used.
 * @param type The type of the component to search for.
 * @param contextOrScene The context or scene to search in. If not provided, the current context is used.
 * @example
 * ```typescript
 * const myComponents = findObjectsOfType(MyComponent);
 * ```
 */
export declare function findObjectsOfType<T extends IComponent>(type: Constructor<T>, array?: T[], contextOrScene?: undefined | Object3D | {
    scene: Scene;
}): T[];

/**
 * Find all users of an object
 * @param object Object to find users of
 * @param recursive Find users of users
 * @param predicate Filter users
 * @param set Set to add users to, a new one will be created if none is provided
 * @returns a set of users
 */
export declare function findResourceUsers(object: object, recursive: boolean, predicate?: UserFilter | null | undefined, set?: Set<object>): Set<object>;

/**
 * Fit the camera to the specified objects or the whole scene.
 * Adjusts the camera position and optionally the FOV to ensure all objects are visible.
 *
 * @example Fit the main camera to the entire scene:
 * ```ts
 * import { fitCamera } from '@needle-tools/engine';
 *
 * // Fit the main camera to the entire scene
 * fitCamera();
 * ```
 * @example Fit a specific camera to specific objects with custom options:
 * ```ts
 * import { fitCamera } from '@needle-tools/engine';
 *
 * // Fit a specific camera to specific objects with custom options
 * const myCamera = ...; // your camera
 * const objectsToFit = [...]; // array of objects to fit
 * fitCamera({
 *    camera: myCamera,
 *    objects: objectsToFit,
 *    fitOffset: 1,
 *    fov: 20,
 * });
 * ```
 *
 * @param options Options for fitting the camera
 * @returns
 */
export declare function fitCamera(options?: FitCameraOptions): null | FitCameraReturnType;

/**
 * Options for fitting a camera to the scene or specific objects.
 *
 * Used by {@link OrbitControls.fitCamera} and the {@link fitCamera}.
 *
 */
export declare type FitCameraOptions = {
    /** When enabled debug rendering will be shown */
    debug?: boolean;
    /**
     * If true the camera position and target will be applied immediately
     * @default true
     */
    autoApply?: boolean;
    /**
     * The context to use. If not provided the current context will be used
     */
    context?: Context;
    /**
     * The camera to fit. If not provided the current camera will be used
     */
    camera?: Camera_2;
    /**
     * The current zoom level of the camera (used to avoid clipping when fitting)
     */
    currentZoom?: number;
    /**
     * Minimum and maximum zoom levels for the camera (e.g. if zoom is constrained by OrbitControls)
     */
    minZoom?: number;
    /**
     * Maximum zoom level for the camera (e.g. if zoom is constrained by OrbitControls)
     */
    maxZoom?: number;
    /**
     * The objects to fit the camera to. If not provided the scene children will be used
     */
    objects?: Object3D[] | Object3D;
    /**
     * A factor to control padding around the fitted objects.
     *
     * Values &gt; 1 will add more space around the fitted objects, values &lt; 1 will zoom in closer.
     *
     * @default 1.1
     */
    fitOffset?: number;
    /** The direction from which the camera should be fitted in worldspace. If not defined the current camera's position will be used */
    fitDirection?: Vector3Like;
    /** If set to "y" the camera will be centered in the y axis */
    centerCamera?: "none" | "y";
    /** Set to 'auto' to update the camera near or far plane based on the fitted-objects bounds */
    cameraNearFar?: "keep" | "auto";
    /**
     * Offset the camera position in world space
     */
    cameraOffset?: Partial<Vector3Like>;
    /**
     * Offset the camera position relative to the size of the objects being focused on (e.g. x: 0.5).
     * Value range: -1 to 1
     */
    relativeCameraOffset?: Partial<Vector3Like>;
    /**
     * Offset the camera target position in world space
     */
    targetOffset?: Partial<Vector3Like>;
    /**
     * Offset the camera target position relative to the size of the objects being focused on.
     * Value range: -1 to 1
     */
    relativeTargetOffset?: Partial<Vector3Like>;
    /**
     * Target field of view (FOV) for the camera
     */
    fov?: number;
};

export declare type FitCameraReturnType = {
    camera: Camera_2;
    position: Vector3;
    lookAt: Vector3;
    fov: number | undefined;
};

/**
 * Fits an object into a bounding volume. The volume is defined by a Box3 in world space.
 * @param obj the object to fit
 * @param volume the volume to fit the object into
 * @param opts options for fitting
 */
export declare function fitObjectIntoVolume(obj: Object3D, volume: Box3, opts?: {
    /** Objects to ignore when calculating the obj's bounding box */
    ignore?: Object3D[];
    /** when `true` aligns the objects position to the volume ground
     * @default true
     */
    position?: boolean;
    /** when `true` scales the object to fit the volume
     * @default true
     */
    scale?: boolean;
}): {
    /** The object's bounding box before fitting */
    boundsBefore: Box3;
    /** The scale that was applied to the object */
    scale: Vector3;
};

declare type FitParameters = {
    object?: Object3D | Object3D[];
    positionOffset?: Partial<Vector3Like>;
    scaleFactor?: Partial<Vector3Like>;
};

/**
 * FixedJoint locks two {@link Rigidbody} components together, making them move as one rigid unit.
 * The bodies maintain their relative position and rotation at the time the joint is created.
 *
 * Use this for:
 * - Attaching objects together permanently
 * - Creating compound rigid bodies
 * - Welding broken pieces back together
 *
 * @example Attach a weapon to a character
 * ```ts
 * const joint = weapon.addComponent(FixedJoint);
 * joint.connectedBody = characterRigidbody;
 * ```
 *
 * @summary Lock two Rigidbodies together rigidly
 * @category Physics
 * @group Components
 * @see {@link Joint} base class
 * @see {@link HingeJoint} for rotating connections
 */
export declare class FixedJoint extends Joint {
    protected createJoint(self: Rigidbody, other: Rigidbody): void;
}

declare type FocusRect = DOMRect | Element | {
    x: number;
    y: number;
    width: number;
    height: number;
};

declare type FocusRectSettings = {
    /** Lower values will result in faster alignment with the rect (value ~= seconds to reach target)
     * Minimum value is 0.
     */
    damping: number;
    /** X offset in camera coordinates. Used by ViewBox component */
    offsetX: number;
    /** Y offset in camera coordinates. Used by ViewBox component */
    offsetY: number;
    /** Zoom factor. Used by ViewBox component */
    zoom: number;
};

/**
 * Adds distance-based fog effect to the scene.
 * When enabled, objects will fade into the fog color based on their distance from the camera.
 *
 * This component is automatically added to the scene when fog is enabled in the editor.
 * For setting fog from code you can simply use `scene.fog = new Fog3(color, near, far)` without adding this component.
 *
 * @summary Adds fog effect to the scene
 * @category Rendering
 * @group Components
 * @link https://threejs.org/docs/#Fog
 */
export declare class Fog extends Component {
    /**
     * The underlying Three.js Fog object. You can modify its properties directly for more advanced control.
     * @remarks The Fog component provides convenient access to common fog properties like `near`, `far`, and `color`. Modifying those will update the underlying `fog` object accordingly. However, you can also access and modify the `fog` object directly for more advanced use cases, such as changing the fog mode or using a custom shader.
     * @link https://threejs.org/docs/#Fog for available properties and methods on the Fog object.
     */
    get fog(): Fog_2;
    get mode(): FogMode;
    set near(value: number);
    get near(): number;
    set far(value: number);
    get far(): number;
    set color(value: Color);
    get color(): Color;
    private _fog?;
    onEnable(): void;
    onDisable(): void;
}

/**
 * Fog rendering mode
 */
declare enum FogMode {
    /** Linear fog increases uniformly with distance */
    Linear = 1,
    /** Exponential fog increases exponentially with distance */
    Exponential = 2,
    /** Exponential squared fog for denser falloff */
    ExponentialSquared = 3
}

declare enum FontStyle {
    Normal = 0,
    Bold = 1,
    Italic = 2,
    BoldAndItalic = 3
}

/**
 * Iterates over all components on an Object3D and optionally its children.
 * The callback can return a value to stop iteration early.
 *
 * @param instance The Object3D to iterate components on
 * @param cb Callback function called for each component. Return a value to stop iteration.
 * @param recursive If true (default), also iterates components on all children
 * @returns The first non-undefined value returned by the callback, or undefined
 *
 * @example Find first Rigidbody in hierarchy
 * ```ts
 * const rb = foreachComponent(myObject, comp => {
 *   if (comp instanceof Rigidbody) return comp;
 * });
 * ```
 */
export declare function foreachComponent(instance: Object3D, cb: ForEachComponentCallback, recursive?: boolean): any;

declare type ForEachComponentCallback = (comp: IComponent) => any;

export declare function foreachComponentEnumerator<T extends IComponent>(instance: Object3D, type?: Constructor<T>, includeChildren?: boolean, maxLevel?: number, _currentLevel?: number): Generator<T>;

export declare function forward(obj: Object3D): Vector3;

/**
 * Represents the different phases of the update cycle in Needle Engine.
 * Components can register for specific frame events to perform actions at precise moments.
 * The order of execution is: Start → EarlyUpdate → Update → LateUpdate → OnBeforeRender → OnAfterRender
 *
 * @see {@link Component.startCoroutine} for using FrameEvent with coroutines
 */
export declare enum FrameEvent {
    /** Called once when a component starts for the first time */
    Start = -1,
    /** Called at the beginning of each frame, before the main update */
    EarlyUpdate = 0,
    /** The main update phase, called once per frame */
    Update = 1,
    /** Called after all Update callbacks have finished */
    LateUpdate = 2,
    /** Called immediately before the scene is rendered */
    OnBeforeRender = 3,
    /** Called after the scene has been rendered */
    OnAfterRender = 4,
    /** Called before each physics simulation step */
    PrePhysicsStep = 9,
    /** Called after each physics simulation step */
    PostPhysicsStep = 10,
    /** Default value when no specific frame event is set */
    Undefined = -1
}

declare type GainedOwnershipBroadcastResponse = {
    guid: string;
    owner: string;
};

/**
 * Base class for objects in Needle Engine. Extends {@link Object3D} from three.js.
 * GameObjects can have components attached to them, which can be used to add functionality to the object.
 * They manage their components and provide methods to add, remove and get components.
 *
 * All {@link Object3D} types loaded in Needle Engine have methods like {@link addComponent}.
 * These methods are available directly on the GameObject instance:
 * ```typescript
 * target.addComponent(MyComponent);
 * ```
 *
 * And can be called statically on the GameObject class as well:
 * ```typescript
 * GameObject.setActive(target, true);
 * ```
 */
export declare abstract class GameObject extends Object3D implements Object3D, IGameObject {
    /**
     * Indicates if the GameObject is currently active. Inactive GameObjects will not be rendered or updated.
     * When the activeSelf state changes, components will receive {@link Component.onEnable} or {@link Component.onDisable} callbacks.
     */
    abstract activeSelf: boolean;
    /** @deprecated Use {@link addComponent} instead */
    abstract addNewComponent<T extends IComponent>(type: ConstructorConcrete<T>, init?: ComponentInit<T>): T;
    /**
     * Creates a new component on this gameObject or adds an existing component instance
     * @param comp Component type constructor or existing component instance
     * @param init Optional initialization values for the component
     * @returns The newly created or added component
     */
    abstract addComponent<T extends IComponent>(comp: T | ConstructorConcrete<T>, init?: ComponentInit<T>): T;
    /**
     * Removes a component from this GameObject
     * @param comp Component instance to remove
     * @returns The removed component
     */
    abstract removeComponent<T extends IComponent>(comp: T): T;
    /**
     * Gets an existing component of the specified type or adds a new one if it doesn't exist
     * @param typeName Constructor of the component type to get or add
     * @returns The existing or newly added component
     */
    abstract getOrAddComponent<T>(typeName: ConstructorConcrete<T> | null): T;
    /**
     * Gets a component of the specified type attached to this GameObject
     * @param type Constructor of the component type to get
     * @returns The component if found, otherwise null
     */
    abstract getComponent<T>(type: Constructor<T>): T | null;
    /**
     * Gets all components of the specified type attached to this GameObject
     * @param type Constructor of the component type to get
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    abstract getComponents<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    /**
     * Gets a component of the specified type in this GameObject's children hierarchy
     * @param type Constructor of the component type to get
     * @returns The first matching component if found, otherwise null
     */
    abstract getComponentInChildren<T>(type: Constructor<T>): T | null;
    /**
     * Gets all components of the specified type in this GameObject's children hierarchy
     * @param type Constructor of the component type to get
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    abstract getComponentsInChildren<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    /**
     * Gets a component of the specified type in this GameObject's parent hierarchy
     * @param type Constructor of the component type to get
     * @returns The first matching component if found, otherwise null
     */
    abstract getComponentInParent<T>(type: Constructor<T>): T | null;
    /**
     * Gets all components of the specified type in this GameObject's parent hierarchy
     * @param type Constructor of the component type to get
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    abstract getComponentsInParent<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    /**
     * The position of this GameObject in world space
     */
    abstract get worldPosition(): Vector3;
    abstract set worldPosition(val: Vector3);
    /**
     * The rotation of this GameObject in world space as a quaternion
     */
    abstract set worldQuaternion(val: Quaternion);
    abstract get worldQuaternion(): Quaternion;
    /**
     * The rotation of this GameObject in world space in euler angles (degrees)
     */
    abstract set worldRotation(val: Vector3);
    abstract get worldRotation(): Vector3;
    /**
     * The scale of this GameObject in world space
     */
    abstract set worldScale(val: Vector3);
    abstract get worldScale(): Vector3;
    /**
     * The forward direction vector of this GameObject in world space
     */
    abstract get worldForward(): Vector3;
    abstract set worldForward(val: Vector3);
    /**
     * The right direction vector of this GameObject in world space
     */
    abstract get worldRight(): Vector3;
    /**
     * The up direction vector of this GameObject in world space
     */
    abstract get worldUp(): Vector3;
    /**
     * Unique identifier for this GameObject
     */
    guid: string | undefined;
    /**
     * Destroys this GameObject and all its components.
     * Internally, this is added to the three.js {@link Object3D} prototype.
     */
    abstract destroy(): any;
    /**
     * Checks if a GameObject has been destroyed
     * @param go The GameObject to check
     * @returns True if the GameObject has been destroyed
     */
    static isDestroyed(go: Object3D): boolean;
    /**
     * Sets the active state of a GameObject
     * @param go The GameObject to modify
     * @param active Whether the GameObject should be active
     * @param processStart Whether to process the start callbacks if being activated
     */
    static setActive(go: Object3D, active: boolean, processStart?: boolean): void;
    /**
     * Checks if the GameObject itself is active (same as go.visible)
     * @param go The GameObject to check
     * @returns True if the GameObject is active
     */
    static isActiveSelf(go: Object3D): boolean;
    /**
     * Checks if the GameObject is active in the hierarchy (e.g. if any parent is invisible or not in the scene it will be false)
     * @param go The GameObject to check
     * @returns True if the GameObject is active in the hierarchy
     */
    static isActiveInHierarchy(go: Object3D): boolean;
    /**
     * Marks a GameObject to be rendered using instancing
     * @param go The GameObject to mark
     * @param instanced Whether the GameObject should use instanced rendering
     */
    static markAsInstancedRendered(go: Object3D, instanced: boolean): void;
    /**
     * Checks if a GameObject is using instanced rendering
     * @param instance The GameObject to check
     * @returns True if the GameObject is using instanced rendering
     */
    static isUsingInstancing(instance: Object3D): boolean;
    /**
     * Executes a callback for all components of the provided type on the provided object and its children
     * @param instance Object to run the method on
     * @param cb Callback to run on each component, "return undefined;" to continue and "return <anything>;" to break the loop
     * @param recursive If true, the method will be run on all children as well
     * @returns The last return value of the callback
     */
    static foreachComponent(instance: Object3D, cb: (comp: Component) => any, recursive?: boolean): any;
    /**
     * Creates a new instance of the provided object that will be replicated to all connected clients
     * @param instance Object to instantiate
     * @param opts Options for the instantiation
     * @returns The newly created instance or null if creation failed
     */
    static instantiateSynced(instance: GameObject | Object3D | null, opts: SyncInstantiateOptions): GameObject | null;
    /**
     * Creates a new instance of the provided object (like cloning it including all components and children)
     * @param instance Object to instantiate
     * @param opts Options for the instantiation (e.g. with what parent, position, etc.)
     * @returns The newly created instance
     */
    static instantiate(instance: AssetReference, opts?: IInstantiateOptions | null | undefined): Promise<Object3D | null>;
    static instantiate(instance: GameObject | Object3D, opts?: IInstantiateOptions | null | undefined): GameObject;
    /**
     * Destroys an object on all connected clients (if in a networked session)
     * @param instance Object to destroy
     * @param context Optional context to use
     * @param recursive If true, all children will be destroyed as well
     */
    static destroySynced(instance: Object3D | Component, context?: Context, recursive?: boolean): void;
    /**
     * Destroys an object
     * @param instance Object to destroy
     * @param recursive If true, all children will be destroyed as well. Default: true
     */
    static destroy(instance: Object3D | Component, recursive?: boolean): void;
    /**
     * Adds an object to parent and ensures all components are properly registered
     * @param instance Object to add
     * @param parent Parent to add the object to
     * @param context Optional context to use
     */
    static add(instance: Object3D | null | undefined, parent: Object3D, context?: Context): void;
    /**
     * Removes the object from its parent and deactivates all of its components
     * @param instance Object to remove
     */
    static remove(instance: Object3D | null | undefined): void;
    /**
     * Invokes a method on all components including children (if a method with that name exists)
     * @param go GameObject to invoke the method on
     * @param functionName Name of the method to invoke
     * @param args Arguments to pass to the method
     */
    static invokeOnChildren(go: Object3D | null | undefined, functionName: string, ...args: any): void;
    /**
     * Invokes a method on all components that have a method matching the provided name
     * @param go GameObject to invoke the method on
     * @param functionName Name of the method to invoke
     * @param children Whether to invoke on children as well
     * @param args Arguments to pass to the method
     */
    static invoke(go: Object3D | null | undefined, functionName: string, children?: boolean, ...args: any): void;
    /** @deprecated use `addComponent` */
    static addNewComponent<T extends IComponent>(go: IGameObject | Object3D, type: T | ConstructorConcrete<T>, init?: ComponentInit<T>, callAwake?: boolean): T;
    /**
     * Adds a new component (or moves an existing component) to the provided object
     * @param go Object to add the component to
     * @param instanceOrType If an instance is provided it will be moved to the new object, if a type is provided a new instance will be created
     * @param init Optional init object to initialize the component with
     * @param opts Optional options for adding the component
     * @returns The added or moved component
     */
    static addComponent<T extends IComponent>(go: IGameObject | Object3D, instanceOrType: T | ConstructorConcrete<T>, init?: ComponentInit<T>, opts?: {
        callAwake: boolean;
    }): T;
    /**
     * Moves a component to a new object
     * @param go GameObject to move the component to
     * @param instance Component to move
     * @returns The moved component
     */
    static moveComponent<T extends IComponent>(go: IGameObject | Object3D, instance: T | ConstructorConcrete<T>): T;
    /**
     * Removes a component from its object
     * @param instance Component to remove
     * @returns The removed component
     */
    static removeComponent<T extends IComponent>(instance: T): T;
    /**
     * Gets or adds a component of the specified type
     * @param go GameObject to get or add the component to
     * @param typeName Constructor of the component type
     * @returns The existing or newly added component
     */
    static getOrAddComponent<T extends IComponent>(go: IGameObject | Object3D, typeName: ConstructorConcrete<T>): T;
    /**
     * Gets a component on the provided object
     * @param go GameObject to get the component from
     * @param typeName Constructor of the component type
     * @returns The component if found, otherwise null
     */
    static getComponent<T extends IComponent>(go: IGameObject | Object3D | null, typeName: Constructor<T> | null): T | null;
    /**
     * Gets all components of the specified type on the provided object
     * @param go GameObject to get the components from
     * @param typeName Constructor of the component type
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    static getComponents<T extends IComponent>(go: IGameObject | Object3D | null, typeName: Constructor<T>, arr?: T[] | null): T[];
    /**
     * Finds an object or component by its unique identifier
     * @param guid Unique identifier to search for
     * @param hierarchy Root object to search in
     * @returns The found GameObject or Component, or null/undefined if not found
     */
    static findByGuid(guid: string, hierarchy: Object3D): GameObject | Component | null | undefined;
    /**
     * Finds the first object of the specified component type in the scene
     * @param typeName Constructor of the component type
     * @param context Context or root object to search in
     * @param includeInactive Whether to include inactive objects in the search
     * @returns The first matching component if found, otherwise null
     */
    static findObjectOfType<T extends IComponent>(typeName: Constructor<T>, context?: Context | Object3D, includeInactive?: boolean): T | null;
    /**
     * Finds all objects of the specified component type in the scene
     * @param typeName Constructor of the component type
     * @param context Context or root object to search in
     * @returns Array of matching components
     */
    static findObjectsOfType<T extends IComponent>(typeName: Constructor<T>, context?: Context | Object3D): Array<T>;
    /**
     * Gets a component of the specified type in the gameObject's children hierarchy
     * @param go GameObject to search in
     * @param typeName Constructor of the component type
     * @param includeInactive Whether to include inactive objects in the search
     * @returns The first matching component if found, otherwise null
     */
    static getComponentInChildren<T extends IComponent>(go: IGameObject | Object3D, typeName: Constructor<T>, includeInactive?: boolean): T | null;
    /**
     * Gets all components of the specified type in the gameObject's children hierarchy
     * @param go GameObject to search in
     * @param typeName Constructor of the component type
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    static getComponentsInChildren<T extends IComponent>(go: IGameObject | Object3D, typeName: Constructor<T>, arr?: T[] | null): Array<T>;
    /**
     * Gets a component of the specified type in the gameObject's parent hierarchy
     * @param go GameObject to search in
     * @param typeName Constructor of the component type
     * @returns The first matching component if found, otherwise null
     */
    static getComponentInParent<T extends IComponent>(go: IGameObject | Object3D, typeName: Constructor<T>): T | null;
    /**
     * Gets all components of the specified type in the gameObject's parent hierarchy
     * @param go GameObject to search in
     * @param typeName Constructor of the component type
     * @param arr Optional array to populate with the components
     * @returns Array of components
     */
    static getComponentsInParent<T extends IComponent>(go: IGameObject | Object3D, typeName: Constructor<T>, arr?: Array<T> | null): Array<T>;
    /**
     * Gets all components on the gameObject
     * @param go GameObject to get components from
     * @returns Array of all components
     */
    static getAllComponents(go: IGameObject | Object3D): Component[];
    /**
     * Iterates through all components on the gameObject
     * @param go GameObject to iterate components on
     * @returns Generator yielding each component
     */
    static iterateComponents(go: IGameObject | Object3D): Generator<any, void, unknown>;
}

/** Button names on typical controllers (since there seems to be no agreed naming)
 * https://w3c.github.io/gamepad/#remapping
 */
export declare type GamepadButtonName = "a-button" | "b-button" | "x-button" | "y-button";

declare type GamepadKey = "button" | "xAxis" | "yAxis";

/** Generates a QR code HTML image using https://github.com/davidshimjs/qrcodejs
 * @param args.text The text to encode
 * @param args.width The width of the QR code
 * @param args.height The height of the QR code
 * @param args.colorDark The color of the dark squares
 * @param args.colorLight The color of the light squares
 * @param args.correctLevel The error correction level to use
 * @param args.showLogo If true, the logo will be shown in the center of the QR code. By default the Needle Logo will be used. You can override which logo is being used by setting the `needle-engine` web component's `qr-logo-src` attribute. The logo can also be disabled by setting that attribute to a falsey value (e.g. "0" or "false")
 * @param args.showUrl If true, the URL will be shown below the QR code
 * @param args.domElement The dom element to append the QR code to. If not provided a new div will be created and returned
 * @returns The dom element containing the QR code
 */
export declare function generateQRCode(args: {
    domElement?: HTMLElement;
    text: string;
    width?: number;
    height?: number;
    colorDark?: string;
    colorLight?: string;
    correctLevel?: any;
    showLogo?: boolean;
    showUrl?: boolean;
}): Promise<HTMLElement>;

export declare function generateSeed(): number;

/** The generator used to export the scene / build the web project */
export declare const GENERATOR: string;

/**
 * Get the axis-aligned bounding box of a list of objects.
 * @param objects The objects to get the bounding box from.
 * @param ignore Objects to ignore when calculating the bounding box. Objects that are invisible (gizmos, helpers, etc.) are excluded by default.
 * @param layers The layers to include. Typically the main camera's layers.
 * @param result The result box to store the bounding box in. Returns a new box if not passed in.
 */
export declare function getBoundingBox(objects: Object3D | Object3D[], ignore?: ((obj: Object3D) => void | boolean) | Array<Object3D | null | undefined> | undefined, layers?: Layers | undefined | null, result?: Box3 | undefined): Box3;

/** Get the camera controller for the given camera (if any)
 */
export declare function getCameraController(cam: Camera_2): ICameraController | null;

/**
 * Searches for a given component type in the given object.
 * @param obj The object to search in.
 * @param componentType The type of the component to search for.
 * @returns The first component of the given type found in the given object.
 * @example
 * ```typescript
 * const myComponent = getComponent(myObject, MyComponent);
 * ```
 */
export declare function getComponent<T extends IComponent>(obj: Object3D, componentType: Constructor<T>): T | null;

/**
 * Searches for a given component type in the children of the given object.
 * @param obj The object to start the search from - this object is also included in the search.
 * @param componentType The type of the component to search for.
 * @param includeInactive If true, also inactive components are returned. Default is true.
 * @returns The first component of the given type found in the children of the given object.
 * @example
 * ```typescript
 * const myComponent = getComponentInChildren(myObject, MyComponent);
 * ```
 */
export declare function getComponentInChildren<T extends IComponent>(obj: Object3D, componentType: Constructor<T>, includeInactive?: boolean): T | null;

/**
 * Searches for a given component type in the parent hierarchy of the given object.
 * @param obj The object to start the search from - this object is also included in the search.
 * @param componentType The type of the component to search for.
 * @param includeInactive If true, also inactive components are returned. Default is false.
 * @returns The first component of the given type found in the parent hierarchy of the given object.
 * @example
 * ```typescript
 * const myComponent = getComponentInParent(myObject, MyComponent);
 * ```
 */
export declare function getComponentInParent<T extends IComponent>(obj: Object3D, componentType: Constructor<T>, includeInactive?: boolean): T | null;

/**
 * Searches for a given component type in the children of the given object.
 * @param obj The object to start the search from - this object is also included in the search.
 * @param componentType The type of the component to search for.
 * @param arr An optional array to store the found components in. If not provided, a new array is created.
 * @param clearArray If true, the array is cleared before storing the found components. Default is true.
 * @returns An array of components of the given type found in the children of the given object.
 * @example
 * ```typescript
 * const myComponents = getComponents(myObject, MyComponent);
 * ```
 */
export declare function getComponents<T extends IComponent>(obj: Object3D, componentType: Constructor<T>, arr?: T[] | null, clearArray?: boolean): T[];

/**
 * Searches for a given component type in the children of the given object.
 * @param obj The object to start the search from - this object is also included in the search.
 * @param componentType The type of the component to search for.
 * @param arr An optional array to store the found components in. If not provided, a new array is created.
 * @param clearArray If true, the array is cleared before storing the found components. Default is true.
 * @returns An array of components of the given type found in the children of the given object.
 * @example
 * ```typescript
 * const myComponents = getComponentsInChildren(myObject, MyComponent);
 * ```
 */
export declare function getComponentsInChildren<T extends IComponent>(obj: Object3D, componentType: Constructor<T>, arr?: T[], clearArray?: boolean): T[];

/**
 * Searches for a given component type in the parent hierarchy of the given object.
 * @param obj The object to start the search from - this object is also included in the search.
 * @param componentType The type of the component to search for.
 * @param arr An optional array to store the found components in. If not provided, a new array is created.
 * @param clearArray If true, the array is cleared before storing the found components. Default is true.
 * @returns An array of components of the given type found in the parent hierarchy of the given object.
 * @example
 * ```typescript
 * const myComponents = getComponentsInParent(myObject, MyComponent);
 * ```
 */
export declare function getComponentsInParent<T extends IComponent>(obj: Object3D, componentType: Constructor<T>, arr?: T[] | null, clearArray?: boolean): T[];

/** Gets the date formatted as 20240220-161993. When no Date is passed in, the current local date is used. */
export declare function getFormattedDate(date?: Date): string;

/** Returns a HTML element containing an icon. Using https://fonts.google.com/icons
 * As a string you should pass in the name of the icon, e.g. "add" or "delete"
 * @returns HTMLElement containing the icon
 */
export declare function getIconElement(str: string): HTMLElement;

export declare function getIconTexture(str: string): Promise<Texture | null>;

export declare function getLoader(): INeedleGltfLoader;

export declare function getOrAddComponent<T extends IComponent>(go: Object3D, typeName: ConstructorConcrete<T>, init?: ComponentInit<T>): T;

/**
 * Checks if a URL parameter exists and returns its value.
 * Useful for debugging, feature flags, and configuration.
 *
 * @param paramName The URL parameter name to check
 * @returns
 * - `true` if the parameter exists without a value (e.g. `?debug`)
 * - `false` if the parameter doesn't exist or is set to `0`
 * - The numeric value if it's a number (e.g. `?level=5` returns `5`)
 * - The string value otherwise (e.g. `?name=test` returns `"test"`)
 *
 * @example Check debug mode
 * ```ts
 * if (getParam("debug")) {
 *   console.log("Debug mode enabled");
 * }
 * ```
 * @example Get a numeric value
 * ```ts
 * const level = getParam("level"); // Returns number if ?level=5
 * ```
 */
export declare function getParam<T extends string>(paramName: T): Param<T>;

export declare function getParentHierarchyPath(obj: Object3D): string;

/** @deprecated use resolveUrl instead */
export declare function getPath(source: SourceIdentifier | undefined, uri: string): string;

export declare function getPeerjsInstance(id?: string, opts?: PeerJSOption): Promise<default_2>;

export declare function getPeerOptions(): PeerJSOption | undefined;

export declare function getResourceUserCount(object: object): number | undefined;

export declare function getTempColor(color?: Color): Color;

/**
 * Gets a temporary quaternion. If a quaternion is passed in it will be copied to the temporary quaternion
 * Temporary quaternions are cached and reused internally. Don't store them!
 * @param value the quaternion to copy
 * @returns a temporary quaternion
 */
export declare function getTempQuaternion(value?: Quaternion | DOMPointReadOnly | {
    x: number;
    y: number;
    z: number;
    w: number;
}): Quaternion;

export declare function getTempQuaternion(x: number, y: number, z: number, w: number): Quaternion;

/** Gets a temporary vector. If a vector is passed in it will be copied to the temporary vector
 * Temporary vectors are cached and reused internally. Don't store them!
 * @param vec3 the vector to copy or the x value
 * @param y the y value
 * @param z the z value
 * @returns a temporary vector
 *
 * @example
 * ``` javascript
 * const vec = getTempVector(1, 2, 3);
 * const vec2 = getTempVector(vec);
 * const vec3 = getTempVector(new Vector3(1, 2, 3));
 * const vec4 = getTempVector(new DOMPointReadOnly(1, 2, 3));
 * const vec5 = getTempVector();
 * ```
 */
export declare function getTempVector(): Vector3;

export declare function getTempVector(vec3: Vector3): Vector3;

export declare function getTempVector(vec3: [number, number, number]): Vector3;

export declare function getTempVector(vec3: Vec3): Vector3;

export declare function getTempVector(dom: DOMPointReadOnly): Vector3;

export declare function getTempVector(x: number): Vector3;

export declare function getTempVector(x: number, y: number, z: number): Vector3;

declare type getter = () => any;

export declare function getUrlParams(): URLSearchParams;

export declare function getVisibleInCustomShadowRendering(obj: Object3D): boolean;

/** Get the world direction. Returns world forward if nothing is passed in.
 * Pass in a relative direction to get it converted to world space (e.g. dir = new Vector3(0, 1, 1))
 * The returned vector will not be normalized
 */
export declare function getWorldDirection(obj: Object3D, dir?: Vector3): Vector3;

export declare function getWorldEuler(obj: Object3D): Euler;

/**
 * Get the world position of an object
 * @param obj the object to get the world position from
 * @param vec a vector to store the result in. If not passed in a temporary vector will be used
 * @param updateParents if true the parents will be updated before getting the world position
 * @returns the world position
 */
export declare function getWorldPosition(obj: Object3D, vec?: Vector3 | null, updateParents?: boolean): Vector3;

export declare function getWorldQuaternion(obj: Object3D, target?: Quaternion | null): Quaternion;

export declare function getWorldRotation(obj: Object3D): Vector3;

export declare function getWorldScale(obj: Object3D, vec?: Vector3 | null): Vector3;

declare type GizmoColor = ColorRepresentation | (Color & {
    a: number;
}) | RGBAColor;

/** Gizmos are temporary objects that are drawn in the scene for debugging or visualization purposes
 * They are automatically removed after a given duration and cached internally to reduce overhead.
 * Use the static methods of this class to draw gizmos in the scene.
 */
export declare class Gizmos {
    private constructor();
    /**
     * Allow creating gizmos
     * If disabled then no gizmos will be added to the scene anymore
     */
    static enabled: boolean;
    /**
     * Returns true if a given object is a gizmo
     */
    static isGizmo(obj: Object3D): boolean;
    /** Set visibility of all currently rendered gizmos */
    static setVisible(visible: boolean): void;
    /**
     * Draw a label in the scene or attached to an object (if a parent is provided)
     * @param position the position of the label in world space
     * @param text the text of the label
     * @param size the size of the label in world space
     * @param duration the duration in seconds the label will be rendered. If 0 it will be rendered for one frame
     * @param color the color of the label
     * @param backgroundColor the background color of the label
     * @param parent the parent object to attach the label to. If no parent is provided the label will be attached to the scene
     * @returns a handle to the label that can be used to update the text
     */
    static DrawLabel(position: Vec3, text: string, size?: number, duration?: number, color?: ColorRepresentation, backgroundColor?: ColorRepresentation | GizmoColor, parent?: Object3D): LabelHandle | null;
    /**
     * Draw a ray gizmo in the scene
     * @param origin the origin of the ray in world space
     * @param dir the direction of the ray in world space
     * @param color the color of the ray
     * @param duration the duration in seconds the ray will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the ray will be rendered with depth test
     */
    static DrawRay(origin: Vec3, dir: Vec3, color?: GizmoColor, duration?: number, depthTest?: boolean): void;
    /**
     * Draw a line gizmo in the scene
     * @param pt0 the start point of the line in world space
     * @param pt1 the end point of the line in world space
     * @param color the color of the line
     * @param duration the duration in seconds the line will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the line will be rendered with depth test
     * @param lengthFactor the length of the line. Default is 1
     */
    static DrawDirection(pt: Vec3, direction: Vec3 | Vec4, color?: GizmoColor, duration?: number, depthTest?: boolean, lengthFactor?: number): void;
    /**
     * Draw a line gizmo in the scene
     * @param pt0 the start point of the line in world space
     * @param pt1 the end point of the line in world space
     * @param color the color of the line
     * @param duration the duration in seconds the line will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the line will be rendered with depth test
     */
    static DrawLine(pt0: Vec3, pt1: Vec3, color?: GizmoColor, duration?: number, depthTest?: boolean): void;
    /**
     * Draw a 2D circle gizmo in the scene
     * @param pt0 the center of the circle in world space
     * @param normal the normal of the circle in world space
     * @param radius the radius of the circle in world space
     * @param color the color of the circle
     * @param duration the duration in seconds the circle will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the circle will be rendered with depth test
     */
    static DrawCircle(pt0: Vec3, normal: Vec3, radius: number, color?: ColorRepresentation, duration?: number, depthTest?: boolean): void;
    /**
     * Draw a 3D wiremesh sphere gizmo in the scene
     * @param center the center of the sphere in world space
     * @param radius the radius of the sphere in world space
     * @param color the color of the sphere
     * @param duration the duration in seconds the sphere will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the sphere will be rendered with depth test
     */
    static DrawWireSphere(center: Vec3, radius: number, color?: GizmoColor, duration?: number, depthTest?: boolean): void;
    /**
     * Draw a 3D sphere gizmo in the scene
     * @param center the center of the sphere in world space
     * @param radius the radius of the sphere in world space
     * @param color the color of the sphere
     * @param duration the duration in seconds the sphere will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the sphere will be rendered with depth test
     */
    static DrawSphere(center: Vec3, radius: number, color?: GizmoColor, duration?: number, depthTest?: boolean): void;
    /**
     * Draw a 3D wiremesh box gizmo in the scene
     * @param center the center of the box in world space
     * @param size the size of the box in world space
     * @param rotation the rotation of the box in world space
     * @param color the color of the box
     * @param duration the duration in seconds the box will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the box will be rendered with depth test
     */
    static DrawWireBox(center: Vec3, size: Vec3, color?: GizmoColor, duration?: number, depthTest?: boolean, rotation?: Quaternion | undefined): void;
    /**
     * Draw a 3D wiremesh box gizmo in the scene
     * @param box the box in world space
     * @param color the color of the box
     * @param duration the duration in seconds the box will be rendered. If 0 it will be rendered for one frame. Default: 0
     * @param depthTest if true the box will be rendered with depth test. Default: true
     */
    static DrawWireBox3(box: Box3, color?: GizmoColor, duration?: number, depthTest?: boolean): void;
    private static _up;
    /**
     * Draw an arrow gizmo in the scene
     * @param pt0 the start point of the arrow in world space
     * @param pt1 the end point of the arrow in world space
     * @param color the color of the arrow
     * @param duration the duration in seconds the arrow will be rendered. If 0 it will be rendered for one frame
     * @param depthTest if true the arrow will be rendered with depth test
     * @param wireframe if true the arrow will be rendered as wireframe
     */
    static DrawArrow(pt0: Vec3, pt1: Vec3, color?: ColorRepresentation, duration?: number, depthTest?: boolean, wireframe?: boolean): void;
    /**
     * Render a wireframe mesh in the scene. The mesh will be removed after the given duration (if duration is 0 it will be rendered for one frame).
     * If a mesh object is provided then the mesh's matrixWorld and geometry will be used. Otherwise, the provided matrix and geometry will be used.
     * @param options the options for the wire mesh
     * @param options.duration the duration in seconds the mesh will be rendered. If 0 it will be rendered for one frame
     * @param options.color the color of the wire mesh
     * @param options.depthTest if true the wire mesh will be rendered with depth test
     * @param options.mesh the mesh object to render (if it is provided the matrix and geometry will be used)
     * @param options.matrix the matrix of the mesh to render
     * @param options.geometry the geometry of the mesh to render
     * @example
     * ```typescript
     * Gizmos.DrawWireMesh({ duration: 1, color: 0xff0000, mesh: myMesh });
     * ```
     */
    static DrawWireMesh(options: {
        duration?: number;
        color?: ColorRepresentation;
        depthTest?: boolean;
    } & ({
        mesh: Mesh;
    } | {
        matrix: Matrix4;
        geometry: BufferGeometry;
    })): void;
}

/** GLTF, GLB or VRM */
export declare type GLTF = GLTF_2;

/**
 * GltfExport is a component that enables exporting selected 3D objects from the scene to the glTF format.
 * You can specify whether to export in binary format (.glb) or JSON format (.gltf), and select specific objects to include in the export.
 * The exported glTF file can be used in various 3D applications and engines that support the glTF standard.
 *
 * @summary Export selected 3D objects to glTF format
 * @category Asset Management
 * @group Components
 */
export declare class GltfExport extends Component {
    binary: boolean;
    objects: Object3D[];
    private ext?;
    exportNow(name: string, opts?: ExportOptions_2): Promise<boolean>;
    export(objectsToExport: Object3D[], opts?: ExportOptions_2): Promise<any>;
    private static saveArrayBuffer;
    private static saveJson;
    private static save;
    private static collectAnimations;
    private static calculateCenter;
    private static filterTopmostParent;
}

export declare class GltfExportBox extends BoxHelperComponent {
    sceneRoot?: Object3D;
}

export declare class Gradient {
    alphaKeys: Array<AlphaKey>;
    colorKeys: Array<ColorKey>;
    get duration(): number;
    evaluate(time: number, target: RGBAColor): RGBAColor;
}

/**
 * [Graphic](https://engine.needle.tools/docs/api/Graphic) provides basic rendering for UI elements with color, opacity, and texture support.
 * @category User Interface
 * @group Components
 */
export declare class Graphic extends BaseUIComponent implements IGraphic, IRectTransformChangedReceiver {
    get isGraphic(): boolean;
    get color(): RGBAColor;
    set color(col: RGBAColor);
    private _alphaFactor;
    setAlphaFactor(factor: number): void;
    get alphaFactor(): number;
    private sRGBColor;
    protected onColorChanged(): void;
    private get m_Color();
    raycastTarget: boolean;
    protected uiObject: ThreeMeshUI.Block | null;
    private _color;
    private _rect;
    private _stateManager;
    protected get rectTransform(): RectTransform;
    onParentRectTransformChanged(): void;
    __internalNewInstanceCreated(init: ComponentInit<this>): this;
    setState(state: string): void;
    setupState(state: object): void;
    setOptions(opts: Options): void;
    awake(): void;
    onEnable(): void;
    onDisable(): void;
    private _currentlyCreatingPanel;
    protected makePanel(): void;
    protected onBeforeCreate(_opts: any): void;
    protected onCreate(opts: any): void;
    protected onAfterCreated(): void;
    private applyEffects;
    /** used internally to ensure textures assigned to UI use linear encoding */
    static textureCache: Map<Texture, Texture>;
    protected setTexture(tex: Texture | null | undefined): Promise<void>;
    protected onAfterAddedToScene(): void;
}

/**
 * GraphicRaycaster enables pointer interactions with UI elements.
 * Add this to a {@link Canvas} or UI hierarchy to enable button clicks,
 * hover effects, and other UI interactions.
 *
 * **Requirements:**
 * - Must be on the same object as a Canvas or on a parent
 * - UI elements need proper RectTransform setup
 *
 * @example Enable UI interaction
 * ```ts
 * // Add to Canvas object
 * canvas.addComponent(GraphicRaycaster);
 * // Now buttons and other UI elements will respond to clicks
 * ```
 *
 * @summary Raycaster for UI elements
 * @category User Interface
 * @group Components
 * @see {@link Canvas} for UI root
 * @see {@link Button} for clickable UI
 * @see {@link EventSystem} for event handling
 */
export declare class GraphicRaycaster extends ObjectRaycaster {
    constructor();
}

/**
 * Utility class to perform various graphics operations like copying textures to canvas
 */
export declare class Graphics {
    private static readonly planeGeometry;
    private static readonly renderer;
    private static readonly perspectiveCam;
    private static readonly orthographicCam;
    private static readonly scene;
    private static readonly blitMaterial;
    private static readonly mesh;
    /**
     * Copy a texture to a new texture
     * @param texture the texture to copy
     * @param blitMaterial the material to use for copying (optional)
     * @returns the newly created, copied texture
     */
    static copyTexture(texture: Texture, blitMaterial?: ShaderMaterial): Texture;
    static blit(src: Texture, target: WebGLRenderTarget, options?: {
        renderer?: WebGLRenderer;
        blitMaterial?: ShaderMaterial;
        flipY?: boolean;
        depthTexture?: DepthTexture | null;
        depthTest?: boolean;
        depthWrite?: boolean;
    }): void;
    /**
     * Copy a texture to a HTMLCanvasElement
     * @param texture the texture convert
     * @param force if true the texture will be copied to a new texture before converting
     * @returns the HTMLCanvasElement with the texture or null if the texture could not be copied
     */
    static textureToCanvas(texture: Texture, force?: boolean): HTMLCanvasElement | null;
}

/**
 * The [GridHelper](https://engine.needle.tools/docs/api/GridHelper) displays a flat grid in the scene for visual reference.
 * Useful for debugging, level design, or providing spatial context.
 *
 * ![](https://cloud.needle.tools/-/media/prWArU8xTbgBKWQOvhTOag.gif)
 *
 * **Properties:**
 * - `color0` / `color1` - Alternating grid line colors
 * - `isGizmo` - When true, only shows when gizmos are enabled
 *
 * @example Add a grid to the scene
 * ```ts
 * const grid = myObject.addComponent(GridHelper);
 * grid.color0 = new Color(0.3, 0.3, 0.3);
 * grid.color1 = new Color(0.5, 0.5, 0.5);
 * ```
 *
 * @category Helpers
 * @group Components
 * @see {@link Gizmos} for debug visualization
 *
 * ![](https://cloud.needle.tools/-/media/i5KGKBUQ3iAX9h6o_9EY2w.jpg)
 */
export declare class GridHelper extends Component {
    isGizmo: boolean;
    color0: Color | ColorRepresentation;
    color1: Color | ColorRepresentation;
    private gridHelper;
    private size;
    private divisions;
    private offset;
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
}

/**
 * [GridLayoutGroup](https://engine.needle.tools/docs/api/GridLayoutGroup) arranges child UI elements in a grid pattern.
 * @category User Interface
 * @group Components
 */
export declare class GridLayoutGroup extends LayoutGroup {
    protected onCalculateLayout(): void;
}

/**
 * The [GroundProjectedEnv](https://engine.needle.tools/docs/api/GroundProjectedEnv) projects the environment map onto a virtual ground plane.
 * Creates a realistic floor from 360° panoramas/HDRIs by deforming the skybox
 * into a hemisphere with a beveled floor.
 *
 *
 * [![](https://cloud.needle.tools/-/media/8LDMd4TnGxVIj1XOfxIUIA.gif)](https://engine.needle.tools/samples/ground-projection)
 *
 * **Key properties:**
 * - `radius` - Size of the projection sphere (keep camera inside)
 * - `height` - How high the original photo was taken (affects floor magnification)
 * - `autoFit` - Automatically center and position at ground level
 * - `arBlending` - Blend with real-world in AR (0=hidden, 1=visible)
 *
 * **Debug:** Use `?debuggroundprojection` URL parameter.
 *
 * @example Apply ground projection
 * ```ts
 * const ground = myObject.getComponent(GroundProjectedEnv);
 * ground.radius = 100;
 * ground.height = 2;
 * ground.apply();
 * ```
 *
 * @summary Projects the environment map onto the ground
 * @category Rendering
 * @group Components
 * @see {@link Camera} for environment/skybox settings
 * @see {@link ContactShadows} for ground shadows
 * @link https://engine.needle.tools/samples/ground-projection for a demo of ground projection
 */
export declare class GroundProjectedEnv extends Component {
    /**
     * If true the projection will be created on awake and onEnable
     * @default false
     */
    applyOnAwake: boolean;
    /**
     * When enabled the position of the projected environment will be adjusted to be centered in the scene (and ground level).
     * @default true
     */
    autoFit: boolean;
    /**
     * Radius of the projection sphere. Set it large enough so the camera stays inside (make sure the far plane is also large enough)
     * @default 50
     */
    set radius(val: number);
    get radius(): number;
    private _radius;
    /**
     * How far the camera that took the photo was above the ground. A larger value will magnify the downward part of the image.
     * @default 3
     */
    set height(val: number);
    get height(): number;
    private _height;
    /**
     * Blending factor for the AR projection being blended with the scene background.
     * 0 = not visible in AR - 1 = blended with real world background.
     * Values between 0 and 1 control the smoothness of the blend while lower values result in smoother blending.
     * @default 0
     */
    set arBlending(val: number);
    get arBlending(): number;
    private _arblending;
    private _lastBackground?;
    private _lastRadius?;
    private _lastHeight?;
    private _projection?;
    private _watcher?;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: onEnable */
    /* Excluded from this release type: onDisable */
    /* Excluded from this release type: onEnterXR */
    /* Excluded from this release type: onLeaveXR */
    /* Excluded from this release type: onBeforeRender */
    private updateAndCreate;
    private _needsTextureUpdate;
    /**
     * Updates the ground projection. This is called automatically when the environment or settings change.
     */
    updateProjection(): void;
    private _blurrynessShader;
    private _lastBlurriness;
    private updateBlurriness;
}

export declare class GroupActionModel implements IBehaviorElement {
    static global_id: number;
    static getId(): number;
    id: string;
    actions: IBehaviorElement[];
    loops: number;
    performCount: number;
    type: string;
    multiplePerformOperation: MultiplePerformOperation | undefined;
    constructor(id: string, actions: IBehaviorElement[]);
    addAction(el: IBehaviorElement): GroupActionModel;
    makeParallel(): GroupActionModel;
    makeSequence(): GroupActionModel;
    makeLooping(): this;
    makeRepeat(count: number): this;
    writeTo(document: USDDocument, writer: USDWriter): void;
}

export declare type GuidsMap = {
    [key: string]: string;
};

/* Excluded from this release type: hasCommercialLicense */

/* Excluded from this release type: hasIndieLicense */

/* Excluded from this release type: hasPointerEventComponent */

/* Excluded from this release type: hasProLicense */

export declare function hideDebugConsole(): void;

export declare enum HideFlags {
    None = 0,
    /** When enabled the glTF exporter will omit this object and all children from being exported */
    DontExport = 1
}

/**
 * Hides the object when the scene starts.
 * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
 *
 * Useful for setting up objects that should initially be hidden and shown later via a {@link SetActiveOnClick} component.
 *
 * @see {@link SetActiveOnClick} to show or hide objects on click
 * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
 * @summary Hides the object on scene start
 * @category Everywhere Actions
 * @group Components
 */
export declare class HideOnStart extends Component implements UsdzBehaviour {
    private static _fadeBehaviour?;
    private static _fadeObjects;
    static add(target: Target, ext: BehaviorExtension): void;
    start(): void;
    createBehaviours(ext: any, model: any, _context: any): void;
    private wasVisible;
    beforeCreateDocument(): void;
}

/**
 * HingeJoint connects two {@link Rigidbody} components with a rotating constraint,
 * like a door hinge or wheel axle. Bodies can only rotate around the specified axis.
 *
 * Use this for:
 * - Doors and gates
 * - Wheels and axles
 * - Pendulums
 * - Any rotating mechanical connection
 *
 * @example Create a door hinge
 * ```ts
 * const hinge = door.addComponent(HingeJoint);
 * hinge.connectedBody = doorFrameRigidbody;
 * hinge.anchor = new Vector3(0, 0, 0); // Hinge position
 * hinge.axis = new Vector3(0, 1, 0);   // Rotate around Y axis
 * ```
 *
 * @summary Connect two Rigidbodies with a rotating hinge
 * @category Physics
 * @group Components
 * @see {@link Joint} base class
 * @see {@link FixedJoint} for rigid connections
 */
export declare class HingeJoint extends Joint {
    /** Local position of the hinge pivot point */
    anchor?: Vector3;
    /** Axis of rotation for the hinge (e.g., Vector3(0,1,0) for vertical axis) */
    axis?: Vector3;
    protected createJoint(self: Rigidbody, other: Rigidbody): void;
}

declare type HitPointObject = Object3D & {
    material: Material & {
        opacity: number;
    };
};

declare enum HorizontalAlignment {
    left = "left",
    center = "center",
    right = "right",
    justified = "justified"
}

/**
 * [HorizontalLayoutGroup](https://engine.needle.tools/docs/api/HorizontalLayoutGroup) arranges child UI elements horizontally with spacing, padding, and alignment options.
 * @category User Interface
 * @group Components
 */
export declare class HorizontalLayoutGroup extends HorizontalOrVerticalLayoutGroup {
    protected get primaryAxis(): Axis;
}

declare abstract class HorizontalOrVerticalLayoutGroup extends LayoutGroup {
    childControlHeight: boolean;
    childControlWidth: boolean;
    childForceExpandHeight: boolean;
    childForceExpandWidth: boolean;
    childScaleHeight: boolean;
    childScaleWidth: boolean;
    protected abstract get primaryAxis(): Axis;
    protected onCalculateLayout(rect: RectTransform): void;
}

declare enum HorizontalWrapMode {
    Wrap = 0,
    Overflow = 1
}

/* Excluded from this release type: HostData */

/**
 * [HoverAnimation](https://engine.needle.tools/docs/api/HoverAnimation) plays animations in response to pointer hover events on the object this component is attached to.
 * The component automatically detects when the mouse pointer or touch enters/exits the object or any of its children, triggering the corresponding animations.
 *
 * **How It Works:**
 * The component listens to pointer enter and exit events and switches between two animation states:
 * - **Hover state**: Plays when the pointer enters the object (default: scale up to 110%)
 * - **Idle state**: Plays when the pointer exits the object (default: returns to original scale)
 *
 * **Default Behavior:**
 * If no custom animation clips are provided, the component automatically creates a smooth scale-up animation using the
 * {@link type}, {@link duration}, and {@link scaleFactor} properties. This provides instant hover feedback without
 * requiring any animation setup.
 *
 * **Custom Animations:**
 * You can provide your own animation clips for complete control over the hover effect. This allows you to create
 * complex animations involving position, rotation, color changes, or any other animated property.
 *
 * **Common Use Cases:**
 * - Interactive buttons with scale feedback
 * - Product showcases with highlight animations
 * - Menu items with hover effects
 * - Interactive 3D objects in AR/VR experiences
 * - Call-to-action elements with attention-grabbing animations
 *
 * @example Basic usage with default scale animation
 * ```ts
 * const button = new Object3D();
 * button.addComponent(HoverAnimation, {
 *   scaleFactor: 1.2,    // Scale to 120% on hover
 *   duration: 0.2,       // 200ms animation
 *   type: "ease-in-out"  // Smooth easing
 * });
 * scene.add(button);
 * ```
 *
 * @example Custom hover animations
 * ```ts
 * const obj = new Object3D();
 * const hoverAnim = loadAnimationClip("hover-glow.anim");
 * const idleAnim = loadAnimationClip("idle-pulse.anim");
 *
 * obj.addComponent(HoverAnimation, {
 *   hovered: hoverAnim,  // Custom hover animation
 *   idle: idleAnim       // Custom idle animation
 * });
 * scene.add(obj);
 * ```
 *
 * @example Quick scale animation with custom settings
 * ```ts
 * gameObject.addComponent(HoverAnimation, {
 *   scaleFactor: 1.15,
 *   duration: 0.15,
 *   type: "ease-out"
 * });
 * ```
 *
 * @see {@link Animation} - The underlying animation component used to play clips
 * @see {@link AnimationClip} - For creating custom animation clips
 * @see {@link AnimationUtils} - Utility functions for creating animations programmatically
 * @see {@link ScaleClipType} - Available easing types for the default scale animation
 * @see {@link ObjectRaycaster} - Controls which objects receive pointer events
 * @see {@link PointerEvents} - For more complex pointer interaction handling
 *
 * @summary Plays animations on pointer hover enter/exit events
 * @category Interactivity
 * @group Components
 * @component
 */
export declare class HoverAnimation extends Component {
    /**
     * The easing type for the default scale animation.
     *
     * This property controls how the scale animation interpolates from the start to end value.
     * Different easing types create different "feels" for the hover effect.
     *
     * **Available types:**
     * - `"linear"`: Constant speed throughout the animation
     * - `"ease-in"`: Starts slow, ends fast
     * - `"ease-out"`: Starts fast, ends slow (good for responsive feel)
     * - `"ease-in-out"`: Starts slow, fast in middle, ends slow (smooth and natural)
     *
     * **Note:** This is only used when no custom {@link hovered} animation clip is provided.
     * If you provide a custom animation clip, this property is ignored.
     *
     * @see {@link ScaleClipType} for all available easing types
     * @default "linear"
     */
    type: ScaleClipType;
    /**
     * Duration of the default hover animation in seconds.
     *
     * This controls how long it takes for the object to scale up when hovered.
     * Shorter durations feel more responsive, while longer durations feel smoother.
     *
     * **Recommendations:**
     * - `0.1-0.15s`: Snappy, responsive feel (good for buttons)
     * - `0.2-0.3s`: Smooth, noticeable animation
     * - `0.4s+`: Slow, emphasized effect
     *
     * **Note:** This is only used when no custom {@link hovered} animation clip is provided.
     * If you provide a custom animation clip, this property is ignored.
     *
     * @default 0.1
     */
    duration: number;
    /**
     * The scale multiplier to apply when the object is hovered.
     *
     * This value is multiplied with the object's original scale to determine the hover size.
     * A value of `1.0` means no change, values greater than `1.0` scale up, and values less than `1.0` scale down.
     *
     * **Examples:**
     * - `1.0`: No scale change
     * - `1.1`: Scale to 110% (subtle effect, default)
     * - `1.2`: Scale to 120% (noticeable effect)
     * - `1.5`: Scale to 150% (dramatic effect)
     * - `0.9`: Scale to 90% (shrink on hover)
     *
     * **Note:** This is only used when no custom {@link hovered} animation clip is provided.
     * If you provide a custom animation clip, this property is ignored.
     *
     * @default 1.1
     */
    scaleFactor: number;
    /**
     * Custom animation clip to play when the pointer hovers over the object.
     *
     * If `null`, the component automatically generates a scale-up animation based on the
     * {@link type}, {@link duration}, and {@link scaleFactor} properties.
     *
     * Provide a custom animation clip if you want more complex hover effects such as:
     * - Color changes or material property animations
     * - Position or rotation changes
     * - Multi-property animations
     * - Animations affecting child objects
     *
     * **Tip:** The animation plays with a 0.1s fade duration for smooth transitions.
     *
     * @see {@link AnimationClip} for creating custom animation clips
     * @see {@link AnimationUtils.createScaleClip} for programmatically creating scale animations
     * @default null (generates default scale animation)
     */
    hovered: AnimationClip | null;
    /**
     * Custom animation clip to play when the pointer is not hovering over the object (idle state).
     *
     * If `null`, an empty animation clip is used, which returns the object to its original state
     * when the hover animation ends.
     *
     * You can provide a custom idle animation for effects such as:
     * - Subtle breathing or floating motion when not hovered
     * - Pulsing or glowing effects in idle state
     * - Return-to-normal animations with custom easing
     * - Looping ambient animations
     *
     * **Tip:** The idle animation is played with `loop: true`, so it will repeat continuously
     * until the object is hovered again.
     *
     * @see {@link AnimationClip} for creating custom animation clips
     * @see {@link AnimationUtils.emptyClip} to see how the default empty clip is created
     * @default null (uses empty clip that returns to original state)
     */
    idle: AnimationClip | null;
    private animation;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    onPointerEnter(): void;
    onPointerExit(): void;
    private playIdle;
    private playHover;
}

export declare type IAnimationComponent = Pick<IComponent, "gameObject"> & {
    isAnimationComponent: boolean;
    addClip?(clip: AnimationClip): any;
};

/* Excluded from this release type: IApplyPrototypeExtension */

export declare interface IBeforeNetworkedDestroy {
    onBeforeNetworkedDestroy(networkIds: string[]): void;
}

declare interface IBehaviorElement {
    id: string;
    writeTo(document: USDDocument, writer: USDWriter): any;
}

export declare interface IBoxCollider extends ICollider {
    size: Vec3;
}

export declare type ICamera = Camera;

/** Interface for a camera controller component that can be attached to a camera to control it */
export declare interface ICameraController {
    get isCameraController(): boolean;
}

declare interface ICanvas extends IComponent {
    get isCanvas(): boolean;
    get screenspace(): boolean;
    registerTransform(rt: IRectTransform): any;
    unregisterTransform(rt: IRectTransform): any;
    registerEventReceiver(receiver: ICanvasEventReceiver): any;
    unregisterEventReceiver(receiver: ICanvasEventReceiver): any;
}

declare interface ICanvasEventReceiver {
    /** Called before the canvas is rendering */
    onBeforeCanvasRender?(canvas: ICanvas): any;
}

declare interface ICanvasGroup {
    get isCanvasGroup(): boolean;
    blocksRaycasts: boolean;
    interactable: boolean;
}

export declare interface ICollider extends IComponent {
    get isCollider(): any;
    attachedRigidbody: IRigidbody | null;
    /**
     * Note: Make sure to call updatePhysicsMaterial after having changed this property
     */
    isTrigger: boolean;
    /**
     * The physics material determines how the collider interacts with other colliders (e.g. bouncyness)
     * Note: Make sure to call updatePhysicsMaterial after having changed this property
     */
    sharedMaterial?: PhysicsMaterial;
    center?: Vec3 & {
        multiply(vec: Vec3): any;
    };
    updateProperties(): void;
    updatePhysicsMaterial(): void;
    /** The collider membership indicates what groups the collider is part of (e.g. group 2 and 3)
     * An `undefined` array indicates that the collider is part of all groups
     * Note: Make sure to call updateProperties after having changed this property
     * Default: [0]
     */
    membership?: number[];
    /** The collider filter indicates what groups the collider can interact with (e.g. group 3 and 4)
     * An `undefined` array indicates that the collider can interact with all groups
     * Note: Make sure to call updateProperties after having changed this property
     * Default: undefined
     */
    filter?: number[];
}

export declare type ICollisionContext = {
    getCollider(obj: Object3D): ICollider;
};

export declare interface IComponent extends IHasGuid {
    get isComponent(): boolean;
    get [$componentName](): string | undefined;
    /** the object this component is attached to */
    gameObject: IGameObject;
    enabled: boolean;
    sourceId?: SourceIdentifier;
    get name(): string;
    get layer(): number;
    get destroyed(): boolean;
    get tag(): string;
    context: any;
    get activeAndEnabled(): boolean;
    /* Excluded from this release type: __internalNewInstanceCreated */
    /* Excluded from this release type: _internalInit */
    /* Excluded from this release type: __internalAwake */
    /* Excluded from this release type: __internalStart */
    /* Excluded from this release type: __internalEnable */
    /* Excluded from this release type: __internalDisable */
    /* Excluded from this release type: __internalDestroy */
    /* Excluded from this release type: resolveGuids */
    /** experimental, called when the script is registered for the first time, this is called even if the component is not enabled. */
    registering?(): any;
    awake(): any;
    onEnable(): any;
    onDisable(): any;
    onDestroy(): any;
    destroy(): any;
    /** called for properties decorated with the @validate decorator */
    onValidate?(property?: string): any;
    /** called when this.context.isPaused changes or when rendering loop changes due to changing DOM element visibility
     * e.g. when the DOM element becomes hidden or out ot view
     */
    onPausedChanged?(isPaused: boolean, wasPaused: boolean): any;
    start?(): void;
    earlyUpdate?(): void;
    update?(): void;
    lateUpdate?(): void;
    onBeforeRender?(frame: XRFrame | null): void;
    onAfterRender?(): void;
    onCollisionEnter?(col: Collision): any;
    onCollisionExit?(col: Collision): any;
    onCollisionStay?(col: Collision): any;
    onTriggerEnter?(col: ICollider): any;
    onTriggerStay?(col: ICollider): any;
    onTriggerExit?(col: ICollider): any;
    get forward(): Vector3;
    get worldPosition(): Vector3;
    get worldQuaternion(): Quaternion;
}

export declare interface IConnectionData {
    id: string;
}

export declare type IContext = Context;

/** Implement to receive callbacks from {@type @needle-tools/editor-sync} package */
declare interface IEditorModification {
    /**
     * Called when a modification is made through the external editor (called from @needle-tools/editor-sync)
     * @param modification The modification that was made in the external editor
     * @returns false if you want the editor package to apply the modification. Otherwise it's expected that your code handles applying the change
     */
    onEditorModification(modification: EditorModification): void | undefined | boolean;
    /** Called immediately after the modification was made through the external editor and applied by the editor-sync package */
    onAfterEditorModification?(mod: EditorModification): void;
}

export declare interface IEffectProvider {
    apply(context: PostProcessingEffectContext): void | undefined | EffectProviderResult;
    unapply(): void;
}

export declare interface IEventList {
    readonly isEventList: true;
    __internalOnInstantiate(map: InstantiateContext): IEventList;
}

export declare interface IGameObject extends Object3D {
    /** the object's unique identifier */
    guid: string | undefined;
    /** if the object is enabled in the hierarchy (usually equivalent to `visible`) */
    activeSelf: boolean;
    /** call to destroy this object including all components that are attached to it. Will destroy all children recursively */
    destroy(): void;
    /** Add a new component to this object. Expects a component type (e.g. `addNewComponent(Animator)`) */
    addNewComponent<T extends IComponent>(type: Constructor<T>, init?: ComponentInit<T>): T;
    /** Remove a component from this object. Expected a component instance
     * @returns the removed component (equal to the passed in component)
     */
    addComponent<T extends IComponent>(comp: T | ConstructorConcrete<T>, init?: ComponentInit<T>): T;
    removeComponent<T extends IComponent>(comp: T): T;
    /** Searches for a component type on this object. If no component of the searched type exists yet a new instance will be created and returned */
    getOrAddComponent<T>(typeName: Constructor<T> | null): T;
    /** Tries to find a component of a type on this object.
     * @returns the first instance of a component on this object that matches the passed in type or null if no component of this type (or a subtype) exists */
    getComponent<T>(type: Constructor<T>): T | null;
    /** @returns all components of a certain type on this object */
    getComponents<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    /** Finds a component of a certain type on this object OR a child object if any exists */
    getComponentInChildren<T>(type: Constructor<T>): T | null;
    /** Finds all components of a certain type on this object AND all children (recursively) */
    getComponentsInChildren<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    /** Finds a component of a certain type on this object OR a parent object if any exists */
    getComponentInParent<T>(type: Constructor<T>): T | null;
    /** Finds all components of a certain type on this object AND all parents (recursively) */
    getComponentsInParent<T>(type: Constructor<T>, arr?: T[]): Array<T>;
    get worldPosition(): Vector3;
    set worldPosition(val: Vector3);
    get worldQuaternion(): Quaternion;
    set worldQuaternion(val: Quaternion);
    get worldRotation(): Vector3;
    set worldRotation(val: Vector3);
    get worldScale(): Vector3;
    set worldScale(val: Vector3);
    get worldForward(): Vector3;
    get worldRight(): Vector3;
    get worldUp(): Vector3;
}

declare interface IGraphic extends IComponent, IHasAlphaFactor {
    get isGraphic(): boolean;
    raycastTarget: boolean;
}

declare interface IHasAlphaFactor {
    setAlphaFactor(val: number): any;
}

export declare interface IHasGuid {
    guid: string;
}

export declare interface IInput {
    convertScreenspaceToRaycastSpace(vec: Vec2): void;
    getPointerPosition(i: number): Vec2 | null;
}

export declare interface IInputEventArgs {
    get used(): boolean;
    use(): void;
    stopImmediatePropagation?(): void;
}

export declare type IInstantiateOptions = {
    idProvider?: UIDProvider;
    parent?: string | Object3D;
    /** position in local space. Set `keepWorldPosition` to true if this is world space */
    position?: Vector3 | [number, number, number];
    /** for duplicatable parenting */
    keepWorldPosition?: boolean;
    /** rotation in local space. Set `keepWorldPosition` to true if this is world space */
    rotation?: Quaternion | Euler | [number, number, number];
    scale?: Vector3 | [number, number, number];
    /** if the instantiated object should be visible */
    visible?: boolean;
    context?: Context;
    /** If true the components will be cloned as well
     * @default true
     */
    components?: boolean;
};

declare interface ILayoutGroup extends IComponent {
    get isLayoutGroup(): boolean;
    get isDirty(): boolean;
    updateLayout(): any;
}

export declare interface ILight extends IComponent {
    intensity: number;
    color: Color;
}

declare interface ILightDataRegistry {
    clear(): any;
    registerTexture(sourceId: SourceIdentifier, type: LightmapType, texture: Texture, index?: number): any;
    tryGet(sourceId: SourceIdentifier | undefined, type: LightmapType, index: number): Texture | null;
    tryGetLightmap(sourceId: SourceIdentifier | null | undefined, index: number): Texture | null;
    tryGetSkybox(sourceId?: SourceIdentifier | null): Texture | null;
    tryGetReflection(sourceId?: SourceIdentifier | null): Texture | null;
}

/* Excluded from this release type: ILoadingViewHandler */

/**
 * [Image](https://engine.needle.tools/docs/api/Image) displays a sprite (2D texture) in the UI. Can be used for icons,
 * backgrounds, or any visual element that needs a texture.
 *
 * **Properties:**
 * - `image` - Direct texture assignment (convenience property)
 * - `sprite` - Sprite object containing texture and rect info
 * - `color` - Tint color applied to the image (inherited from Graphic)
 *
 * **Usage with Button:**
 * Image is commonly paired with {@link Button} to create clickable
 * UI elements with visual feedback via color tinting.
 *
 * @example Set an image texture
 * ```ts
 * const img = myIcon.getComponent(Image);
 * img.image = myTexture;
 * img.color = new RGBAColor(1, 0.5, 0.5, 1); // Red tint
 * ```
 *
 * @summary Display a 2D image in the UI
 * @category User Interface
 * @group Components
 * @see {@link Canvas} for the UI root
 * @see {@link Button} for clickable images
 * @see {@link RawImage} for non-UI image display
 */
declare class Image_2 extends MaskableGraphic {
    set image(img: Texture | null);
    get image(): Texture | null;
    get sprite(): Sprite_3 | undefined;
    set sprite(sprite: Sprite_3 | undefined);
    private _sprite?;
    private pixelsPerUnitMultiplier;
    private isBuiltinSprite;
    protected onBeforeCreate(opts: any): void;
    protected onAfterCreated(): void;
}
export { Image_2 as Image }

declare type ImageReadbackResult = {
    imageData: ImageData;
    imageBitmap?: ImageBitmap;
};

/**
 * Load images or textures from external URLs.
 *
 * **Important methods:**
 * - {@link createHTMLImage} to create an HTMLImageElement from the URL
 * - {@link createTexture} to create a Three.js Texture from the URL
 *
 * @example
 * ```ts
 * import { ImageReference, serializable } from '@needle-tools/engine';
 *
 * export class MyComponent extends Behaviour {
 *   @serializable(ImageReference)
 *   myImage?:ImageReference;
 *   async start() {
 *     if(this.myImage) {
 *       const texture = await this.myImage.createTexture();
 *       if(texture) {
 *         // use the texture
 *       }
 *     }
 *   }
 * ```
 *
 * ### Related:
 * - {@link AssetReference} to load glTF or GLB assets
 * - {@link FileReference} to load external file URLs
 */
export declare class ImageReference {
    private static imageReferences;
    static getOrCreate(url: string): ImageReference;
    constructor(url: string);
    readonly url: string;
    private _bitmap?;
    private _bitmapObject?;
    dispose(): void;
    createHTMLImage(): HTMLImageElement;
    private loader;
    createTexture(): Promise<Texture | null>;
    /** Loads the bitmap data of the image */
    getBitmap(): Promise<ImageBitmap | null>;
}

/* Excluded from this release type: ImageReferenceSerializer */

/** This method uses a '2d' canvas context for pixel manipulation, and can apply a color scale or Y flip to the given image.
 * Unfortunately, canvas always uses premultiplied data, and thus images with low alpha values (or multiplying by a=0) will result in black pixels.
 */
export declare function imageToCanvas(image: HTMLImageElement | HTMLCanvasElement | OffscreenCanvas | ImageBitmap, color?: Vector4 | undefined, flipY?: boolean, maxTextureSize?: number): Promise<OffscreenCanvasExt>;

export declare interface IModel {
    guid: string;
    /**
     * If set to true the model will not be saved in the server room state
     */
    dontSave?: boolean;
    /**
     * If set to true the model will be deleted when the user disconnects
     */
    deleteOnDisconnect?: boolean;
}

/** holds information if a field was undefined before serialization. This gives us info if we might want to warn the user about missing attributes */
declare class ImplementationInformation {
    private isDevMode;
    private cache;
    /** only call when assigning values for the very first time */
    registerDefinedKeys(typeName: string, type: object): void;
    getDefinedKey(typeName: string, key: string): boolean;
}

export declare interface INeedleEngineComponent extends HTMLElement {
    getAROverlayContainer(): HTMLElement;
    onEnterAR(session: XRSession, overlayContainer: HTMLElement): any;
    onExitAR(session: XRSession): any;
}

/**
 * Interface for registering custom glTF extensions to the Needle Engine GLTFLoaders.
 * Register your plugin using the {@link addCustomExtensionPlugin} method
 */
export declare interface INeedleGLTFExtensionPlugin {
    /** The Name of your plugin */
    name: string;
    /** Called before starting to load a glTF file. This callback can be used to add custom extensions to the [GLTFLoader](https://threejs.org/docs/#GLTFLoader.register)
     *
     * @example Add a custom extension to the GLTFloader
     * ```ts
     * onImport: (loader, url, context) => {
     *    loader.register((parser) => new MyCustomExtension(parser));
     * }
     * ```
     */
    onImport?: OnImportCallback;
    /** Called after a glTF file has been loaded */
    onLoaded?: (url: string, gltf: GLTF, context: Context) => void;
    /** Called before starting to export a glTF file. This callback can be used to add custom extensions to the [GLTFExporter](https://threejs.org/docs/#examples/en/exporters/GLTFExporter.register)
     *
     * @example Add a custom extension to the GLTFExporter
     * ```ts
     * onExport: (exporter, context) => {
     *    exporter.register((writer) => new MyCustomExportExtension(writer));
     * }
     *
     */
    onExport?: OnExportCallback;
}

export declare interface INeedleGltfLoader {
    createBuiltinComponents(context: Context, gltfId: SourceIdentifier, gltf: GLTF, seed: number | null | UIDProvider, extension?: NEEDLE_components): Promise<void>;
    writeBuiltinComponentData(comp: object, context: SerializationContext): any;
    parseSync(context: Context, data: string | ArrayBuffer, path: string, seed: number | UIDProvider | null): Promise<Model | undefined>;
    loadSync(context: Context, url: string, sourceId: string, seed: number | UIDProvider | null, prog?: (prog: ProgressEvent) => void): Promise<Model | undefined>;
}

export declare type INeedleXRSession = NeedleXRSession;

export declare interface INeedleXRSessionEventReceiver extends Pick<IComponent, "destroyed"> {
    get activeAndEnabled(): boolean;
    supportsXR?(mode: XRSessionMode): boolean;
    /** Called before requesting a XR session */
    onBeforeXR?(mode: XRSessionMode, args: XRSessionInit): void;
    onEnterXR?(args: NeedleXREventArgs): void;
    onUpdateXR?(args: NeedleXREventArgs): void;
    onLeaveXR?(args: NeedleXREventArgs): void;
    onXRControllerAdded?(args: NeedleXRControllerEventArgs): void;
    onXRControllerRemoved?(args: NeedleXRControllerEventArgs): void;
}

export declare interface INetworkConnection {
    get isConnected(): boolean;
    get isInRoom(): boolean;
    send(key: string, data: IModel | object | boolean | null | string | number, queue: SendQueue): unknown;
}

export declare interface INetworkingWebsocketUrlProvider {
    getWebsocketUrl(): string | null;
}

export declare class InheritVelocityModule {
    enabled: boolean;
    curve: MinMaxCurve;
    curveMultiplier: number;
    mode: ParticleSystemInheritVelocityMode;
    clone(): InheritVelocityModule;
    system: IParticleSystem;
    private get _lastWorldPosition();
    private get _velocity();
    private readonly _temp;
    private _firstUpdate;
    awake(system: IParticleSystem): void;
    reset(): void;
    update(_context: Context): void;
    applyInitial(vel: Vector3 | Vector3_2): void;
    private _frames;
    applyCurrent(vel: Vector3 | Vector3_2, t01: number, lerpFactor: number): void;
}

/**
 * Handles all input events including mouse, touch, keyboard, and XR controllers.
 * Access via `this.context.input` from any component.
 *
 * @example Checking mouse/pointer state
 * ```ts
 * update() {
 *   if (this.context.input.mouseDown) {
 *     console.log("Mouse button pressed");
 *   }
 *   if (this.context.input.mouseClick) {
 *     console.log("Click detected");
 *   }
 *   const pos = this.context.input.mousePosition;
 *   console.log(`Mouse at: ${pos.x}, ${pos.y}`);
 * }
 * ```
 * @example Keyboard input
 * ```ts
 * update() {
 *   if (this.context.input.isKeyDown("Space")) {
 *     console.log("Space pressed this frame");
 *   }
 *   if (this.context.input.isKeyPressed("w")) {
 *     console.log("W key is held down");
 *   }
 * }
 * ```
 * @example Event-based input
 * ```ts
 * onEnable() {
 *   this.context.input.addEventListener("pointerdown", this.onPointerDown);
 * }
 * onDisable() {
 *   this.context.input.removeEventListener("pointerdown", this.onPointerDown);
 * }
 * onPointerDown = (evt: NEPointerEvent) => {
 *   console.log("Pointer down:", evt.pointerId);
 * }
 * ```
 *
 * @see {@link NEPointerEvent} for pointer event data
 * @see {@link InputEvents} for available event types
 * @see {@link PointerType} for pointer device types
 * @link https://engine.needle.tools/docs/scripting.html
 */
export declare class Input implements IInput {
    /** This is a list of event listeners per event type (e.g. pointerdown, pointerup, keydown...). Each entry contains a priority and list of listeners.
     * That way users can control if they want to receive events before or after other listeners (e.g subscribe to pointer events before the EventSystem receives them) - this allows certain listeners to be always invoked first (or last) and stop propagation
     * Listeners per event are sorted
     */
    private readonly _eventListeners;
    /** Adds an event listener for the specified event type. The callback will be called when the event is triggered.
     * @param type The event type to listen for
     * @param callback The callback to call when the event is triggered
     * @param options The options for adding the event listener.
     * @example Basic usage
     * ```ts
     * input.addEventListener("pointerdown", (evt) => {
     *   console.log("Pointer down", evt.pointerId, evt.pointerType);
     * });
     * ```
     * @example Adding a listener that is called after all other listeners
     * By using a higher value for the queue the listener will be called after other listeners (default queue is 0).
     * ```ts
     * input.addEventListener("pointerdown", (evt) => {
     *  console.log("Pointer down", evt.pointerId, evt.pointerType);
     * }, { queue: 10 });
     * ```
     * @example Adding a listener that is only called once
     * ```ts
     * input.addEventListener("pointerdown", (evt) => {
     *   console.log("Pointer down", evt.pointerId, evt.pointerType);
     * }, { once: true });
     * ```
     */
    addEventListener(type: PointerEventNames, callback: PointerEventListener, options?: EventListenerOptions_2): any;
    addEventListener(type: KeyboardEventNames, callback: KeyboardEventListener, options?: EventListenerOptions_2): any;
    /** Removes the event listener from the specified event type. If no queue is specified the listener will be removed from all queues.
     * @param type The event type to remove the listener from
     * @param callback The callback to remove
     * @param options The options for removing the event listener
     */
    removeEventListener(type: PointerEventNames, callback: PointerEventListener, options?: EventListenerOptions_2): any;
    removeEventListener(type: KeyboardEventNames, callback: KeyboardEventListener, options?: EventListenerOptions_2): any;
    private dispatchEvent;
    _doubleClickTimeThreshold: number;
    _longPressTimeThreshold: number;
    get mousePosition(): Vector2;
    get mousePositionRC(): Vector2;
    get mouseDown(): boolean;
    get mouseUp(): boolean;
    /** Is the primary pointer clicked (usually the left button). This is equivalent to `input.click` */
    get mouseClick(): boolean;
    /** Was a double click detected for the primary pointer? This is equivalent to `input.doubleClick` */
    get mouseDoubleClick(): boolean;
    get mousePressed(): boolean;
    get mouseWheelChanged(): boolean;
    /** Is the primary pointer double clicked (usually the left button). This is equivalent to `input.mouseDoubleClick` */
    get click(): boolean;
    /** Was a double click detected for the primary pointer? */
    get doubleClick(): boolean;
    /**
     * Get a connected Gamepad
     * Note: For a gamepad to be available to the browser it must have received input before while the page was focused.
     * @link https://developer.mozilla.org/en-US/docs/Web/API/Gamepad_API/Using_the_Gamepad_API
     * @returns The gamepad or null if no gamepad is connected
     */
    getGamepad(index?: number): Gamepad | null;
    private readonly _setCursorTypes;
    /** @deprecated use setCursor("pointer") */
    setCursorPointer(): void;
    /** @deprecated use unsetCursor() */
    setCursorNormal(): void;
    /**
     * Set a custom cursor. This will set the cursor type until unsetCursor is called
     */
    setCursor(type: CursorTypeName): void;
    /**
     * Unset a custom cursor. This will set the cursor type to the previous type or default
     */
    unsetCursor(type: CursorTypeName): void;
    private updateCursor;
    /**
     * Check if a pointer id is currently used.
     */
    getIsPointerIdInUse(pointerId: number): boolean;
    /** how many pointers are currently pressed */
    getPointerPressedCount(): number;
    /**
     * Gets the position of the given pointer index in pixel
     * @param i The pointer index
     * @returns The position of the pointer in pixel
     */
    getPointerPosition(i: number): Vector2 | null;
    getPointerPositionLastFrame(i: number): Vector2 | null;
    getPointerPositionDelta(i: number): Vector2 | null;
    /**
     * The pointer position in screenspace coordinates (-1 to 1) where 0 is the center of the screen.
     * This can be useful for e.g. raycasting (see https://threejs.org/docs/#api/en/core/Raycaster.setFromCamera)
     */
    getPointerPositionRC(i: number): Vector2 | null;
    getPointerDown(i: number): boolean;
    getPointerUp(i: number): boolean;
    getPointerPressed(i: number): boolean;
    getPointerClicked(i: number): boolean;
    getPointerDoubleClicked(i: number): boolean;
    getPointerDownTime(i: number): number;
    getPointerUpTime(i: number): number;
    getPointerLongPress(i: number): boolean;
    getIsMouse(i: number): boolean;
    getIsTouch(i: number): boolean;
    getTouchesPressedCount(): number;
    getMouseWheelChanged(i?: number): boolean;
    getMouseWheelDeltaY(i?: number): number;
    getPointerEvent(i: number): Event | undefined;
    foreachPointerId(pointerType?: string | PointerType | string[] | PointerType[]): Generator<number>;
    foreachTouchId(): Generator<number>;
    private _pointerIsActive;
    private context;
    private _pointerDown;
    private _pointerUp;
    private _pointerClick;
    private _pointerDoubleClick;
    private _pointerPressed;
    private _pointerPositions;
    private _pointerPositionsLastFrame;
    private _pointerPositionsDelta;
    private _pointerPositionsRC;
    private _pointerPositionDown;
    private _pointerDownTime;
    private _pointerUpTime;
    private _pointerUpTimestamp;
    private _pointerIds;
    private _pointerTypes;
    private _mouseWheelChanged;
    private _mouseWheelDeltaY;
    private _pointerEvent;
    /** current pressed pointer events. Used to check if any of those events was used  */
    private _pointerEventsPressed;
    /** This is added/updated for pointers. screenspace pointers set this to the camera near plane  */
    private _pointerSpace;
    private readonly _pressedStack;
    private onDownButton;
    private onReleaseButton;
    /** the first button that was down and is currently pressed */
    getFirstPressedButtonForPointer(pointerId: number): number | undefined;
    /** the last (most recent) button that was down and is currently pressed */
    getLatestPressedButtonForPointer(pointerId: number): number | undefined;
    /** Get a key (if any) that was just pressed this frame (this is only true for the frame it was pressed down) */
    getKeyDown(): string | null;
    /** Get true or false if the given key was pressed this frame */
    getKeyDown(key: KeyCode | ({} & string)): boolean;
    /** Get a key (if any) that is currently being pressed (held down) */
    getKeyPressed(): string | null;
    /** Get true or false if the given key is pressed */
    getKeyPressed(key: KeyCode | ({} & string)): boolean;
    /** Get a key (if any) that was released in this frame */
    getKeyUp(): string | null;
    /** Get true or false if the given key was released this frame */
    getKeyUp(key: KeyCode | ({} & string)): boolean;
    isKeyDown(keyCode: KeyCode | ({} & string)): boolean;
    isKeyUp(keyCode: KeyCode | ({} & string)): boolean;
    isKeyPressed(keyCode: KeyCode | ({} & string)): boolean;
    private getCodeForCommonKeyName;
    createInputEvent(args: NEPointerEvent): void;
    convertScreenspaceToRaycastSpace<T extends Vec2 | Vector2>(vec2: T): T;
    /* Excluded from this release type: __constructor */
    /** this is the html element we subscribed to for events */
    private _htmlEventSource;
    bindEvents(): void;
    unbindEvents(): void;
    dispose(): void;
    private onLostFocus;
    private readonly _receivedPointerMoveEventsThisFrame;
    private onEndOfFrame;
    private canReceiveInput;
    private onContextMenu;
    private keysPressed;
    private onKeyDown;
    private onKeyPressed;
    private onKeyUp;
    private onWheelWindow;
    private onMouseWheel;
    private onPointerDown;
    private onPointerMove;
    private onPointerCancel;
    private onPointerUp;
    private getPointerId;
    private getButtonName;
    private onTouchStart;
    private onTouchMove;
    private onTouchEnd;
    private readonly tempNearPlaneVector;
    private readonly tempFarPlaneVector;
    private readonly tempLookMatrix;
    private getAndUpdateSpatialObjectForScreenPosition;
    private isInRect;
    private onDown;
    private onMove;
    private onUp;
    private updatePointerPosition;
    /** get the next free id */
    private getPointerIndex;
    private setPointerState;
    private setPointerStateT;
    private onDispatchEvent;
}

declare type InputDeviceLayout = {
    selectComponentId: string;
    components: {
        [key: string]: ComponentMap;
    };
    mapping: Mapping;
    gamepad: Array<XRControllerButtonName>;
    axes: Array<{
        componentId: ControllerAxes;
        axis: "x-axis" | "y-axis";
    }>;
};

export declare type InputEventNames = PointerEventNames | KeyboardEventNames;

export declare enum InputEventQueue {
    Early = -100,
    Default = 0,
    Late = 100
}

/**
 * Event types that can be listened to via {@link Input.addEventListener}.
 * @see {@link NEPointerEvent} for pointer event data
 * @see {@link NEKeyboardEvent} for keyboard event data
 */
export declare const enum InputEvents {
    /** Fired when a pointer button is pressed */
    PointerDown = "pointerdown",
    /** Fired when a pointer button is released */
    PointerUp = "pointerup",
    /** Fired when a pointer moves */
    PointerMove = "pointermove",
    /** Fired when a key is pressed down */
    KeyDown = "keydown",
    /** Fired when a key is released */
    KeyUp = "keyup",
    /** Fired when a key produces a character value */
    KeyPressed = "keypress"
}

/**
 * [InputField](https://engine.needle.tools/docs/api/InputField) is a UI component that allows users to enter and edit text.
 * It provides a text input area where users can type, delete, and modify text.
 * The InputField supports placeholder text, events for value changes, and end edit actions.
 * @summary Text field for user input
 * @category User Interface
 * @group Components
 */
export declare class InputField extends Component implements IPointerEventHandler {
    get text(): string;
    set text(value: string);
    get isFocused(): boolean;
    private textComponent?;
    private placeholder?;
    onValueChanged?: EventList<any>;
    onEndEdit?: EventList<any>;
    private static active;
    private static activeTime;
    private static htmlField;
    private static htmlFieldFocused;
    private inputEventFn;
    private _iosEventFn;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    /** Clear the input field if it's currently active */
    clear(): void;
    /** Select the input field, set it active to receive keyboard input */
    select(): void;
    /** Deselect the input field, stop receiving keyboard input */
    deselect(): void;
    onPointerEnter(_args: PointerEventData): void;
    onPointerExit(_args: PointerEventData): void;
    onPointerClick(_args: any): void;
    private activeLoop;
    private onSelected;
    private onDeselected;
    update(): void;
    private onInput;
    private setTextFromInputField;
    private selectInputField;
    private processInputOniOS;
}

declare class InstancedMeshRenderer {
    /** The three instanced mesh
     * @link https://threejs.org/docs/#api/en/objects/InstancedMesh
     */
    get batchedMesh(): BatchedMesh;
    get visible(): boolean;
    set visible(val: boolean);
    get castShadow(): boolean;
    set castShadow(val: boolean);
    set receiveShadow(val: boolean);
    /** If true, the instancer is allowed to grow when the max instance count is reached */
    allowResize: boolean;
    /** The name of the instancer */
    name: string;
    /** The added geometry */
    readonly geometry: BufferGeometry;
    /** The material used for the instanced mesh */
    readonly material: Material;
    /** The current number of instances */
    get count(): number;
    /** Update the bounding box and sphere of the instanced mesh
     * @param box If true, update the bounding box
     * @param sphere If true, update the bounding sphere
     */
    updateBounds(box?: boolean, sphere?: boolean): void;
    private _context;
    private _batchedMesh;
    private _handles;
    private _geometryIds;
    private _maxInstanceCount;
    private _currentInstanceCount;
    private _currentVertexCount;
    private _currentIndexCount;
    private _maxVertexCount;
    private _maxIndexCount;
    private static nullMatrix;
    /** Check if the geometry can be added to this instancer
     * @param geometry The geometry to check
     * @param material The material of the geometry
     * @returns true if the geometry can be added
     */
    canAdd(geometry: BufferGeometry, material: Material): boolean;
    private _needUpdateBounds;
    private _debugMaterial;
    private getBatchedMeshName;
    constructor(name: string, geo: BufferGeometry, material: Material, initialMaxCount: number, context: Context);
    dispose(): void;
    addInstance(obj: Mesh): InstanceHandle | null;
    add(handle: InstanceHandle): boolean;
    remove(handle: InstanceHandle, delete_: boolean): void;
    updateInstance(mat: Matrix4, index: number): void;
    updateGeometry(geo: BufferGeometry, geometryIndex: number): boolean;
    private onBeforeRender;
    private onAfterRender;
    private validateGeometry;
    private markNeedsUpdate;
    /**
     * @param geo The geometry to add (if none is provided it means the geometry is already added and just updated)
     */
    private mustGrow;
    private _growId;
    private grow;
    private tryEstimateVertexCountSize;
    private addGeometry;
    private removeGeometry;
}

/**
 * The instance handle is used to interface with the mesh that is rendered using instancing.
 */
export declare class InstanceHandle {
    static readonly all: InstanceHandle[];
    /** The name of the object */
    get name(): string;
    get isActive(): boolean;
    get vertexCount(): number;
    get maxVertexCount(): number;
    get reservedVertexCount(): number;
    get indexCount(): number;
    get maxIndexCount(): number;
    get reservedIndexCount(): number;
    /** The object that is being instanced */
    readonly object: Mesh;
    /** The instancer/BatchedMesh that is rendering this object*/
    readonly renderer: InstancedMeshRenderer;
    /* Excluded from this release type: __instanceIndex */
    /* Excluded from this release type: __reservedVertexRange */
    /* Excluded from this release type: __reservedIndexRange */
    __geometryIndex: number;
    /** The mesh information of the object - this tries to also calculate the LOD info */
    readonly meshInformation: MeshInformation;
    constructor(originalObject: Mesh, instancer: InstancedMeshRenderer);
    /** Calculates the mesh information again
     * @returns true if the vertex count or index count has changed
     */
    updateMeshInformation(): boolean;
    /** Updates the matrix from the rendered object. Will also call updateWorldMatrix internally */
    updateInstanceMatrix(updateChildren?: boolean, updateMatrix?: boolean): void;
    /** Updates the matrix of the instance */
    setMatrix(matrix: Matrix4): void;
    /** Can be used to change the geometry of this instance */
    setGeometry(geo: BufferGeometry): boolean;
    /** Adds this object to the instancing renderer (effectively activating instancing) */
    add(): void;
    /** Removes this object from the instancing renderer
     * @param delete_ If true, the instance handle will be removed from the global list
     */
    remove(delete_: boolean): void;
}

/**
 * Handles instancing for Needle Engine.
 */
export declare class InstancingHandler {
    static readonly instance: InstancingHandler;
    /** This is the initial instance count when creating a new instancing structure.
     * Override this and the number of max instances that you expect for a given object.
     * The larger the value the more objects can be added without having to resize but it will also consume more memory.
     * (The instancing mesh renderer will grow x2 if the max instance count is reached)
     * @default 4
     * @returns The initial instance count
     */
    static getStartInstanceCount: (obj: Object3D) => number;
    objs: InstancedMeshRenderer[];
    setup(renderer: Renderer, obj: Object3D, context: Context, handlesArray: InstanceHandle[] | null, args: InstancingSetupArgs, level?: number): InstanceHandle[] | null;
    private tryCreateOrAddInstance;
    private autoUpdateInstanceMatrix;
}

declare class InstancingSetupArgs {
    rend: Renderer;
    foundMeshes: number;
    useMatrixWorldAutoUpdate: boolean;
}

/**
 * Utility class for accessing instancing related properties
 */
export declare class InstancingUtil {
    /** Is this object rendered using a InstancedMesh */
    static isUsingInstancing(instance: Object3D): boolean;
    /** Returns the instanced mesh IF the object is rendered by an instanced mesh
     * @link https://threejs.org/docs/#api/en/objects/InstancedMesh
     */
    static getRenderer(instance: Object3D): InstancedMesh | null;
    setAutoUpdateBounds(instance: Object3D, value: boolean): void;
    /** Mark an instanced object dirty so the instance matrix will be updated */
    static markDirty(go: Object3D | null, recursive?: boolean): void;
}

/**
 * Creates a copy (clone) of a GameObject or loads and instantiates an AssetReference.
 * All components on the original object are cloned and `awake()` is called on them.
 *
 * @param instance The Object3D to clone, or an AssetReference to load and instantiate
 * @param opts Optional instantiation settings (position, rotation, scale, parent)
 * @returns The cloned GameObject, or a Promise<Object3D> if instantiating from AssetReference
 *
 * @example Clone an object
 * ```ts
 * import { instantiate } from "@needle-tools/engine";
 * const clone = instantiate(original);
 * clone.position.set(1, 0, 0);
 * this.context.scene.add(clone);
 * ```
 *
 * @example Instantiate with options
 * ```ts
 * const clone = instantiate(original, {
 *   parent: parentObject,
 *   position: new Vector3(0, 1, 0),
 *   rotation: new Quaternion()
 * });
 * ```
 *
 * @example Instantiate from AssetReference
 * ```ts
 * const instance = await instantiate(myAssetRef);
 * if (instance) this.context.scene.add(instance);
 * ```
 *
 * @see {@link GameObject.instantiate} for the static method equivalent
 * @see {@link destroy} to remove instantiated objects
 */
export declare function instantiate(instance: AssetReference, opts?: IInstantiateOptions | null): Promise<Object3D | null>;

export declare function instantiate(instance: IGameObject | Object3D, opts?: IInstantiateOptions | null): IGameObject;

/**
 * Provides access to the instantiated object and its clone
 */
export declare type InstantiateContext = Readonly<InstantiateReferenceMap>;

export declare enum InstantiateEvent {
    NewInstanceCreated = "new-instance-created",
    InstanceDestroyed = "instance-destroyed"
}

export declare class InstantiateIdProvider implements UIDProvider {
    get seed(): number;
    set seed(val: number);
    private _originalSeed;
    private _seed;
    constructor(seed: string | number);
    reset(): void;
    generateUUID(str?: string): string;
    initialize(strOrNumber: string | number): void;
    static createFromString(str: string): InstantiateIdProvider;
    private static hash;
}

/**
 * Instantiation options for {@link syncInstantiate}
 */
export declare class InstantiateOptions implements IInstantiateOptions {
    idProvider?: UIDProvider | undefined;
    parent?: string | undefined | Object3D;
    keepWorldPosition?: boolean;
    position?: Vector3 | [number, number, number] | undefined;
    rotation?: Quaternion | Euler | [number, number, number] | undefined;
    scale?: Vector3 | [number, number, number] | undefined;
    visible?: boolean | undefined;
    context?: Context | undefined;
    components?: boolean | undefined;
    clone(): InstantiateOptions;
    /** Copy fields from another object, clone field references */
    cloneAssign(other: InstantiateOptions | IInstantiateOptions): void;
}

declare type InstantiateReferenceMap = Record<string, ObjectCloneReference>;

/**
 * An empty component that can be used to mark an object as interactable.
 * @group Components
 */
/** @deprecated */
export declare class Interactable extends Component {
}

export declare namespace InternalScreenshotUtils {
    /**
     * Screenshot rendering for AR
     * @param args
     * @returns The canvas with the screenshot
     */
    export function compositeWithCameraImage(args: {
        scene: Scene;
        camera: Camera_2;
        renderer: WebGLRenderer;
        width: number;
        height: number;
    }): HTMLCanvasElement;
    export type FullscreenPlane = Mesh & {
        setTexture: (texture: Texture) => void;
    };
    export function makeFullscreenPlane(options?: {
        material?: ShaderMaterial;
        defines?: {
            [key: string]: boolean | number;
        };
    }): FullscreenPlane;
    export {};
}

/* Excluded from this release type: invokeLoadedImportPluginHooks */

/* Excluded from this release type: invokeXRSessionEnd */

/* Excluded from this release type: invokeXRSessionStart */

declare interface IParticleSystem {
    get currentParticles(): number;
    get maxParticles(): number;
    get time(): number;
    get deltaTime(): number;
    get duration(): number;
    readonly main: MainModule;
    get container(): Object3D;
    get worldspace(): boolean;
    get worldPos(): Vector3;
    get worldQuaternion(): Quaternion;
    get worldQuaternionInverted(): Quaternion;
    get worldScale(): Vector3;
    get matrixWorld(): Matrix4;
}

export declare interface IPhysics {
    engine?: IPhysicsEngine;
}

export declare interface IPhysicsEngine {
    /** Initializes the physics engine */
    initialize(): Promise<boolean>;
    /** Indicates whether the physics engine has been initialized */
    get isInitialized(): boolean;
    /** Advances physics simulation by the given time step */
    step(dt: number): void;
    postStep(): any;
    /** Indicates whether the physics engine is currently updating */
    get isUpdating(): boolean;
    /** Clears all cached data (e.g., mesh data when creating scaled mesh colliders) */
    clearCaches(): any;
    /** Enables or disables the physics engine */
    enabled: boolean;
    /** Returns the underlying physics world object */
    get world(): World | undefined;
    /** Sets the gravity vector for the physics simulation */
    set gravity(vec3: Vec3);
    /** Gets the current gravity vector */
    get gravity(): Vec3;
    /**
     * Gets the rapier physics body for a Needle component
     * @param obj The collider or rigidbody component
     * @returns The underlying physics body or null if not found
     */
    getBody(obj: ICollider | IRigidbody): null | any;
    /**
     * Gets the Needle Engine component for a rapier physics object
     * @param rapierObject The rapier physics object
     * @returns The associated component or null if not found
     */
    getComponent(rapierObject: object): IComponent | null;
    /**
     * Performs a fast raycast against physics colliders
     * @param origin Ray origin in screen or worldspace
     * @param direction Ray direction in worldspace
     * @param options Additional raycast configuration options
     * @returns Raycast result containing hit point and collider, or null if no hit
     */
    raycast(origin?: Vec2 | Vec3, direction?: Vec3, options?: {
        maxDistance?: number;
        /** True if you want to also hit objects when the raycast starts from inside a collider */
        solid?: boolean;
        queryFilterFlags?: QueryFilterFlags;
        /**
         * Raycast filter groups. Groups are used to apply the collision group rules for the scene query.
         * The scene query will only consider hits with colliders with collision groups compatible with
         * this collision group (using the bitwise test described in the collision groups section).
         * For example membership 0x0001 and filter 0x0002 should be 0x00010002
         * @see https://rapier.rs/docs/user_guides/javascript/colliders#collision-groups-and-solver-groups
         */
        filterGroups?: number;
        /**
         * Predicate to filter colliders in raycast results
         * @param collider The collider being tested
         * @returns False to ignore this collider, true to include it
         */
        filterPredicate?: (collider: ICollider) => boolean;
    }): RaycastResult;
    /**
     * Performs a raycast that also returns the normal vector at the hit point
     * @param origin Ray origin in screen or worldspace
     * @param direction Ray direction in worldspace
     * @param options Additional raycast configuration options
     * @returns Raycast result containing hit point, normal, and collider, or null if no hit
     */
    raycastAndGetNormal(origin?: Vec2 | Vec3, direction?: Vec3, options?: {
        maxDistance?: number;
        /** True if you want to also hit objects when the raycast starts from inside a collider */
        solid?: boolean;
        queryFilterFlags?: QueryFilterFlags;
        /**
         * Raycast filter groups. Groups are used to apply the collision group rules for the scene query.
         * The scene query will only consider hits with colliders with collision groups compatible with
         * this collision group (using the bitwise test described in the collision groups section).
         * For example membership 0x0001 and filter 0x0002 should be 0x00010002
         * @see https://rapier.rs/docs/user_guides/javascript/colliders#collision-groups-and-solver-groups
         */
        filterGroups?: number;
        /**
         * Predicate to filter colliders in raycast results
         * @param collider The collider being tested
         * @returns False to ignore this collider, true to include it
         */
        filterPredicate?: (collider: ICollider) => boolean;
    }): RaycastResult;
    /**
     * Finds all colliders within a sphere
     * @param point The center point of the sphere
     * @param radius The radius of the sphere
     * @returns Array of objects that overlap with the sphere
     */
    sphereOverlap(point: Vector3, radius: number): Array<ShapeOverlapResult>;
    /** box overlap detection using rapier against colliders
     * @param point center of the box in worldspace
     * @param size size of the box
     * @param rotation quaternion representation of the rotation in world space
     * @returns array of colliders that overlap with the box. Note: they currently only contain the collider and the gameobject
     */
    boxOverlap(point: Vector3, size: Vector3, rotation: Vector4Like | null): Array<ShapeOverlapResult>;
    /**
     * Creates a collider in the physics world.
     *
     * @param collider - The collider component.
     * @param desc - The collider description.
     * @returns The created collider.
     *
     * @throws Will throw an error if the physics world is not initialized. Make sure to call `initialize()` before creating colliders.
     *
     * @example
     * ```typescript
     * const boxColliderDesc = NEEDLE_ENGINE_MODULES.RAPIER_PHYSICS.MODULE.ColliderDesc.cuboid(1, 1, 1);
     * const collider = physicsEngine.createCollider(myBoxColliderComponent, boxColliderDesc);
     * ```
     */
    createCollider(collider: ICollider, desc: any): any;
    /**
     * Adds a sphere collider to the physics world
     * @param collider The collider component to add
     */
    addSphereCollider(collider: ICollider): any;
    /**
     * Adds a box collider to the physics world
     * @param collider The collider component to add
     * @param size The size of the box
     */
    addBoxCollider(collider: ICollider, size: Vector3): any;
    /**
     * Adds a capsule collider to the physics world
     * @param collider The collider component to add
     * @param radius The radius of the capsule
     * @param height The height of the capsule
     */
    addCapsuleCollider(collider: ICollider, radius: number, height: number): any;
    /**
     * Adds a mesh collider to the physics world
     * @param collider The collider component to add
     * @param mesh The mesh to use for collision
     * @param convex Whether the collision mesh should be treated as convex
     * @param scale Optional scale to apply to the mesh
     */
    addMeshCollider(collider: ICollider, mesh: Mesh, convex: boolean, scale?: Vector3 | undefined): any;
    /**
     * Updates the physics material properties of a collider
     * @param collider The collider to update
     */
    updatePhysicsMaterial(collider: ICollider): any;
    /**
     * Wakes up a sleeping rigidbody
     * @param rb The rigidbody to wake up
     */
    wakeup(rb: IRigidbody): any;
    /**
     * Checks if a rigidbody is currently sleeping
     * @param rb The rigidbody to check
     * @returns Whether the rigidbody is sleeping or undefined if cannot be determined
     */
    isSleeping(rb: IRigidbody): boolean | undefined;
    /**
     * Updates the physical properties of a rigidbody or collider
     * @param rb The rigidbody or collider to update
     */
    updateProperties(rb: IRigidbody | ICollider): any;
    /**
     * Resets all forces acting on a rigidbody
     * @param rb The rigidbody to reset forces on
     * @param wakeup Whether to wake up the rigidbody
     */
    resetForces(rb: IRigidbody, wakeup: boolean): any;
    /**
     * Resets all torques acting on a rigidbody
     * @param rb The rigidbody to reset torques on
     * @param wakeup Whether to wake up the rigidbody
     */
    resetTorques(rb: IRigidbody, wakeup: boolean): any;
    /**
     * Adds a continuous force to a rigidbody
     * @param rb The rigidbody to add force to
     * @param vec The force vector to add
     * @param wakeup Whether to wake up the rigidbody
     */
    addForce(rb: IRigidbody, vec: Vec3, wakeup: boolean): any;
    /**
     * Applies an instantaneous impulse to a rigidbody
     * @param rb The rigidbody to apply impulse to
     * @param vec The impulse vector to apply
     * @param wakeup Whether to wake up the rigidbody
     */
    applyImpulse(rb: IRigidbody, vec: Vec3, wakeup: boolean): any;
    /**
     * Gets the linear velocity of a rigidbody or the rigidbody attached to a collider
     * @param rb The rigidbody or collider to get velocity from
     * @returns The linear velocity vector or null if not available
     */
    getLinearVelocity(rb: IRigidbody | ICollider): Vec3 | null;
    /**
     * Gets the angular velocity of a rigidbody
     * @param rb The rigidbody to get angular velocity from
     * @returns The angular velocity vector or null if not available
     */
    getAngularVelocity(rb: IRigidbody): Vec3 | null;
    /**
     * Sets the angular velocity of a rigidbody
     * @param rb The rigidbody to set angular velocity for
     * @param vec The angular velocity vector to set
     * @param wakeup Whether to wake up the rigidbody
     */
    setAngularVelocity(rb: IRigidbody, vec: Vec3, wakeup: boolean): any;
    /**
     * Sets the linear velocity of a rigidbody
     * @param rb The rigidbody to set linear velocity for
     * @param vec The linear velocity vector to set
     * @param wakeup Whether to wake up the rigidbody
     */
    setLinearVelocity(rb: IRigidbody, vec: Vec3, wakeup: boolean): any;
    /**
     * Updates the position and/or rotation of a physics body
     * @param comp The collider or rigidbody component to update
     * @param translation Whether to update the position
     * @param rotation Whether to update the rotation
     */
    updateBody(comp: ICollider | IRigidbody, translation: boolean, rotation: boolean): any;
    /**
     * Removes a physics body from the simulation
     * @param body The component whose physics body should be removed
     */
    removeBody(body: IComponent): any;
    /**
     * Enables or disables a collider in the physics world without destroying it.
     * Uses Rapier's `setEnabled()` for efficient toggling instead of removing and recreating the collider.
     * @param collider The collider component to enable or disable
     * @param enabled Whether the collider should be enabled
     * @returns True if the collider was found and its state was changed, false otherwise
     */
    setColliderEnabled(collider: ICollider, enabled: boolean): boolean;
    /**
     * Gets the physics body for a component
     * @param obj The collider or rigidbody component
     * @returns The underlying physics body or null if not found
     */
    getBody(obj: ICollider | IRigidbody): null | any;
    addFixedJoint(body1: IRigidbody, body2: IRigidbody): any;
    addHingeJoint(body1: IRigidbody, body2: IRigidbody, anchor: Vec3, axis: Vec3): any;
    /** Enable to render collider shapes */
    debugRenderColliders: boolean;
    /** Enable to visualize raycasts in the scene with gizmos */
    debugRenderRaycasts: boolean;
}

export declare interface IPointerClickHandler {
    /** Called when an object (or any child object) is clicked (needs a EventSystem in the scene) */
    onPointerClick?(args: PointerEventData): any;
}

export declare interface IPointerDownHandler {
    /** Called when a button is started to being pressed on an object (or a child object) */
    onPointerDown?(args: PointerEventData): any;
}

export declare interface IPointerEnterHandler {
    /** Called when a pointer (mouse, touch, xr controller) starts pointing on/hovering an object (or a child object) */
    onPointerEnter?(args: PointerEventData): any;
}

/** Implement on your component to receive input events via the `EventSystem` component */
export declare interface IPointerEventHandler extends IPointerDownHandler, IPointerUpHandler, IPointerEnterHandler, IPointerMoveHandler, IPointerExitHandler, IPointerClickHandler {
}

export declare interface IPointerExitHandler {
    /** Called when a pointer (mouse, touch, xr controller) exists an object (it was hovering the object before but now it's not anymore) */
    onPointerExit?(args: PointerEventData): any;
}

export declare interface IPointerHitEventReceiver {
    onPointerHits: OnPointerHitsEvent;
}

export declare interface IPointerMoveHandler {
    /** Called when a pointer (mouse, touch, xr controller) is moving over an object (or a child object) */
    onPointerMove?(args: PointerEventData): any;
}

export declare interface IPointerUpHandler {
    /** Called when a button is released (which was previously pressed in `onPointerDown`) */
    onPointerUp?(args: PointerEventData): any;
}

declare type IPostProcessingManager = IComponent & {
    get isPostProcessingManager(): boolean;
    get dirty(): boolean;
    set dirty(value: boolean);
    addEffect(effect: PostProcessingEffect): void;
    removeEffect(effect: PostProcessingEffect): void;
};

export declare interface IRaycastOptions {
    /** Optionally a custom raycaster can be provided. Other properties will then be set on this raycaster */
    raycaster?: Raycaster;
    /** Optional ray that can be used for raycasting
     *  @link https://threejs.org/docs/#api/en/math/Ray
     * */
    ray?: Ray;
    /** The camera to use for the raycaster */
    cam?: Camera_2 | null;
    /** Point on screen in raycast space / normalized device coordinates (-1 to 1).
     * @link https://threejs.org/docs/#api/en/core/Raycaster.setFromCamera */
    screenPoint?: Vector2;
    /** Raycast results array. You can provide an array here to avoid creating a new one (note that if your array already contains items they will be removed) */
    results?: Array<Intersection>;
    /** Objects to raycast against. If no target array is provided the whole scene will be raycasted */
    targets?: Array<Object3D>;
    /**
     * If true, the raycaster will traverse the scene recursively.
     * @default true
     */
    recursive?: boolean;
    /**
     * If true, the raycaster will use a more precise method to test for intersections. This is slower but more accurate.
     * @default true
     */
    precise?: boolean;
    /** Set the raycaster near distance:
     * The near factor of the raycaster. This value indicates which objects can be discarded based on the distance. This value shouldn't be negative and should be smaller than the far property.
     * @link https://threejs.org/docs/#api/en/core/Raycaster.near
     */
    minDistance?: number;
    /** Set the raycaster far distance:
     * The far factor of the raycaster. This value indicates which objects can be discarded based on the distance. This value shouldn't be negative and should be larger than the near property.
     * @link https://threejs.org/docs/#api/en/core/Raycaster.far
     */
    maxDistance?: number;
    /** @link https://threejs.org/docs/#api/en/core/Raycaster.params */
    lineThreshold?: number;
    /** raw layer mask, use setLayer to set an individual layer active */
    layerMask?: Layers | number;
    ignore?: Object3D[];
    /** Optional calback function to be called per object before tested for intersections.
     * This can be used to filter objects.
     * Return `false` to ignore the object completely or `"continue in children"` to skip the object but continue to traverse its children (if you do raycast with `recursive` enabled)
     * */
    testObject?: RaycastTestObjectCallback;
    /**
     * Use MeshBVH for raycasting. This is faster than the default threejs raycaster but uses more memory.
     * @default true
     */
    useAcceleratedRaycast?: boolean;
    /**
     * When enabled raycasting will use the 'slower' traditional three.js raycasting method while the MeshBVH is being generated in the background. When disabled objects that don't have a BVH available *Yet* because it's still being generated will be ignored and not generate any hits. This is useful to improve performance for cases where raycasting happens frequently and it won't matter if raycasts don't produce hits for a few frames.
     * @default true
     */
    allowSlowRaycastFallback?: boolean;
}

declare interface IRectTransform extends IComponent {
    get isDirty(): boolean;
    markDirty(): any;
    updateTransform(): any;
}

declare interface IRectTransformChangedReceiver {
    onParentRectTransformChanged(comp: IRectTransform): void;
}

export declare interface IRenderer extends IComponent {
    sharedMaterial: Material;
    get sharedMaterials(): ISharedMaterials;
}

export declare interface IRigidbody extends IComponent {
    get isRigidbody(): boolean;
    constraints: RigidbodyConstraints;
    isKinematic: boolean;
    /** When true the mass will automatically calculated by attached colliders */
    autoMass: boolean;
    mass: number;
    drag: number;
    angularDrag: number;
    useGravity: boolean;
    centerOfMass: Vec3;
    gravityScale: number;
    dominanceGroup: number;
    collisionDetectionMode: CollisionDetectionMode;
    lockPositionX: boolean;
    lockPositionY: boolean;
    lockPositionZ: boolean;
    lockRotationX: boolean;
    lockRotationY: boolean;
    lockRotationZ: boolean;
}

export declare function isActiveInHierarchy(go: Object3D): boolean;

export declare function isActiveSelf(go: Object3D): boolean;

/** @deprecated use {@link DeviceUtilities.isAndroidDevice} instead */
export declare function isAndroidDevice(): boolean;

export declare function isAnimationAction(obj: object): boolean;

/**
 * The ISceneEventListener is called by the {@link SceneSwitcher} when a scene is loaded or unloaded.
 * It must be added to the root object of your scene (that is being loaded) or on the same object as the SceneSwitcher
 * It can be used to e.g. smooth the transition between scenes or to load additional content when a scene is loaded.
 * @example
 * ```ts
 * import { ISceneEventListener } from "@needle-tools/engine";
 *
 * // Add this component to the root object of a scene loaded by a SceneSwitcher or to the same object as the SceneSwitcher
 * export class MySceneListener implements ISceneEventListener {
 *   async sceneOpened(sceneSwitcher: SceneSwitcher) {
 *    console.log("Scene opened", sceneSwitcher.currentlyLoadedScene?.url);
 *  }
 * }
 * ```
 *
 **/
export declare interface ISceneEventListener {
    /** Called when the scene is loaded and added */
    sceneOpened(sceneSwitcher: SceneSwitcher): Promise<void>;
    /** Called before the scene is being removed (due to another scene being loaded) */
    sceneClosing(): Promise<void>;
}

export declare function isComponent(obj: any): obj is IComponent;

/* Excluded from this release type: isDebugMode */

/**
 * @deprecated use {@link DeviceUtilities.isDesktop} instead
 */
export declare function isDesktop(): boolean;

export declare function isDestroyed(go: Object3D): boolean;

/** True when the application runs on a local url */
export declare function isDevEnvironment(): boolean;

export declare function isDisposed(obj: object): boolean;

export declare interface ISerializable {
    $serializedTypes?: {
        [key: string]: ConstructorConcrete<any> | ITypeInformation | null;
    };
    onBeforeDeserialize?(data: any, context: SerializationContext): void | undefined | boolean;
    onBeforeDeserializeMember?(key: string, data: any, context: SerializationContext): void | undefined | boolean;
    onAfterDeserializeMember?(key: string, data: any, context: SerializationContext): void;
    onAfterDeserialize?(data: any, context: SerializationContext): void;
}

/**
 * Returns whether an export process is currently running.
 * @returns True if an export process is currently running, false otherwise.
 */
export declare function isExporting(): boolean;

export declare function isGLTFModel(model: Model): model is GLTF;

export declare interface ISharedMaterials {
    [num: number]: Material;
    get length(): number;
}

export declare function isHostedOnGlitch(): boolean;

export declare function isHotReloadEnabled(): boolean;

/* Excluded from this release type: isHotReloading */

/**@returns true if the element is an needle engine icon element */
export declare function isIconElement(element: Node): boolean;

/** @deprecated use {@link DeviceUtilities.isiOS} instead */
export declare function isiOS(): boolean;

/** @deprecated use {@link DeviceUtilities.isiPad} instead */
export declare function isIPad(): boolean;

/** @deprecated use {@link DeviceUtilities.isiPad} instead */
export declare function isiPad(): boolean;

export declare function isLocalNetwork(hostname?: string): boolean;

/** @deprecated use {@link DeviceUtilities.isMacOS} instead */
export declare function isMacOS(): boolean;

/**
 * @deprecated use {@link DeviceUtilities.isMobileDevice} instead
 */
export declare function isMobileDevice(): boolean;

/** @deprecated use {@link DeviceUtilities.isMozillaXR} instead */
export declare function isMozillaXR(): boolean;

export declare interface ISphereCollider extends ICollider {
    radius: number;
}

/** @deprecated use {@link DeviceUtilities.isQuest} instead */
export declare function isQuest(): boolean;

export declare function isResourceTrackingEnabled(): boolean;

/** @deprecated use {@link DeviceUtilities.isSafari} instead */
export declare function isSafari(): boolean;

export declare function isUsingInstancing(instance: Object3D): boolean;

export declare interface ITime {
    get time(): number;
    get deltaTime(): number;
}

/**
 * Interface for receiving callbacks during timeline animation evaluation.
 * Allows modification of position/rotation values before they are applied.
 *
 * **Registration:**
 * ```ts
 * director.registerAnimationCallback(this);
 * // Later: director.unregisterAnimationCallback(this);
 * ```
 *
 * @experimental This interface may change in future versions
 * @see {@link PlayableDirector.registerAnimationCallback}
 */
export declare interface ITimelineAnimationOverride {
    /**
     * @param director The director that is playing the timeline
     * @param target The target object that is being animated
     * @param time The current time of the timeline
     * @param rotation The evaluated rotation of the target object at the current time
     */
    onTimelineRotation?(director: PlayableDirector, target: Object3D, time: number, rotation: Quaternion): any;
    /**
     * @param director The director that is playing the timeline
     * @param target The target object that is being animated
     * @param time The current time of the timeline
     * @param position The evaluated position of the target object at the current time
     */
    onTimelinePosition?(director: PlayableDirector, target: Object3D, time: number, position: Vector3): any;
}

declare interface ITypeInformation {
    type?: ConstructorConcrete<any>;
}

declare interface ITypeSerializer {
    readonly name?: string;
    onSerialize(data: any, context: SerializationContext): any;
    onDeserialize(data: any, context: SerializationContext): any;
}

/**
 * Interface for USDZ Exporter Extensions used by {@link USDZExporter}
 */
declare interface IUSDExporterExtension {
    /**
     * The name of the extension
     */
    get extensionName(): string;
    /**
     * Called before the document is built
     */
    onBeforeBuildDocument?(context: USDZExporterContext): any;
    /**
     * Called after the document is built
     */
    onAfterBuildDocument?(context: USDZExporterContext): any;
    onExportObject?(object: Object3D, model: USDObject, context: USDZExporterContext): any;
    onAfterSerialize?(context: USDZExporterContext): any;
    onAfterHierarchy?(context: USDZExporterContext, writer: USDWriter): void | Promise<void>;
}

export declare interface IWatch {
    subscribeWrite(callback: WriteCallback): any;
    unsubscribeWrite(callback: WriteCallback): any;
    apply(): any;
    revoke(): any;
    dispose(): any;
}

export declare interface IXRRig extends Pick<IComponent, "gameObject"> {
    isXRRig(): boolean;
    get isActive(): boolean;
    /** The rig with the highest priority will be chosen */
    priority?: number;
}

/** Received when listening to `RoomEvents.JoinedRoom` event */
export declare class JoinedRoomResponse {
    room: string;
    viewId: string;
    allowEditing: boolean;
    inRoom: string[];
}

/**
 * Base class for physics joints that connect two {@link Rigidbody} components.
 * Joints constrain how two bodies can move relative to each other.
 *
 * The joint is created between:
 * - The {@link Rigidbody} on this GameObject (automatically found)
 * - The {@link connectedBody} Rigidbody you specify
 *
 * @summary Connect two Rigidbodies with physics constraints
 * @category Physics
 * @group Components
 * @see {@link FixedJoint} for rigid connections
 * @see {@link HingeJoint} for rotating connections
 * @see {@link Rigidbody} for physics bodies
 */
declare abstract class Joint extends Component {
    /** The other Rigidbody to connect to */
    connectedBody?: Rigidbody;
    get rigidBody(): Rigidbody | null;
    private _rigidBody;
    onEnable(): void;
    private create;
    protected abstract createJoint(self: Rigidbody, other: Rigidbody): any;
}

declare const enum KeyboardEnumType {
    KeyDown = "keydown",
    KeyUp = "keyup",
    KeyPressed = "keypress"
}

declare type KeyboardEventListener = (evt: NEKeyboardEvent) => void;

declare type KeyboardEventNames = EnumToPrimitiveUnion<KeyboardEnumType>;

export declare type KeyCode = "Tab" | "Enter" | "ShiftLeft" | "ShiftRight" | "ControlLeft" | "ControlRight" | "AltLeft" | "AltRight" | "Pause" | "CapsLock" | "Escape" | "Space" | "PageUp" | "PageDown" | "End" | "Home" | "ArrowLeft" | "ArrowUp" | "ArrowRight" | "ArrowDown" | "Insert" | "Delete" | "Digit0" | "Digit1" | "Digit2" | "Digit3" | "Digit4" | "Digit5" | "Digit6" | "Digit7" | "Digit8" | "Digit9" | "KeyA" | "KeyB" | "KeyC" | "KeyD" | "KeyE" | "KeyF" | "KeyG" | "KeyH" | "KeyI" | "KeyJ" | "KeyK" | "KeyL" | "KeyM" | "KeyN" | "KeyO" | "KeyP" | "KeyQ" | "KeyR" | "KeyS" | "KeyT" | "KeyU" | "KeyV" | "KeyW" | "KeyX" | "KeyY" | "KeyZ" | "Select" | "Numpad0" | "Numpad1" | "Numpad2" | "Numpad3" | "Numpad4" | "Numpad5" | "Numpad6" | "Numpad7" | "Numpad8" | "Numpad9" | "Multiply" | "Add" | "Subtract" | "Decimal" | "Divide" | "F1" | "F2" | "F3" | "F4" | "F5" | "F6" | "F7" | "F8" | "F9" | "F10" | "F11" | "F12";

export declare class KeyEventArgs {
    key: string;
    keyType: string;
    source?: Event;
    constructor(evt: KeyboardEvent);
}

/**
 * Keyframe is a representation of a keyframe in an AnimationCurve.
 */
declare class Keyframe_2 {
    time: number;
    value: number;
    inTangent: number;
    inWeight?: number;
    outTangent: number;
    outWeight?: number;
    weightedMode?: number;
    constructor(time?: number, value?: number);
}
export { Keyframe_2 as Keyframe }

declare type LabelHandle = {
    setText(str: string): any;
};

export declare type Layer = {
    name: string;
    stateMachine: StateMachine;
};

declare abstract class LayoutGroup extends Component implements ILayoutGroup {
    private _rectTransform;
    private get rectTransform();
    onParentRectTransformChanged(_comp: IRectTransform): void;
    private _needsUpdate;
    get isDirty(): boolean;
    get isLayoutGroup(): boolean;
    updateLayout(): void;
    childAlignment: TextAnchor;
    reverseArrangement: boolean;
    spacing: number;
    padding: Padding;
    minWidth: number;
    minHeight: number;
    flexibleHeight: number;
    flexibleWidth: number;
    preferredHeight: number;
    preferredWidth: number;
    start(): void;
    onEnable(): void;
    onDisable(): void;
    protected abstract onCalculateLayout(rt: RectTransform): any;
    private set m_Spacing(value);
    get m_Spacing(): number;
}

export declare class LeftRoomResponse {
    room: string;
}

declare type LifecycleHookContext = {
    context: Context;
};

/**
 * A function that can be called during the Needle Engine frame event at a specific point
 * @link https://engine.needle.tools/docs/scripting.html#special-lifecycle-hooks
 */
declare type LifecycleMethod = (this: LifecycleHookContext, ctx: Context) => void;

/**
 * Options for `onStart(()=>{})` etc event hooks
 * @link https://engine.needle.tools/docs/scripting.html#special-lifecycle-hooks
 */
declare type LifecycleMethodOptions = {
    /**
     * If true, the callback will only be called once
     */
    once?: boolean;
};

/**
 * [Light](https://engine.needle.tools/docs/api/Light) creates a light source in the scene for illuminating 3D objects.
 *
 * **Light types:**
 * - `Directional` - Sun-like parallel rays (best for outdoor scenes)
 * - `Point` - Omnidirectional from a point (bulbs, candles)
 * - `Spot` - Cone-shaped (flashlights, stage lights)
 *
 * **Shadows:**
 * Enable shadows via `shadows` property. Configure quality with shadow resolution
 * settings. Directional lights support adaptive shadow cascades.
 *
 * **Performance tips:**
 * - Use baked lighting (`lightmapBakeType = Baked`) when possible
 * - Limit shadow-casting lights (1-2 recommended)
 * - Reduce shadow resolution for mobile
 *
 * **Debug:** Use `?debuglights` URL parameter for visual helpers.
 *
 * @example Configure a directional light
 * ```ts
 * const light = myLight.getComponent(Light);
 * light.intensity = 1.5;
 * light.color = new Color(1, 0.95, 0.9); // Warm white
 * light.shadows = LightShadows.Soft;
 * ```
 *
 * @summary Light component for various light types and shadow settings
 * @category Rendering
 * @group Components
 * @see {@link LightType} for available light types
 * @see {@link ReflectionProbe} for environment reflections
 * @see {@link Camera} for rendering configuration
 */
export declare class Light extends Component implements ILight {
    /**
     * The type of light (spot, directional, point, etc.)
     * Can not be changed at runtime.
     */
    private type;
    /**
     * The maximum distance the light affects.
     * Only applicable for spot and point lights.
     */
    get range(): number;
    set range(value: number);
    private _range;
    /**
     * The full outer angle of the spotlight cone in degrees.
     * Only applicable for spot lights.
     */
    get spotAngle(): number;
    set spotAngle(value: number);
    private _spotAngle;
    /**
     * The angle of the inner cone in degrees for soft-edge spotlights.
     * Must be less than or equal to the outer spot angle.
     * Only applicable for spot lights.
     */
    get innerSpotAngle(): number;
    set innerSpotAngle(value: number);
    private _innerSpotAngle;
    /**
     * The color of the light
     */
    set color(val: Color);
    get color(): Color;
    _color: Color;
    /**
     * The near plane distance for shadow projection
     */
    set shadowNearPlane(val: number);
    get shadowNearPlane(): number;
    private _shadowNearPlane;
    /**
     * Shadow bias value to reduce shadow acne and peter-panning
     */
    set shadowBias(val: number);
    get shadowBias(): number;
    private _shadowBias;
    /**
     * Shadow normal bias to reduce shadow acne on sloped surfaces
     */
    set shadowNormalBias(val: number);
    get shadowNormalBias(): number;
    private _shadowNormalBias;
    /** when enabled this will remove the multiplication when setting the shadow bias settings initially */
    private _overrideShadowBiasSettings;
    /**
     * Shadow casting mode (None, Hard, or Soft)
     */
    set shadows(val: LightShadows);
    get shadows(): LightShadows;
    private _shadows;
    /**
     * Determines if the light contributes to realtime lighting, baked lighting, or a mix
     */
    private lightmapBakeType;
    /**
     * Brightness of the light. In WebXR experiences, the intensity is automatically
     * adjusted based on the AR session scale to maintain consistent lighting.
     */
    set intensity(val: number);
    get intensity(): number;
    private _intensity;
    /**
     * Maximum distance the shadow is projected
     */
    get shadowDistance(): number;
    set shadowDistance(val: number);
    private _shadowDistance?;
    private shadowWidth?;
    private shadowHeight?;
    /**
     * Resolution of the shadow map in pixels (width and height)
     */
    get shadowResolution(): number;
    set shadowResolution(val: number);
    private _shadowResolution?;
    /**
     * Whether this light's illumination is entirely baked into lightmaps
     */
    get isBaked(): boolean;
    /**
     * Checks if the GameObject itself is a {@link ThreeLight} object
     */
    private get selfIsLight();
    /**
     * The underlying three.js {@link ThreeLight} instance
     */
    private light;
    /**
     * Gets the world position of the light
     * @param vec Vector3 to store the result
     * @returns The world position as a Vector3
     */
    getWorldPosition(vec: Vector3): Vector3;
    awake(): void;
    onEnable(): void;
    onDisable(): void;
    /**
     * Creates the appropriate three.js light based on the configured light type
     * and applies all settings like shadows, intensity, and color.
     */
    createLight(): void;
    /**
     * Coroutine that updates the main light reference in the context
     * if this directional light should be the main light
     */
    updateMainLightRoutine(): Generator<undefined, void, unknown>;
    /**
     * Controls whether the renderer's shadow map type can be changed when soft shadows are used
     */
    static allowChangingRendererShadowMapType: boolean;
    /**
     * Updates shadow settings based on whether the shadows are set to hard or soft
     */
    private updateShadowSoftHard;
    onEnterXR(_args: NeedleXREventArgs): void;
    onUpdateXR(args: NeedleXREventArgs): void;
    onLeaveXR(_args: NeedleXREventArgs): void;
    /** Adjusts light intensity and distance to compensate for XR rig scale.
     * When the rig is scaled, world-space distances change proportionally
     * causing lights to appear brighter or dimmer due to distance falloff. */
    private applyXRScale;
    /**
     * Configures a directional light by adding and positioning its target
     * @param dirLight The directional light to set up
     */
    private setDirectionalLight;
}

export declare class LightData {
    get Source(): Texture;
    private _source;
    constructor(_context: Context, tex: Texture, _ambientScale?: number);
}

declare enum LightmapType {
    Lightmap = 0,
    Skybox = 1,
    Reflection = 2
}

/**
 * Defines the shadow casting options for a Light.
 * @enum {number}
 * @see {@link Light} for configuring shadow settings
 */
declare enum LightShadows {
    /** No shadows are cast */
    None = 0,
    /** Hard-edged shadows without filtering */
    Hard = 1,
    /** Soft shadows with PCF filtering */
    Soft = 2
}

export declare class LimitVelocityOverLifetimeModule {
    enabled: boolean;
    dampen: number;
    drag: MinMaxCurve;
    dragMultiplier: number;
    limit: MinMaxCurve;
    limitMultiplier: number;
    separateAxes: boolean;
    limitX: MinMaxCurve;
    limitXMultiplier: number;
    limitY: MinMaxCurve;
    limitYMultiplier: number;
    limitZ: MinMaxCurve;
    limitZMultiplier: number;
    multiplyDragByParticleSize: boolean;
    multiplyDragByParticleVelocity: boolean;
    space: ParticleSystemSimulationSpace;
    private _temp;
    private _temp2;
    apply(_position: Vec3, baseVelocity: Vector3, currentVelocity: Vector3 | Vector3_2, _size: Vector3_2, t01: number, _dt: number, _scale: number): void;
}

/**
 * Load a 3D model file from a URL (glTF, glb, FBX, OBJ, or any format with a registered loader).
 * @param url URL to the model file.
 * @param options Optional loading configuration.
 * @param options.context The Needle Engine context to load into. Defaults to `Context.Current`.
 * @param options.seed Seed for generating unique component IDs.
 * @param options.onprogress Callback invoked with download progress events.
 * @returns A promise that resolves to the loaded {@link Model} (`GLTF | FBX | OBJ | CustomModel`), or `undefined` if loading fails.
 */
export declare function loadAsset(url: string, options?: {
    context?: Context;
    path?: string;
    seed?: number;
    onprogress?: (evt: ProgressEvent) => void;
}): Promise<Model | undefined>;

/** A loaded model */
export declare type LoadedModel = {
    src: string;
    file: Model;
};

/* Excluded from this release type: LoadingElementOptions */

export declare class LoadingProgressArgs {
    /** the name or URL of the loaded file */
    name: string;
    /** the loading progress event from the loader */
    progress: ProgressEvent;
    /** the index of the loaded file */
    index: number;
    /** the total number of files to load */
    count: number;
}

/**
 * Loads a PMREM texture from the given URL. This also supports the ultra-fast preprocessed environment maps (PMREM) format.
 * @param url The URL of the PMREM texture to load.
 * @param renderer The WebGLRenderer to use for loading the texture.
 * @returns A promise that resolves to the loaded texture or null if loading failed.
 */
export declare function loadPMREM(url: string, renderer: WebGLRenderer): Promise<Texture | null>;

/**
 * {@link SceneSwitcher} event argument data
 */
declare type LoadSceneEvent = {
    /**
     * The {@link SceneSwitcher} that is loading the scene
     */
    switcher: SceneSwitcher;
    /**
     * The scene that is being loaded
     */
    scene: AssetReference;
    /**
     * The index of the scene that is being loaded
     */
    index: number;
};

/**
 * Load a gltf file from a url. This is the core method used by Needle Engine to load gltf files. All known extensions are registered here.
 * @param context The current context
 * @param url The url to the gltf file
 * @param sourceId The source id of the gltf file - this is usually the url
 * @param seed The seed for generating unique ids
 * @param prog A progress callback
 * @returns The loaded gltf object
 */
export declare function loadSync(context: Context, url: string, sourceId: string, seed: number | UIDProvider | null, prog?: (ProgressEvent: any) => void): Promise<Model | undefined>;

/**
 * LODGroup manages multiple levels of detail for optimized rendering.
 * Objects switch between different detail levels based on distance from camera.
 *
 * LOD levels are defined in {@link LODModel} objects, each specifying:
 * - The distance at which that level becomes active
 * - The {@link Renderer} components to show at that level
 *
 * This is useful for performance optimization - showing high-detail models up close
 * and lower-detail versions at distance where the difference isn't visible.
 *
 * **Progressive Loading:**
 * For automatic texture/mesh LOD streaming, see the `@needle-tools/gltf-progressive` package
 * which provides progressive loading capabilities independent of this component.
 *
 * **Debug options:**
 * - `?debuglods` - Log LOD switching information
 * - `?nolods` - Disable LOD system entirely
 *
 * @summary Level of Detail Group for optimizing rendering
 * @category Rendering
 * @group Components
 * @see {@link LODModel} for configuring individual LOD levels
 * @see {@link Renderer} for the renderers controlled by LOD
 * @see {@link LODsManager} for programmatic control of progressive LODs
 * @link https://npmjs.com/package/@needle-tools/gltf-progressive
 */
export declare class LODGroup extends Component {
    /** Array of LOD level configurations */
    readonly lodModels: LODModel[];
    private _lods;
    private _settings;
    private _lodsHandler?;
    start(): void;
    onAfterRender(): void;
    private onAddLodLevel;
    private _distanceFactor;
    /**
     * Adjusts all LOD transition distances by a multiplier.
     * Values > 1 push LOD transitions further away (higher quality at distance).
     * Values < 1 bring transitions closer (better performance).
     * @param factor Multiplier to apply to all LOD distances
     */
    distanceFactor(factor: number): void;
}

/**
 * Defines a single LOD level with its transition distance and associated renderers.
 * Used by {@link LODGroup} to configure level of detail switching.
 */
export declare class LODModel {
    /** Screen height ratio (0-1) at which this LOD becomes active */
    screenRelativeTransitionHeight: number;
    /** Distance from camera at which this LOD becomes active */
    distance: number;
    /** Renderers to show at this LOD level */
    renderers: Renderer[];
}

/**
 * Needle Engine LODs manager. Wrapper around the internal LODs manager.
 * It uses the [@needle-tools/gltf-progressive](https://npmjs.com/package/@needle-tools/gltf-progressive) package to manage LODs.
 *
 * For lower-level control (e.g. configuring max concurrent loading tasks, queue settings, or other progressive loading specifics), use {@link NEEDLE_progressive} directly.
 * @link https://npmjs.com/package/@needle-tools/gltf-progressive
 */
declare class LODsManager implements NEEDLE_progressive_plugin {
    /** The type of the @needle-tools/gltf-progressive LODsManager - can be used to set static settings */
    static readonly GLTF_PROGRESSIVE_LODSMANAGER_TYPE: typeof LODsManager_2;
    readonly context: Context;
    private _lodsManager?;
    private _settings;
    /**
     * The internal LODs manager. See @needle-tools/gltf-progressive for more information.
     * @link https://npmjs.com/package/@needle-tools/gltf-progressive
     */
    get manager(): LODsManager_2 | undefined;
    /**
     * The interval (in seconds) at which the bounding volumes of skinned meshes are automatically updated.
     * If set to 0, automatic updates are disabled and bounding volumes will only be updated when the mesh is loaded or when the `updateSkinnedMeshBounds` method is called manually.
     * @default 0
     */
    get skinnedMeshAutoUpdateBoundsInterval(): number;
    set skinnedMeshAutoUpdateBoundsInterval(value: number);
    /**
     * The target triangle density is the desired max amount of triangles on screen when the mesh is filling the screen.
     * @default 200_000
     */
    get targetTriangleDensity(): number;
    set targetTriangleDensity(value: number);
    /* Excluded from this release type: __constructor */
    private applySettings;
    /* Excluded from this release type: setRenderer */
    disable(): void;
    /* Excluded from this release type: onAfterUpdatedLOD */
    private onRenderDebug;
}

export declare function logHierarchy(root: Object3D | null | undefined, collapsible?: boolean): void;

/* Excluded from this release type: LogStats */

export declare enum LogType {
    Log = 0,
    Warn = 1,
    Error = 2
}

/**
 * The [LookAt](https://engine.needle.tools/docs/api/LookAt) behaviour makes the object look at a target object or the camera.
 * It can also invert the forward direction and keep the up direction.
 *
 * @summary Makes the object look at a target object or the camera
 * @category Everywhere Actions
 * @category Interactivity
 * @group Components
 */
export declare class LookAt extends Component implements UsdzBehaviour {
    /**
     * The target object to look at. If not set, the main camera will be used.
     */
    target?: Object3D;
    /**
     * Inverts the forward direction.
     */
    invertForward: boolean;
    /**
     * Keep the up direction.
     */
    keepUpDirection: boolean;
    /**
     * Copy the target rotation.
     */
    copyTargetRotation: boolean;
    private static flipYQuat;
    /* Excluded from this release type: onBeforeRender */
    /* Excluded from this release type: createBehaviours */
}

/**
 * @deprecated LookAtConstraint will be removed in future versions. Please either use a direct object reference instead (e.g. assigning an Object3D as a look target in scripts that reply on the LookAtConstraint) or implement the LookAtConstraint signature in your web project:
 * ```ts
 * export class LookAtConstraint extends Behaviour {
 *   constraintActive: boolean = true;
 *   locked: boolean = false;
 *   sources: Object3D[] = [];
 *   setConstraintPosition(worldPosition: Vector3) {
 *     const source = this.sources[0];
 *     if (source) source.worldPosition = worldPosition;
 *   }
 * }
 * ```
 */
export declare class LookAtConstraint extends Component {
    /**
     * When true, the constraint is active and affects the target.
     * Set to false to temporarily disable without removing sources.
     */
    constraintActive: boolean;
    /**
     * When true, the look-at position is locked and won't update
     * even if source objects move.
     */
    locked: boolean;
    /**
     * Objects to look at. The first object in the array is used
     * as the primary look-at target.
     */
    sources: Object3D[];
    /**
     * Sets the world position that the constraint should look at.
     * Updates the first source object's position.
     * @param worldPosition The world-space position to look at
     */
    setConstraintPosition(worldPosition: Vector3): void;
}

export declare function lookAtInverse(obj: Object3D, target: Vector3): void;

/** Better lookAt
 * @param object the object that the lookAt should be applied to
 * @param target the target to look at
 * @param keepUpDirection if true the up direction will be kept
 * @param copyTargetRotation if true the target rotation will be copied so the rotation is not skewed
 */
export declare function lookAtObject(object: Object3D, target: Object3D, keepUpDirection?: boolean, copyTargetRotation?: boolean): void;

/**
 * Look at a 2D point in screen space
 * @param object the object to look at the point
 * @param target the target point in 2D screen space XY e.g. from a mouse event
 * @param camera the camera to use for the lookAt
 * @param factor the factor to multiply the distance from the camera to the object. Default is 1
 * @returns the target point in world space
 *
 * @example Needle Engine Component
 * ```ts
 * export class MyLookAtComponent extends Behaviour {
 *   update() {
 *     lookAtScreenPoint(this.gameObject, this.context.input.mousePosition, this.context.mainCamera);
 *   }
 * }
 * ```
 *
 * @example Look at from browser mouse move event
 * ```ts
 * window.addEventListener("mousemove", (e) => {
 *   lookAtScreenPoint(object, new Vector2(e.clientX, e.clientY), camera);
 *  });
 * ```
 */
export declare function lookAtScreenPoint(object: Object3D, target: Vector2Like, camera: Camera_2, factor?: number): Vector3 | null;

declare type LostOwnershipBroadcastResponse = {
    guid: string;
    owner: string;
};

declare class LowPassFilter {
    y: number | null;
    s: number | null;
    alpha: number;
    constructor(alpha: number);
    setAlpha(alpha: number): void;
    filter(value: number, alpha: number): number;
    lastValue(): number | null;
    reset(value: number): void;
}

declare type MagicSkyboxName = "studio" | "blurred-skybox" | "quicklook-ar" | "quicklook";

export declare class MainModule {
    cullingMode: number;
    duration: number;
    emitterVelocityMode: number;
    flipRotation: number;
    gravityModifier: MinMaxCurve;
    gravityModifierMultiplier: number;
    loop: boolean;
    maxParticles: number;
    playOnAwake: boolean;
    prewarm: boolean;
    ringBufferLoopRange: {
        x: number;
        y: number;
    };
    ringBufferMode: boolean;
    scalingMode: ParticleSystemScalingMode;
    simulationSpace: ParticleSystemSimulationSpace;
    simulationSpeed: number;
    startColor: MinMaxGradient;
    startDelay: MinMaxCurve;
    startDelayMultiplier: number;
    startLifetime: MinMaxCurve;
    startLifetimeMultiplier: number;
    startRotation: MinMaxCurve;
    startRotationMultiplier: number;
    startRotation3D: boolean;
    startRotationX: MinMaxCurve;
    startRotationXMultiplier: number;
    startRotationY: MinMaxCurve;
    startRotationYMultiplier: number;
    startRotationZ: MinMaxCurve;
    startRotationZMultiplier: number;
    startSize: MinMaxCurve;
    startSize3D: boolean;
    startSizeMultiplier: number;
    startSizeX: MinMaxCurve;
    startSizeXMultiplier: number;
    startSizeY: MinMaxCurve;
    startSizeYMultiplier: number;
    startSizeZ: MinMaxCurve;
    startSizeZMultiplier: number;
    startSpeed: MinMaxCurve;
    startSpeedMultiplier: number;
    stopAction: number;
    useUnscaledTime: boolean;
}

/** Generates a random id string of the given length */
export declare function makeId(length: any): string;

/** Generates a random id string from a list of adjectives and nouns */
export declare function makeIdFromRandomWords(): string;

export declare function makeNameSafeForUSD(str: any): any;

declare type Mapping = "xr-standard";

export declare function markAsInstancedRendered(go: Object3D, instanced: boolean): void;

/**
 * @category Animation and Sequencing
 * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
 */ export declare class MarkerModel {
    type: MarkerType;
    time: number;
}

export declare class MarkerTrackHandler extends TrackHandler {
    models: Array<Models.MarkerModel & Record<string, any>>;
    needsSorting: boolean;
    foreachMarker<T>(type?: string | null): Generator<T, void, unknown>;
    onEnable(): void;
    evaluate(_time: number): void;
    private sort;
}

export declare enum MarkerType {
    Signal = "SignalEmitter"
}

/**
 * @category User Interface
 * @group Components
 */
export declare class MaskableGraphic extends Graphic {
    private _flippedObject;
    protected onAfterCreated(): void;
}

/**
 * MaterialPropertyBlock allows per-object material property overrides without creating new material instances.
 * This is useful for rendering multiple objects with the same base material but different properties
 * (e.g., different colors, textures, or shader parameters).
 *
 * ## How Property Blocks Work
 *
 * **Important**: Overrides are registered on the **Object3D**, not on the material.
 * This means:
 * - If you change the object's material, the overrides will still be applied to the new material
 * - Multiple objects can share the same material but have different property overrides
 * - If you don't want overrides applied after changing a material, you must remove them using {@link removeOveride}, {@link clearAllOverrides}, or {@link dispose}
 *
 * The property block system works by:
 * - Temporarily applying overrides in onBeforeRender
 * - Restoring original values in onAfterRender
 * - Managing shader defines and program cache keys for correct shader compilation
 * - Supporting texture coordinate transforms per object
 *
 * ## Common Use Cases
 *
 * - **Lightmaps**: Apply unique lightmap textures to individual objects sharing the same material
 * - **Reflection Probes**: Apply different environment maps per object for localized reflections
 * - **See-through effects**: Temporarily override transparency/transmission properties for X-ray effects
 *
 * ## Getting a MaterialPropertyBlock
 *
 * **Important**: Do not use the constructor directly. Instead, use the static {@link MaterialPropertyBlock.get} method:
 *
 * ```typescript
 * const block = MaterialPropertyBlock.get(myMesh);
 * ```
 *
 * This method will either return an existing property block or create a new one if it doesn't exist.
 * It automatically:
 * - Creates the property block instance
 * - Registers it in the internal registry
 * - Attaches the necessary render callbacks to the object
 * - Handles Groups by applying overrides to all child meshes
 *
 * @example Basic usage
 * ```typescript
 * // Get or create a property block for an object
 * const block = MaterialPropertyBlock.get(myMesh);
 *
 * // Override the color property
 * block.setOverride("color", new Color(1, 0, 0));
 *
 * // Override a texture with custom UV transform (useful for lightmaps)
 * block.setOverride("lightMap", myLightmapTexture, {
 *   offset: new Vector2(0.5, 0.5),
 *   repeat: new Vector2(2, 2)
 * });
 *
 * // Set a shader define
 * block.setDefine("USE_CUSTOM_FEATURE", 1);
 * ```
 *
 * @example Material swapping behavior
 * ```typescript
 * const mesh = new Mesh(geometry, materialA);
 * const block = MaterialPropertyBlock.get(mesh);
 * block.setOverride("color", new Color(1, 0, 0));
 *
 * // The color override is red for materialA
 *
 * // Swap the material - overrides persist and apply to the new material!
 * mesh.material = materialB;
 * // The color override is now red for materialB too
 *
 * // If you don't want overrides on the new material, remove them:
 * block.clearAllOverrides(); // Remove all overrides
 * // or
 * block.removeOveride("color"); // Remove specific override
 * // or
 * block.dispose(); // Remove the entire property block
 * ```
 *
 * @example Lightmap usage
 * ```typescript
 * const block = MaterialPropertyBlock.get(mesh);
 * block.setOverride("lightMap", lightmapTexture);
 * block.setOverride("lightMapIntensity", 1.5);
 * ```
 *
 * @example See-through effect
 * ```typescript
 * const block = MaterialPropertyBlock.get(mesh);
 * block.setOverride("transparent", true);
 * block.setOverride("opacity", 0.3);
 * ```
 *
 * @template T The material type this property block is associated with
 */
export declare class MaterialPropertyBlock<T extends Material = Material> {
    private _overrides;
    private _defines;
    private _object;
    /** The object this property block is attached to */
    get object(): Object3D | null;
    /**
     * Creates a new MaterialPropertyBlock
     * @param object The object this property block is for (optional)
     */
    protected constructor(object?: Object3D | null);
    /**
     * Gets or creates a MaterialPropertyBlock for the given object.
     * This is the recommended way to obtain a property block instance.
     *
     * @template T The material type
     * @param object The object to get/create a property block for
     * @returns The MaterialPropertyBlock associated with this object
     *
     * @example
     * ```typescript
     * const block = MaterialPropertyBlock.get(myMesh);
     * block.setOverride("roughness", 0.5);
     * ```
     */
    static get<T extends Material = Material>(object: Object3D): MaterialPropertyBlock<T>;
    /**
     * Checks if an object has any property overrides
     * @param object The object to check
     * @returns True if the object has a property block with overrides
     */
    static hasOverrides(object: Object3D): boolean;
    /**
     * Disposes this property block and cleans up associated resources.
     * After calling dispose, this property block should not be used.
     */
    dispose(): void;
    /**
     * Sets or updates a material property override.
     * The override will be applied to the material during rendering.
     *
     * @param name The name of the material property to override (e.g., "color", "map", "roughness")
     * @param value The value to set
     * @param textureTransform Optional UV transform (only used when value is a Texture)
     *
     * @example
     * ```typescript
     * // Override a simple property
     * block.setOverride("roughness", 0.8);
     *
     * // Override a color
     * block.setOverride("color", new Color(0xff0000));
     *
     * // Override a texture with UV transform
     * block.setOverride("map", texture, {
     *   offset: new Vector2(0, 0),
     *   repeat: new Vector2(2, 2)
     * });
     * ```
     */
    setOverride<K extends NonFunctionPropertyNames<T>>(name: K, value: T[K], textureTransform?: TextureTransform): void;
    setOverride(name: string, value: MaterialPropertyType, textureTransform?: TextureTransform): void;
    /**
     * Gets the override for a specific property with type-safe value inference
     * @param name The property name to get
     * @returns The PropertyBlockOverride with correctly typed value if it exists, undefined otherwise
     *
     * @example
     * ```typescript
     * const block = MaterialPropertyBlock.get<MeshStandardMaterial>(mesh);
     *
     * // Value is inferred as number | undefined
     * const roughness = block.getOverride("roughness")?.value;
     *
     * // Value is inferred as Color | undefined
     * const color = block.getOverride("color")?.value;
     *
     * // Value is inferred as Texture | null | undefined
     * const map = block.getOverride("map")?.value;
     *
     * // Explicitly specify the type for properties not on the base material type
     * const transmission = block.getOverride<number>("transmission")?.value;
     *
     * // Or use a more specific material type
     * const physicalBlock = block as MaterialPropertyBlock<MeshPhysicalMaterial>;
     * const transmissionTyped = physicalBlock.getOverride("transmission")?.value; // number
     * ```
     */
    getOverride<K extends NonFunctionPropertyNames<T>>(name: K): PropertyBlockOverride<T[K] & MaterialPropertyType> | undefined;
    getOverride<V extends MaterialPropertyType = MaterialPropertyType>(name: string): PropertyBlockOverride<V> | undefined;
    /**
     * Removes a specific property override.
     * After removal, the material will use its original property value for this property.
     *
     * @param name The property name to remove the override for
     *
     * @example
     * ```typescript
     * const block = MaterialPropertyBlock.get(mesh);
     *
     * // Set some overrides
     * block.setOverride("color", new Color(1, 0, 0));
     * block.setOverride("roughness", 0.5);
     * block.setOverride("lightMap", lightmapTexture);
     *
     * // Remove a specific override - the material will now use its original color
     * block.removeOveride("color");
     *
     * // Other overrides (roughness, lightMap) remain active
     * ```
     */
    removeOveride<K extends NonFunctionPropertyNames<T>>(name: K | ({} & string)): void;
    /**
     * Removes all property overrides from this block.
     * After calling this, the material will use its original values for all properties.
     *
     * **Note**: This does NOT remove shader defines. Use {@link clearDefine} or {@link dispose} for that.
     *
     * @example Remove all overrides but keep the property block
     * ```typescript
     * const block = MaterialPropertyBlock.get(mesh);
     *
     * // Set multiple overrides
     * block.setOverride("color", new Color(1, 0, 0));
     * block.setOverride("roughness", 0.5);
     * block.setOverride("lightMap", lightmapTexture);
     *
     * // Later, remove all overrides at once
     * block.clearAllOverrides();
     *
     * // The material now uses its original values
     * // The property block still exists and can be reused with new overrides
     * ```
     *
     * @example Temporarily disable all overrides
     * ```typescript
     * const block = MaterialPropertyBlock.get(mesh);
     *
     * // Save current overrides if you want to restore them later
     * const savedOverrides = [...block.overrides];
     *
     * // Clear all overrides temporarily
     * block.clearAllOverrides();
     *
     * // Do some rendering without overrides...
     *
     * // Restore overrides
     * savedOverrides.forEach(override => {
     *   block.setOverride(override.name, override.value, override.textureTransform);
     * });
     * ```
     *
     * @see {@link removeOveride} - To remove a single override
     * @see {@link dispose} - To completely remove the property block and clean up resources
     */
    clearAllOverrides(): void;
    /**
     * Gets all property overrides as a readonly array
     * @returns Array of all property overrides
     */
    get overrides(): readonly PropertyBlockOverride[];
    /**
     * Checks if this property block has any overrides
     * @returns True if there are any overrides set
     */
    hasOverrides(): boolean;
    /**
     * Set a shader define that will be included in the program cache key.
     * This allows different objects sharing the same material to have different shader programs.
     *
     * Defines affect shader compilation and are useful for enabling/disabling features per-object.
     *
     * @param name The define name (e.g., "USE_LIGHTMAP", "ENABLE_REFLECTIONS")
     * @param value The define value (typically a boolean, number, or string)
     *
     * @example
     * ```typescript
     * // Enable a feature for this specific object
     * block.setDefine("USE_CUSTOM_SHADER", true);
     * block.setDefine("QUALITY_LEVEL", 2);
     * ```
     */
    setDefine(name: string, value: string | number | boolean): void;
    /**
     * Remove a shader define
     * @param name The define name to remove
     */
    clearDefine(name: string): void;
    /**
     * Get all defines set on this property block
     * @returns A readonly record of all defines
     */
    getDefines(): Readonly<Record<string, string | number | boolean>>;
    /* Excluded from this release type: getCacheKey */
}

/**
 * Valid types that can be used as material property overrides
 */
declare type MaterialPropertyType = number | number[] | Color | Texture | Vector2 | Vector3 | Vector4 | null | Euler;

export declare namespace MaterialX {
    /**
     * Utility function to load a MaterialX material from a URL. This can be used in your own code to load MaterialX materials outside of the glTF loading process. The URL should point to a MaterialX XML file.
     */
    export function loadFromUrl(urlOrXML: string, opts?: {
        url?: string;
        loadingManager?: LoadingManager;
        materialNameOrIndex?: number | string;
    }): Promise<Material | null>;
}

export declare const Mathf: MathHelper;

/**
 * Math utility class providing common mathematical operations.
 * Access via the exported `Mathf` constant.
 *
 * @example
 * ```ts
 * import { Mathf } from "@needle-tools/engine";
 *
 * // Random number between 0 and 10
 * const rand = Mathf.random(0, 10);
 *
 * // Clamp a value
 * const clamped = Mathf.clamp(value, 0, 100);
 *
 * // Smooth interpolation
 * const smoothed = Mathf.lerp(start, end, t);
 * ```
 */
declare class MathHelper {
    /**
     * Returns a random number or element.
     * @param arr Array to pick a random element from
     * @returns Random element from array, or null if array is empty
     * @example `Mathf.random([1, 2, 3])` - returns random element
     */
    random<T>(arr: Array<T>): T | null;
    /**
     * Returns a random number between min and max (inclusive).
     * @param min Minimum value (inclusive)
     * @param max Maximum value (inclusive)
     * @returns Random number in range, or 0-1 if no args provided
     * @example `Mathf.random(0, 10)` - returns 0 to 10
     */
    random(min?: number, max?: number): number;
    /**
     * Fills a Vector3 with random values.
     * @param target Vector3 to fill with random values
     * @param min Minimum value for each component
     * @param max Maximum value for each component
     */
    randomVector3(target: Vector3, min?: number, max?: number): void;
    /**
     * Clamps a value between min and max.
     * @param value Value to clamp
     * @param min Minimum bound
     * @param max Maximum bound
     * @returns Clamped value
     */
    clamp(value: number, min: number, max: number): number;
    /**
     * Clamps a value between 0 and 1.
     * @param value Value to clamp
     * @returns Value clamped to [0, 1]
     */
    clamp01(value: number): number;
    /**
     * Linearly interpolates between two values.
     * @param value1 Start value (returned when t=0)
     * @param value2 End value (returned when t=1)
     * @param t Interpolation factor, clamped to [0, 1]
     * @returns Interpolated value
     */
    lerp(value1: number, value2: number, t: number): number;
    /**
     * Calculates the linear interpolation parameter that produces the given value.
     * Inverse of lerp: if `lerp(a, b, t) = v`, then `inverseLerp(a, b, v) = t`
     * @param value1 Start value
     * @param value2 End value
     * @param t The value to find the parameter for
     * @returns The interpolation parameter (may be outside [0,1] if t is outside [value1, value2])
     */
    inverseLerp(value1: number, value2: number, t: number): number;
    /**
     * Remaps a value from one range to another.
     * @param value The value to remap.
     * @param min1 The minimum value of the current range.
     * @param max1 The maximum value of the current range.
     * @param min2 The minimum value of the target range.
     * @param max2 The maximum value of the target range.
     */
    remap(value: number, min1: number, max1: number, min2: number, max2: number): number;
    /**
     * Moves a value towards a target by a maximum step amount.
     * Useful for smooth following or gradual value changes.
     * @param value1 Current value
     * @param value2 Target value
     * @param amount Maximum step to move (positive moves toward target)
     * @returns New value moved toward target, never overshooting
     */
    moveTowards(value1: number, value2: number, amount: number): number;
    readonly Rad2Deg: number;
    readonly Deg2Rad: number;
    readonly Epsilon = 0.00001;
    /**
     * Converts radians to degrees
     */
    toDegrees(radians: number): number;
    /**
     * Converts degrees to radians
     */
    toRadians(degrees: number): number;
    tan(radians: number): number;
    gammaToLinear(gamma: number): number;
    linearToGamma(linear: number): number;
    /**
     * Checks if two vectors are approximately equal within epsilon tolerance.
     * Works with Vector2, Vector3, Vector4, and Quaternion.
     * @param v1 First vector
     * @param v2 Second vector
     * @param epsilon Tolerance for comparison (default: Number.EPSILON)
     * @returns True if all components are within epsilon of each other
     */
    approximately(v1: Vector, v2: Vector, epsilon?: number): boolean;
    /**
     * Easing function: slow start, fast middle, slow end (cubic).
     * @param x Input value from 0 to 1
     * @returns Eased value from 0 to 1
     */
    easeInOutCubic(x: number): number;
}

/**
 * MeshCollider creates a collision shape from a mesh geometry.
 * Allows for complex collision shapes that match the exact geometry of an object.
 *
 * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
 *
 * - Example: https://samples.needle.tools/physics-basic
 * - Example: https://samples.needle.tools/physics-playground
 * - Example: https://samples.needle.tools/physics-&-animation
 *
 * @category Physics
 * @group Components
 */
export declare class MeshCollider extends Collider {
    /**
     * The mesh that is used to create the collision shape.
     * If not set, the collider will try to use the mesh of the object it's attached to.
     */
    sharedMesh?: Mesh;
    /**
     * When `true` the collider is treated as a solid object without holes.
     * Set to `false` if you want this mesh collider to be able to contain other objects.
     */
    convex: boolean;
    /**
     * Creates and registers the mesh collider with the physics engine.
     * Handles both individual meshes and mesh groups.
     */
    onEnable(): void;
}

declare type MeshInformation = {
    vertexCount: number;
    indexCount: number;
};

declare type MeshPhysicalNodeMaterial = default_3;

export declare class MeshRenderer extends Renderer {
}

/** @deprecated use {@link DeviceUtilities.microphonePermissionsGranted} instead */
export declare function microphonePermissionsGranted(): Promise<boolean>;

declare type MimetypeCallback = (args: {
    /** The URL of the file to load */
    url: string;
    /** The response of the range request with the first few bytes of the file (bytes are available in the 'args.bytes' property of this callback) */
    response: Response;
    /** The mimetype of the file as provided by the request header */
    contentType: string | null;
    /** The first few bytes of the file as a Uint8Array */
    bytes: Uint8Array;
}) => NeedleMimetype | null;

export declare class MinMaxCurve {
    static constant(val: number): MinMaxCurve;
    static betweenTwoConstants(min: number, max: number): MinMaxCurve;
    static curve(curve: AnimationCurve, multiplier?: number): MinMaxCurve;
    setConstant(val: number): void;
    setMinMaxConstant(min: number, max: number): void;
    setCurve(curve: AnimationCurve, multiplier?: number): void;
    mode: ParticleSystemCurveMode | ParticleSystemCurveModeKeys;
    constant: number;
    constantMin: number;
    constantMax: number;
    curve?: AnimationCurve;
    curveMin?: AnimationCurve;
    curveMax?: AnimationCurve;
    curveMultiplier?: number;
    clone(): MinMaxCurve;
    evaluate(t01: number, lerpFactor?: number): number;
    getMax(): number;
    private getMaxFromCurve;
}

export declare class MinMaxGradient {
    static constant(color: RGBAColor | Color): MinMaxGradient;
    static betweenTwoColors(color1: RGBAColor | Color, color2: RGBAColor | Color): MinMaxGradient;
    constant(color: RGBAColor | Color): this;
    betweenTwoColors(color1: RGBAColor | Color, color2: RGBAColor | Color): this;
    /**
     * The mode of the gradient, which can be Color, Gradient, TwoColors or TwoGradients.
     */
    mode: ParticleSystemGradientMode | ParticleSystemGradientModeKeys;
    color: RGBAColor | Color;
    colorMin: RGBAColor | Color;
    colorMax: RGBAColor | Color;
    gradient: Gradient;
    gradientMin: Gradient;
    gradientMax: Gradient;
    private static _temp;
    private static _temp2;
    evaluate(t01: number, lerpFactor?: number): RGBAColor | Color;
}

/** All possible model types that Needle Engine can load */
export declare type Model = (GLTF | FBX | OBJ | CustomModel);

declare namespace Models {
    export {
        TimelineAssetModel,
        TrackType,
        ClipExtrapolation,
        TrackModel,
        TrackOffset,
        ClipModel,
        AnimationClipModel,
        AudioClipModel,
        ControlClipModel,
        MarkerType,
        MarkerModel,
        SignalMarkerModel,
        ScrollMarkerModel
    }
}

export declare type Motion = {
    name: string;
    isLooping: boolean;
    guid?: string;
    /** clip index in gltf animations array */
    index?: number;
    /** the resolved clip */
    clip?: AnimationClip;
    /** the clip mapping -> which object has which animationclip */
    clips?: ClipMapping[];
    action?: AnimationAction;
    /** used when a transition points to the same state we need another action to blend */
    action_loopback?: AnimationAction;
};

/* Excluded from this release type: MotionStyle */

/** Typical mouse button names for most devices */
export declare type MouseButtonName = "left" | "right" | "middle";

/* Excluded from this release type: MultiplePerformOperation */

/* Excluded from this release type: nameof */

/* Excluded from this release type: nameofFactory */

declare class NEEDLE_components implements GLTFLoaderPlugin {
    get name(): string;
    exportContext: {
        [nodeIndex: number]: ExportData;
    };
    objectToNodeMap: ObjectToNodeMap;
    context: SerializationContext;
    writer?: any;
    registerExport(exp: GLTFExporter): void;
    beforeParse(): void;
    serializeUserData(node: Object3D, _nodeDef: any): boolean;
    afterSerializeUserData(node: Object3D, _nodeDef: any): void;
    writeNode(node: Object3D, nodeDef: any): void;
    afterParse(input: any): void;
    parser?: GLTFParser;
    nodeToObjectMap: NodeToObjectMap;
    /** The loaded gltf */
    gltf: GLTF_3 | null;
    beforeRoot(): null;
    afterRoot(result: GLTF_3): Promise<void>;
    private createComponents;
}

export declare const NEEDLE_ENGINE_FEATURE_FLAGS: {
    experimentalSmartHierarchyUpdate: boolean;
};

/**
 * External dependencies that are loaded on demand either by the engine automatically when needed or they can be loaded manually by calling the `load` function.
 *
 * Use the `ready` function to wait for the module to be loaded if you do not wand to trigger a load.
 *
 * If a module is already loaded it's also available in the `MODULE` variable.
 */
export declare const NEEDLE_ENGINE_MODULES: {
    MaterialX: {
        MODULE: needleToolsMaterialx;
        MAYBEMODULE: needleToolsMaterialx | null;
        /** Wait for the module to be loaded (doesn't trigger a load) */
        ready(): Promise<needleToolsMaterialx>;
        /** Load the module */
        load(): Promise<needleToolsMaterialx>;
    };
    RAPIER_PHYSICS: {
        MODULE: dimforgeRapier3dCompat;
        MAYBEMODULE: dimforgeRapier3dCompat | null;
        /** Wait for the module to be loaded (doesn't trigger a load) */
        ready(): Promise<dimforgeRapier3dCompat>;
        /** Load the module */
        load(): Promise<dimforgeRapier3dCompat>;
    };
    POSTPROCESSING: {
        MODULE: postprocessing;
        MAYBEMODULE: postprocessing | null;
        /** Wait for the module to be loaded (doesn't trigger a load) */
        ready(): Promise<postprocessing>;
        /** Load the module */
        load(): Promise<postprocessing>;
    };
    POSTPROCESSING_AO: {
        MODULE: any;
        MAYBEMODULE: any;
        /** Wait for the module to be loaded (doesn't trigger a load) */
        ready(): Promise<any>;
        /** Load the module */
        load(): Promise<any>;
    };
    PEERJS: {
        MODULE: peerjs;
        MAYBEMODULE: peerjs | null;
        /** Wait for the module to be loaded (doesn't trigger a load) */
        ready(): Promise<peerjs>;
        /** Load the module */
        load(): Promise<peerjs>;
    };
};

export { NEEDLE_progressive }

/**
 * [&lt;needle-button&gt;](https://engine.needle.tools/docs/api/NeedleButtonElement) is a web component for easily adding AR, VR, Quicklook, or QR code buttons to your website without writing JavaScript code.
 *
 * The button automatically handles session management and displays appropriate UI based on device capabilities.
 * It comes with default styling (glassmorphism design) but can be fully customized with CSS.
 *
 * **Supported button types:**
 * - `ar` - WebXR AR session button
 * - `vr` - WebXR VR session button
 * - `quicklook` - Apple AR Quick Look button (iOS only)
 * - `qrcode` - QR code sharing button
 *
 * @example Basic AR/VR buttons
 * ```html
 * <needle-engine src="scene.glb"></needle-engine>
 * <needle-button ar></needle-button>
 * <needle-button vr></needle-button>
 * <needle-button quicklook></needle-button>
 * ```
 *
 * @example Custom button labels
 * ```html
 * <needle-button ar>Start AR Experience</needle-button>
 * <needle-button vr>Enter VR Mode</needle-button>
 * <needle-button quicklook>View in AR</needle-button>
 * ```
 *
 * @example Custom styling
 * ```html
 * <style>
 *   needle-button {
 *     background-color: #ff6b6b;
 *     color: white;
 *     border-radius: 8px;
 *     padding: 1rem 2rem;
 *   }
 *   needle-button:hover {
 *     background-color: #ff5252;
 *   }
 * </style>
 * <needle-button ar>Start AR</needle-button>
 * ```
 *
 * @example Unstyled button (for complete custom styling)
 * ```html
 * <needle-button ar unstyled>
 *   <span class="my-icon">🥽</span>
 *   Launch AR
 * </needle-button>
 * ```
 *
 * @see {@link NeedleEngineWebComponent} for the main &lt;needle-engine&gt; element
 * @see {@link NeedleMenu} for the built-in menu component that can display similar buttons
 */
export declare class NeedleButtonElement extends HTMLElement {
    #private;
    static observedAttributes: string[];
    constructor();
    attributeChangedCallback(_name: string, _oldValue: string, _newValue: string): void;
}

/** Supported attributes for the `<needle-engine>` web component. */
export declare interface NeedleEngineAttributes {
    /** Change which model gets loaded. */
    'src': string;
    /** String attached to the context for caching/identification. */
    'hash': string;
    /** Set to automatically add OrbitControls to the loaded scene. */
    'camera-controls': string;
    /** Override the default draco decoder path location. */
    'dracoDecoderPath': string;
    /** Override the default draco library type. */
    'dracoDecoderType': 'wasm' | 'js';
    /** Override the default KTX2 transcoder/decoder path. */
    'ktx2DecoderPath': string;
    /** Prevent context from being disposed when element is removed from DOM. */
    'keep-alive': 'true' | 'false';
    /** Loading overlay style. */
    'loading-style': 'dark' | 'light' | 'auto';
    /** URL to .exr, .hdr, .png, .jpg to be used as skybox. */
    'background-image': string;
    /** Rotation of the background image in degrees. */
    'background-rotation': string | number;
    /** URL to .exr, .hdr, .png, .jpg to be used for lighting. */
    'environment-image': string;
    /** Intensity multiplier for environment lighting. */
    'environment-intensity': string;
    /** Blurs the background image. 0 (sharp) to 1 (fully blurred). */
    'background-blurriness': string;
    /** Intensity multiplier for the background image. */
    'background-intensity': string;
    /** CSS background color if no skybox/background image is provided. */
    'background-color': string;
    /** Enable/disable renderer canvas transparency. */
    'transparent': 'true' | 'false';
    /** Enable/disable contact shadows. */
    'contact-shadows': 'true' | 'false';
    /** Tonemapping mode. */
    'tone-mapping': TonemappingAttributeOptions;
    /** Exposure multiplier for tonemapping. */
    'tone-mapping-exposure': string;
    /** CSS selector or HTMLElement for camera focus. */
    'focus-rect': string | HTMLElement;
    /** Allow pointer events to pass through transparent parts. */
    'clickthrough': 'true' | 'false';
    /** Automatically fit model into camera view on load. */
    'auto-fit': 'true' | 'false';
    /** Auto-rotate model until user interacts. */
    'auto-rotate': 'true' | 'false';
    /** Play animations automatically on scene load. */
    'autoplay': 'true' | 'false';
}

/**
 * NeedleEngineModelLoader is a namespace that provides functions to register custom model loaders and mimetype callbacks.
 * It allows you to create custom loaders for specific file types and determine the mimetype of files based on their content.
 * @example
 * ```ts
 * import { NeedleEngineModelLoader } from "@needle-tools/engine";
 * import { STLLoader } from "three/examples/jsm/loaders/STLLoader";
 *
 * NeedleEngineModelLoader.onCreateCustomModelLoader(args => {
 *    if (args.mimetype === "model/stl") {
 *       return new STLLoader();
 *  }
 * });
 *
 * NeedleEngineModelLoader.onDetermineModelMimetype((args) => {
 *   // detect stl mimetype
 *   const bytes = args.bytes;
 *   if (bytes[0] === 0x73 && bytes[1] === 0x74 && bytes[2] === 0x6c) {
 *      return "model/stl";
 *   }
 *   return null;
 * });
 * ```
 */
export declare namespace NeedleEngineModelLoader {
    export type CustomLoaderOptions = {
        /** The name of the loader. This is used for debugging purposes. */
        name?: string;
        /**
         * The priority of the loader. Higher priority loaders will be called first.
         * @default 0
         */
        priority?: number;
    };
    /**
     * Register a custom loader callback. For every file that is requested this callback is called with the url and mimetype. It should return a custom loader or null if it does not want to handle the file.
     * @param callback The callback to register
     * @param opts Optional options for the loader (e.g. name, priority)
     * @returns A function to unregister the callback
     * @example
     * ```ts
     * import { onCreateModelLoader } from "@needle-tools/engine";
     * const unregister = onCreateModelLoader((url, mimetype) => {
     *     if (mimetype === "application/vnd.usdz+zip") {
     *         return new CustomLoader();
     *     }
     *     return null;
     * });
     * ```
     */
    export function onCreateCustomModelLoader(callback: CustomLoaderCallback, opts?: CustomLoaderOptions): () => void;
    /**
     * Register a callback to determine the mimetype of a file. This is to support custom loaders. The callback will provide the URL of the file to load + a range request response with the first few bytes of the file. The callback should return a mimetype or null if it does not want to handle the file.
     * @param callback The callback to register
     * @returns A function to unregister the callback
     *
     */
    export function onDetermineModelMimetype(callback: MimetypeCallback): (() => void);
    export {};
}

/**
 * The `<needle-engine>` web component. See {@link NeedleEngineAttributes} attributes for supported attributes
 * The web component creates and manages a Needle Engine context, which is responsible for rendering a 3D scene using threejs.
 * The context is created when the `src` attribute is set, and disposed when the element is removed from the DOM. You can prevent cleanup by setting the `keep-alive` attribute to `true`.
 * The context is accessible from the `<needle-engine>` element: `document.querySelector("needle-engine").context`.
 * See {@link https://engine.needle.tools/docs/reference/needle-engine-attributes}
 *
 * @example Basic usage
 * ```html
 * <needle-engine src="https://example.com/scene.glb"></needle-engine>
 * ```
 *
 * @example With camera controls disabled
 * ```html
 * <needle-engine src="https://example.com/scene.glb" camera-controls="false"></needle-engine>
 * ```
 *
 * @see {@link NeedleButtonElement} for adding AR/VR/Quicklook buttons via &lt;needle-button&gt;
 * @see {@link NeedleMenu} for the built-in menu configuration component
 */
export declare class NeedleEngineWebComponent extends HTMLElement implements INeedleEngineComponent {
    static get observedAttributes(): string[];
    get loadingProgress01(): number;
    get loadingFinished(): boolean;
    /**
     * If set to false the camera controls are disabled. Default is true.
     * @type {boolean | null}
     * @memberof NeedleEngineAttributes
     * @example
     * <needle-engine camera-controls="false"></needle-engine>
     * @example
     * <needle-engine camera-controls="true"></needle-engine>
     * @example
     * <needle-engine camera-controls></needle-engine>
     * @example
     * <needle-engine></needle-engine>
     * @returns {boolean | null} if the attribute is not set it returns null
     */
    get cameraControls(): boolean | null;
    set cameraControls(value: boolean | null);
    /**
     * Get the current context for this web component instance. The context is created when the src attribute is set and the loading has finished.
     * The context is disposed when the needle engine is removed from the document (you can prevent this by setting the keep-alive attribute to true).
     * @returns a promise that resolves to the context when the loading has finished
     */
    getContext(): Promise<Context>;
    /**
     * Get the context that is created when the src attribute is set and the loading has finished.
     */
    get context(): Context | undefined;
    private _context?;
    private _overlay_ar;
    private _loadingProgress01;
    private _loadingView?;
    private _previousSrc;
    /** @private set to true after <needle-engine> did load completely at least once. Set to false when < to false when <needle-engine> is removed from the document removed from the document */
    private _didFullyLoad;
    private _didInitialize;
    constructor();
    private ensureInitialized;
    private initializeDom;
    /* Excluded from this release type: connectedCallback */
    /* Excluded from this release type: disconnectedCallback */
    connectedMoveCallback(): void;
    /* Excluded from this release type: attributeChangedCallback */
    /** The tonemapping setting configured as an attribute on the <needle-engine> component */
    get toneMapping(): TonemappingAttributeOptions | null | undefined;
    private _loadId;
    private _abortController;
    private _lastSourceFiles;
    private _createContextPromise;
    /**
     * Check if we have a context. If not a new one is created.
     */
    private getOrCreateContext;
    private onLoad;
    private applyAttributes;
    private onXRSessionStarted;
    /** called by the context when the first frame has been rendered */
    private onReady;
    private onError;
    private getSourceFiles;
    private checkIfSourceHasChanged;
    private _previouslyRegisteredMap;
    private ensureLoadStartIsRegistered;
    private registerEventFromAttribute;
    private setPublicKey;
    private setVersion;
    /* Excluded from this release type: getAROverlayContainer */
    /* Excluded from this release type: getVROverlayContainer */
    /* Excluded from this release type: onEnterAR */
    /* Excluded from this release type: onExitAR */
    /* Excluded from this release type: onEnterVR */
    /* Excluded from this release type: onExitVR */
    private onSetupAR;
    private onSetupVR;
    private onSetupDesktop;
    private setupElementsForMode;
    private foreachHtmlElement;
    private onBeforeBeginLoading;
    /** Set a known Needle Engine attribute. See {@link NeedleEngineAttributes} for available attributes. */
    setAttribute<K extends keyof NeedleEngineAttributes>(name: K, value: NeedleEngineAttributes[K]): void;
    setAttribute(qualifiedName: string, value: string): void;
    /** Get a known Needle Engine attribute. See {@link NeedleEngineAttributes} for available attributes. */
    getAttribute<K extends keyof NeedleEngineAttributes>(qualifiedName: K): string | null;
    getAttribute(qualifiedName: string): string | null;
    /**
     * Emitted when loading begins for the scene. The event is cancelable — calling `preventDefault()`
     * will stop the default loading UI behavior, so apps can implement custom loading flows.
     */
    addEventListener(type: 'loadstart', listener: (ev: CustomEvent<{
        context: Context;
        alias: string | null;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted repeatedly while loading resources. Use the event detail to show progress. */
    addEventListener(type: 'progress', listener: (ev: CustomEvent<{
        context: Context;
        name: string;
        progress: ProgressEvent<EventTarget>;
        index: number;
        count: number;
        totalProgress01: number;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when scene loading has finished. */
    addEventListener(type: 'loadfinished', listener: (ev: CustomEvent<{
        context: Context;
        src: string | null;
        loadedFiles: LoadedModel[];
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when an XR session ends. */
    addEventListener(type: 'xr-session-ended', listener: (ev: CustomEvent<{
        session: XRSession | null;
        context: Context;
        sessionMode: XRSessionMode | undefined;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when entering an AR session. */
    addEventListener(type: 'enter-ar', listener: (ev: CustomEvent<{
        session: XRSession;
        context: Context;
        htmlContainer: HTMLElement | null;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when exiting an AR session. */
    addEventListener(type: 'exit-ar', listener: (ev: CustomEvent<{
        session: XRSession;
        context: Context;
        htmlContainer: HTMLElement | null;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when entering a VR session. */
    addEventListener(type: 'enter-vr', listener: (ev: CustomEvent<{
        session: XRSession;
        context: Context;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when exiting a VR session. */
    addEventListener(type: 'exit-vr', listener: (ev: CustomEvent<{
        session: XRSession;
        context: Context;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when the engine has rendered its first frame and is ready. */
    addEventListener(type: 'ready', listener: (ev: Event) => void, options?: boolean | AddEventListenerOptions): void;
    /** Emitted when an XR session is started. You can do additional setup here. */
    addEventListener(type: 'xr-session-started', listener: (ev: CustomEvent<{
        session: XRSession;
        context: Context;
    }>) => void, options?: boolean | AddEventListenerOptions): void;
    addEventListener<K extends keyof HTMLElementEventMap>(type: ({} & K), listener: (this: HTMLElement, ev: HTMLElementEventMap[K]) => unknown, options?: boolean | AddEventListenerOptions): void;
    addEventListener<K extends keyof HTMLElementEventMap>(type: K, listener: (this: HTMLElement, ev: HTMLElementEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
    addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void;
}

/** Enhanced GamepadButton with `isDown` and `isUp` information */
declare class NeedleGamepadButton {
    /** The index of the button in the input gamepad */
    readonly index: number | undefined;
    readonly name: string;
    touched: boolean;
    pressed: boolean;
    value: number;
    /** was the button just pressed down the last update */
    isDown: boolean;
    /** was the button just released the last update */
    isUp: boolean;
    constructor(index: number | undefined, name: string);
}

/**
 * [NeedleMenu](https://engine.needle.tools/docs/api/NeedleMenu) provides configuration for the built-in UI menu.
 * The menu renders as HTML overlay in browser mode and automatically
 * switches to a 3D spatial menu in VR/AR.
 *
 * ![](https://cloud.needle.tools/-/media/YKleg1oPy_I8Hv8sg_k40Q.png)
 *
 * **Features:**
 * - Fullscreen toggle button
 * - Audio mute/unmute button
 * - QR code sharing (desktop only)
 * - Spatial menu in XR (appears when looking up)
 * - Custom positioning (top/bottom)
 *
 * **Programmatic access:**
 * Access the menu API via `this.context.menu` to add custom buttons,
 * show/hide elements, or modify behavior at runtime.
 *
 * @example Configure menu from code
 * ```ts
 * // Access the menu API
 * this.context.menu.appendChild(myCustomButton);
 * this.context.menu.setPosition("top");
 * this.context.menu.showFullscreenOption(true);
 * ```
 *
 * @summary Configuration component for the Needle Menu overlay
 * @category User Interface
 * @group Components
 * @see {@link Context.menu} for programmatic menu control
 * @see {@link NeedleButtonElement} for standalone &lt;needle-button&gt; web component
 * @see {@link NeedleEngineWebComponent} for the main &lt;needle-engine&gt; element
 * @see {@link Voip} adds a microphone button to the menu
 * @see {@link ScreenCapture} adds a screen sharing button
 **/
export declare class NeedleMenu extends Component {
    /**
     * Determines the vertical positioning of the menu on the screen
     */
    position: "top" | "bottom";
    /**
     * Controls the visibility of the Needle logo in the menu (requires PRO license)
     */
    showNeedleLogo: boolean;
    /**
     * When enabled, displays the menu in VR/AR mode when the user looks up
     * @default undefined
     */
    showSpatialMenu?: boolean;
    /**
     * When enabled, adds a fullscreen toggle button to the menu
     * @default undefined
     */
    createFullscreenButton?: boolean;
    /**
     * When enabled, adds an audio mute/unmute button to the menu
     * @default undefined
     */
    createMuteButton?: boolean;
    /**
     * When enabled, adds a button to display a QR code for sharing the application.
     * The QR code is only displayed on desktop devices.
     * @default undefined
     */
    createQRCodeButton?: boolean;
    /**
     * Applies the configured menu options when the component is enabled
     * @hidden
     */
    onEnable(): void;
    /**
     * Applies all configured options to the active {@link Context.menu}.
     */
    applyOptions(): void;
}

/**
 * The NeedleMenu is a menu that can be displayed in the needle engine webcomponent or in VR/AR sessions.
 *
 * The menu can be used to add buttons to the needle engine that can be used to interact with the application.
 *
 * The menu can be positioned at the top or the bottom of the <needle-engine> webcomponent.
 *
 * @example Add a new button using the NeedleMenu
 * ```typescript
 * onStart(ctx => {
 *   ctx.menu.appendChild({
 *    label: "Open Google",
 *    icon: "google",
 *    onClick: () => { window.open("https://www.google.com", "_blank") }
 *   });
 * })
 * ```
 *
 * Buttons can be added to the menu using the {@link NeedleMenu#appendChild} method or by sending a postMessage event to the needle engine with the type "needle:menu". Use the {@link NeedleMenuPostMessageModel} model to create buttons with postMessage.
 * @example Create a button using a postmessage
 * ```javascript
 * window.postMessage({
 *    type: "needle:menu",
 *    button: {
 *      label: "Open Google",
 *      icon: "google",
 *      onclick: "https://www.google.com",
 *      target: "_blank",
 *    }
 * }, "*");
 * ```
 *
 * @example Access the menu from a component
 * ```typescript
 * import { Behaviour, OnStart } from '@needle-tools/engine';
 *
 * export class MyComponent extends Behaviour {
 *
 *   start() {
 *    this.context.menu.appendChild({ ... });
 *   }
 * }
 * ```
 *
 * @category HTML
 */
declare class NeedleMenu_2 {
    static setElementPriority(button: HTMLElement, priority: number): void;
    static getElementPriority(button: HTMLElement): number | undefined;
    private readonly _context;
    private readonly _menu;
    private readonly _spatialMenu;
    constructor(context: Context);
    /** @ignore internal method */
    onDestroy(): void;
    private onPostMessage;
    private onStartXR;
    private onExitXR;
    /** Experimental: Change the menu position to be at the top or the bottom of the needle engine webcomponent
     * @param position "top" or "bottom"
     */
    setPosition(position: "top" | "bottom"): void;
    /**
     * Call to show or hide the menu.
     * NOTE: Hiding the menu is a PRO feature and requires a needle engine license. Hiding the menu will not work in production without a license.
     */
    setVisible(visible: boolean): void;
    /** When set to false, the Needle Engine logo will be hidden. Hiding the logo requires a needle engine license */
    showNeedleLogo(visible: boolean): void;
    /** @returns true if the logo is visible */
    get logoIsVisible(): boolean;
    /** When enabled=true the menu will be visible in VR/AR sessions */
    showSpatialMenu(enabled: boolean): void;
    setSpatialMenuVisible(display: boolean): void;
    get spatialMenuIsVisible(): any;
    /**
     * Call to add or remove a button to the menu to show a QR code for the current page
     * If enabled=true then a button will be added to the menu that will show a QR code for the current page when clicked.
     */
    showQRCodeButton(enabled: boolean | "desktop-only"): HTMLButtonElement | null;
    /** Call to add or remove a button to the menu to mute or unmute the application
     * Clicking the button will mute or unmute the application
     */
    showAudioPlaybackOption(visible: boolean): void;
    private _muteButton?;
    showFullscreenOption(visible: boolean): void;
    private _fullscreenButton?;
    appendChild(child: HTMLElement | ButtonInfo): HTMLElement;
}

/** This is the model for the postMessage event that the needle engine will send to create menu items */
export declare type NeedleMenuPostMessageModel = {
    type: "needle:menu";
    button?: {
        label?: string;
        /** Google icon name */
        icon?: string;
        /** currently only URLs are supported */
        onclick?: string;
        target?: "_blank" | "_self" | "_parent" | "_top";
        /** Low priority is icon is on the left, high priority is icon is on the right. Default is 0 */
        priority?: number;
    };
};

/**
 * The supported file types that can be determined by the engine. Used in {@link tryDetermineMimetypeFromURL} and {@link tryDetermineMimetypeFromBinary}
 */
export declare type NeedleMimetype = "unknown" | "unsupported" | "model/gltf+json" | "model/gltf-binary" | "model/vrm" | "model/vnd.usdz+zip" | "model/vnd.usd" | "model/vnd.usda" | "model/vnd.usdc" | "model/fbx" | "model/vnd.autodesk.fbx" | "model/obj" | "application/materialx+xml" | (string & {});

export declare const NeedlePatchesKey = "Needle:Patches";

export declare class NeedleUSDZExporter {
    debug: boolean;
    pruneUnusedNodes: boolean;
    sceneAnchoringOptions: USDZExporterOptions;
    extensions: Array<IUSDExporterExtension>;
    keepObject?: (object: Object3D) => boolean;
    beforeWritingDocument?: () => void;
    constructor();
    parse(scene: Object3D | null | undefined, options?: USDZExporterOptions): Promise<Uint8Array<ArrayBufferLike>>;
}

/**
 * A NeedleXRController wraps a connected XRInputDevice that is either a physical controller or a hand
 * You can access specific buttons using `getButton` and `getStick`
 * To get spatial data in rig space (position, rotation) use the `gripPosition`, `gripQuaternion`, `rayPosition` and `rayQuaternion` properties
 * To get spatial data in world space use the `gripWorldPosition`, `gripWorldQuaternion`, `rayWorldPosition` and `rayWorldQuaternion` properties
 * Inputs will also be emitted as pointer events on `this.context.input` - so you can receive controller inputs on objects using the appropriate input events on your components (e.g. `onPointerDown`, `onPointerUp` etc) - use the `pointerType` property to check if the event is from a controller or not
 * @link https://developer.mozilla.org/en-US/docs/Web/API/XRInputSource
 * @category XR
 */
export declare class NeedleXRController implements IPointerHitEventReceiver {
    /** the Needle XR Session */
    readonly xr: NeedleXRSession;
    get context(): Context;
    /**
     * https://developer.mozilla.org/en-US/docs/Web/API/XRInputSource
     */
    readonly inputSource: XRInputSource;
    /** the input source index */
    readonly index: number;
    /** When enabled the controller will create input events in the Needle Engine input system (e.g. when a button is pressed or the controller is moved)
     * You can disable this if you don't want inputs to go through the input system but be aware that this will result in `onPointerDown` component callbacks to not be invoked anymore for this XRController
     */
    emitEvents: boolean;
    /** Is the controller still connected?  */
    get connected(): boolean;
    private _connected;
    get isTracking(): boolean;
    private _isTracking;
    /** the input source gamepad giving raw access to the gamepad values
     * You should usually use the `getButton` and `getStick` methods instead to get access to named buttons and sticks
     */
    get gamepad(): Gamepad | undefined;
    private __gamepad?;
    /** @returns true if this is a hand (otherwise this is a controller) */
    get isHand(): boolean;
    /**
     * If this is a hand then this is the hand info (XRHand)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRHand
     */
    get hand(): XRHand | undefined;
    private __hand?;
    /** threejs XRHandSpace, shorthand for `context.renderer.xr.getHand(controllerIndex)`
     * @link https://threejs.org/docs/#api/en/renderers/webxr/WebXRManager.getHand
     */
    get handObject(): XRHandSpace;
    /** The input source profiles */
    get profiles(): string[];
    /** The device input layout */
    get layout(): InputDeviceLayout | undefined;
    /** shorthand for `inputSource.targetRayMode` */
    get targetRayMode(): (XRTargetRayMode | "transient-pointer");
    /** shorthand for `inputSource.targetRaySpace` */
    get targetRaySpace(): XRSpace;
    /** shorthand for `inputSource.gripSpace` */
    get gripSpace(): XRSpace | undefined;
    /**
     * If the controller if held in the left or right hand (or if it's a left or right hand)
     **/
    get side(): XRHandedness;
    private __side;
    /** is right side. shorthand for `side === 'right'` */
    get isRight(): boolean;
    /** is left side. shorthand for `side === 'left'` */
    get isLeft(): boolean;
    /** is XR stylus, e.g. Logitech MX Ink */
    get isStylus(): boolean;
    /** The XRTransientInputHitTestSource can be used to perform hit tests with the controller ray against the real world.
     * see https://developer.mozilla.org/en-US/docs/Web/API/XRSession/requestHitTestSourceForTransientInput for more information
     * Requires the hit-test feature to be enabled in the XRSession
     *
     * NOTE: The hit test source should be cancelled once it's not needed anymore. Call `cancelHitTestSource` to do this
     */
    getHitTestSource(): XRTransientInputHitTestSource | undefined;
    get hasHitTestSource(): XRTransientInputHitTestSource | undefined;
    /** Make sure to cancel the hittest source once it's not needed anymore */
    cancelHitTestSource(): void;
    private _hitTestSource;
    private _hasSelectEvent;
    get hasSelectEvent(): boolean;
    private _isMxInk;
    private _isMetaQuestTouchController;
    /** Perform a hit test against the XR planes or meshes. shorthand for `xr.getHitTest(controller)`
     * @returns the hit test result (with position and rotation in worldspace) or null if no hit was found
     */
    getHitTest(): NeedleXRHitTestResult | null;
    /** This is cleared at the beginning of each frame */
    private readonly _handJointPoses;
    /** Get the hand joint pose from the current XRFrame. Results are cached for a frame to avoid calling getJointPose multiple times */
    getHandJointPose(joint: XRJointSpace, frame?: XRFrame): XRJointPose | null | undefined;
    /** Grip matrix in grip space */
    private readonly _gripMatrix;
    /** Grip position in grip space */
    private readonly _gripPosition;
    /** Grip rotation in grip space */
    private readonly _gripQuaternion;
    private readonly _linearVelocity;
    private readonly _rayPositionRaw;
    private readonly _rayRotationRaw;
    /** ray matrix in grip space */
    private readonly _rayMatrix;
    /** Ray position in rig space */
    private readonly _rayPosition;
    /** Ray rotation in rig space */
    private readonly _rayQuaternion;
    /** Grip position in rig space */
    get gripPosition(): Vector3;
    /** Grip rotation in rig space */
    get gripQuaternion(): Quaternion;
    get gripMatrix(): Matrix4;
    /** Grip linear velocity in rig space
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRPose/linearVelocity
     */
    get gripLinearVelocity(): Vector3;
    /** Ray position in rig space */
    get rayPosition(): Vector3;
    /** Ray rotation in rig space */
    get rayQuaternion(): Quaternion;
    /** Controller grip position in worldspace */
    get gripWorldPosition(): Vector3;
    private readonly _gripWorldPosition;
    /** Controller grip rotation in wordspace */
    get gripWorldQuaternion(): Quaternion;
    private readonly _gripWorldQuaternion;
    /** Controller ray position in worldspace (this value is calculated once per frame by default - call `updateRayWorldPosition` to force an update) */
    get rayWorldPosition(): Vector3;
    private readonly _rayWorldPosition;
    /** Recalculates the ray world position */
    updateRayWorldPosition(): void;
    /** Controller ray rotation in wordspace (this value is calculated once per frame by default - call `updateRayWorldQuaternion` to force an update) */
    get rayWorldQuaternion(): Quaternion;
    private readonly _rayWorldQuaternion;
    get pinchPosition(): Vector3;
    private readonly _pinchPosition;
    /** Recalculates the ray world quaternion */
    updateRayWorldQuaternion(): void;
    /** The controller ray in worldspace */
    get ray(): Ray;
    private readonly _ray;
    /** Recalculated once per update */
    private _hand_wristDotUp;
    /**
     * The dot product of the hand palm with the up vector.
     * This is a number between -1 and 1, where 1 means the palm is directly up and -1 means the palm is directly down (upside down).
     * This value is undefined if there's no hand
     */
    get handWristDotUp(): number | undefined;
    /**
     * @returns true if the hand is upside down
     */
    get isHandUpsideDown(): boolean;
    /**
     * @returns true if the hand is upside down and we got a pinch down event this frame.
     */
    get isTeleportGesture(): boolean | undefined;
    /** The controller object space.
     * You can use it to attach objects to the controller.
     * Children will be automatically detached and put into the scene when the controller disconnects
     */
    get object(): IGameObject;
    private readonly _object;
    private readonly _gripSpaceObject?;
    private readonly _raySpaceObject?;
    /** Assigned the model that you use for rendering. This can be used as a hint for other components */
    model: Object3D | null;
    private readonly _debugAxesHelper;
    private readonly _debugGripAxesHelper;
    private readonly _debugRayAxesHelper;
    /** returns the URL of the default controller model */
    getModelUrl(): Promise<string | null>;
    constructor(session: NeedleXRSession, device: XRInputSource, index: number);
    private _hitTestSourcePromise;
    private _requestHitTestSource;
    onPointerHits: (_evt: any) => void;
    onUpdate(frame: XRFrame): void;
    onRenderDebug(): void;
    private onUpdateFrame;
    /** Called when the input source disconnects */
    onDisconnected(): void;
    /**
     * Get a gamepad button
     * @link https://github.com/immersive-web/webxr-gamepads-module/blob/main/gamepads-module-explainer.md
     * @param key the controller button name e.g. x-button
     * @returns the gamepad button if it exists on the controller - otherwise undefined
     */
    getButton(key: NeedleXRControllerButtonName): NeedleGamepadButton | undefined | null;
    /** Get a gesture state */
    getGesture(key: XRGestureName): NeedleGamepadButton | null;
    /**
     * Get the pointer id for a specific button of this input device.
     * This is useful if you want to check if a button (e.g. trigger) is currently being in use which can be queried on the inputsystem.
     * @returns the pointer id for the button or undefined if the button is not supported
     * @example
     * ```ts
     * const pointerId = controller.getPointerId("primary");
     * if (pointerId !== undefined) {
     *     const isUsed = this.context.input.getPointerUsed(pointerId);
     *     console.log(controller.side, "used?", isUsed);
     * }
     * ```
     */
    getPointerId(button: number): number;
    getPointerId(button: NeedleXRControllerButtonName | XRGestureName): number | undefined;
    private readonly _needleGamepadButtons;
    /** combine the InputState information + the GamepadButton information (since GamepadButtons can not be extended) */
    private toNeedleGamepadButton;
    /**
     * Get the values of a controller joystick
     * @link https://github.com/immersive-web/webxr-gamepads-module/blob/main/gamepads-module-explainer.md
     * @returns the stick values where x is left/right, y is up/down and z is the button value
     */
    getStick(key: StickName | "primary"): Vec3;
    private readonly _buttonMap;
    private _motioncontroller?;
    private _layout;
    private getMotionController;
    private initialize;
    /**
     * When enabled the controller will automatically emit pointer down events to the Needle Engine Input System.
     * @default true
     */
    emitPointerDownEvent: boolean;
    /**
     * When enabled the controller will automatically emit pointer up events to the Needle Engine Input System.
     * @default true
     */
    emitPointerUpEvent: boolean;
    /**
     * When enabled the controller will automatically emit pointer move events to the Needle Engine Input System.
     * @default true
     */
    emitPointerMoveEvent: boolean;
    /**
     * The distance threshold for pointer move events. This value is in units in rig space
     * @default 0.03
     */
    pointerMoveDistanceThreshold: number;
    /**
     * The angle threshold for pointer move events. This value is in radians.
     * @default 0.05
     */
    pointerMoveAngleThreshold: number;
    private subscribeEvents;
    private unsubscribeEvents;
    private _selectButtonIndex;
    private _squeezeButtonIndex;
    private onSelectStart;
    private onSelectEnd;
    private onSequeezeStart;
    private onSequeezeEnd;
    /** Index = button index */
    private readonly states;
    private updateInputEvents;
    private _didMoveLastFrame;
    private readonly _lastPointerMovePosition;
    private readonly _lastPointerMoveQuaternion;
    private onUpdateMove;
    /** cached spatial pointer init object. We re-use it to not have */
    private readonly pointerInit;
    private emitPointerEvent;
}

declare type NeedleXRControllerButtonName = ButtonName | "primary-button" | "primary";

/** Contains a reference to the currently active webxr session and the controller that has changed */
export declare type NeedleXRControllerEventArgs = NeedleXREventArgs & {
    controller: NeedleXRController;
    change: "added" | "removed";
};

/** NeedleXRSession event argument.
 * Use `args.xr` to access the NeedleXRSession */
export declare type NeedleXREventArgs = {
    readonly xr: NeedleXRSession;
};

declare type NeedleXRFrame = XRFrame & {
    fillPoses?: FillPosesFunction;
};

/** Result of a XR hit-test
 * @property {XRHitTestResult} hit The original XRHitTestResult
 * @property {Vector3} position The hit position in world space
 * @property {Quaternion} quaternion The hit rotation in world space
 */
export declare type NeedleXRHitTestResult = {
    readonly hit: XRHitTestResult;
    readonly position: Vector3;
    readonly quaternion: Quaternion;
};

/**
 * This class manages an XRSession to provide helper methods and events. It provides easy access to the XRInputSources (controllers and hands)
 * - Start a XRSession with `NeedleXRSession.start(...)`
 * - Stop a XRSession with `NeedleXRSession.stop()`
 * - Access a running XRSession with `NeedleXRSession.active`
 *
 * If a XRSession is active you can use all XR-related event methods on your components to receive XR events e.g. `onEnterXR`, `onUpdateXR`, `onLeaveXR`
 * ```ts
 * export class MyComponent extends Behaviour {
 *    // callback invoked whenever the XRSession is started or your component is added to a scene with an active XRSession
 *    onEnterXR(args: NeedleXREventArgs) {
 *       console.log("Entered XR");
 *      // access the NeedleXRSession via args.xr
 *    }
 *    // callback invoked whenever a controller is added (or you switch from controller to hand tracking)
 *    onControllerAdded(args: NeedleXRControllerEventArgs) {  }
 * }
 * ```
 *
 * ### XRRig
 * The XRRig can be accessed via the `rig` property
 * Set a custom XRRig via `NeedleXRSession.addRig(...)` or `NeedleXRSession.removeRig(...)`
 * By default the active XRRig with the highest priority in the scene is used
 *
 * ### Screenshots in XR
 * Screenshots work automatically during XR sessions, including AR camera feed compositing. See {@link screenshot2} for more information.
 *
 * @category XR
 * @see {@link screenshot2} for taking screenshots in XR sessions
 */
export declare class NeedleXRSession implements INeedleXRSession {
    private static _sync;
    static getXRSync(context: Context): NeedleXRSync;
    static get currentSessionRequest(): XRSessionMode | null;
    private static _currentSessionRequestMode;
    /**
     * @returns the active @type {NeedleXRSession} (if any active) or null
     */
    static get active(): NeedleXRSession | null;
    /** The active xr session mode (if any xr session is active)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSessionMode
     */
    static get activeMode(): XRSessionMode | null;
    /** XRSystem via navigator.xr access
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSystem
     */
    static get xrSystem(): XRSystem | undefined;
    /**
     * @returns true if the browser supports WebXR (`immersive-vr` or `immersive-ar`)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSystem/isSessionSupported
     */
    static isXRSupported(): Promise<boolean>;
    /**
     * @returns true if the browser supports immersive-vr (WebXR)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSystem/isSessionSupported
     */
    static isVRSupported(): Promise<boolean>;
    /**
     * @returns true if the browser supports immersive-ar (WebXR)
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSystem/isSessionSupported
     */
    static isARSupported(): Promise<boolean>;
    /**
     * @param mode The XRSessionMode to check if it is supported
     * @returns true if the browser supports the given XRSessionMode
     */
    static isSessionSupported(mode: XRSessionMode): Promise<boolean>;
    private static _currentSessionRequest?;
    private static _activeSession;
    /** Register to listen to XRSession start events. Unsubscribe with `offXRSessionStart` */
    static onSessionRequestStart(evt: SessionRequestedEvent): void;
    /** Unsubscribe from request start evt. Register with `onSessionRequestStart` */
    static offSessionRequestStart(evt: SessionRequestedEvent): void;
    private static readonly _sessionRequestStartListeners;
    /** Called after the session request has finished */
    static onSessionRequestEnd(evt: SessionRequestedEndEvent): void;
    /** Unsubscribe from request end evt */
    static offSessionRequestEnd(evt: SessionRequestedEndEvent): void;
    private static readonly _sessionRequestEndListeners;
    /** Listen to XR session started. Unsubscribe with `offXRSessionStart` */
    static onXRSessionStart(evt: SessionChangedEvt): void;
    /** Unsubscribe from XRSession started events */
    static offXRSessionStart(evt: SessionChangedEvt): void;
    private static readonly _xrStartListeners;
    /** Listen to XR session ended. Unsubscribe with `offXRSessionEnd` */
    static onXRSessionEnd(evt: SessionChangedEvt): void;
    /** Unsubscribe from XRSession started events */
    static offXRSessionEnd(evt: SessionChangedEvt): void;
    private static readonly _xrEndListeners;
    /** Listen to controller added events.
     * Events are cleared when starting a new session
     **/
    static onControllerAdded(evt: ControllerChangedEvt): void;
    /** Unsubscribe from controller added evts */
    static offControllerAdded(evt: ControllerChangedEvt): void;
    private static readonly _controllerAddedListeners;
    /** Listen to controller removed events
     * Events are cleared when starting a new session
     **/
    static onControllerRemoved(evt: ControllerChangedEvt): void;
    /** Unsubscribe from controller removed events */
    static offControllerRemoved(evt: ControllerChangedEvt): void;
    private static readonly _controllerRemovedListeners;
    /** If the browser supports offerSession - creating a VR or AR button in the browser navigation bar */
    static offerSession(mode: XRSessionMode, init: XRSessionInit | "default", context: Context): boolean;
    /** @returns a new XRSession init object with defaults */
    static getDefaultSessionInit(mode: Omit<XRSessionMode, "inline">): XRSessionInit;
    /** start a new webXR session (make sure to stop already running sessions before calling this method)
     * @param mode The XRSessionMode to start (e.g. `immersive-vr` or `immersive-ar`) or `ar` to start `immersive-ar` on supported devices OR on iOS devices it will export an interactive USDZ and open in Quicklook.
     * Get more information about WebXR modes: https://developer.mozilla.org/en-US/docs/Web/API/XRSessionMode
     * @param init The XRSessionInit to use (optional), docs: https://developer.mozilla.org/en-US/docs/Web/API/XRSessionInit
     * @param context The Needle Engine context to use
     */
    static start(mode: XRSessionMode | "ar" | "quicklook", init?: XRSessionInit, context?: Context): Promise<NeedleXRSession | null>;
    private static invokeSessionRequestStart;
    private static invokeSessionRequestEnd;
    static setSession(mode: XRSessionMode, session: XRSession, init: XRSessionInit, context: Context): NeedleXRSession;
    private static $_stop_request;
    /** stops the active XR session */
    static stop(): void;
    private static onEnd;
    /** The needle engine context this session was started from */
    readonly context: Context;
    get sync(): NeedleXRSync | null;
    /** Returns true if the xr session is still active */
    get running(): boolean;
    /**
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSession
     */
    readonly session: XRSession;
    /** XR Session Mode: AR or VR */
    readonly mode: XRSessionMode;
    /**
     * The XRSession interface's read-only interactionMode property describes the best space (according to the user agent) for the application to draw an interactive UI for the current session.
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSession/interactionMode
     */
    get interactionMode(): "screen-space" | "world-space";
    /**
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSession/visibilityState
     * @returns {XRVisibilityState} The visibility state of the XRSession
     */
    get visibilityState(): XRVisibilityState;
    /**
     * Check if the session is `visible-blurred` - this means e.g. the keyboard is shown
     */
    get isVisibleBlurred(): boolean;
    /**
     * Check if the session has system keyboard support
     */
    get isSystemKeyboardSupported(): boolean;
    /**
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRSession/environmentBlendMode
     */
    get environmentBlendMode(): XREnvironmentBlendMode;
    /**
     * The current XR frame
     * @link https://developer.mozilla.org/en-US/docs/Web/API/XRFrame
     */
    get frame(): NeedleXRFrame;
    /** The currently active/connected controllers */
    readonly controllers: NeedleXRController[];
    /** shorthand to query the left controller. Use `controllers` to get access to all connected controllers */
    get leftController(): NeedleXRController | undefined;
    /** shorthand to query the right controller. Use `controllers` to get access to all connected controllers */
    get rightController(): NeedleXRController | undefined;
    /** @returns the given controller if it is connected */
    getController(side: XRHandedness | number): NeedleXRController | null;
    /** Returns true if running in pass through mode in immersive AR (e.g. user is wearing a headset while in AR) */
    get isPassThrough(): boolean;
    get isAR(): boolean;
    get isVR(): boolean;
    /** If the AR mode is not immersive (meaning the user is e.g. holding a phone instead of wearing a AR passthrough headset) */
    get isScreenBasedAR(): boolean;
    get posePosition(): Vector3;
    get poseOrientation(): Quaternion;
    /** @returns the context.renderer.xr.getReferenceSpace() result */
    get referenceSpace(): XRSpace | null;
    /** @returns the XRFrame `viewerpose` using the xr `referenceSpace` */
    get viewerPose(): XRViewerPose | undefined;
    /** @returns `true` if any image is currently being tracked */
    /** returns true if images are currently being tracked */
    get isTrackingImages(): boolean;
    /** The currently active XR rig */
    get rig(): IXRRig | null;
    private _rigScale;
    private _lastRigScaleUpdate;
    /** Get the XR Rig worldscale.
     *
     * **For AR**
     * If you want to modify the scale in AR at runtime get the WebARSessionRoot component via `findObjectOfType(WebARSessionRoot)` and then set the `arScale` value.
     * @returns the scale of the XR rig
     *
     */
    get rigScale(): number;
    /** add a rig to the available XR rigs - if it's priority is higher than the currently active rig it will be enabled */
    addRig(rig: IXRRig): void;
    /** Remove a rig from the available XR Rigs */
    removeRig(rig: IXRRig): void;
    /** Sets a XRRig to be active which will parent the camera to this rig */
    setRigActive(rig: IXRRig): void;
    /**
     * @returns the user position in the rig space
     */
    getUserOffsetInRig(): Vector3;
    private updateActiveXRRig;
    private _rigs;
    private _viewerHitTestSource;
    /** Returns a XR hit test result (if hit-testing is available) in rig space
     * @param source If provided, the hit test will be performed for the given controller
     */
    getHitTest(source?: NeedleXRController): NeedleXRHitTestResult | null;
    private getControllerHitTest;
    private convertHitTestResult;
    /** convert a XRRigidTransform from XR session space to threejs / Needle Engine XR space */
    convertSpace(transform: XRRigidTransform): {
        position: Vector3;
        quaternion: Quaternion;
    };
    /** this is the implictly created XR rig */
    private readonly _defaultRig;
    /** all scripts that receive some sort of XR update event */
    private readonly _xr_scripts;
    /** scripts that have onUpdateXR event methods */
    private readonly _xr_update_scripts;
    /** scripts that are in the scene but inactive (e.g. disabled parent gameObject) */
    private readonly _inactive_scripts;
    private readonly _controllerAdded;
    private readonly _controllerRemoved;
    private readonly _originalCameraWorldPosition?;
    private readonly _originalCameraWorldRotation?;
    private readonly _originalCameraWorldScale?;
    private readonly _originalCameraParent?;
    /** we store the main camera reference here each frame to make sure we have a rendering camera
     * this e.g. the case when the XR rig with the camera gets disabled (and thus this.context.mainCamera is unassigned)
     */
    private _mainCamera;
    private constructor();
    /** called when renderer.setSession is fulfilled */
    private onRendererSessionSet;
    private onInputSourceAdded;
    /** Disconnects the controller, invokes events and notifies previou controller (if any) */
    private disconnectInputSource;
    /** End the XR Session */
    end(): void;
    private _ended;
    private readonly _newControllers;
    private onEnd;
    private _didStart;
    /** Called every frame by the engine */
    private onBefore;
    private onRenderDebug;
    private onBeforeRender;
    private onAfterRender;
    /** register a new XR script if it hasnt added yet */
    private addScript;
    /** mark a script as inactive and invokes callbacks */
    private markInactive;
    private handleInactiveScripts;
    private readonly _script_to_remove;
    private removeScript;
    private invokeCallback_EnterXR;
    private invokeCallback_ControllerAdded;
    private invokeCallback_ControllerRemoved;
    private invokeCallback_LeaveXR;
    private syncCameraCullingMask;
    private invokeControllerEvent;
    private _camera;
    private readonly _cameraRenderParent;
    private _previousCameraParent;
    private readonly _customforward;
    private originalCameraNearPlane?;
    private requestedCameraNearPlane;
    /** This is used to have the XR system camera look into threejs Z forward direction (instead of -z) */
    private applyCustomForward;
    private revertCustomForward;
    private _viewerPose?;
    private readonly _transformOrientation;
    private readonly _transformPosition;
    private internalUpdateState;
    private _transition?;
    get transition(): SceneTransition;
    /** Call to fade rendering to black for a short moment (the returned promise will be resolved when fully black)
     * This can be used to mask scene transitions or teleportation
     * @returns a promise that is resolved when the screen is fully black
     * @example `fadeTransition().then(() => { <fully_black> })`
     */
    fadeTransition(): Promise<void>;
    /** e.g. FadeToBlack */
    private updateFade;
    private onUpdateFade_PostRender;
}

export declare class NeedleXRSync {
    hasState(userId: string | null | undefined): boolean;
    /** Is the left controller or hand tracked */
    isTracking(userId: string | null | undefined, handedness: XRHandedness): boolean | undefined;
    /** Is it hand tracking or a controller */
    getDeviceType(userId: string, handedness: XRHandedness): XRControllerType | undefined | "unknown";
    private readonly context;
    constructor(context: Context);
    destroy(): void;
    private onJoinedRoom;
    private onLeftRoom;
    private onOtherUserJoinedRoom;
    private onOtherUserLeftRoom;
    private _states;
    onUpdate(session: NeedleXRSession): void;
    onExitXR(session: NeedleXRSession): void;
}

export declare class NeedleXRUtils {
    /** Searches the hierarchy for objects following a specific naming scheme */
    static tryFindAvatarObjects(obj: Object3D, sourceId: SourceIdentifier, result: {
        head?: AssetReference;
        leftHand?: AssetReference;
        rightHand?: AssetReference;
    }): void;
}

export declare class NEKeyboardEvent extends KeyboardEvent {
    source?: Event;
    constructor(type: InputEvents, source: Event, init: KeyboardEventInit);
    stopImmediatePropagation(): void;
}

/**
 * Extended PointerEvent with Needle Engine-specific data.
 * Contains information about the input device, spatial data for XR, and world-space ray.
 *
 * @example Accessing event data in a component
 * ```ts
 * onPointerDown(args: PointerEventData) {
 *   const evt = args.event;
 *   console.log(`Pointer ${evt.pointerId} (${evt.pointerType})`);
 *   if (evt.isSpatial) {
 *     console.log("XR input, ray:", evt.ray);
 *   }
 * }
 * ```
 *
 * @see {@link Input} for the input management system
 * @see {@link PointerType} for available pointer types
 */
export declare class NEPointerEvent extends PointerEvent {
    /**
     * Spatial input data
     */
    clientZ?: number;
    /** the device index: mouse and touch are always 0, otherwise e.g. index of the connected Gamepad or XRController */
    readonly deviceIndex: number;
    /** The origin of the event contains a reference to the creator of this event.
     * This can be the Needle Engine input system or e.g. a XR controller.
     * Implement `onPointerHits` to receive the intersections of this event.
     */
    readonly origin: object & Partial<IPointerHitEventReceiver>;
    /** the browser event that triggered this event (if any) */
    readonly source: Event | null;
    /** Is the pointer event created via a touch on screen or a spatial device like a XR controller or hand tracking? */
    readonly mode: XRTargetRayMode | "transient-pointer";
    /** Returns true if the input was emitted in 3D space (and not by e.g. clicking on a 2D screen). You can use {@link mode} if you need more information about the input source */
    get isSpatial(): boolean;
    /** A ray in worldspace for the event.
     * If the ray is undefined you can also use `space.worldForward` and `space.worldPosition` */
    get ray(): Ray;
    private set ray(value);
    /**@returns true if this event has a ray. If you access the ray property a ray will automatically created */
    get hasRay(): boolean;
    private _ray;
    /** The device space (this object is not necessarily rendered in the scene but you can access or copy the matrix)
     * E.g. you can access the input world space source position with `space.worldPosition` or world direction with `space.worldForward`
     */
    readonly space: IGameObject;
    /** true if this event is a click */
    isClick: boolean;
    /** true if this event is a double click */
    isDoubleClick: boolean;
    /** @returns `true` if the event is marked to be used (when `use()` has been called). Default: `false` */
    get used(): boolean;
    private _used;
    /** Call to mark an event to be used */
    use(): void;
    /** Identifier for this pointer event.
     * For mouse and touch this is always 0.
     * For XR input: a combination of the deviceIndex + button to uniquely identify the exact input (e.g. LeftController:Button0 = 0, RightController:Button1 = 11)
     */
    get pointerId(): number;
    private readonly _pointerid;
    /** What type of input created this event: touch, mouse, xr controller, xr hand tracking... */
    get pointerType(): PointerTypeNames;
    private readonly _pointerType;
    /**
     * The button name that raised this event (e.g. for mouse events "left", "right", "middle" or for XRTrigger "xr-standard-trigger" or "xr-standard-thumbstick")
     * Use {@link button} to get the numeric button index (e.g. 0, 1, 2...) on the controller or mouse.
     */
    readonly buttonName?: ButtonName | "none";
    /** The input that raised this event like `pointerdown` */
    get type(): InputEventNames;
    private readonly _type;
    /** metadata can be used to associate additional information with the event */
    readonly metadata: {};
    /** intersections that were generated from this event (or are associated with this event in any way) */
    readonly intersections: NEPointerEventIntersection[];
    constructor(type: InputEvents | InputEventNames, source: Event | null, init: NEPointerEventInit);
    private _immediatePropagationStopped;
    get immediatePropagationStopped(): boolean;
    private _propagationStopped;
    get propagationStopped(): boolean;
    stopImmediatePropagation(): void;
    stopPropagation(): void;
}

export declare type NEPointerEventInit = PointerEventInit & {
    clientZ?: number;
    origin: object;
    pointerId: number;
    /** the index of the device */
    deviceIndex: number;
    pointerType: PointerTypeNames;
    mode: XRTargetRayMode;
    ray?: Ray;
    /** The control object for this input. In the case of spatial devices the controller,
     * otherwise a generated object in screen space. The object may not be in the scene. */
    device: IGameObject;
    buttonName: ButtonName | "none";
};

/** An intersection that is potentially associated with a pointer event */
export declare type NEPointerEventIntersection = Intersection & {
    event?: NEPointerEvent;
};

/**
 * NestedGltf loads and instantiates a glTF file when the component starts.
 * NestedGltf components are created by the Unity exporter when nesting Objects with the GltfObject component (in Unity).
 * Use this for lazy-loading content, modular scene composition, or dynamic asset loading.
 *
 * ![](https://cloud.needle.tools/-/media/lJKrr_2tWlqRFdFc46U4bQ.gif)
 *
 * The loaded glTF is instantiated as a sibling (child of parent) by default,
 * inheriting the transform of the GameObject with this component.
 *
 * **Features:**
 * - Automatic loading on start
 * - Progress callbacks for loading UI
 * - Preloading support for faster display
 * - Event callback when loading completes
 *
 * @example Load a glTF when object becomes active
 * ```ts
 * const nested = myPlaceholder.addComponent(NestedGltf);
 * nested.filePath = new AssetReference("models/furniture.glb");
 * nested.loaded.addEventListener(({ instance }) => {
 *   console.log("Loaded:", instance.name);
 * });
 * ```
 *
 * @example Preload for instant display
 * ```ts
 * // Preload during loading screen
 * await nested.preload();
 * // Later, when object becomes active, it displays instantly
 * ```
 *
 * @summary Loads and instantiates a nested glTF file
 * @category Asset Management
 * @group Components
 * @see {@link AssetReference} for asset loading utilities
 * @see {@link SceneSwitcher} for scene-level loading
 * @link https://engine.needle.tools/samples/hotspots
 */
export declare class NestedGltf extends Component {
    /** Reference to the glTF file to load. Can be a URL or asset path. */
    filePath?: AssetReference;
    /**
     * Event fired when the glTF has been loaded and instantiated.
     * Provides the component, loaded instance, and asset reference.
     */
    loaded: EventList<{
        component: NestedGltf;
        instance: any;
        asset: AssetReference;
    }>;
    /**
     * EXPERIMENTAL for cloud asset loading
     */
    loadAssetInParent: boolean;
    private _isLoadingOrDoneLoading;
    /** Register a callback that will be called when the progress of the loading changes */
    listenToProgress(evt: ProgressCallback): void;
    /** Begin loading the referenced gltf file in filePath */
    preload(): Promise<ArrayBufferLike | null> | null;
    /* Excluded from this release type: start */
    /* Excluded from this release type: onDestroy */
    private hash;
}

declare enum NEToneMappingMode {
    None = 0,
    Neutral = 1,// Neutral tonemapper, close to Reinhard
    ACES = 2,// ACES Filmic reference tonemapper (custom approximation)
    AgX = 3,// AgX Filmic tonemapper
    KhronosNeutral = 4
}

declare type NEToneMappingModeNames = keyof typeof NEToneMappingMode;

/**
 * Main class for multiuser networking. Access via `this.context.connection` from any component.
 *
 * **About GUIDs:**
 * In Needle Engine networking, GUIDs (Globally Unique Identifiers) are used to identify objects and components across the network.
 * Every GameObject and Component has a unique `guid` property that remains consistent across all clients.
 * GUIDs are automatically assigned (e.g. during export from Unity/Blender) and are essential for:
 * - Object ownership management (see {@link OwnershipModel})
 * - State synchronization (storing and retrieving object state)
 * - Identifying which object received a network message
 *
 * When working with networking, you'll typically use `this.guid` to identify your component or `this.gameObject.guid` for the GameObject.
 *
 * @example Joining a room
 * ```ts
 * this.context.connection.connect();
 * this.context.connection.joinRoom("my-room");
 * ```
 * @example Listening to events
 * ```ts
 * this.context.connection.beginListen("my-event", (data) => {
 *   console.log("Received:", data);
 * });
 * ```
 * @example Sending data
 * ```ts
 * this.context.connection.send("my-event", { message: "Hello" });
 * ```
 * @example Using GUIDs for object identification
 * ```ts
 * // Get state for a specific object by its GUID
 * const state = this.context.connection.tryGetState(this.guid);
 *
 * // Delete remote state for an object
 * this.context.connection.sendDeleteRemoteState(this.guid);
 * ```
 * @see {@link RoomEvents} for room lifecycle events
 * @see {@link OwnershipModel} for object ownership
 * @link https://engine.needle.tools/docs/how-to-guides/networking/
 * @category Networking
 */
export declare class NetworkConnection implements INetworkConnection {
    private context;
    private _peer;
    constructor(context: Context);
    /** Experimental: networking via peerjs */
    get peer(): PeerNetworking;
    /**
     * Returns the cached network state for a given GUID.
     * The state is stored locally whenever network updates are received for that object.
     * @param guid The unique identifier of the object whose state you want to retrieve
     * @returns The cached state object, or `null` if no state exists for this GUID
     * @example
     * ```ts
     * // Get the last known state for this component
     * const myState = this.context.connection.tryGetState(this.guid);
     * if (myState) {
     *   console.log("Found cached state:", myState);
     * }
     * ```
     */
    tryGetState(guid: string): IModel | null;
    /** The connection id of the local user - it is given by the networking backend and can not be changed */
    get connectionId(): string | null;
    /** Returns true if the networking backend is in debug mode.
     * To see all networking messages in the console use `?debugnet` in the url
     */
    get isDebugEnabled(): boolean;
    /**
     * Checks if Needle Engine networking is connected to a websocket. Note that this is **not equal** to being connected to a *room*. If you want to check if Needle Engine is connected to a networking room use the `{@link isInRoom}` property.
     * @returns true if connected to the websocket.
     */
    get isConnected(): boolean;
    /** The name of the room the user is currently connected to */
    get currentRoomName(): string | null;
    /** True when connected to a room via a regular url, otherwise (when using a view only url) false indicating that the user should not be able to modify the scene */
    get allowEditing(): boolean;
    /**
     * The view id of the room the user is currently connected to.
     */
    get currentRoomViewId(): string | null;
    /**
     * Returns a url that can be shared with others to view the current room in view only mode.
     * This is useful for sharing a room with others without allowing them to modify the scene.
     * Use `connection.allowEditing` to check if the current room is in view only mode.
     */
    getViewOnlyUrl(): string | null;
    /** True if connected to a networked room. Use the joinRoom function or a `SyncedRoom` component */
    get isInRoom(): boolean;
    /** Latency to currently connected backend server */
    get currentLatency(): number;
    /**
     * The current server url that the networking backend is connected to (e.g. the url of the websocket server)
     */
    get currentServerUrl(): string | null;
    /** A ping is sent to the server at a regular interval while the browser tab is active. This method can be used to send additional ping messages when needed so that the user doesn't get disconnected from the networking backend */
    sendPing(): void;
    /** Returns true if a user with the given connectionId is in the room */
    userIsInRoom(id: string): boolean;
    private _usersInRoomCopy;
    /** Returns a list of all user ids in the current room */
    usersInRoom(target?: string[] | null): string[];
    /** Joins a networked room. If you don't want to manage a connection yourself you can use a `{@link SyncedRoom}` component as well */
    joinRoom(room: string, viewOnly?: boolean): boolean;
    /** Use to leave a room that you are currently connected to (use `leaveRoom()` to disconnect from the currently active room but you can also specify a room name) */
    leaveRoom(room?: string | null): boolean;
    /** Send a message to the networking backend - it will be broadcasted to all connected users (except yourself) in the same room by default */
    send<K extends NetworkEventKey>(key: K, data?: (K extends keyof NetworkEventMap ? NetworkEventData<K> : WebsocketSendType) | null, queue?: SendQueue): void;
    /**
     * Deletes the network state for a specific object on the server.
     * This removes the object's state from the room, preventing it from being sent to newly joining users.
     * @param guid The unique identifier of the object whose state should be deleted
     * @example
     * ```ts
     * // When destroying a networked object, clean up its server state
     * onDestroy() {
     *   this.context.connection.sendDeleteRemoteState(this.guid);
     * }
     * ```
     */
    sendDeleteRemoteState(guid: string): void;
    /** Use to delete all state in the currently connected room on the server */
    sendDeleteRemoteStateAll(): void;
    /** Send a binary message to the server (broadcasted to all connected users) */
    sendBinary(bin: Uint8Array): void;
    private _defaultMessagesBuffer;
    private _defaultMessagesBufferArray;
    sendBufferedMessagesNow(): void;
    /** Use to start listening to networking events.
     * To unsubscribe from events use the `{@link stopListen}` method.
     *
     * @example Custom event example
     * ```ts
     * // Listen to a custom event sent by the server
     * this.context.connection.beginListen<MyDataType>("my-custom-event", (data) => {
     *   console.log("Received custom event:", data);
     * });
     * ```
     *
     * @example Listening to room events
     * ```ts
     * // Make sure to unsubscribe from events when the component is disabled
     * export class MyComponent extends Behaviour {
     *   onEnable() {
     *     this.connection.beginListen("joined-room", this.onJoinedRoom)
     *   }
     *   onDisable() {
     *     this.connection.stopListen("joined-room", this.onJoinedRoom)
     *   }
     *   onJoinedRoom = () => {
     *      console.log("I joined a networked room")
     *   }
     * }
     * ```
     * @link https://engine.needle.tools/docs/networking.html
     *
     */
    beginListen<K extends NetworkEventKey>(key: K, callback: K extends keyof NetworkEventMap ? NetworkEventMap[K] : (...args: any[]) => void): K extends keyof NetworkEventMap ? NetworkEventMap[K] : (...args: any[]) => void;
    /**@deprecated please use stopListen instead (2.65.2-pre) */
    stopListening<K extends NetworkEventKey>(key: K, callback: (K extends keyof NetworkEventMap ? NetworkEventMap[K] : (...args: any[]) => void) | null): void;
    /** Use to stop listening to networking events
     * To subscribe to events use the `{@link beginListen}` method.
     * See the example below for typical usage:
     *
     * ### Component Example
     * ```ts
     * // Make sure to unsubscribe from events when the component is disabled
     * export class MyComponent extends Behaviour {
     *   onEnable() {
     *     this.connection.beginListen("joined-room", this.onJoinedRoom)
     *   }
     *   onDisable() {
     *     this.connection.stopListen("joined-room", this.onJoinedRoom)
     *   }
     *   onJoinedRoom = () => {
     *      console.log("I joined a networked room")
     *   }
     * }
     * ```
     */
    stopListen<K extends NetworkEventKey>(key: K, callback: (K extends keyof NetworkEventMap ? NetworkEventMap[K] : (...args: any[]) => void) | null): void;
    /** Use to start listening to networking binary events */
    beginListenBinary(identifier: string, callback: BinaryCallback): BinaryCallback;
    /** Use to stop listening to networking binary events */
    stopListenBinary(identifier: string, callback: any): void;
    private netWebSocketUrlProvider?;
    /** Use to override the networking server backend url.
     * This is what the `{@link Networking}` component uses to modify the backend url.
     **/
    registerProvider(prov: INetworkingWebsocketUrlProvider): void;
    /** Used to connect to the networking server
     * @param url Optional url to connect to. If not provided, it will use the url from the registered `INetworkingWebsocketUrlProvider` or the default backend networking url. If you want to change the url after connecting, you need to disconnect first and then connect again with the new url.
     */
    connect(url?: string): Promise<boolean>;
    /** Disconnect from the networking backend + reset internal state */
    disconnect(): void;
    private _listeners;
    private _listenersBinary;
    private connected;
    private channelId;
    private _connectionId;
    private _ws;
    private _waitingForSocket;
    private _isInRoom;
    private _currentRoomName;
    private _currentRoomViewId;
    private _currentRoomAllowEditing;
    private _currentInRoom;
    private _state;
    private _currentDelay;
    private _connectingToWebsocketPromise;
    private connectWebsocket;
    private onMessage;
    private handleIncomingBinaryMessage;
    private handleIncomingStringMessage;
    private toMessage;
    private sendWithWebsocket;
    private onSendQueued;
}

export declare enum NetworkedStreamEvents {
    Connected = "peer-user-connected",
    StreamReceived = "receive-stream",
    StreamEnded = "call-ended",
    Disconnected = "peer-user-disconnected",
    UserJoined = "user-joined"
}

/**
 * This class is responsible for managing the sending and receiving of streams between peers.
 */
export declare class NetworkedStreams extends EventDispatcher<any> {
    /**
     * Create a new NetworkedStreams instance
     */
    static create(comp: IComponent, guid?: string): NetworkedStreams;
    private readonly context;
    private readonly peer;
    private _sendingStreams;
    /**
     * If true, will log debug information
     */
    debug: boolean;
    constructor(context: IComponent);
    constructor(context: Context, guid: string);
    constructor(context: Context, peer: PeerHandle);
    startSendingStream(stream: MediaStream): void;
    stopSendingStream(_steam: MediaStream | undefined | null): void;
    private _enabled;
    get enabled(): boolean;
    enable(): void;
    disable(): void;
    private _tickIntervalId?;
    private tick;
    private onJoinedRoom;
    /** This is when the local user leaves the room */
    private onLeftRoom;
    private onCallStreamReceived;
    private onCallEnded;
    private onUserConnected;
    private onUserLeft;
    private updateSendingCalls;
    private stopCallsToUsersThatAreNotInTheRoomAnymore;
    private debugLogCurrentState;
}

/** Extracts the first parameter type from a callback in {@link NetworkEventMap}. */
declare type NetworkEventData<K extends keyof NetworkEventMap> = Parameters<NetworkEventMap[K]>[0];

/** All known networking event keys. Includes string enum values from {@link ConnectionEvents}, {@link RoomEvents}, and {@link OwnershipEvent}. */
export declare type NetworkEventKey = `${ConnectionEvents}` | `${RoomEvents}` | `${OwnershipEvent}` | (string & {});

/** Maps known networking event keys to their callback signatures.
 * Used by {@link NetworkConnection.beginListen} and {@link NetworkConnection.stopListen} for type-safe event handling.
 */
export declare interface NetworkEventMap {
    "connection-start-info": (data: IConnectionData) => void;
    "join-room": (data: {
        room: string;
        viewOnly: boolean;
    }) => void;
    "leave-room": (data: {
        room: string;
    }) => void;
    "joined-room": (response: JoinedRoomResponse) => void;
    "left-room": (response: LeftRoomResponse) => void;
    "user-joined-room": (user: UserJoinedOrLeftRoomModel) => void;
    "user-left-room": (user: UserJoinedOrLeftRoomModel) => void;
    "room-state-sent": () => void;
    "request-has-owner": (data: {
        guid: string;
    }) => void;
    "response-has-owner": (response: OwnershipResponse) => void;
    "request-is-owner": (data: {
        guid: string;
    }) => void;
    "response-is-owner": (response: OwnershipResponse) => void;
    "request-ownership": (data: {
        guid: string;
    }) => void;
    "gained-ownership": (response: GainedOwnershipBroadcastResponse) => void;
    "remove-ownership": (data: {
        guid: string;
    }) => void;
    "lost-ownership": (guid: string) => void;
    "gained-ownership-broadcast": (response: GainedOwnershipBroadcastResponse) => void;
    "lost-ownership-broadcast": (response: LostOwnershipBroadcastResponse) => void;
}

/**
 * Provides websocket URL configuration for the {@link NetworkConnection | built-in networking system}.
 * Add this component to override the default networking backend URL used by {@link NetworkConnection} (`this.context.connection`).
 *
 * The component registers itself as a URL provider on `awake()`. When the networking system connects,
 * it queries this provider for the websocket URL to use instead of the default Needle networking backend.
 *
 * **URL resolution order:**
 * 1. If `urlParameterName` is set and the corresponding URL parameter exists in the browser URL, that value is used
 * 2. If running on a local network and `localhost` is set, the `localhost` URL is used
 * 3. Otherwise, the `url` field is used
 *
 * Without this component, the default backend URL `wss://networking-2.needle.tools/socket` is used.
 *
 * **Note:** This component only configures the websocket URL. To actually join a networked room,
 * use a `SyncedRoom` component or call `this.context.connection.joinRoom("room-name")` directly.
 *
 * @example Overriding the URL via browser parameter
 * ```ts
 * // With urlParameterName="server", visiting:
 * // https://myapp.com/?server=wss://my-server.com/socket
 * // will connect to that server instead
 * ```
 *
 * @see {@link NetworkConnection} for the main networking API (`this.context.connection`)
 * @see {@link SyncedRoom} for automatic room joining
 * @see {@link OwnershipModel} for networked object ownership
 * @see {@link RoomEvents} for room lifecycle events
 * @see {@link isLocalNetwork} for local network detection
 * @link https://engine.needle.tools/docs/how-to-guides/networking/
 * @summary Networking configuration
 * @category Networking
 * @group Components
 */
export declare class Networking extends Component implements INetworkingWebsocketUrlProvider {
    /**
     * The websocket URL to connect to for networking functionality.
     * Can be a complete URL or a relative path that will be resolved against the current origin.
     * @default null
     */
    url: string | null;
    /**
     * Name of the URL parameter that can override the websocket connection URL.
     * When set, the URL will be overridden by the parameter value from the browser URL.
     * For example, with `urlParameterName="ws"`, adding `?ws=ws://localhost:8080` to the browser URL will override the connection URL.
     */
    urlParameterName: string | null;
    /**
     * Alternative URL to use when running on a local network.
     * This is particularly useful for development, when the server is running on the same machine as the client.
     */
    localhost: string | null;
    /* Excluded from this release type: awake */
    /* Excluded from this release type: getWebsocketUrl */
    /**
     * Processes a URL string applying various transformations based on network environment.
     * Handles relative paths and localhost fallbacks for local network environments.
     * @param url The original URL to process
     * @param localhostFallback Alternative URL to use when on a local network
     * @returns The processed URL string or null/undefined if input was invalid
     */
    static GetUrl(url: string | null | undefined, localhostFallback?: string | null): string | null | undefined;
    /**
     * Determines if the current connection is on a local network.
     * Useful for applying different networking configurations in local development environments.
     * This is the same as calling {@link isLocalNetwork}.
     * @param hostname Optional hostname to check instead of the current window location
     * @returns True if the connection is on a local network, false otherwise
     */
    static IsLocalNetwork(hostname?: string): boolean;
}

export declare class NewInstanceModel implements IModel {
    guid: string;
    originalGuid: string;
    seed: number | undefined;
    visible: boolean | undefined;
    hostData: HostData | undefined;
    dontSave?: boolean | undefined;
    parent: string | undefined;
    position: {
        x: number;
        y: number;
        z: number;
    } | undefined;
    rotation: {
        x: number;
        y: number;
        z: number;
        w: number;
    } | undefined;
    scale: {
        x: number;
        y: number;
        z: number;
    } | undefined;
    /** Set to true to prevent this model from being instantiated */
    preventCreation?: boolean;
    /**
     * When set this will delete the server state when the user disconnects
     */
    deleteStateOnDisconnect?: boolean | undefined;
    constructor(originalGuid: string, newGuid: string);
}

declare type NodeToObjectMap = {
    [nodeId: string]: Object3D;
};

declare type NoInternalNeedleEngineState<T> = Omit<T, "destroyed" | "gameObject" | "activeAndEnabled" | "context" | "isComponent" | "scene" | "up" | "forward" | "right" | "worldRotation" | "worldEuler" | "worldPosition" | "worldQuaternion">;

/** Removes all properties that start with an underscore */
declare type NoInternals<T> = FilterStartingWith<T, "_">;

export declare class NoiseModule {
    damping: boolean;
    enabled: boolean;
    frequency: number;
    octaveCount: number;
    octaveMultiplier: number;
    octaveScale: number;
    positionAmount: MinMaxCurve;
    quality: number;
    remap: MinMaxCurve;
    remapEnabled: boolean;
    remapMultiplier: number;
    remapX: MinMaxCurve;
    remapXMultiplier: number;
    remapY: MinMaxCurve;
    remapYMultiplier: number;
    remapZ: MinMaxCurve;
    remapZMultiplier: number;
    scrollSpeedMultiplier: number;
    separateAxes: boolean;
    strengthMultiplier: number;
    strengthX: MinMaxCurve;
    strengthXMultiplier: number;
    strengthY: MinMaxCurve;
    strengthYMultiplier: number;
    strengthZ: MinMaxCurve;
    strengthZMultiplier: number;
    private _noise?;
    private _time;
    update(context: Context): void;
    /** nebula implementations: */
    private _temp;
    apply(_index: number, pos: Vec3, vel: Vec3, _deltaTime: number, age: number, life: number): void;
}

/**
 * Utility type that extracts only non-function property names from a type
 * @template T The type to extract property names from
 */
declare type NonFunctionPropertyNames<T> = {
    [K in keyof T]: T[K] extends Function ? never : K;
}[keyof T];

/** Removes all undefined functions */
declare type NoUndefinedNoFunctions<T> = FilterTypes<T, Function | undefined | null>;

/** OBJ type */
export declare type OBJ = {
    animations: AnimationClip[];
    scene: Object3D;
    scenes: Object3D[];
};

declare type ObjectCloneReference = {
    readonly original: object;
    readonly clone: object;
};

declare type ObjectData = {
    node?: number;
    guid?: string;
};

/**
 * Options to create an object. Used by {@link ObjectUtils.createPrimitive}
 */
export declare type ObjectOptions = {
    /**
     * The parent object to add the created object to
     */
    parent?: Object3D;
    /**
     * The name of the object
     */
    name?: string;
    /** The material to apply to the object */
    material?: Material;
    /** The color of the object. This color will only be used if no material is provided */
    color?: ColorRepresentation;
    /** The texture will applied to the material's main texture slot e.g. `material.map` if any is passed in */
    texture?: Texture;
    /**
     * The position of the object in local space
     */
    position?: Partial<Vec3> | [number, number, number];
    /** The rotation of the object in local space */
    rotation?: Partial<Vec3> | [number, number, number];
    /**
     * The scale of the object in local space
     */
    scale?: Partial<Vec3> | number | [number, number, number];
    /**
     * If the object should receive shadows
     * @default true
     */
    receiveShadow?: boolean;
    /**
     * If the object should cast shadows
     * @default true
     */
    castShadow?: boolean;
};

/**
 * ObjectRaycaster enables pointer interactions with 3D objects.
 * Add this component to any object that needs click/hover detection.
 *
 * **Usage:**
 * Objects with ObjectRaycaster will receive pointer events when
 * they implement interfaces like {@link IPointerClickHandler}.
 *
 * **Note:**
 * In older Needle Engine versions the ObjectRaycaster was required to be added to the Scene.
 * This is no longer the case - the EventSystem will automatically handle raycasts.
 *
 *
 * @category Interactivity
 * @group Components
 * @see {@link IPointerClickHandler} for click events
 * @see {@link DragControls} for drag interactions
 */
export declare class ObjectRaycaster extends Raycaster_2 {
    private targets;
    private raycastHits;
    ignoreSkinnedMeshes: boolean;
    start(): void;
    performRaycast(opts?: IRaycastOptions | RaycastOptions | null): Intersection[] | null;
}

declare class ObjectSerializer extends TypeSerializer {
    constructor();
    onSerialize(data: any, context: SerializationContext): {
        node: number;
    } | undefined;
    onDeserialize(data: ObjectData | string | null, context: SerializationContext): Object3D<Object3DEventMap> | Component | null | undefined;
}

export declare const objectSerializer: ObjectSerializer;

declare type ObjectToNodeMap = {
    [uuid: string]: number;
};

/**
 * Utility class to create primitive objects
 * @example
 * ```typescript
 * const cube = ObjectUtils.createPrimitive("Cube", { name: "Cube", position: { x: 0, y: 0, z: 0 } });
 * ```
 */
export declare class ObjectUtils {
    #private;
    /**
     * Creates a 3D text object
     * @param text The text to display
     * @param opts Options to create the object
     */
    static createText(text: string, opts?: TextOptions): Mesh;
    /**
     * Creates an occluder object that only render depth but not color
     * @param type The type of primitive to create
     * @returns The created object
     */
    static createOccluder(type: PrimitiveTypeNames): Mesh;
    /** Creates a primitive object like a Cube or Sphere
     * @param type The type of primitive to create
     * @param opts Options to create the object
     * @returns The created object
     */
    static createPrimitive(type: "ShaderBall", opts?: ObjectOptions): Group;
    static createPrimitive(type: PrimitiveType | PrimitiveTypeNames, opts?: ObjectOptions): Mesh;
    /**
     * Creates a Sprite object
     * @param opts Options to create the object
     * @returns The created object
     */
    static createSprite(opts?: Omit<ObjectOptions, "material">): Sprite_2;
    private static applyDefaultObjectOptions;
}

/* Excluded from this release type: OffscreenCanvasExt */

/**
 * The [OffsetConstraint](https://engine.needle.tools/docs/api/OffsetConstraint) maintains a fixed positional and rotational offset relative to a target object.
 * Useful for attaching objects to moving targets while preserving a specific spatial relationship.
 *
 * **Use cases:**
 * - Camera following a player with offset
 * - UI elements attached to characters
 * - Weapons attached to hands
 * - Objects orbiting around a target
 *
 * **Options:**
 * - `affectPosition` - Apply position offset
 * - `affectRotation` - Apply rotation offset
 * - `alignLookDirection` - Make object face same direction as target
 * - `levelLookDirection` - Keep look direction horizontal (ignore pitch)
 * - `levelPosition` - Project position onto horizontal plane
 * - `referenceSpace` - Transform offset in this object's coordinate space
 *
 * @example Attach camera offset to player
 * ```ts
 * const constraint = camera.addComponent(OffsetConstraint);
 * // Configure via serialized properties in editor
 * ```
 *
 * @summary Maintains positional/rotational offset relative to target
 * @category Constraints
 * @group Components
 * @see {@link SmoothFollow} for smoothed following
 * @see {@link AlignmentConstraint} for alignment between two objects
 */
export declare class OffsetConstraint extends Component {
    private referenceSpace;
    private from;
    private affectPosition;
    private affectRotation;
    private alignLookDirection;
    private levelLookDirection;
    private levelPosition;
    private positionOffset;
    private rotationOffset;
    private offset;
    update(): void;
}

/**
 * Remove a listener for when an XR session ends
 * @param fn The function to remove from the listeners
 * @example
 * ```js
 * const myFunction = (evt) => {
 *  console.log("XR session ended", evt);
 * };
 * onXRSessionEnd(myFunction);
 * offXRSessionEnd(myFunction);
 * ```
 */
export declare function offXRSessionEnd(fn: (evt: XRSessionEventArgs) => void): void;

/**
 * Remove a listener for when an XR session starts
 * @param fn The function to remove from the listeners
 * @example
 * ```js
 * const myFunction = (evt) => {
 *  console.log("XR session started", evt);
 * };
 * onXRSessionStart(myFunction);
 * offXRSessionStart(myFunction);
 * ```
 */
export declare function offXRSessionStart(fn: (evt: XRSessionEventArgs) => void): void;

/**
 * Register a callback in the engine onAfterRender event
 * This is called every frame after the main camera has rendered
 * @param cb The callback to be called
 * @returns A function that can be called to unregister the callback
 * @example
 * ```ts
 * const unsubscribe = onAfterRender((ctx : Context) => {
 *    // do something...
 *    console.log("After render");
 *    // if you want to unsubscribe after the first call:
 *    unsubscribe();
 * });
 * ```
 */
export declare function onAfterRender(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/** Register a callback in the engine onBeforeRender event
 * This is called every frame before the main camera renders
 * @param cb The callback to be called
 * @returns A function that can be called to unregister the callback
 * @example
 * ```ts
 * onBeforeRender((ctx : Context) => {
 *     // do something
 * }
 * ```
 * */
export declare function onBeforeRender(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/**
 * Register a callback before the engine context is cleared.
 * This happens if e.g. `<needle-engine src>` changes
 */
export declare function onClear(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/**
 * Register a callback in the engine before the context is destroyed
 * This happens once per context (before the context is destroyed)
 */
export declare function onDestroy(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/**
 * [OneEuroFilter](https://engine.needle.tools/docs/api/OneEuroFilter) is a low-pass filter designed to reduce jitter in noisy signals while maintaining low latency.
 * It's particularly useful for smoothing tracking data from XR controllers, hand tracking, or other input devices where the signal contains noise but responsiveness is important.
 *
 * The filter automatically adapts its smoothing strength based on the signal's velocity:
 * - When the signal moves slowly, it applies strong smoothing to reduce jitter
 * - When the signal moves quickly, it reduces smoothing to maintain responsiveness
 *
 * Based on the research paper: [1€ Filter: A Simple Speed-based Low-pass Filter for Noisy Input](http://cristal.univ-lille.fr/~casiez/1euro/)
 *
 * @example Basic usage with timestamp
 * ```ts
 * const filter = new OneEuroFilter(120, 1.0, 0.0);
 *
 * // In your update loop:
 * const smoothedValue = filter.filter(noisyValue, this.context.time.time);
 * ```
 *
 * @example Without timestamps (using frequency estimate)
 * ```ts
 * // Assuming 60 FPS update rate
 * const filter = new OneEuroFilter(60, 1.0, 0.5);
 *
 * // Call without timestamp - uses the frequency estimate
 * const smoothedValue = filter.filter(noisyValue);
 * ```
 *
 * @example Smoothing 3D positions
 * ```ts
 * const posFilter = new OneEuroFilterXYZ(90, 0.5, 0.0);
 *
 * posFilter.filter(trackedPosition, smoothedPosition, this.context.time.time);
 * ```
 *
 * @see {@link OneEuroFilterXYZ} for filtering 3D vectors
 */
export declare class OneEuroFilter {
    /**
     * An estimate of the frequency in Hz of the signal (> 0), if timestamps are not available.
     */
    freq: number;
    /**
     * Min cutoff frequency in Hz (> 0). Lower values allow to remove more jitter.
     */
    minCutOff: number;
    /**
     * Parameter to reduce latency (> 0). Higher values make the filter react faster to changes.
     */
    beta: number;
    /**
     * Used to filter the derivates. 1 Hz by default. Change this parameter if you know what you are doing.
     */
    dCutOff: number;
    /**
     * The low-pass filter for the signal.
     */
    x: LowPassFilter;
    /**
     * The low-pass filter for the derivates.
     */
    dx: LowPassFilter;
    /**
     * The last time the filter was called.
     */
    lasttime: number | null;
    /** Create a new OneEuroFilter
     * @param freq - An estimate of the frequency in Hz of the signal (> 0), if timestamps are not available.
     * @param minCutOff - Min cutoff frequency in Hz (> 0). Lower values allow to remove more jitter.
     * @param beta - Parameter to reduce latency (> 0). Higher values make the filter react faster to changes.
     * @param dCutOff - Used to filter the derivates. 1 Hz by default. Change this parameter if you know what you are doing.
     */
    constructor(freq: number, minCutOff?: number, beta?: number, dCutOff?: number);
    alpha(cutOff: number): number;
    /** Filter your value: call with your value and the current timestamp (e.g. from this.context.time.time) */
    filter(x: number, time?: number | null): number;
    reset(x?: number): void;
}

export declare class OneEuroFilterXYZ {
    readonly x: OneEuroFilter;
    readonly y: OneEuroFilter;
    readonly z: OneEuroFilter;
    /** Create a new OneEuroFilter
     * @param freq - An estimate of the frequency in Hz of the signal (> 0), if timestamps are not available.
     * @param minCutOff - Min cutoff frequency in Hz (> 0). Lower values allow to remove more jitter.
     * @param beta - Parameter to reduce latency (> 0). Higher values make the filter react faster to changes.
     * @param dCutOff - Used to filter the derivates. 1 Hz by default. Change this parameter if you know what you are doing.
     */
    constructor(freq: number, minCutOff?: number, beta?: number, dCutOff?: number);
    filter(value: Vec3, target: Vec3, time?: number | null): void;
    reset(value?: Vec3): void;
}

/**
 * Callback type for glTF export plugins. See {@link INeedleGLTFExtensionPlugin.onExport}
 */
export declare type OnExportCallback = (exporter: GLTFExporter, context: Context) => void;

/**
 * Callback type for glTF import plugins. See {@link INeedleGLTFExtensionPlugin.onImport}
 */
export declare type OnImportCallback = (loader: GLTFLoader, url: string, context: Context) => void;

/**
 * Register a callback in the engine context created event.
 * This happens once per context (after the context has been created and the first content has been loaded)
 * @param cb The callback to be called
 * @returns A function that can be called to unregister the callback
 * @example
 * ```ts
 * onInitialized((ctx : Context) => {
 *     // do something
 * }
 * ```
 * */
export declare function onInitialized(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

declare type OnPointerHitEvent = {
    /** The object that raised the event */
    sender: object;
    /** The pointer event that invoked the event */
    event: NEPointerEvent;
    /** The intersections that were generated from this event (or are associated with this event in any way) */
    hits: Intersection[];
};

declare type OnPointerHitsEvent = (args: OnPointerHitEvent) => void;

/** threejs callback event signature */
export declare type OnRenderCallback = (renderer: WebGLRenderer, scene: Scene, camera: Camera_2, geometry: BufferGeometry, material: Material, group: Group) => void;

/** Register a callback in the engine start event.
 * This happens once at the beginning of a frame
 * (e.g. once when the method is registered, after <needle-engine> is created or the src has changed)
 * @param cb The callback to be called. Optionally return a function that will be called when the onStart callback is removed again
 * @returns A function that can be called to unregister the callback
 * @example
 * ```ts
 * onStart((ctx : Context) => {
 *     // do something
 *     console.log("Needle Engine: onStart registered")
 *     // optional to cleanup:
 *     return () => { console.log("OnStart removed") }
 * }
 * ```
 * */
export declare function onStart(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/** Register a callback in the engine update event
 * This is called every frame
 * @param cb The callback to be called
 * @returns A function that can be called to unregister the callback
 * @example
 * ```ts
 * onUpdate((ctx : Context) => {
 *     // do something
 *     console.log("Needle Engine: onUpdate registered")
 *     // optional to cleanup:
 *     return () => { console.log("onUpdate removed") }
 * }
 * ```
 * */
export declare function onUpdate(cb: LifecycleMethod, opts?: LifecycleMethodOptions): () => void;

/**
 * Add a listener for when an XR session ends
 * This event is triggered when the XR session is ended, either by the user or by the application before all other XR end events
 * @param fn The function to call when the XR session ends
 * @example
 * ```js
 * onXRSessionEnd((evt) => {
 *    console.log("XR session ended", evt);
 * });
 * ```
 */
export declare function onXRSessionEnd(fn: (evt: XRSessionEventArgs) => void): void;

/**
 * Add a listener for when an XR session starts
 * This event is triggered when the XR session is started, either by the user or by the application before all other XR start events
 * @param fn The function to call when the XR session starts
 * @example
 * ```js
 * onXRSessionStart((evt) => {
 *   console.log("XR session started", evt);
 * });
 * ```
 */
export declare function onXRSessionStart(fn: (evt: XRSessionEventArgs) => void): void;

/**
 * OpenURL behaviour opens a URL in a new tab or window when the object (or any if it's children) is clicked.
 *
 * @category Interactivity
 * @category Web
 * @group Components
 */
export declare class OpenURL extends Component implements IPointerClickHandler {
    /**
     * The URL to open.
     */
    url?: string;
    /**
     * The mode in which the URL should be opened: NewTab, SameTab, NewWindow.
     */
    mode: OpenURLMode;
    /**
     * If true, the URL will be opened when the object with this component is clicked.
     */
    clickable: boolean;
    /**
     * Opens the URL in a new tab or window.
     */
    open(): Promise<void>;
    /* Excluded from this release type: start */
    /* Excluded from this release type: onPointerEnter */
    /* Excluded from this release type: onPointerExit */
    /* Excluded from this release type: onPointerClick */
    private _validateUrl;
}

/**
 * [OpenURL](https://engine.needle.tools/docs/api/OpenURL) OpenURLMode defines how a URL should be opened.
 */
declare enum OpenURLMode {
    NewTab = 0,
    SameTab = 1,
    NewWindow = 2
}

/**
 * [OrbitControls](https://engine.needle.tools/docs/api/OrbitControls) provides interactive camera control using three.js OrbitControls.
 * Users can rotate, pan, and zoom the camera to explore 3D scenes.
 *
 * **Features:**
 * - Rotation around a target point (orbit)
 * - Panning to move the view
 * - Zooming via scroll or pinch
 * - Auto-rotation for showcases
 * - Configurable angle and distance limits
 * - Smooth damping for natural feel
 *
 * ![](https://cloud.needle.tools/-/media/ylC34hrC3srwyzGNhFRbEQ.gif)
 *
 * **Access underlying controls:**
 * - `controls` - The three.js OrbitControls instance
 * - `controllerObject` - The object being controlled (usually the camera)
 *
 * **Debug options:**
 * - `?debugorbit` - Log orbit control events
 * - `?freecam` - Enable unrestricted camera movement
 *
 * @example Basic setup
 * ```ts
 * const orbitControls = camera.getComponent(OrbitControls);
 * orbitControls.autoRotate = true;
 * orbitControls.autoRotateSpeed = 2;
 * ```
 *
 * @example Set look-at target
 * ```ts
 * orbitControls.setLookTargetPosition(new Vector3(0, 1, 0), true);
 * // Or move both camera and target
 * orbitControls.setCameraTargetPosition(new Vector3(5, 2, 5), new Vector3(0, 0, 0));
 * ```
 *
 * @summary Camera controller using three.js OrbitControls
 * @category Camera and Controls
 * @group Components
 * @see {@link SmoothFollow} for smooth camera following
 * @see {@link Camera} for camera configuration
 * @link https://threejs.org/docs/#examples/en/controls/OrbitControls
 * @link https://engine.needle.tools/samples/panorama-controls alternative controls in samples
 */
export declare class OrbitControls extends Component implements ICameraController {
    /**
     * @inheritdoc
     */
    get isCameraController(): boolean;
    /** The underlying three.js OrbitControls.
     * See {@link https://threejs.org/docs/#examples/en/controls/OrbitControls}
     * @returns {@type ThreeOrbitControls | null}
         */
     get controls(): OrbitControls_2 | null;
     /** The object being controlled by the OrbitControls (usually the camera)
      * See {@link https://threejs.org/docs/#examples/en/controls/OrbitControls.object}
      * @returns {@type Object3D | null}
          */
      get controllerObject(): Object3D | null;
      /** Register callback when user starts interacting with the orbit controls */
      onStartInteraction(callback: Function): void;
      /** When enabled OrbitControls will automatically raycast find a look at target in start
       * @default true
       */
      autoTarget: boolean;
      /** When enabled the scene will be automatically fitted into the camera view in onEnable
       * @default false
       */
      autoFit: boolean;
      /** When enabled the camera can be rotated
       * @default true
       */
      enableRotate: boolean;
      /** When enabled the camera will rotate automatically
       * @default false
       */
      autoRotate: boolean;
      /** The speed at which the camera will rotate automatically. Will only be used when `autoRotate` is enabled
       * @default 1.0
       */
      autoRotateSpeed: number;
      /** The minimum azimuth angle in radians */
      minAzimuthAngle: number;
      /** The maximum azimuth angle in radians */
      maxAzimuthAngle: number;
      /** The minimum polar angle in radians
       * @default 0
       */
      minPolarAngle: number;
      /** The maximum polar angle in radians
       * @default Math.PI
       */
      maxPolarAngle: number;
      /** When enabled the camera can be moved using keyboard keys. The keys are defined in the `controls.keys` property
       * @default false
       */
      enableKeys: boolean;
      /** When enabled the camera movement will be damped
       * @default true
       */
      enableDamping: boolean;
      /** The damping factor for the camera movement. For more information see the [three.js documentation](https://threejs.org/docs/#examples/en/controls/OrbitControls.dampingFactor)
       * @default 0.1
       */
      dampingFactor: number;
      /** When enabled the camera can be zoomed
       * @default true
       */
      enableZoom: boolean;
      /** The minimum zoom level
       * @default 0
       */
      minZoom: number;
      /** The maximum zoom level
       * @default Infinity
       */
      maxZoom: number;
      /**
       * Sets the zoom speed of the OrbitControls
       * @default 1
       */
      zoomSpeed: number;
      /**
       * Set to true to enable zooming to the cursor position.
       * @default false
       */
      zoomToCursor: boolean;
      /** When enabled the camera can be panned
       * @default true
       */
      enablePan: boolean;
      /** Assigning an Object3D will make the camera look at this target's position.
       * The camera will orbit around this target.
       * @default null
       */
      lookAtTarget: Object3D | null;
      /** When enabled the camera will continuously follow the lookAtTarget's position every frame.
       * When disabled the target is only used for the initial look direction.
       * @default true
       */
      lockLookAtTarget: boolean;
      /** The weight for the lookAtTarget interpolation
       * @default 1
       */
      lookAtConstraint01: number;
      private lookAtConstraint?;
      /** If true user input interrupts the camera from animating to a target
       * @default true
       */
      allowInterrupt: boolean;
      /** If true the camera will focus on the target when the middle mouse button is clicked */
      middleClickToFocus: boolean;
      /** If true the camera will focus on the target when the left mouse button is double clicked
       * @default true
       */
      doubleClickToFocus: boolean;
      /**
       * When enabled the camera will fit the scene to the camera view when the background is clicked the specified number of times within a short time
       * @default 2
       */
      clickBackgroundToFitScene: number;
      /**
       * This is the DOM element that the OrbitControls will listen to for input events. By default this is the renderer's canvas element.
       * Set this to a different element to make the OrbitControls listen to that element instead.
       */
      get targetElement(): HTMLElement | null;
      set targetElement(value: HTMLElement | null);
      private _targetElement;
      /* Excluded from this release type: debugLog */
      /**
       * @deprecated use `targetLerpDuration` instead
       * ~~The speed at which the camera target and the camera will be lerping to their destinations (if set via script or user input)~~
       * */
      get targetLerpSpeed(): number;
      set targetLerpSpeed(v: number);
      /** The duration in seconds it takes for the camera look ad and position lerp to reach their destination (when set via `setCameraTargetPosition` and `setLookTargetPosition`)
       * @default 1
       */
      get targetLerpDuration(): number;
      set targetLerpDuration(v: number);
      private _lookTargetLerpDuration;
      targetBounds: Object3D | null;
      /**
       * Rotate the camera left (or right) by the specified angle in radians.
       * For positive angles the camera will rotate to the left, for negative angles it will rotate to the right.
       * Tip: Use Mathf to convert between degrees and radians.
       * @param angleInRadians The angle in radians to rotate the camera left
       * @example
       * ```typescript
       * // Rotate the camera left by 0.1 radians
       * orbitControls.rotateLeft(0.1);
       * ```
       */
      rotateLeft(angleInRadians: number): void;
      /**
       * Rotate the camera up (or down) by the specified angle in radians.
       * For positive angles the camera will rotate up, for negative angles it will rotate down.
       * Tip: Use Mathf to convert between degrees and radians.
       * @param angleInRadians The angle in radians to rotate the camera up
       * @example
       * ```typescript
       * // Rotate the camera up by 0.1 radians
       * orbitControls.rotateUp(0.1);
       * ```
       */
      rotateUp(angleInRadians: number): void;
      /**
       * Pan the camera by the specified amount in the x and y direction in pixels.
       * @param dx The amount to pan the camera in the x direction in pixels.
       * @param dy The amount to pan the camera in the y direction in pixels.
       */
      pan(dx: number, dy: number): void;
      /**
       * Zoom the camera in or out by the specified scale factor. The factor is applied to the current zoom radius / distance.
       * If the scale is greater than 0 then the camera will zoom in, if it is less than 0 then the camera will zoom out.
       * @param scale The scale factor to zoom the camera in or out. Expected range is between -1 and 1, where 0 means no zoom.
       * @example
       * ```typescript
       * // Zoom in by 0.1
       * orbitControls.zoomIn(0.1);
       * // Zoom out by 0.1
       * orbitControls.zoomIn(-0.1);
       * ```
       */
      zoomIn(scale: number): void;
      private _controls;
      private _cameraObject;
      private _lookTargetLerpActive;
      private _lookTargetStartPosition;
      private _lookTargetEndPosition;
      private _lookTargetLerp01;
      private _cameraLerpActive;
      private _cameraStartPosition;
      private _cameraEndPosition;
      private _cameraLerp01;
      private _cameraLerpDuration;
      private _fovLerpActive;
      private _fovLerpStartValue;
      private _fovLerpEndValue;
      private _fovLerp01;
      private _fovLerpDuration;
      private _inputs;
      private _enableTime;
      private _startedListeningToKeyEvents;
      private _eventSystem?;
      private _afterHandleInputFn?;
      private _camera;
      private _syncedTransform?;
      private _didSetTarget;
      private _didApplyLookAtTarget;
      /* Excluded from this release type: awake */
      /* Excluded from this release type: start */
      /* Excluded from this release type: onDestroy */
      /* Excluded from this release type: onEnable */
      /* Excluded from this release type: onDisable */
      private _activePointerEvents;
      private _lastTimeClickOnBackground;
      private _clickOnBackgroundCount;
      private _onPointerDown;
      private _onPointerDownLate;
      private _onPointerUp;
      private _onPointerUpLate;
      private updateTargetNow;
      private _orbitStartAngle;
      private _zoomStartDistance;
      private onControlsChangeStarted;
      private onControlsChangeEnded;
      private _shouldDisable;
      private afterHandleInput;
      onPausedChanged(isPaused: boolean): void;
      /* Excluded from this release type: onBeforeRender */
      private __onPreRender;
      /**
       * Sets camera target position and look direction using a raycast in forward direction of the object.
       *
       * @param source The object to raycast from. If a camera is passed in the camera position will be used as the source.
       * @param immediateOrDuration If true the camera target will move immediately to the new position, otherwise it will lerp. If a number is passed in it will be used as the duration of the lerp.
       *
       * This is useful for example if you want to align your camera with an object in your scene (or another camera). Simply pass in this other camera object
       * @returns true if the target was set successfully
       */
      setCameraAndLookTarget(source: Object3D | Camera, immediateOrDuration?: number | boolean): boolean;
      /** Moves the camera to position smoothly.
       * @param position The position in local space of the controllerObject to move the camera to. If null the camera will stop lerping to the target.
       * @param immediateOrDuration If true the camera will move immediately to the new position, otherwise it will lerp. If a number is passed in it will be used as the duration of the lerp.
       */
      setCameraTargetPosition(position?: Object3D | Vector3Like | null, immediateOrDuration?: boolean | number): void;
      /** True while the camera position is being lerped */
      get cameraLerpActive(): boolean;
      /** Call to stop camera position lerping */
      stopCameraLerp(): void;
      setFieldOfView(fov: number | undefined, immediateOrDuration?: boolean | number): void;
      /** Moves the camera look-at target to a position smoothly.
       * @param position The position in world space to move the camera target to. If null the camera will stop lerping to the target.
       * @param immediateOrDuration If true the camera target will move immediately to the new position, otherwise it will lerp. If a number is passed in it will be used as the duration of the lerp.
       */
      setLookTargetPosition(position?: Object3D | Vector3Like | null, immediateOrDuration?: boolean | number): void;
      /** True while the camera look target is being lerped */
      get lookTargetLerpActive(): boolean;
      /** Call to stop camera look target lerping */
      stopLookTargetLerp(): void;
      /** Sets the look at target from the assigned lookAtTarget Object3D
       * @param t The interpolation factor between the current look at target and the new target
       */
      private setLookTargetFromConstraint;
      private lerpLookTarget;
      private setTargetFromRaycast;
      /**
       * Fits the camera to show the objects provided (defaults to the scene if no objects are passed in)
       * @param options The options for fitting the camera. Use to provide objects to fit to, fit direction and size and other settings.
       */
      fitCamera(options?: OrbitFitCameraOptions): any;
      /** @deprecated Use fitCamera(options) */
      fitCamera(objects?: Object3D | Array<Object3D>, options?: Omit<OrbitFitCameraOptions, "objects">): any;
      private _haveAttachedKeyboardEvents;
     }

     declare type OrbitFitCameraOptions = FitCameraOptions & {
         immediate?: boolean;
     };

     /**
      * [Outline](https://engine.needle.tools/docs/api/Outline) is a UI component that adds an outline effect to UI elements.
      * You can customize the outline color and distance to create a visual border around the UI element.
      * @summary Add an outline effect to UI elements
      * @category User Interface
      * @group Components
      */
     export declare class Outline extends Component {
         effectColor?: RGBAColor;
         effectDistance?: Vector2;
     }

     /** The Needle Engine networking server supports the concept of ownership that can be requested.
      * This enum contains possible outgoing (Request*) and incoming (Response*) events for communicating ownership.
      *
      * Typically, using the {@link OwnershipModel} class instead of dealing with those events directly is preferred. */
     export declare enum OwnershipEvent {
         RequestHasOwner = "request-has-owner",
         ResponseHasOwner = "response-has-owner",
         RequestIsOwner = "request-is-owner",
         ResponseIsOwner = "response-is-owner",
         RequestOwnership = "request-ownership",
         GainedOwnership = "gained-ownership",
         RemoveOwnership = "remove-ownership",
         LostOwnership = "lost-ownership",
         GainedOwnershipBroadcast = "gained-ownership-broadcast",
         LostOwnershipBroadcast = "lost-ownership-broadcast"
     }

     /**
      * Manages ownership of networked objects or components.
      *
      * In multiplayer scenarios, ownership determines which client has authority to modify an object.
      * The networking server rejects changes from clients that don't own an object. This prevents conflicts
      * when multiple users try to manipulate the same object simultaneously.
      *
      * **Ownership states:**
      * - `hasOwnership`: This client owns the object and can modify it
      * - `isOwned`: Some client (could be local or remote) owns the object
      * - `undefined`: Ownership state is unknown (not yet queried)
      *
      * **Typical workflow:**
      * 1. Request ownership before modifying an object
      * 2. Make your changes while you have ownership
      * 3. Free ownership when done (or keep it if still interacting)
      *
      * @example Basic usage
      * ```ts
      * export class MyComponent extends Behaviour {
      *   private ownership?: OwnershipModel;
      *
      *   awake() {
      *     this.ownership = new OwnershipModel(this.context.connection, this.guid);
      *   }
      *
      *   onClick() {
      *     // Request ownership before modifying the object
      *     this.ownership.requestOwnership();
      *   }
      *
      *   update() {
      *     if (this.ownership.hasOwnership) {
      *       // Safe to modify and sync the object
      *       this.gameObject.position.y += 0.01;
      *     }
      *   }
      *
      *   onDisable() {
      *     // Release ownership when done
      *     this.ownership.freeOwnership();
      *     this.ownership.destroy();
      *   }
      * }
      * ```
      *
      * @example Async ownership
      * ```ts
      * async modifyObject() {
      *   try {
      *     await this.ownership.requestOwnershipAsync();
      *     // Now guaranteed to have ownership
      *     this.transform.position.x = 5;
      *   } catch(e) {
      *     console.log("Failed to gain ownership");
      *   }
      * }
      * ```
      *
      * @see {@link SyncedTransform} for a complete example of ownership in action
      * @link https://engine.needle.tools/docs/networking.html
      */
     export declare class OwnershipModel {
         /** The unique identifier (GUID) of the object this ownership model manages */
         guid: string;
         private connection;
         /**
          * Checks if the local client has ownership of this object.
          * @returns `true` if this client owns the object and can modify it, `false` otherwise
          */
         get hasOwnership(): boolean;
         /**
          * Checks if anyone (local or remote client) has ownership of this object.
          * @returns `true` if someone owns the object, `false` if no one owns it, `undefined` if unknown
          */
         get isOwned(): boolean | undefined;
         /**
          * Checks if Needle Engine networking is connected to a websocket. Note that this is **not equal** to being connected to a *room*. If you want to check if Needle Engine is connected to a networking room use the `isInRoom` property.
          * @returns true if connected to the websocket.
          */
         get isConnected(): boolean;
         private _hasOwnership;
         private _isOwned;
         private _gainSubscription;
         private _lostSubscription;
         private _hasOwnerResponse;
         constructor(connection: NetworkConnection, guid: string);
         private _isWaitingForOwnershipResponseCallback;
         /**
          * Queries the server to update the `isOwned` state.
          * Call this to check if anyone currently has ownership.
          */
         updateIsOwned(): void;
         private onHasOwnerResponse;
         /**
          * Requests ownership only if the object is not currently owned by anyone.
          * Internally checks ownership state first, then requests ownership if free.
          * @returns this OwnershipModel instance for method chaining
          */
         requestOwnershipIfNotOwned(): OwnershipModel;
         private waitForHasOwnershipRequestResponse;
         /**
          * Requests ownership and waits asynchronously until ownership is granted or timeout occurs.
          * @returns Promise that resolves with this OwnershipModel when ownership is gained
          * @throws Rejects with "Timeout" if ownership is not gained within ~1 second
          * @example
          * ```ts
          * try {
          *   await ownership.requestOwnershipAsync();
          *   // Ownership granted, safe to modify object
          * } catch(e) {
          *   console.warn("Could not gain ownership:", e);
          * }
          * ```
          */
         requestOwnershipAsync(): Promise<OwnershipModel>;
         /**
          * Requests ownership of this object from the networking server.
          * Ownership may not be granted immediately - check `hasOwnership` property or use `requestOwnershipAsync()`.
          * @returns this OwnershipModel instance for method chaining
          */
         requestOwnership(): OwnershipModel;
         /**
          * Releases ownership of this object, allowing others to take control.
          * Call this when you're done modifying an object to allow other users to interact with it.
          * @returns this OwnershipModel instance for method chaining
          */
         freeOwnership(): OwnershipModel;
         /**
          * Cleans up event listeners and resources.
          * Call this when the OwnershipModel is no longer needed (e.g., in `onDestroy()`).
          */
         destroy(): void;
         private onGainedOwnership;
         private onLostOwnership;
     }

     declare type OwnershipResponse = {
         guid: string;
         value: boolean;
     };

     export declare class Padding {
         left: number;
         right: number;
         top: number;
         bottom: number;
         get vertical(): number;
         get horizontal(): number;
     }

     declare type Param<T extends string> = string | boolean | number | T;

     export declare type Parameter = {
         name: string;
         /** the animator string to hash result, test against this if a number is used to get a param value */
         hash: number;
         type: AnimatorControllerParameterType;
         value: number | boolean | string;
     };

     declare type ParseNumber<T> = T extends `${infer U extends number}` ? U : never;

     /** Load a gltf file from a url. This is the core method used by Needle Engine to load gltf files. All known extensions are registered here.
      * @param context The current context
      * @param data The gltf data as string or ArrayBuffer
      * @param path The path to the gltf file
      * @param seed The seed for generating unique ids
      * @returns The loaded gltf object
      */
     export declare function parseSync(context: Context, data: string | ArrayBuffer, path: string, seed: number | UIDProvider | null): Promise<Model | undefined>;

     export declare class ParticleBurst {
         cycleCount: number;
         maxCount: number;
         minCount: number;
         probability: number;
         repeatInterval: number;
         time: number;
         count: {
             constant: number;
             constantMax: number;
             constantMin: number;
             curve?: AnimationCurve;
             curveMax?: AnimationCurve;
             curveMin?: AnimationCurve;
             curveMultiplier?: number;
             mode: ParticleSystemCurveMode;
         };
         private _performed;
         reset(): void;
         run(time: number): number;
     }

     export declare class ParticleSubEmitter implements QParticleBehaviour {
         private system;
         private particleSystem;
         private subSystem;
         subParticleSystem?: ParticleSystem_2 | undefined;
         type: string;
         emitterType?: SubEmitterType;
         emitterProbability?: number;
         private q_;
         private v_;
         private v2_;
         private _emitterMatrix;
         private _circularBuffer;
         constructor(system: IParticleSystem, particleSystem: ParticleSystem_2, subSystem: IParticleSystem, subParticleSystem?: ParticleSystem_2 | undefined);
         clone(): QParticleBehaviour;
         initialize(particle: QParticle): void;
         update(particle: QParticle, _delta: number): void;
         frameUpdate(_delta: number): void;
         toJSON(): any;
         reset(): void;
         private run;
     }

     /**
      * ParticleSystem efficiently handles the motion and rendering of many individual particles.
      * Use it for visual effects like fire, smoke, sparks, rain, magic spells, and more.
      *
      * ![](https://cloud.needle.tools/-/media/qz5nO-raa7dNb_XCBNxHmA.gif)
      * ![](https://cloud.needle.tools/-/media/IKOrLhesy1dKTfQQxx_pLA.gif)
      *
      * **Modules:**
      * Configure particle behavior through modules like {@link EmissionModule}, {@link ShapeModule},
      * {@link ColorOverLifetimeModule}, {@link SizeOverLifetimeModule}, {@link VelocityOverLifetimeModule},
      * {@link NoiseModule}, and {@link TrailModule}.
      *
      * **Custom behaviors:**
      * Add custom particle behaviors by extending {@link ParticleSystemBaseBehaviour} and
      * calling `addBehaviour()`. This gives you full control over particle initialization and updates.
      *
      * **Performance:**
      * Particles are batched together for fast, performant rendering even on low-end devices.
      * Needle Engine uses [three.quarks](https://github.com/Alchemist0823/three.quarks) internally.
      *
      * @example Basic playback control
      * ```ts
      * const ps = myObject.getComponent(ParticleSystem);
      * ps.play();
      * ps.emit(10); // Emit 10 particles immediately
      * ps.pause();
      * ps.stop(true, true); // Stop and clear all particles
      * ```
      *
      * @example Custom particle behavior
      * ```ts
      * class GravityBehaviour extends ParticleSystemBaseBehaviour {
      *   update(particle: Particle, delta: number) {
      *     particle.velocity.y -= 9.8 * delta;
      *   }
      * }
      * particleSystem.addBehaviour(new GravityBehaviour());
      * ```
      *
      * - Example: https://engine.needle.tools/samples/particles
      * - Example: https://engine.needle.tools/samples/particle-bursts
      * - Example: https://engine.needle.tools/samples/particles-on-collision
      *
      * @summary Handles the motion and rendering of many individual particles
      * @category Rendering
      * @group Components
      * @see {@link ParticleSystemBaseBehaviour} for custom particle behaviors
      * @see {@link EmissionModule} for emission configuration
      * @see {@link ShapeModule} for emission shape control
      * @see {@link TrailModule} for particle trails
      * @link https://engine.needle.tools/docs/features/particles.html
      */
     export declare class ParticleSystem extends Component implements IParticleSystem {
         play(includeChildren?: boolean): void;
         pause(includeChildren?: boolean): void;
         /** clear=true removes all emitted particles */
         stop(includeChildren?: boolean, clear?: boolean): void;
         /** remove emitted particles and reset time */
         reset(): void;
         private _state?;
         emit(count: number): void;
         get playOnAwake(): boolean;
         set playOnAwake(val: boolean);
         readonly colorOverLifetime: ColorOverLifetimeModule;
         readonly main: MainModule;
         readonly emission: EmissionModule;
         readonly sizeOverLifetime: SizeOverLifetimeModule;
         readonly shape: ShapeModule;
         readonly noise: NoiseModule;
         readonly trails: TrailModule;
         readonly velocityOverLifetime: VelocityOverLifetimeModule;
         readonly limitVelocityOverLifetime: LimitVelocityOverLifetimeModule;
         inheritVelocity: InheritVelocityModule;
         readonly colorBySpeed: ColorBySpeedModule;
         readonly textureSheetAnimation: TextureSheetAnimationModule;
         readonly rotationOverLifetime: RotationOverLifetimeModule;
         readonly rotationBySpeed: RotationBySpeedModule;
         readonly sizeBySpeed: SizeBySpeedModule;
         get renderer(): ParticleSystemRenderer;
         get isPlaying(): boolean;
         get currentParticles(): number;
         get maxParticles(): number;
         get time(): number;
         get duration(): number;
         get deltaTime(): number;
         get scale(): number;
         get cameraScale(): number;
         private _cameraScale;
         get container(): Object3D;
         get worldspace(): boolean;
         get localspace(): boolean;
         private __worldQuaternion;
         get worldQuaternion(): Quaternion;
         private _worldQuaternionInverted;
         get worldQuaternionInverted(): Quaternion;
         private _worldScale;
         get worldScale(): Vector3;
         private _worldPositionFrame;
         private _worldPos;
         get worldPos(): Vector3;
         get matrixWorld(): Matrix4;
         get isSubsystem(): boolean;
         /** Add a custom quarks behaviour to the particle system.
          * You can add a quarks.Behaviour type or derive from {@link ParticleSystemBaseBehaviour}
          * @link https://github.com/Alchemist0823/three.quarks
          * @example
          * ```typescript
          * class MyBehaviour extends ParticleSystemBaseBehaviour {
          *    initialize(particle: Particle) {
          *       // initialize the particle
          *   }
          *    update(particle: Particle, delta: number) {
          *        // do something with the particle
          *   }
          * }
          *
          * const system = gameObject.getComponent(ParticleSystem);
          * system.addBehaviour(new MyBehaviour());
          * ```
          */
         addBehaviour(particleSystemBehaviour: QParticleBehaviour | ParticleSystemBaseBehaviour): boolean;
         /** Remove a custom quarks behaviour from the particle system. **/
         removeBehaviour(particleSystemBehaviour: QParticleBehaviour | ParticleSystemBaseBehaviour): boolean;
         /** Removes all behaviours from the particle system
          * **Note:** this will also remove the default behaviours like SizeBehaviour, ColorBehaviour etc.
          */
         removeAllBehaviours(): boolean;
         /** Get the underlying three.quarks particle system behaviours. This can be used to fully customize the behaviour of the particles. */
         get behaviours(): QParticleBehaviour[] | null;
         /** Get access to the underlying quarks particle system if you need more control
          * @link https://github.com/Alchemist0823/three.quarks
          */
         get particleSystem(): ParticleSystem_2 | null;
         private _renderer;
         private _batchSystem?;
         private _particleSystem?;
         private _interface;
         private _container;
         private _time;
         private _isPlaying;
         private _isUsedAsSubsystem;
         private _didPreWarm;
         /** called from deserialization */
         private set bursts(value);
         private _bursts?;
         /** called from deserialization */
         private set subEmitterSystems(value);
         private _subEmitterSystems?;
         /* Excluded from this release type: onAfterDeserialize */
         /* Excluded from this release type: awake */
         /* Excluded from this release type: start */
         /* Excluded from this release type: onDestroy */
         /* Excluded from this release type: onEnable */
         onDisable(): void;
         /* Excluded from this release type: onBeforeRender */
         private preWarm;
         private _lastBatchesCount;
         private onSimulate;
         private updateLayers;
         private onUpdate;
         private addSubParticleSystems;
     }

     declare enum ParticleSystemAnimationRowMode {
         Custom = 0,
         Random = 1,
         MeshIndex = 2
     }

     declare enum ParticleSystemAnimationTimeMode {
         Lifetime = 0,
         Speed = 1,
         FPS = 2
     }

     declare enum ParticleSystemAnimationType {
         WholeSheet = 0,
         SingleRow = 1
     }

     /**
      * Base class for custom particle behaviors. Extend this to create custom particle logic.
      *
      * Override `initialize()` to set up per-particle state when particles spawn.
      * Override `update()` to modify particles each frame (position, velocity, color, size, etc.).
      * Override `frameUpdate()` for logic that runs once per frame (not per particle).
      *
      * @example Custom wind effect
      * ```ts
      * class WindBehaviour extends ParticleSystemBaseBehaviour {
      *   windStrength = 2;
      *   windDirection = new Vector3(1, 0, 0);
      *
      *   update(particle: Particle, delta: number) {
      *     particle.velocity.addScaledVector(this.windDirection, this.windStrength * delta);
      *   }
      * }
      * ```
      *
      * @see {@link ParticleSystem.addBehaviour} to register your custom behavior
      * @link https://github.com/Alchemist0823/three.quarks
      */
     export declare abstract class ParticleSystemBaseBehaviour implements QParticleBehaviour {
         /** Reference to the particle system this behavior belongs to */
         system: ParticleSystem;
         /** Access to the engine context for timing, input, etc. */
         get context(): Context;
         constructor(ps?: ParticleSystem);
         /** Behavior type identifier used by three.quarks */
         type: string;
         /** Called once when a particle is spawned. Use to initialize per-particle state. */
         initialize(_particle: QParticle): void;
         /** Called every frame for each active particle. Use to update particle properties. */
         update(_particle: QParticle, _delta: number): void;
         /** Called once per frame before particle updates. Use for shared calculations. */
         frameUpdate(_delta: number): void;
         toJSON(): void;
         clone(): QParticleBehaviour;
         /** Called when the particle system is reset. */
         reset(): void;
     }

     declare enum ParticleSystemCurveMode {
         Constant = 0,
         Curve = 1,
         TwoCurves = 2,
         TwoConstants = 3
     }

     declare type ParticleSystemCurveModeKeys = keyof typeof ParticleSystemCurveMode;

     declare enum ParticleSystemGradientMode {
         Color = 0,
         Gradient = 1,
         TwoColors = 2,
         TwoGradients = 3,
         RandomColor = 4
     }

     declare type ParticleSystemGradientModeKeys = keyof typeof ParticleSystemGradientMode;

     declare enum ParticleSystemInheritVelocityMode {
         Initial = 0,
         Current = 1
     }

     declare enum ParticleSystemMeshShapeType {
         Vertex = 0,
         Edge = 1,
         Triangle = 2
     }

     /* Excluded from this release type: ParticleSystemRenderer */

     declare enum ParticleSystemRenderMode {
         Billboard = 0,
         Stretch = 1,
         HorizontalBillboard = 2,
         VerticalBillboard = 3,
         Mesh = 4
     }

     declare enum ParticleSystemScalingMode {
         Hierarchy = 0,
         Local = 1,
         Shape = 2
     }

     declare enum ParticleSystemShapeMultiModeValue {
         Random = 0,
         Loop = 1,
         PingPong = 2,
         BurstSpread = 3
     }

     export declare enum ParticleSystemShapeType {
         Sphere = 0,
         SphereShell = 1,
         Hemisphere = 2,
         HemisphereShell = 3,
         Cone = 4,
         Box = 5,
         Mesh = 6,
         ConeShell = 7,
         ConeVolume = 8,
         ConeVolumeShell = 9,
         Circle = 10,
         CircleEdge = 11,
         SingleSidedEdge = 12,
         MeshRenderer = 13,
         SkinnedMeshRenderer = 14,
         BoxShell = 15,
         BoxEdge = 16,
         Donut = 17,
         Rectangle = 18,
         Sprite = 19,
         SpriteRenderer = 20
     }

     declare enum ParticleSystemSimulationSpace {
         Local = 0,
         World = 1,
         Custom = 2
     }

     declare enum ParticleSystemTrailMode {
         PerParticle = 0,
         Ribbon = 1
     }

     declare enum ParticleSystemTrailTextureMode {
         Stretch = 0,
         Tile = 1,
         DistributePerSegment = 2,
         RepeatPerSegment = 3
     }

     export declare class PeerHandle extends EventDispatcher<any> {
         private static readonly instances;
         static getOrCreate(context: Context, guid: string): PeerHandle;
         getMyPeerId(): string | undefined;
         getPeerIdFromUserId(userConnectionId: string): string;
         getUserIdFromPeerId(peerId: string): string;
         makeCall(peerId: string, stream: MediaStream): CallHandle | undefined;
         closeAll(): void;
         updateCalls: () => void;
         get peer(): default_2 | undefined;
         get incomingCalls(): CallHandle[];
         readonly id: string;
         readonly context: Context;
         private readonly _incomingCalls;
         private readonly _outgoingCalls;
         private _peer;
         private constructor();
         private _enabled;
         private _enabledPeer;
         private onConnectRoomFn;
         enable(): void;
         disable(): void;
         private onConnectRoom;
         private setupPeer;
         private subscribePeerEvents;
         private unsubscribePeerEvents;
         /**
          * Emitted when a connection to the PeerServer is established. You may use the peer before this is emitted, but messages to the server will be queued. id is the brokering ID of the peer (which was either provided in the constructor or assigned by the server).
          * @param id ID of the peer
          */
         private onPeerConnect;
         /** Emitted when the peer is destroyed and can no longer accept or create any new connections. At this time, the peer's connections will all be closed. */
         private onPeerClose;
         /** Emitted when the peer is disconnected from the signalling server, either manually or because the connection to the signalling server was lost. */
         private onPeerDisconnected;
         /**
          * Errors on the peer are almost always fatal and will destroy the peer. Errors from the underlying socket and PeerConnections are forwarded here.
          */
         private onPeerError;
         private onPeerReceivingCall;
         private registerCall;
     }

     export declare class PeerNetworking {
         get isHost(): boolean;
         private _host?;
         private _client;
         private _clientData?;
         constructor();
         onEnable(): void;
         private trySetupHost;
         private trySetupClient;
     }

     /**
      * Provides physics utilities including raycasting and overlap detection.
      * Access via `this.context.physics` from any component.
      *
      * For physics engine features (rigidbodies, colliders, forces, etc.), use `this.context.physics.engine`.
      * The physics engine is {@link RapierPhysics}, which uses the Rapier physics library for realistic simulation.
      *
      * **Performance - Automatic MeshBVH:**
      * Needle Engine automatically uses [three-mesh-bvh](https://github.com/gkjohnson/three-mesh-bvh) to accelerate raycasting.
      * MeshBVH structures are generated automatically on web workers in the background, making raycasts significantly faster
      * without blocking the main thread. This happens transparently - you don't need to do anything to enable it.
      * While the BVH is being generated, raycasts fall back to standard three.js raycasting (configurable via `allowSlowRaycastFallback`).
      *
      * @example Raycasting from mouse position
      * ```ts
      * const hits = this.context.physics.raycast();
      * if (hits.length > 0) {
      *   console.log("Hit:", hits[0].object.name);
      * }
      * ```
      * @example Raycasting with inline options
      * ```ts
      * const hits = this.context.physics.raycast({
      *   maxDistance: 100, // Only hit objects within 100 units
      *   layerMask: 1, // Only layer 0
      *   ignore: [this.gameObject] // Ignore self
      * });
      * ```
      * @example Physics engine raycast (against colliders only)
      * ```ts
      * const hit = this.context.physics.engine?.raycast(origin, direction);
      * ```
      * @see {@link Rigidbody} for physics simulation component
      * @see {@link Collider} for collision detection component
      * @see {@link RapierPhysics} for physics engine implementation details
      */
     export declare class Physics {
         private static _raycasting;
         /**
          * Returns true if raycasting is currently in progress
          */
         static get raycasting(): boolean;
         /**@deprecated use `this.context.physics.engine.raycast` {@link IPhysicsEngine.raycast} */
         raycastPhysicsFast(origin: Vec2 | Vec3, direction?: Vec3 | undefined, maxDistance?: number, solid?: boolean): {
             point: Vector3;
             collider: ICollider;
             normal?: Vector3;
         } | null;
         /**@deprecated use `this.context.physics.engine.raycastAndGetNormal` {@link IPhysicsEngine.raycastAndGetNormal} */
         raycastPhysicsFastAndGetNormal(origin: Vec2 | Vec3, direction?: Vec3 | undefined, maxDistance?: number, solid?: boolean): {
             point: Vector3;
             collider: ICollider;
             normal?: Vector3;
         } | null;
         /**@deprecated use this.context.physics.engine.sphereOverlap */
         sphereOverlapPhysics(point: Vector3, radius: number): ShapeOverlapResult[] | null;
         private readonly context;
         engine?: IPhysicsEngine;
         constructor(context: Context);
         private readonly raycaster;
         private readonly defaultRaycastOptions;
         private readonly targetBuffer;
         private readonly defaultThresholds;
         private sphereResults;
         private sphereMask;
         private readonly sphere;
         /** Test overlapping of a sphere with the threejs geometry. This does not use colliders. This does not return an exact intersection point (intersections returned contain the object and the world position of the object that is being hit)
          * For a more accurate test use the physics engine's collider overlap test (see sphereOverlapPhysics)
          * @param spherePos the center of the sphere in world space
          * @param radius the radius of the sphere
          * @param traverseChildsAfterHit if false it will stop after the first hit. If true it will continue to traverse and add all hits to the result array
          * @param bvh use MeshBVH for raycasting. This is faster than the default threejs raycaster but uses more memory.
          * @param shouldRaycast optional callback to filter objects. Return `false` to ignore the object completely or `"continue in children"` to skip the object but continue to traverse its children (if you do raycast with `recursive` enabled)
          */
         sphereOverlap(spherePos: Vector3, radius: number, traverseChildsAfterHit?: boolean, bvh?: boolean, shouldRaycast?: RaycastTestObjectCallback | null): Array<Intersection>;
         raycastFromRay(ray: Ray, options?: IRaycastOptions | null): Array<Intersection>;
         /** raycast against rendered three objects. This might be very slow depending on your scene complexity.
          * We recommend setting objects to IgnoreRaycast layer (2) when you don't need them to be raycasted.
          * Raycasting SkinnedMeshes is specially expensive.
          * Use raycastPhysics for raycasting against physic colliders only. Depending on your scenario this might be faster.
          * @param options raycast options. If null, default options will be used.
          */
         raycast(options?: IRaycastOptions | null): Array<Intersection>;
         private intersect;
         private tempBoundingBox;
         private intersectSphere;
     }

     export declare class PhysicsExtension implements IUSDExporterExtension {
         get extensionName(): string;
         onExportObject?(object: Object3D, model: USDObject, _context: USDZExporterContext): void;
     }

     /**
      * Properties for physics simulation, like friction or bounciness.
      */
     export declare type PhysicsMaterial = {
         bounceCombine?: PhysicsMaterialCombine;
         bounciness?: number;
         frictionCombine?: PhysicsMaterialCombine;
         dynamicFriction?: number;
         /**@deprecated not used */
         staticFriction?: number;
     };

     export declare enum PhysicsMaterialCombine {
         Average = 0,
         Multiply = 1,
         Minimum = 2,
         Maximum = 3
     }

     /**
      * [PixelationEffect](https://engine.needle.tools/docs/api/PixelationEffect) Pixelation effect simulates a pixelated look by enlarging pixels in the rendered scene.
      * This effect can be used to achieve a retro or stylized visual aesthetic, reminiscent of early video games or low-resolution graphics.
      * @summary Pixelation Post-Processing Effect
      * @category Effects
      * @group Components
      */
     export declare class PixelationEffect extends PostProcessingEffect {
         get typeName(): string;
         readonly granularity: VolumeParameter;
         onCreateEffect(): EffectProviderResult;
     }

     /**
      * Place an object on a surface. This will calculate the object bounds which might be an expensive operation for complex objects.
      * The object will be visually placed on the surface (the object's pivot will be ignored).
      * @param obj the object to place on the surface
      * @param point the point to place the object on
      * @returns the offset from the object bounds to the pivot
      */
     export declare function placeOnSurface(obj: Object3D, point: Vector3): PlaceOnSurfaceResult;

     declare type PlaceOnSurfaceResult = {
         /** The offset from the object bounds to the pivot */
         offset: Vector3;
         /** The object's bounding box */
         bounds: Box3;
     };

     /**
      * PlayableDirector is the main component for controlling timelines in Needle Engine.
      * It orchestrates playback of TimelineAssets containing animation, audio, signal,
      * control, and activation tracks.
      *
      * ![](https://cloud.needle.tools/-/media/CkJal5dIBwFe6erA-MmiGw.webp)
      * *Screenshot: Timeline in Unity*
      *
      * **Supported track types:**
      * - Animation tracks - animate objects using AnimationClips
      * - Audio tracks - play synchronized audio
      * - Activation tracks - show/hide objects at specific times
      * - Signal tracks - trigger events at specific points
      * - Control tracks - control nested timelines or prefab instances
      * - Marker tracks - add metadata and navigation points
      *
      * [![](https://cloud.needle.tools/-/media/HFudFwl8J8D-Kt_VPu7pRw.gif)](https://engine.needle.tools/samples/bike-scrollytelling-responsive-3d)
      *
      * [![](https://cloud.needle.tools/-/media/xJ1rI3STbZRnOoWJrSzqlQ.gif)](https://app.songsofcultures.com/?scene=little-brother)
      *
      * **Playback control:**
      * Use `play()`, `pause()`, `stop()` for basic control.
      * Set `time` directly and call `evaluate()` for scrubbing.
      * Adjust `speed` for playback rate and `weight` for blending.
      *
      * @example Basic timeline playback
      * ```ts
      * const director = myObject.getComponent(PlayableDirector);
      * director.play();
      * // Jump to specific time
      * director.time = 2.5;
      * director.evaluate();
      * ```
      *
      * @example Control playback speed
      * ```ts
      * director.speed = 0.5; // Half speed
      * director.speed = -1;  // Reverse playback
      * ```
      *
      * - Example: https://engine.needle.tools/samples-uploads/product-flyover/
      *
      * @summary Controls and plays TimelineAssets
      * @category Animation and Sequencing
      * @group Components
      * @see {@link Animator} for playing individual AnimationClips
      * @see {@link AudioSource} for standalone audio playback
      * @see {@link SignalReceiver} for handling timeline signals
      * @link https://engine.needle.tools/samples/?overlay=samples&tag=animation
      * @link https://app.songsofcultures.com/
      * @link https://engine.needle.tools/docs/blender/animation.html Blender timeline and animation export
      */
     export declare class PlayableDirector extends Component {
         private static createTrackFunctions;
         /**
          * Register a function to create a track handler for a custom track type.
          * This allows you to extend the timeline system with your own track types and handlers.
          */
         static registerCreateTrack(type: string, fn: CreateTrackFunction): void;
         /**
          * The timeline asset containing tracks, clips, and markers that this director will play.
          * Assign a timeline asset exported from Unity or Blender to enable playback.
          */
         playableAsset?: Models.TimelineAssetModel;
         /**
          * When true, the timeline starts playing automatically when the component awakens.
          * Set to false to control playback manually via `play()`.
          * @default false
          */
         playOnAwake?: boolean;
         /**
          * Determines how the timeline behaves when it reaches the end of its duration.
          * @default DirectorWrapMode.Loop
          */
         extrapolationMode: DirectorWrapMode;
         /** Returns true if the timeline is currently playing (not paused or stopped). */
         get isPlaying(): boolean;
         /** Returns true if the timeline is currently paused. */
         get isPaused(): boolean;
         /**
          * The current playback time in seconds. Set this and call `evaluate()` to scrub.
          * @example Scrub to a specific time
          * ```ts
          * director.time = 5.0;
          * director.evaluate();
          * ```
          */
         get time(): number;
         set time(value: number);
         /** The total duration of the timeline in seconds (read from the longest track/clip). */
         get duration(): number;
         set duration(value: number);
         /**
          * The blend weight of the timeline (0-1). Use values below 1 to blend
          * timeline animations with other animations like those from an Animator.
          */
         get weight(): number;
         set weight(value: number);
         /**
          * The playback speed multiplier. Set to negative values for reverse playback.
          * @example Reverse playback at double speed
          * ```ts
          * director.speed = -2;
          * ```
          */
         get speed(): number;
         set speed(value: number);
         /**
          * When true, `play()` will wait for audio tracks to load and for user interaction
          * before starting playback. Web browsers require user interaction (click/tap) before
          * allowing audio to play - this ensures audio is synchronized with the timeline.
          * Set to false if you need immediate visual playback and can tolerate audio delay.
          * @default true
          */
         waitForAudio: boolean;
         private _visibilityChangeEvt?;
         private _clonedPlayableAsset;
         private _speed;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: onDestroy */
         /* Excluded from this release type: rebuildGraph */
         /**
          * Play the timeline from the current time.
          * If the timeline is already playing this method does nothing.
          */
         play(): Promise<void>;
         /**
          * Pause the timeline.
          */
         pause(): void;
         /**
          * Stop the timeline.
          */
         stop(): void;
         /**
          * Evaluate the timeline at the current time. This is useful when you want to manually update the timeline e.g. when the timeline is paused and you set `time` to a new value.
          */
         evaluate(): void;
         /**
          * @returns true if the timeline is valid and has tracks
          */
         isValid(): boolean | undefined;
         /** Iterates over all tracks of the timeline
          * @returns all tracks of the timeline
          */
         forEachTrack(): Generator<Tracks.TrackHandler, void, unknown>;
         /**
          * @returns all animation tracks of the timeline
          */
         get animationTracks(): Tracks.AnimationTrackHandler[];
         /**
          * @returns all audio tracks of the timeline
          */
         get audioTracks(): Tracks.AudioTrackHandler[];
         /**
          * @returns all signal tracks of the timeline
          */
         get signalTracks(): Tracks.SignalTrackHandler[];
         /**
          * @returns all marker tracks of the timeline
          */
         get markerTracks(): Tracks.MarkerTrackHandler[];
         /**
          * Iterates over all markers of the timeline, optionally filtering by type
          *
          * @example
          * ```ts
          * // Iterate over all ScrollMarkers in the timeline
          * for (const marker of director.foreachMarker<{selector:string}>("ScrollMarker")) {
          *   console.log(marker.time, marker.selector);
          * }
          * ```
          *
          */
         foreachMarker<T extends Record<string, any>>(type?: string | null): Generator<(T & Models.MarkerModel)>;
         private _guidsMap?;
         /* Excluded from this release type: resolveGuids */
         private _isPlaying;
         private _internalUpdateRoutine;
         private _isPaused;
         /** internal, true during the time stop() is being processed */
         private _isStopping;
         private _time;
         private _duration;
         private _weight;
         private readonly _animationTracks;
         private readonly _audioTracks;
         private readonly _signalTracks;
         private readonly _markerTracks;
         private readonly _controlTracks;
         private readonly _customTracks;
         private readonly _tracksArray;
         private get _allTracks();
         /** should be called after evaluate if the director was playing */
         private invokePauseChangedMethodsOnTracks;
         private invokeStateChangedMethodsOnTracks;
         private internalUpdate;
         /**
          * PlayableDirector lifecycle should always call this instead of "evaluate"
          * @param called_by_user If true the evaluation is called by the user (e.g. via evaluate())
          */
         private internalEvaluate;
         private resolveBindings;
         private findRoot;
         private updateTimelineDuration;
         private setupAndCreateTrackHandlers;
         private setAudioTracksAllowPlaying;
         /** Experimental support for overriding timeline animation data (position or rotation) */
         readonly animationCallbackReceivers: ITimelineAnimationOverride[];
         /** Experimental: Receive callbacks for timeline animation. Allows modification of final value */
         registerAnimationCallback(receiver: ITimelineAnimationOverride): void;
         /** Experimental: Unregister callbacks for timeline animation. Allows modification of final value */
         unregisterAnimationCallback(receiver: ITimelineAnimationOverride): void;
     }

     /* Excluded from this release type: PlayAction */

     /**
      * Plays an animation state when this object is clicked.
      * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
      *
      * Assign an {@link Animator} and a `stateName` to play a specific animation state,
      * or assign an {@link Animation} component to play a legacy animation clip.
      *
      * For USDZ export, the component follows animator state transitions automatically, including looping states.
      *
      * @see {@link Animator}for playing animator state machine animations
      * @see {@link Animation} for playing legacy animation clips
      * @see {@link PlayAudioOnClick} to play audio when clicked
      * @see {@link SetActiveOnClick} to toggle visibility when clicked
      * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
      * @summary Plays an animation when clicked
      * @category Everywhere Actions
      * @group Components
      */
     export declare class PlayAnimationOnClick extends Component implements IPointerClickHandler, UsdzBehaviour, UsdzAnimation {
         /** The {@link Animator} component whose state to play when clicked. */
         animator?: Animator;
         /** The name of the animation state to play. Required when using an {@link Animator}. */
         stateName?: string;
         trigger: "tap" | "start";
         animation?: Animation_2;
         private get target();
         onEnable(): void;
         onDisable(): void;
         onDestroy(): void;
         onPointerEnter(): void;
         onPointerExit(): void;
         onPointerClick(args: PointerEventData): void;
         private selfModel;
         private stateAnimationModel;
         private animationSequence?;
         private animationLoopAfterSequence?;
         private randomOffsetNormalized;
         createBehaviours(_ext: BehaviorExtension, model: USDObject, _context: USDZExporterContext): void;
         private static animationActions;
         private static rootsWithExclusivePlayback;
         afterSerialize(): void;
         afterCreateDocument(ext: BehaviorExtension, context: USDZExporterContext): void;
         static getActionForSequences(_document: USDDocument, model: Target, animationSequence?: Array<RegisteredAnimationInfo>, animationLoopAfterSequence?: Array<RegisteredAnimationInfo>, randomOffsetNormalized?: number): GroupActionModel;
         static getAndRegisterAnimationSequences(ext: AnimationExtension, target: GameObject, stateName?: string): {
             animationSequence: Array<RegisteredAnimationInfo>;
             animationLoopAfterSequence: Array<RegisteredAnimationInfo>;
             randomTimeOffset: number;
         } | undefined;
         createAnimation(ext: AnimationExtension, model: USDObject, _context: USDZExporterContext): void;
     }

     /**
      * Plays an audio clip when this object is clicked.
      * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
      *
      * Assign a `target` {@link AudioSource} to use its spatial audio settings, or assign a `clip` URL directly.
      * If no `target` is assigned, an {@link AudioSource} will be created automatically on this object.
      *
      * @see {@link AudioSource}for spatial audio settings
      * @see {@link PlayAnimationOnClick} to play animations when clicked
      * @see {@link SetActiveOnClick} to toggle visibility when clicked
      * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
      * @summary Plays an audio clip when clicked
      * @category Everywhere Actions
      * @group Components
      */
     export declare class PlayAudioOnClick extends Component implements IPointerClickHandler, UsdzBehaviour {
         /** The {@link AudioSource} to use for playback. If not set, one will be created automatically on this object. */
         target?: AudioSource;
         /** URL of the audio clip to play. If not set, the clip assigned to `target` is used. */
         clip: string;
         /** If true, clicking again while the audio is playing will stop it. */
         toggleOnClick: boolean;
         trigger: "tap" | "start";
         ensureAudioSource(): void;
         onEnable(): void;
         onDisable(): void;
         onDestroy(): void;
         onPointerEnter(): void;
         onPointerExit(): void;
         onPointerClick(args: PointerEventData): void;
         createBehaviours(ext: BehaviorExtension, model: USDObject, _context: USDZExporterContext): void;
     }

     /**
      * [PlayerColor](https://engine.needle.tools/docs/api/PlayerColor) Assigns a unique color for each user in the room to the object it is attached to.
      * The color is generated based on the user's ID.
      *
      * @summary Assigns a unique color to the player object
      * @category Networking
      * @group Components
      */
     export declare class PlayerColor extends Component {
         private _didAssignPlayerColor;
         onEnable(): void;
         onDisable(): void;
         private waitForConnection;
         private tryAssignColor;
         assignUserColor(id: string): void;
         private assignColor;
         static hashCode(str: string): number;
         static colorFromHashCode(hash: number): Color;
     }

     /**
      * Represents a player instance in the networked environment.
      * Handles ownership, synchronization, and lifecycle management of player objects.
      */
     export declare class PlayerState extends Component {
         private static _all;
         /** All PlayerState instances for all players in the scene */
         static get all(): PlayerState[];
         private static _local;
         /** All PlayerState instances that belong to the local player */
         static get local(): PlayerState[];
         /**
          * Gets the PlayerState component for a given object or component
          * @param obj Object3D or Component to find the PlayerState for
          * @returns The PlayerState component if found, undefined otherwise
          */
         static getFor(obj: Object3D | Component): PlayerState | null | undefined;
         /**
          * Checks if a given object or component belongs to the local player
          * @param obj Object3D or Component to check
          * @returns True if the object belongs to the local player, false otherwise
          */
         static isLocalPlayer(obj: Object3D | Component): boolean;
         private static _callbacks;
         /**
          * Registers a callback for a specific PlayerState event
          * @param event The event to listen for
          * @param cb Callback function that will be invoked when the event occurs
          * @returns The provided callback function for chaining
          */
         static addEventListener(event: PlayerStateEvent, cb: PlayerStateEventCallback): PlayerStateEventCallback;
         /**
          * Removes a previously registered event callback
          * @param event The event type to remove the callback from
          * @param cb The callback function to remove
          */
         static removeEventListener(event: PlayerStateEvent, cb: PlayerStateEventCallback): void;
         private static dispatchEvent;
         /** Event triggered when the owner of this PlayerState changes */
         onOwnerChangeEvent: EventList<any>;
         /** Event triggered the first time an owner is assigned to this PlayerState */
         onFirstOwnerChangeEvent: EventList<any>;
         /** Indicates if this PlayerState has an owner assigned */
         hasOwner: boolean;
         /**
          * The connection ID of the player who owns this PlayerState instance
          * @syncField Synchronized across the network
          */
         owner?: string;
         /**
          * When enabled, PlayerState will not destroy itself when the owner is not connected anymore
          */
         dontDestroy: boolean;
         /**
          * Indicates if this PlayerState belongs to the local player
          */
         get isLocalPlayer(): boolean;
         /**
          * Handles owner change events and updates relevant state
          * @param newOwner The new owner's connection ID
          * @param oldOwner The previous owner's connection ID
          */
         private onOwnerChange;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: start */
         /** Tells the server that this client has been destroyed, and the networking message for the instantiate will be removed      */
         doDestroy(): void;
         /* Excluded from this release type: onDestroy */
         /**
          * Handler for when a user leaves the networked room
          * @param model Object containing the ID of the user who left
          */
         private onUserLeftRoom;
     }

     /**
      * Enum defining events that can be triggered by PlayerState
      */
     export declare enum PlayerStateEvent {
         /** Triggered when a PlayerState's owner property changes */
         OwnerChanged = "ownerChanged"
     }

     /** Callback type for PlayerState events */
     export declare type PlayerStateEventCallback = (args: CustomEvent<PlayerStateOwnerChangedArgs>) => void;

     /** Arguments passed when a PlayerState's owner changes */
     export declare interface PlayerStateOwnerChangedArgs {
         /** The PlayerState instance that changed */
         playerState: PlayerState;
         /** Previous owner's connection ID */
         oldValue: string;
         /** New owner's connection ID */
         newValue: string;
     }

     /**
      * This component instantiates an asset for each player that joins a networked room. The asset will be destroyed when the player leaves the room.
      * The asset should have a PlayerState component, and can have other components like SyncedTransform, custom components, etc.
      * @summary Instantiates and manages player assets in a networked room
      * @category Networking
      * @group Components
      */
     export declare class PlayerSync extends Component {
         /**
          * This API is experimental and may change or be removed in the future.
          * Creates a PlayerSync instance at runtime from a given URL and sets it up for networking
          * @param url Path to the asset that should be instantiated for each player
          * @param init Optional initialization parameters for the PlayerSync component
          * @returns Promise resolving to a PlayerSync instance with a guaranteed asset property
          * @example
          * ```typescript
          * const res = await PlayerSync.setupFrom("/assets/demo.glb");
          * addComponent(res.asset?.asset, DragControls);
          * addComponent(res.asset?.asset, SyncedTransform);
          * scene.add(res.gameObject);
          * ```
          */
         static setupFrom(url: string, init?: Omit<ComponentInit<PlayerSync>, "asset">): Promise<PlayerSyncWithAsset>;
         /**
          * When enabled, PlayerSync will automatically load and instantiate the assigned asset when joining a networked room
          */
         autoSync: boolean;
         /**
          * The asset that will be loaded and instantiated when PlayerSync becomes active and joins a networked room
          */
         asset?: AssetReference;
         /**
          * Event invoked when a player instance is spawned with the spawned {@link Object3D} as parameter
          * @serializable
          */
         onPlayerSpawned?: EventList<Object3D>;
         private _localInstance?;
         awake(): void;
         onEnable(): void;
         onDisable(): void;
         private onJoinedRoom;
         /**
          * Gets or creates an instance of the assigned asset for the local player
          * @returns Promise resolving to the instantiated player object or null if creation failed
          */
         getInstance(): Promise<IGameObject | null | undefined>;
         /**
          * Destroys the current player instance and cleans up networking state
          */
         destroyInstance: () => void;
         /**
          * Sets up visibility change listeners to handle player cleanup when browser tab visibility changes
          */
         private watchTabVisible;
     }

     /** Type definition for a PlayerSync instance with a guaranteed asset property */
     declare type PlayerSyncWithAsset = PlayerSync & Required<Pick<PlayerSync, "asset">>;

     export declare class PlayerView {
         readonly userId: string;
         readonly context: Context;
         viewDevice: ViewDevice;
         get currentObject(): Object3D | undefined | null;
         set currentObject(obj: Object3D | undefined | null);
         get isConnected(): boolean;
         removed: boolean;
         private _object;
         constructor(userId: string, context: Context);
     }

     export declare class PlayerViewManager {
         private readonly context;
         private readonly playerViews;
         constructor(context: Context);
         setPlayerView(id: string, obj: Object3D | undefined | null, device: ViewDevice): void;
         getPlayerView(id: string | null | undefined): PlayerView | undefined;
         removePlayerView(id: string, device: ViewDevice): void;
     }

     /**
      * Options for controlling animation playback via {@link Animation.play}.
      * All options are optional - defaults are sensible for most use cases.
      *
      * @see {@link Animation} for the component that uses these options
      */
     declare type PlayOptions = {
         /**
          * The fade duration in seconds for the action to fade in and other actions to fade out (if exclusive is enabled)
          */
         fadeDuration?: number;
         /**
          * If true, the animation will loop
          */
         loop?: boolean;
         /**
          * If true, will stop all other animations before playing this one
          * @default true
          */
         exclusive?: boolean;
         /**
          * The animation start time in seconds
          */
         startTime?: number;
         /**
          * The animation end time in seconds
          */
         endTime?: number;
         /**
          * If true, the animation will clamp when finished
          */
         clampWhenFinished?: boolean;
         /**
          * Animation playback speed. This is a multiplier to the animation speed
          * @default 1
          */
         speed?: number;
         /**
          * Animation playback speed range. This will override speed
          * @default undefined
          */
         minMaxSpeed?: Vec2_2;
         /**
          * The normalized offset to start the animation at. This will override startTime
          * @default undefined
          */
         minMaxOffsetNormalized?: Vec2_2;
     };

     declare const enum PointerEnumType {
         PointerDown = "pointerdown",
         PointerUp = "pointerup",
         PointerMove = "pointermove"
     }

     /** [PointerEventData](https://engine.needle.tools/docs/api/PointerEventData) This pointer event data object is passed to all event receivers that are currently active
      * It contains hit information if an object was hovered or clicked
      * If the event is received in onPointerDown or onPointerMove, you can call `setPointerCapture` to receive onPointerMove events even when the pointer has left the object until you call `releasePointerCapture` or when the pointerUp event happens
      * You can get additional information about the event or event source via the `event` property (of type `NEPointerEvent`)
      */
     export declare class PointerEventData implements IInputEventArgs {
         /** the original event */
         readonly event: NEPointerEvent;
         /** the index of the used device
          * mouse and touch are always 0, controller is the gamepad index or XRController index
          */
         get deviceIndex(): number;
         /** a combination of the pointerId + button to uniquely identify the exact input (e.g. LeftController:Button0 = 0, RightController:Button1 = 101) */
         get pointerId(): number;
         /**
          * mouse button 0 === LEFT, 1 === MIDDLE, 2 === RIGHT
          * */
         readonly button: number;
         readonly buttonName: MouseButtonName | GamepadButtonName | undefined;
         get pressure(): number;
         /** @returns `true` when `use()` has been called. Default: false */
         get used(): boolean;
         private _used;
         /** mark this event to be used */
         use(): void;
         private _propagationStopped;
         get propagationStopped(): boolean;
         /** Call this method to stop immediate propagation on the `event` object.
          * WARNING: this is currently equivalent to stopImmediatePropagation
          */
         stopPropagation(): void;
         /** Call this method to stop immediate propagation on the `event` object.
          */
         stopImmediatePropagation(): void;
         /**@ignore internal flag, pointer captured (we dont want to see it in intellisense) */
         z__pointer_ctured: boolean;
         /** Call this method in `onPointerDown` or `onPointerMove` to receive onPointerMove events for this pointerId even when the pointer has left the object until you call `releasePointerCapture` or when the pointerUp event happens
          */
         setPointerCapture(): void;
         /**@ignore internal flag, pointer capture released */
         z__pointer_cture_rleased: boolean;
         /** call this method in `onPointerDown` or `onPointerMove` to stop receiving onPointerMove events */
         releasePointerCapture(): void;
         /** Who initiated this event */
         inputSource: Input | any;
         /** Returns the input target ray mode e.g. "screen" for 2D mouse and touch events */
         get mode(): XRTargetRayMode | "transient-pointer";
         /** The object this event hit or interacted with */
         object: Object3D;
         /** The world position of this event */
         point?: Vector3;
         /** The object-space normal of this event */
         normal?: Vector3;
         /** */
         face?: Face | null;
         /** The distance of the hit point from the origin */
         distance?: number;
         /** The instance ID of an object hit by a raycast (if a instanced object was hit) */
         instanceId?: number;
         /** The three intersection */
         intersection?: Intersection;
         isDown: boolean | undefined;
         isUp: boolean | undefined;
         isPressed: boolean | undefined;
         isClick: boolean | undefined;
         isDoubleClick: boolean | undefined;
         private input;
         constructor(input: Input, event: NEPointerEvent);
         clone(): PointerEventData;
         /**@deprecated use use() */
         Use(): void;
         /**@deprecated use stopPropagation() */
         StopPropagation(): void;
     }

     declare type PointerEventListener = (evt: NEPointerEvent) => void;

     /** e.g. `pointerdown` */
     declare type PointerEventNames = EnumToPrimitiveUnion<PointerEnumType>;

     /**
      * Types of pointer input devices supported by Needle Engine.
      */
     export declare const enum PointerType {
         /** Mouse or trackpad input */
         Mouse = "mouse",
         /** Touch screen input */
         Touch = "touch",
         /** XR controller input (e.g., VR controllers) */
         Controller = "controller",
         /** XR hand tracking input */
         Hand = "hand"
     }

     export declare type PointerTypeNames = EnumToPrimitiveUnion<PointerType>;

     export declare type Postfix = (...args: any[]) => any;

     /**
      * Postprocesses the material of an object loaded by {@link FBXLoader}.
      * It will apply necessary color conversions, remap shininess to roughness, and turn everything into {@link MeshStandardMaterial} on the object.
      * This ensures consistent lighting and shading, including environment effects.
      */
     export declare function postprocessFBXMaterials(obj: Mesh, material: Material | Material[], index?: number, array?: Material[]): boolean;

     /**
      * PostProcessingEffect is a base class for post processing effects that can be applied to the scene.
      * To create a custom post processing effect, extend this class and override the `onCreateEffect` method and call `registerCustomEffectType` to make it available in the editor.
      * @example
      * ```typescript
      * import { EdgeDetectionMode, SMAAEffect, SMAAPreset } from "postprocessing";
      * export class Antialiasing extends PostProcessingEffect {
      *  get typeName(): string {
      *      return "Antialiasing";
      *  }
      *  @serializable(VolumeParameter)
      *  preset!: VolumeParameter = new VolumeParameter();
      *  onCreateEffect(): EffectProviderResult {
      *    const effect = new SMAAEffect({
      *      preset: SMAAPreset.HIGH,
      *      edgeDetectionMode: EdgeDetectionMode.DEPTH
      *      });
      *      this.preset.onValueChanged = (newValue) => {
      *          effect.applyPreset(newValue);
      *      };
      *      return effect;
      *    }
      * }
      * registerCustomEffectType("Antialiasing", Antialiasing)
      * ```
      *
      * @category Effects
      * @group Components
      */
     export declare abstract class PostProcessingEffect extends Component implements IEffectProvider, IEditorModification {
         get isPostProcessingEffect(): boolean;
         /**
          * The order of this effect. The higher the order the later the effect will be applied in the post processing stack.
          * This can be used to control the order of effects when multiple effects are applied.
          * It is recommended to use the PostProcessingEffectOrder constant to order your custom effects before or after built-in effects.
          * @default `undefined` (no specific order set, will be applied in the order of registration)
          *
          * @example
          * ```typescript
          * import { PostProcessingEffectOrder } from "@needle-tools/engine"
          *
          * export class MyCustomEffect extends PostProcessingEffect {
          *    order: PostProcessingEffectOrder.Bloom + 1; // render after bloom
          *    // This will ensure that the effect is applied after the bloom effect in the post processing stack.
          *    // ... the rest of your effect code
          * }
          * ```
          */
         order: number | undefined;
         constructor(params?: any);
         abstract get typeName(): string;
         /**
          * Whether the effect is active or not. Prefer using `enabled` instead.
          * @deprecated
          */
         active: boolean;
         private _manager;
         onEnable(): void;
         onDisable(): void;
         protected onEffectEnabled(manager?: IPostProcessingManager): void;
         /** override to initialize bindings on parameters */
         init(): void;
         /** previously created effect (if any) */
         private _result;
         private _postprocessingContext;
         protected get postprocessingContext(): PostProcessingEffectContext | null;
         /** Apply post settings. Make sure to call super.apply() if you also create an effect */
         apply(ctx: PostProcessingEffectContext): void | undefined | EffectProviderResult;
         /** Reset previously set values (e.g. when adjusting settings on the renderer like Tonemapping) */
         unapply(): void;
         /** implement to create a effect once to be cached in the base class. Make sure super.apply() is called if you also override apply */
         onCreateEffect?(): EffectProviderResult | undefined;
         dispose(): void;
         private initParameters;
         onEditorModification(modification: EditorModification): void | boolean | undefined;
     }

     export declare type PostProcessingEffectContext = {
         handler: PostProcessingHandler;
         components: PostProcessingEffect[];
     };

     /**
      * Default order for post-processing effects. This can be used to sort effects by their rendering order when creating custom effects.
      * E.g. in your custom effect, you can set `order: PostProcessingEffectOrder.Bloom + 1;` to ensure it gets rendered after the bloom effect.
      * OR `order: PostProcessingEffectOrder.Bloom - 1;` to ensure it gets rendered before the bloom effect.
      * @example
      * ```typescript
      * import { PostProcessingEffectOrder } from "@needle-tools/engine"
      *
      * export class MyCustomEffect extends PostProcessingEffect {
      *    order: PostProcessingEffectPriority.Bloom + 1; // render after bloom
      *
      *    // ... your effect code
      * }
      * ```
      */
     export declare const PostProcessingEffectOrder: {
         /** Used to render effects at the start of the post-processing chain */
         AT_START: number;
         NormalPass: number;
         DepthDownsamplingPass: number;
         SSAO: number;
         SMAA: number;
         TiltShift: number;
         DepthOfField: number;
         ChromaticAberration: number;
         Bloom: number;
         Vignette: number;
         Pixelation: number;
         ToneMapping: number;
         HueSaturation: number;
         BrightnessContrast: number;
         Sharpening: number;
         /** Used to render effects at the end of the post-processing chain, e.g. for final adjustments or overlays. */
         AT_END: number;
     };

     /**
      * [PostProcessingHandler](https://engine.needle.tools/docs/api/PostProcessingHandler) Is responsible for applying post processing effects to the scene. It is internally used by the {@link Volume} component
      */
     export declare class PostProcessingHandler {
         private _composer;
         private _lastVolumeComponents?;
         private readonly _effects;
         /**
          * Returns true if a specific effect is currently active in the post processing stack.
          */
         getEffectIsActive(effect: Effect): boolean;
         get isActive(): boolean;
         get composer(): EffectComposer | null;
         private _isActive;
         private readonly context;
         constructor(context: Context);
         apply(components: PostProcessingEffect[]): Promise<void>;
         unapply(dispose?: boolean): void;
         dispose(): void;
         private onApply;
         private _anyPassHasDepth;
         private _anyPassHasNormal;
         private _hasSmaaEffect;
         get anyPassHasDepth(): boolean;
         get anyPassHasNormal(): boolean;
         get hasSmaaEffect(): boolean;
         private _customInputBuffer;
         private _customInputBufferId;
         private _multisampling;
         set multisampling(value: number);
         get multisampling(): number;
         private static readonly _dprStep;
         private static readonly _dprMinDelay;
         private _adaptiveDPR_Enabled;
         private _adaptiveDPR_LastChangeTime;
         private _adaptiveDPR_LastDecreaseTime;
         private _adaptiveDPR_OverrideActive;
         /** Enable or disable adaptive device pixel ratio scaling.
          * When enabled, the renderer pixel ratio will be gradually reduced when FPS is low
          * and restored when FPS is good. This is useful for postprocessing-heavy scenes
          * where full retina resolution is too expensive for integrated GPUs.
          */
         set adaptivePixelRatio(value: boolean);
         get adaptivePixelRatio(): boolean;
         /** Called from Volume.onBeforeRender to adapt pixel ratio based on frame rate */
         updateAdaptivePixelRatio(): void;
         /** Build composer passes */
         private applyEffects;
         /** Should be called before `composer.addPass()` to create an effect pass with all previously collected effects that can be merged up to that point */
         private createPassForMergeableEffects;
         private handleDevicePixelRatio;
         private _menuEntry;
         private _passIndices;
         private _onCreateEffectsDebug;
     }

     export declare type PrefabProviderCallback = (guid: string) => Promise<Object3D | null>;

     export declare type Prefix = (...args: any[]) => any;

     /** Experimental attribute
      * Use to hook into another type's methods and run before the other methods run (similar to Harmony prefixes).
      * Return false to prevent the original method from running.
      */
     export declare const prefix: <T>(type: Constructor<T>) => (target: IComponent | any, _propertyKey: string | {
         name: string;
     }, _PropertyDescriptor: PropertyDescriptor) => void;

     export declare class PreliminaryAction extends Component {
         getType(): string | void;
         target?: Object3D;
         getDuration(): number | void;
     }

     export declare class PreliminaryTrigger extends Component {
         target?: PreliminaryAction;
     }

     export declare namespace PreviewHelper {
         export type PreviewInfo = {
             position?: Vector3Like | Vec3;
             size?: Vector3Like | Vec3;
         };
         export function addPreview(params: {
             parent: Object3D;
             guid: string;
         } & PreviewInfo): {
             object: Object3D;
             onProgress: (downloadProgress: number) => void;
         };
         export function removePreview(guid: string): void;
     }

     export declare enum PrimitiveType {
         /**
          * A quad with a width and height of 1 facing the positive Z axis
          */
         Quad = 0,
         /**
          * A cube with a width, height, and depth of 1
          */
         Cube = 1,
         Sphere = 2,
         Cylinder = 3,
         RoundedCube = 10
     }

     export declare type PrimitiveTypeNames = keyof typeof PrimitiveType;

     /** Progress reporting utility.
      * See `Progress.start` for usage examples.
      */
     export declare class Progress {
         /** Start a new progress reporting scope. Make sure to close it with Progress.end.
          * @param scope The scope to start progress reporting for.
          * @param options Parent scope, onProgress callback and logging. If only a string is provided, it's used as parentScope.
          * @example
          * // Manual usage:
          * Progress.start("export-usdz", undefined, (progress) => console.log("Progress: " + progress));
          * Progress.report("export-usdz", { message: "Exporting object 1", currentStep: 1, totalSteps: 3 });
          * Progress.report("export-usdz", { message: "Exporting object 2", currentStep: 2, totalSteps: 3 });
          * Progress.report("export-usdz", { message: "Exporting object 3", currentStep: 3, totalSteps: 3 });
          *
          * // Auto step usage:
          * Progress.start("export-usdz", undefined, (progress) => console.log("Progress: " + progress));
          * Progress.report("export-usdz", { message: "Exporting objects", autoStep: true, totalSteps: 3 });
          * Progress.report("export-usdz", "Exporting object 1");
          * Progress.report("export-usdz", "Exporting object 2");
          * Progress.report("export-usdz", "Exporting object 3");
          * Progress.end("export-usdz");
          *
          * // Auto step with weights:
          * Progress.start("export-usdz", undefined, (progress) => console.log("Progress: " + progress));
          * Progress.report("export-usdz", { message: "Exporting objects", autoStep: true, totalSteps: 10 });
          * Progress.report("export-usdz", { message: "Exporting object 1", autoStep: 8 }); // will advance to 80% progress
          * Progress.report("export-usdz", "Exporting object 2"); // 90%
          * Progress.report("export-usdz", "Exporting object 3"); // 100%
          *
          * // Child scopes:
          * Progress.start("export-usdz", undefined, (progress) => console.log("Progress: " + progress));
          * Progress.report("export-usdz", { message: "Overall export", autoStep: true, totalSteps: 2 });
          * Progress.start("export-usdz-objects", "export-usdz");
          * Progress.report("export-usdz-objects", { message: "Exporting objects", autoStep: true, totalSteps: 3 });
          * Progress.report("export-usdz-objects", "Exporting object 1");
          * Progress.report("export-usdz-objects", "Exporting object 2");
          * Progress.report("export-usdz-objects", "Exporting object 3");
          * Progress.end("export-usdz-objects");
          * Progress.report("export-usdz", "Exporting materials");
          * Progress.end("export-usdz");
          *
          * // Enable console logging:
          * Progress.start("export-usdz", { logTimings: true });
          */
         static start(scope: string, options?: ProgressStartOptions | string): void;
         /** Report progress for a formerly started scope.
          * @param scope The scope to report progress for.
          * @param options Options for the progress report. If a string is passed, it will be used as the message.
          * @example
          * // auto step and show a message
          * Progress.report("export-usdz", "Exporting object 1");
          * // same as above
          * Progress.report("export-usdz", { message: "Exporting object 1", autoStep: true });
          * // show the current step and total steps and implicitly calculate progress as 10%
          * Progress.report("export-usdz", { currentStep: 1, totalSteps: 10 });
          * // enable auto step mode, following calls that have autoStep true will increase currentStep automatically.
          * Progress.report("export-usdz", { totalSteps: 20, autoStep: true });
          * // show the progress as 50%
          * Progress.report("export-usdz", { progress: 0.5 });
          * // give this step a weight of 20, which changes how progress is calculated. Useful for steps that take longer and/or have child scopes.
          * Progress.report("export-usdz", { message. "Long process", autoStep: 20 });
          * // show the current step and total steps and implicitly calculate progress as 10%
          * Progress.report("export-usdz", { currentStep: 1, totalSteps: 10 });
          */
         static report(scope: string, options?: ProgressOptions | string): void;
         /** End a formerly started scope. This will also report the progress as 100%.
          * @remarks Will warn if any child scope is still running (progress < 1).
          */
         static end(scope: string): void;
     }

     export declare type ProgressCallback = (asset: AssetReference, prog: ProgressEvent) => void;

     declare type ProgressOptions = {
         message?: string;
         progress?: number;
         autoStep?: boolean | number;
         currentStep?: number;
         totalSteps?: number;
     };

     declare type ProgressStartOptions = {
         /** This progress scope will be nested below parentScope */
         parentScope?: string;
         /** Callback with progress in 0..1 range. */
         onProgress?: (progress: number) => void;
         /** Log timings using console.time() and console.timeLog(). */
         logTimings?: boolean;
     };

     /** Can be used to simplify Promise error handling and if errors are acceptable.
      * Promise.all will just fail if any of the provided promises fails and not return or cancel pending promises or partial results
      * Using Promise.allSettled (or this method) instead will return a result for each promise and not automatically fail if any of the promises fails.
      * Instead it will return a promise containing information if any of the promises failed
      * and the actual results will be available as `results` array
      **/
     export declare function PromiseAllWithErrors<T>(promise: Promise<T>[]): Promise<{
         anyFailed: boolean;
         results: Array<T | PromiseErrorResult>;
     }>;

     /** Used by `PromiseAllWithErrors` */
     export declare class PromiseErrorResult {
         readonly reason: string;
         constructor(reason: string);
     }

     /**
      * Represents a single material property override with optional texture transformation
      * @template T The type of the property value
      */
     declare interface PropertyBlockOverride<T extends MaterialPropertyType = MaterialPropertyType> {
         /** The name of the material property to override (e.g., "color", "map", "roughness") */
         name: string;
         /** The value to set for this property */
         value: T;
         /** Optional texture coordinate transformation (only used when value is a Texture) */
         textureTransform?: TextureTransform;
     }

     export declare const PUBLIC_KEY: string;

     /** Adds an entry to the browser history. Internally uses `window.history.pushState` */
     export declare function pushState(title: string, urlParams: URLSearchParams, state?: any): void;

     export { QParticle }

     export { QParticleBehaviour }

     export { QTrailParticle }

     declare type Quat = {
         x: number;
         y: number;
         z: number;
         w: number;
     };

     /** Generates a random number
      * @deprecated use Mathf.random(min, max)
      */
     export declare function randomNumber(min: number, max: number): number;

     /**
      * Rapier physics engine implementation for Needle Engine.
      *
      * Rapier is a fast, cross-platform physics engine that provides realistic physics simulation
      * for rigidbodies, colliders, joints, and collision detection. It runs entirely in WebAssembly
      * for high performance.
      *
      * **Features:**
      * - Rigidbody simulation (dynamic, kinematic, static)
      * - Multiple collider shapes (box, sphere, capsule, mesh, convex hull)
      * - Raycasting and shape overlap queries against physics colliders
      * - Collision and trigger events
      * - Joints (fixed, hinge, etc.)
      * - Continuous collision detection (CCD)
      * - Physics materials (friction, bounciness)
      *
      * **Access:**
      * The Rapier physics engine is accessible via `this.context.physics.engine` from any component.
      * Rapier is automatically initialized when physics components are used.
      *
      * **Using the Rapier Module Directly:**
      * Rapier is lazy-loaded for performance. You can access the raw Rapier module via {@link MODULES.RAPIER_PHYSICS}.
      * Use `MODULES.RAPIER_PHYSICS.load()` to load the module, or `MODULES.RAPIER_PHYSICS.ready()` to wait for it without triggering a load.
      * Once loaded, the module is available at `MODULES.RAPIER_PHYSICS.MODULE`.
      *
      * **Note:**
      * This is the low-level physics engine implementation. For general raycasting (against all scene objects),
      * use {@link Physics.raycast} instead. Use this class for physics-specific operations like applying forces,
      * raycasting against colliders only, or accessing the Rapier world directly.
      *
      * @example Applying forces to a rigidbody
      * ```ts
      * const rb = this.gameObject.getComponent(Rigidbody);
      * if (rb) {
      *   this.context.physics.engine?.addForce(rb, { x: 0, y: 10, z: 0 }, true);
      * }
      * ```
      * @example Raycasting against physics colliders only
      * ```ts
      * const origin = { x: 0, y: 5, z: 0 };
      * const direction = { x: 0, y: -1, z: 0 };
      * const hit = this.context.physics.engine?.raycast(origin, direction);
      * if (hit) {
      *   console.log("Hit collider:", hit.collider.name);
      * }
      * ```
      * @example Accessing Rapier world directly
      * ```ts
      * const rapierWorld = this.context.physics.engine?.world;
      * if (rapierWorld) {
      *   // Direct access to Rapier API
      *   console.log("Gravity:", rapierWorld.gravity);
      * }
      * ```
      * @example Using the Rapier module directly
      * ```ts
      * import { MODULES } from "@needle-tools/engine";
      *
      * // Load the Rapier module
      * const RAPIER = await MODULES.RAPIER_PHYSICS.load();
      *
      * // Now you can use Rapier types and create custom physics objects
      * const rigidBodyDesc = RAPIER.RigidBodyDesc.dynamic()
      *   .setTranslation(0, 10, 0);
      *
      * // Or access the already-loaded module
      * if (MODULES.RAPIER_PHYSICS.MODULE) {
      *   const colliderDesc = MODULES.RAPIER_PHYSICS.MODULE.ColliderDesc.ball(1.0);
      * }
      * ```
      * @see {@link Rigidbody} for physics simulation component
      * @see {@link Collider} for collision detection component
      * @see {@link Physics} for general raycasting and physics utilities
      * @see {@link MODULES.RAPIER_PHYSICS} for direct access to the Rapier module
      * @link https://rapier.rs/docs/ for Rapier documentation
      * @link https://engine.needle.tools/docs/reference/components.html#physics
      * @link https://engine.needle.tools/docs/how-to-guides/scripting/use-physics.html
      */
     export declare class RapierPhysics implements IPhysicsEngine {
         debugRenderColliders: boolean;
         debugRenderRaycasts: boolean;
         removeBody(obj: IComponent): void;
         setColliderEnabled(collider: ICollider, enabled: boolean): boolean;
         updateBody(comp: ICollider | IRigidbody, translation: boolean, rotation: boolean): void;
         updateProperties(obj: IRigidbody | ICollider): void;
         addForce(rigidbody: IRigidbody, force: Vec3, wakeup: boolean): void;
         addImpulse(rigidbody: IRigidbody, force: Vec3, wakeup: boolean): void;
         getLinearVelocity(comp: IRigidbody | ICollider): Vec3 | null;
         getAngularVelocity(rb: IRigidbody): Vec3 | null;
         resetForces(rb: IRigidbody, wakeup: boolean): void;
         resetTorques(rb: IRigidbody, wakeup: boolean): void;
         applyImpulse(rb: IRigidbody, vec: Vec3, wakeup: boolean): void;
         wakeup(rb: IRigidbody): void;
         isSleeping(rb: IRigidbody): boolean | undefined;
         setAngularVelocity(rb: IRigidbody, vec: Vec3, wakeup: boolean): void;
         setLinearVelocity(rb: IRigidbody, vec: Vec3, wakeup: boolean): void;
         private readonly context?;
         private _initializePromise?;
         private _isInitialized;
         constructor(ctx: IContext);
         get isInitialized(): boolean;
         initialize(): Promise<boolean>;
         private internalInitialization;
         /** Check is the physics engine has been initialized and the call can be made */
         private validate;
         private rapierRay;
         private raycastVectorsBuffer;
         raycast(origin?: Vec2 | Vec3, direction?: Vec3, options?: {
             maxDistance?: number;
             /** True if you want to also hit objects when the raycast starts from inside a collider */
             solid?: boolean;
             queryFilterFlags?: QueryFilterFlags;
             filterGroups?: number;
             /** Return false to ignore this collider */
             filterPredicate?: (c: ICollider) => boolean;
             /** When enabled the hit object's layer will be tested. If layer 2 is enabled the object will be ignored (Layer 2 == IgnoreRaycast)
              * If not set the raycast will ignore objects in the IgnoreRaycast layer (default: true)
              * @default undefined
              */
             useIgnoreRaycastLayer?: boolean;
         }): null | {
             point: Vector3;
             collider: ICollider;
         };
         raycastAndGetNormal(origin?: Vec2 | Vec3, direction?: Vec3, options?: {
             maxDistance?: number;
             /** True if you want to also hit objects when the raycast starts from inside a collider */
             solid?: boolean;
             queryFilterFlags?: QueryFilterFlags;
             filterGroups?: number;
             /** Return false to ignore this collider */
             filterPredicate?: (c: ICollider) => boolean;
             /** When enabled the hit object's layer will be tested. If layer 2 is enabled the object will be ignored (Layer 2 == IgnoreRaycast)
              * If not set the raycast will ignore objects in the IgnoreRaycast layer (default: true)
              * @default undefined
              */
             useIgnoreRaycastLayer?: boolean;
         }): null | {
             point: Vector3;
             normal: Vector3;
             collider: ICollider;
         };
         private getPhysicsRay;
         private rapierSphere;
         private rapierBox;
         private readonly rapierColliderArray;
         private readonly rapierIdentityRotation;
         private readonly rapierForwardVector;
         /** Precice sphere overlap detection using rapier against colliders
          * @param point center of the sphere in worldspace
          * @param radius radius of the sphere
          * @returns array of colliders that overlap with the sphere. Note: they currently only contain the collider and the gameobject
          */
         sphereOverlap(point: Vector3, radius: number): Array<ShapeOverlapResult>;
         /** box overlap detection using rapier against colliders
          * @param point center of the box in worldspace
          * @param size size of the box
          * @param rotation quaternion representation of the rotation in world space
          * @returns array of colliders that overlap with the box. Note: they currently only contain the collider and the gameobject
          */
         boxOverlap(point: Vector3, size: Vector3, rotation?: Vector4Like | null): Array<ShapeOverlapResult>;
         private shapeOverlap;
         enabled: boolean;
         /** Get access to the rapier world */
         get world(): World | undefined;
         private _tempPosition;
         private _tempQuaternion;
         private _tempScale;
         private _tempMatrix;
         private static _didLoadPhysicsEngine;
         private _isUpdatingPhysicsWorld;
         get isUpdating(): boolean;
         private _world?;
         private _hasCreatedWorld;
         private eventQueue?;
         private collisionHandler?;
         private objects;
         private bodies;
         private _meshCache;
         private _gravity;
         get gravity(): Vec3;
         set gravity(value: Vec3);
         clearCaches(): void;
         addBoxCollider(collider: ICollider, size: Vector3): Promise<void>;
         addSphereCollider(collider: ICollider): Promise<void>;
         addCapsuleCollider(collider: ICollider, height: number, radius: number): Promise<void>;
         addMeshCollider(collider: ICollider, mesh: Mesh, convex: boolean, extraScale?: Vector3): Promise<void>;
         updatePhysicsMaterial(col: ICollider): void;
         /** Get the rapier body for a Needle component */
         getBody(obj: ICollider | IRigidbody): null | any;
         /** Get the Needle Engine component for a rapier object */
         getComponent(rapierObject: object): IComponent | null;
         /**
          * Creates a collider in the physics world.
          *
          * @param collider - The collider component.
          * @param desc - The collider description.
          * @returns The created collider.
          *
          * @throws Will throw an error if the physics world is not initialized. Make sure to call `initialize()` before creating colliders.
          */
         createCollider(collider: ICollider, desc: ColliderDesc): Collider_2 | null;
         /**
          * Updates the collision groups of a collider.
          *
          * @param collider - The collider to update.
          */
         private updateColliderCollisionGroups;
         private getRigidbody;
         private internal_getRigidbody;
         private internalUpdateColliderProperties;
         private internalUpdateRigidbodyProperties;
         private lines?;
         private disabledLines?;
         step(dt?: number): void;
         postStep(): void;
         private updateDebugRendering;
         /** sync rendered objects with physics world (except for colliders without rigidbody) */
         private syncObjects;
         private syncPhysicsBody;
         private readonly _tempCenterPos;
         private readonly _tempCenterVec;
         private readonly _tempCenterQuaternion;
         private tryApplyCenter;
         private static _matricesBuffer;
         private getRigidbodyRelativeMatrix;
         private static centerConnectionPos;
         private static centerConnectionRot;
         addFixedJoint(body1: IRigidbody, body2: IRigidbody): void;
         /** The joint prevents any relative movement between two rigid-bodies, except for relative rotations along one axis. This is typically used to simulate wheels, fans, etc. They are characterized by one local anchor as well as one local axis on each rigid-body. */
         addHingeJoint(body1: IRigidbody, body2: IRigidbody, anchor: {
             x: number;
             y: number;
             z: number;
         }, axis: {
             x: number;
             y: number;
             z: number;
         }): void;
         private calculateJointRelativeMatrices;
     }

     /**
      * @category User Interface
      * @group Components
      */
     export declare class RawImage extends MaskableGraphic {
         get mainTexture(): Texture | undefined;
         set mainTexture(texture: Texture | undefined);
         private _mainTexture?;
         protected onAfterCreated(): void;
     }

     /**
      * [ObjectRaycaster](https://engine.needle.tools/docs/api/ObjectRaycaster) Base class for raycasters that detect pointer interactions.
      * Derive from this class to create custom raycasting logic.
      *
      * **Built-in raycasters:**
      * - {@link ObjectRaycaster} - Raycasts against 3D objects
      * - {@link GraphicRaycaster} - Raycasts against UI elements
      * - {@link SpatialGrabRaycaster} - Sphere overlap for XR grab
      *
      * **Important:** If you override `awake`, `onEnable`, or `onDisable`,
      * call the base class methods to ensure proper registration with {@link EventSystem}.
      *
      * @category Interactivity
      * @group Components
      * @see {@link EventSystem} for the event dispatch system
      */
     declare abstract class Raycaster_2 extends Component {
         awake(): void;
         onEnable(): void;
         onDisable(): void;
         abstract performRaycast(_opts?: IRaycastOptions | RaycastOptions | null): Intersection[] | null;
     }

     export declare class RaycastOptions implements IRaycastOptions {
         static readonly AllLayers = 4294967295;
         ray?: Ray;
         cam?: Camera_2 | null;
         screenPoint?: Vector2;
         raycaster?: Raycaster;
         results?: Array<Intersection>;
         targets?: Array<Object3D>;
         recursive?: boolean;
         minDistance?: number;
         maxDistance?: number;
         lineThreshold?: number;
         layerMask?: Layers | number;
         ignore?: Object3D[];
         testObject?: RaycastTestObjectCallback;
         useAcceleratedRaycast?: boolean | undefined;
         allowSlowRaycastFallback?: boolean;
         screenPointFromOffset(ox: number, oy: number): void;
         /** sets one layer for raycasting (e.g. layer 4, only objects on layer 4 will then be hit) */
         setLayer(layer: number): void;
         /** sets the layer.mask value directly, use setLayer if you want to set e.g. an individual layer only active. See https://threejs.org/docs/#api/en/core/Layers for more information about layers */
         setMask(mask: number): void;
     }

     export declare type RaycastResult = null | {
         point: Vector3;
         collider: ICollider;
         normal?: Vector3;
     };

     export declare type RaycastTestObjectCallback = (obj: Object3D) => RaycastTestObjectReturnType;

     export declare type RaycastTestObjectReturnType = void | boolean | "continue in children";

     export declare class Rect {
         x: number;
         y: number;
         width: number;
         height: number;
     }

     /**
      * [RectTransform](https://engine.needle.tools/docs/api/RectTransform) defines a rectangle for UI layout with anchoring, pivoting, and positioning capabilities.
      * @summary UI Rectangle Transform
      * @category User Interface
      * @group Components
      */
     export declare class RectTransform extends BaseUIComponent implements IRectTransform, IRectTransformChangedReceiver {
         get parent(): RectTransform | undefined;
         get translation(): Vector3;
         get rotation(): Quaternion;
         get scale(): Vector3;
         private _anchoredPosition;
         get anchoredPosition(): Vector2;
         private set anchoredPosition(value);
         sizeDelta: Vector2;
         pivot: Vector2;
         anchorMin: Vector2;
         anchorMax: Vector2;
         /** Optional min width in pixel, set to undefined to disable it */
         minWidth?: number;
         /** Optional min height in pixel, set to undefined to disable it */
         minHeight?: number;
         get width(): number;
         get height(): number;
         private lastMatrix;
         private rectBlock;
         private _transformNeedsUpdate;
         private _initialPosition;
         private _parentRectTransform?;
         private _lastUpdateFrame;
         awake(): void;
         onEnable(): void;
         onDisable(): void;
         onParentRectTransformChanged(comp: IRectTransform): void;
         get isDirty(): boolean;
         markDirty(): void;
         /** Will update the transforms if it changed or is dirty */
         updateTransform(): void;
         private canUpdate;
         private onApplyTransform;
         private _lastAnchoring;
         /** applies the position offset to the passed in vector */
         private applyAnchoring;
         /** applies the pivot offset to the passed in vector */
         private applyPivot;
         getBasicOptions(): DocumentedOptions;
         private ensureValidSize;
         private _createdBlocks;
         private _createdTextBlocks;
         createNewBlock(opts?: DocumentedOptions | object): ThreeMeshUI.Block;
         createNewText(opts?: DocumentedOptions | object): ThreeMeshUI.Block;
     }

     /**
      * The [ReflectionProbe](https://engine.needle.tools/docs/api/ReflectionProbe) provides environment reflection data to materials within its defined area.
      * Use for chrome-like materials that need accurate environment reflections.
      *
      * **Setup:**
      * 1. Add ReflectionProbe component to an object
      * 2. Assign a cubemap or HDR texture
      * 3. In Renderer components, assign the probe as anchor override
      *
      * **Note:** Volume-based automatic assignment is not fully supported yet.
      * Objects (Renderer components) can explicitly reference their reflection probe.
      *
      * **Debug options:**
      * - `?debugreflectionprobe` - Log probe info
      * - `?noreflectionprobe` - Disable all reflection probes
      *
      * - Example: https://engine.needle.tools/samples/reflection-probes
      *
      * @summary Provides reflection data to materials
      * @category Rendering
      * @group Components
      * @see {@link Renderer} for material and rendering control
      * @see {@link Light} for scene lighting
      */
     export declare class ReflectionProbe extends Component {
         private static _probes;
         private static testBox;
         /**
          * Checks if the given material is currently using a reflection probe.
          * This is determined by checking for an override on the material's "envMap" property, which is set by the Renderer component when applying a reflection probe.
          */
         static isUsingReflectionProbe(material: Material): boolean;
         /**
          * Event invoked when a reflection probe is enabled. Used internally by Renderer components to update probes when they become active. Not recommended to call this directly in most cases.
          * @see {@link onDisabled} for the corresponding disable event.
          */
         static readonly onEnabled: EventList<ReflectionProbe>;
         /**
          * Event invoked when a reflection probe is disabled. Used internally by Renderer components to update probes when they become inactive. Not recommended to call this directly in most cases.
          * @see {@link onEnabled} for the corresponding enable event.
          */
         static readonly onDisabled: EventList<ReflectionProbe>;
         /**
          * Gets the active reflection probe for the given object and context. If `isAnchor` is true, it will only return a probe if the object is the anchor of that probe. Otherwise, it checks if the object is within the probe's influence area.
          *
          * Note: This method is used internally by the Renderer component to determine which reflection probe to apply. It is not recommended to call this method directly in most cases. Instead, assign probes to renderers using the "anchor" property or rely on automatic assignment when supported.
          * Note: Volume-based automatic assignment is not fully supported yet, so explicit assignment is recommended for now.
          *
          * @param object The object to find a reflection probe for
          * @param context The context to search within
          * @param isAnchor If true, only return a probe if the object is the anchor of that probe
          * @param anchor Optional anchor object to match against probes
          */
         static get(object: Object3D | null | undefined, context: Context, isAnchor: boolean, anchor?: Object3D): ReflectionProbe | null;
         private _texture;
         private _textureUrlInFlight?;
         /**
          * The cubemap or HDR texture used for reflections. Can be assigned directly or via a URL string.
          * When assigning via URL, the texture will be loaded asynchronously and applied once ready.
          * @param tex The texture or URL to assign to this reflection probe
          * @default null
          */
         set texture(tex: Texture | null);
         get texture(): Texture | null;
         /**
          * The intensity of the reflection probe's influence.
          *  Higher values will make reflections brighter, while lower values will make them dimmer.
          * The default value is 1, which means the reflections will be applied at their original brightness. Adjust this value to achieve the desired look for your scene.
          * @default 1
          */
         intensity: number;
         /**
          * Defines the center of the reflection probe's influence area relative to the GameObject's position.
          * The probe will affect objects within a box defined by this center and the `size` property.
          * Note: The actual influence area is determined by both the `center` and `size` properties. The `center` defines the offset from the GameObject's position, while the `size` defines the dimensions of the box around that center. Objects within this box will be influenced by the reflection probe.
          */
         center: Vector3;
         /**
          * Defines the size of the reflection probe's influence area. Objects within this box will be affected by the probe's reflections.
          * Note: The actual influence area is determined by both the `center` and `size` properties. The `center` defines the offset from the GameObject's position, while the `size` defines the dimensions of the box around that center. Objects within this box will be influenced by the reflection probe.
          */
         size: Vector3;
         /**
          * Workaround for lightmap. Compensates for the fact that lightmaps are pre-multiplied by intensity, while reflection probes are not. This means that if you use both lightmaps and reflection probes, you may need to adjust this value to get the correct balance between them. The default value of `Math.PI` is a good starting point for most cases, but you may need to tweak it based on your specific lighting setup and artistic needs.
          */
         __lightmapIntensityScale: boolean;
         private isInBox;
         constructor();
         /* Excluded from this release type: awake */
         /* Excluded from this release type: update */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: start */
         /* Excluded from this release type: onDestroy */
         /**
          * Applies this reflection probe to the given object by setting material property overrides for "envMap", "envMapRotation", and "envMapIntensity".
          * This is typically called by the Renderer component when determining which reflection probe to use for a given object.
          * @param object The object to apply the reflection probe to
          * @see {@link unapply} for the corresponding method to remove the probe's influence from an object.
          */
         apply(object: Object3D): void;
         /**
          * Removes the reflection probe overrides from the given object.
          * This is typically called by the Renderer component when an object is no longer influenced by this probe or when the probe is disabled.
          * @param object The object to remove the reflection probe overrides from
          * @see {@link apply} for the corresponding method to apply the probe's influence to an object.
          */
         unapply(obj: Object3D): void;
     }

     declare enum ReflectionProbeUsage {
         Off = 0,
         BlendProbes = 1,
         BlendProbesAndSkybox = 2,
         Simple = 3
     }

     export declare function registerBinaryType(identifier: string, cast: (bin: flatbuffers.ByteBuffer) => object): void;

     export declare function registerComponent(script: IComponent, context?: Context): void;

     /** Registers the Needle Engine components extension */
     export declare function registerComponentExtension(loader: GLTFLoader | Loader | object): NEEDLE_components | null;

     export declare function registerCustomEffectType(name: string, effect: typeof PostProcessingEffect): void;

     export declare class RegisteredAnimationInfo {
         private _start?;
         get start(): number;
         get duration(): number;
         get nearestAnimatedRoot(): Object3D | undefined;
         get clipName(): string;
         private ext;
         private root;
         private _nearestAnimatedRoot?;
         private clip;
         speed?: number;
         constructor(ext: AnimationExtension, root: Object3D, clip: AnimationClip | null);
         private static isDescendantOf;
         /** Finds the nearest actually animated object under root based on the tracks in the AnimationClip. */
         getNearestAnimatedRoot(): Object3D<Object3DEventMap> | undefined;
     }

     export declare function registerExportExtensions(exp: GLTFExporter, context: Context): void;

     export declare function registerExtensions(loader: GLTFLoader, context: Context, url: string, sourceId: SourceIdentifier): Promise<void>;

     /* Excluded from this release type: registerHotReloadType */

     export declare function registerLoader<T extends INeedleGltfLoader>(loader: ConstructorConcrete<T>): void;

     export declare function registerPrefabProvider(key: string, fn: PrefabProviderCallback): void;

     /* Excluded from this release type: registerPrototypeExtensions */

     /**
      * add to a class declaration to automatically register it to the TypeStore (required for HMR right now)
      *
      * `@registerType`
      *
      * `export class MyType extends Behaviour { ... }`
      */
     export declare const registerType: (constructor: Type) => void;

     export declare const relativePathPrefix = "rel:";

     /**
      * The [RemoteSkybox](https://engine.needle.tools/docs/api/RemoteSkybox) component allows you to set the skybox or environment texture of a scene from a URL, a local file or a static skybox name.
      * It supports .hdr, .exr, .jpg, .png, and .ktx2 files.
      *
      * **HTML Attributes:**
      * You can control skybox and environment from HTML using `<needle-engine>` attributes:
      * - `background-image`: Sets the scene background/skybox image
      * - `environment-image`: Sets the scene environment map (for reflections and lighting)
      *
      * These attributes accept URLs or magic skybox names (see examples below).
      *
      * **Magic Skybox Names:**
      * Built-in optimized skyboxes hosted on Needle CDN:
      * - `"studio"` - Neutral studio lighting (default)
      * - `"blurred-skybox"` - Blurred environment
      * - `"quicklook"` - Apple QuickLook object mode style
      * - `"quicklook-ar"` - Apple QuickLook AR mode style
      *
      * ### Events
      * - `dropped-unknown-url`: Emitted when a file is dropped on the scene. The event detail contains the sender, the url and a function to apply the url.
      *
      * @example Using HTML attributes
      * ```html
      * <needle-engine
      *   background-image="https://example.com/skybox.hdr"
      *   environment-image="studio">
      * </needle-engine>
      * ```
      *
      * @example Using magic skybox names
      * ```html
      * <needle-engine background-image="studio"></needle-engine>
      * <needle-engine environment-image="quicklook"></needle-engine>
      * ```
      *
      * @example Adding via code
      * ```ts
      * GameObject.addComponent(gameObject, RemoteSkybox, {
      *   url: "https://example.com/skybox.hdr",
      *   background: true,
      *   environment: true
      * });
      * ```
      *
      * @example Handle custom dropped URL
      * ```ts
      * const skybox = GameObject.addComponent(gameObject, RemoteSkybox);
      * skybox.addEventListener("dropped-unknown-url", (evt) => {
      *    let url = evt.detail.url;
      *    console.log("User dropped file", url);
      *    // change url or resolve it differently
      *    url = "https://example.com/skybox.hdr";
      *    // apply the url
      *    evt.detail.apply(url);
      * });
      * ```
      *
      * @example Update skybox at runtime
      * ```ts
      * skybox.setSkybox("https://example.com/skybox.hdr");
      * // Or use a magic name:
      * skybox.setSkybox("studio");
      * ```
      *
      * @summary Sets the skybox or environment texture of a scene
      * @category Rendering
      * @group Components
      * @see {@link Camera} for clearFlags and background control
      * @link https://engine.needle.tools/docs/html.html#needle-engine-element
      */
     export declare class RemoteSkybox extends Component {
         /**
          * URL to a remote skybox.
          * To update the skybox/environment map use `setSkybox(url)`.
          *
          * The url can also be set to a magic skybox name.
          * Magic name options are: "quicklook", "quicklook-ar", "studio", "blurred-skybox".
          * These will resolve to built-in skyboxes hosted on the Needle CDN that are static, optimized for the web and will never change.
          *
          * @example
          * ```ts
          * skybox.url = "https://example.com/skybox.hdr";
          * ```
          */
         url: MagicSkyboxName | AnyString;
         /**
          * When enabled a user can drop a link to a skybox image on the scene to set the skybox.
          * @default true
          */
         allowDrop: boolean;
         /**
          * When enabled the skybox will be set as the background of the scene.
          * @default true
          */
         background: boolean;
         /**
          * When enabled the skybox will be set as the environment of the scene (to be used as environment map for reflections and lighting)
          * @default true
          */
         environment: boolean;
         /**
          * When enabled dropped skybox urls (or assigned skybox urls) will be networked to other users in the same networked room.
          * @default true
          */
         allowNetworking: boolean;
         private _prevUrl?;
         private _prevLoadedEnvironment?;
         private _prevEnvironment;
         private _prevBackground;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private urlChangedSyncField;
         /**
          * Set the skybox from a given url
          * @param url The url of the skybox image
          * @param name Define name of the file with extension if it isn't apart of the url
          * @returns Whether the skybox was successfully set
          */
         setSkybox(url: MagicSkyboxName | AnyString | undefined | null, name?: string): Promise<boolean>;
         private apply;
         private readonly validProtocols;
         private readonly validTextureTypes;
         private isRemoteTexture;
         private isValidTextureType;
         private registerDropEvents;
         private unregisterDropEvents;
         private onDragOverEvent;
         private onDrop;
     }

     /**
      * Unregister a callback previously registered with {@link addAttributeChangeCallback}.
      */
     export declare function removeAttributeChangeCallback(domElement: HTMLElement, name: string, callback: AttributeChangeCallback): void;

     export declare function removeComponent<T extends IComponent>(go: Object3D, componentInstance: T): T;

     /** Unregister callbacks for registering custom gltf importer or exporter plugins */
     export declare function removeCustomImportExtensionType(ext: INeedleGLTFExtensionPlugin): void;

     /** Removes prefix or postfix */
     export declare function removePatch(prototype: object, fieldName: string, prefixOrPostfix: Prefix | Postfix): void;

     /**
      * The [Renderer](https://engine.needle.tools/docs/api/Renderer) component controls rendering properties of meshes including materials,
      * lightmaps, reflection probes, and GPU instancing.
      *
      * **Materials:**
      * Access materials via `sharedMaterials` array. Changes affect all instances.
      * Use material cloning for per-instance variations.
      *
      * **Instancing:**
      * Enable GPU instancing for improved performance with many identical objects.
      * Use `Renderer.setInstanced(obj, true)` or `enableInstancing` property.
      *
      * **Lightmaps:**
      * Baked lighting is automatically applied when exported from Unity or Blender.
      * Access via the associated {@link RendererLightmap} component.
      *
      * [![](https://cloud.needle.tools/-/media/Vk944XVswtPEuxlNPLMxPQ.gif)](https://engine.needle.tools/samples/multiple-lightmaps/)
      *
      * **Debug options:**
      * - `?debugrenderer` - Log renderer info
      * - `?wireframe` - Show wireframe rendering
      * - `?noinstancing` - Disable GPU instancing
      *
      * @example Change material at runtime
      * ```ts
      * const renderer = myObject.getComponent(Renderer);
      * renderer.sharedMaterials[0] = newMaterial;
      * ```
      *
      * @example Enable instancing
      * ```ts
      * Renderer.setInstanced(myObject, true);
      * ```
      *
      * @category Rendering
      * @group Components
      * @see {@link ReflectionProbe} for environment reflections
      * @see {@link Light} for scene lighting
      */
     export declare class Renderer extends Component implements IRenderer {
         /** Enable or disable instancing for an object. This will create a Renderer component if it does not exist yet.
          * @returns the Renderer component that was created or already existed on the object
          */
         static setInstanced(obj: Object3D, enableInstancing: boolean): Renderer;
         /** Check if an object is currently rendered using instancing
          * @returns true if the object is rendered using instancing
          */
         static isInstanced(obj: Object3D): boolean;
         /** Set the rendering state only of an object (makes it visible or invisible) without affecting component state or child hierarchy visibility! You can also just enable/disable the Renderer component on that object for the same effect!
          *
          * If you want to activate or deactivate a complete object you can use obj.visible as usual (it acts the same as setActive in Unity) */
         static setVisible(obj: Object3D, visible: boolean): void;
         receiveShadows: boolean;
         shadowCastingMode: ShadowCastingMode;
         lightmapIndex: number;
         lightmapScaleOffset: Vector4;
         /** If the renderer should use instancing
          * If this is a boolean (true) all materials will be instanced or (false) none of them.
          * If this is an array of booleans the materials will be instanced based on the index of the material.
          */
         enableInstancing: boolean | boolean[] | undefined;
         renderOrder: number[] | undefined;
         allowOcclusionWhenDynamic: boolean;
         probeAnchor?: Object3D;
         reflectionProbeUsage: ReflectionProbeUsage;
         private _lightmaps?;
         /** Get the mesh Object3D for this renderer
          * Warn: if this is a multimaterial object it will return the first mesh only
          * @returns a mesh object3D.
          * */
         get sharedMesh(): Mesh | SkinnedMesh | undefined;
         private readonly _sharedMeshes;
         /** Get all the mesh Object3D for this renderer
          * @returns an array of mesh object3D.
          */
         get sharedMeshes(): Mesh[];
         get sharedMaterial(): SharedMaterial;
         set sharedMaterial(mat: SharedMaterial);
         /**@deprecated Use sharedMaterial */
         get material(): SharedMaterial;
         /**@deprecated Use sharedMaterial */
         set material(mat: SharedMaterial);
         private _sharedMaterials;
         private _originalMaterials?;
         private _probeAnchorLastFrame?;
         private set sharedMaterials(value);
         get sharedMaterials(): SharedMaterialArray;
         static get shouldSuppressInstancing(): string | number | boolean;
         private _lightmapTextureOverride;
         get lightmap(): Texture | null;
         /** set undefined to return to default lightmap */
         set lightmap(tex: Texture | null | undefined);
         get hasLightmap(): boolean;
         allowProgressiveLoading: boolean;
         private _firstFrame;
         registering(): void;
         awake(): void;
         private applyLightmapping;
         private _isInstancingEnabled;
         private _handles;
         /**
          * @returns true if this renderer has instanced objects
          */
         get isInstancingActive(): boolean;
         /** @returns the instancing handles */
         get instances(): InstanceHandle[] | null;
         private _handlesTempArray;
         /** Enable or disable instancing for this renderer.
          * @param enabled true to enable instancing, false to disable it
          */
         setInstancingEnabled(enabled: boolean): boolean;
         private clearInstancingState;
         /** Return true to wrap matrix update events for instanced rendering to update instance matrices automatically when matrixWorld changes
          * This is a separate method to be overrideable from user code
          */
         useInstanceMatrixWorldAutoUpdate(): boolean;
         start(): void;
         onEnable(): void;
         onDisable(): void;
         onDestroy(): void;
         private readonly onReflectionProbeEnabled;
         private onReflectionProbeDisabled;
         onBeforeRender(): void;
         private onBeforeRenderThree;
         onAfterRender(): void;
         /** Applies stencil settings for this renderer's objects (if stencil settings are available) */
         applyStencil(): void;
         /** Apply the settings of this renderer to the given object
          * Settings include shadow casting and receiving (e.g. this.receiveShadows, this.shadowCastingMode)
          */
         applySettings(go: Object3D): void;
         private _reflectionProbe;
         private updateReflectionProbe;
         private _updateReflectionProbe;
         private setVisibility;
         private isMultiMaterialObject;
         private isMeshOrSkinnedMesh;
     }

     /**
      * The RendererData class is used to manage the lighting settings of a scene.
      * It is created and used within the Needle Engine Context.
      */
     export declare class RendererData {
         private context;
         constructor(context: Context);
         /**
          * The id of the currently active scene light settings (source identifier).
          */
         private _currentLightSettingsId?;
         private _sceneLightSettings?;
         get currentLightSettingsId(): SourceIdentifier | undefined;
         private preUpdate;
         private _timevec4;
         /** Time data used for custom shaders
          * x: time
          * y: sin(time)
          * z: cos(time)
          * w: deltaTime
          */
         get timeVec4(): Vector4;
         /** the current environment intensity */
         get environmentIntensity(): number;
         /** Get all currently registered scene light settings */
         get sceneLightSettings(): MapIterator<SceneLightSettings> | undefined;
         /** set the scene lighting from a specific scene. Will disable any previously enabled lighting settings */
         enable(sourceId: SourceIdentifier | AssetReference): boolean;
         /** disable the lighting of a specific scene, will only have any effect if it is currently active */
         disable(sourceId: SourceIdentifier | AssetReference): boolean;
         /**
          * Enables the currently active scene lighting (if any), returns the id of the enabled lighting.
          * @returns The id of the enabled lighting, or null if no lighting is currently active.
          */
         enableCurrent(): SourceIdentifier | null;
         /** Disables the currently active scene lighting (if any), returns the id of the previously active lighting
          * @returns The id of the previously active lighting, or null if no lighting was active.
          */
         disableCurrent(): SourceIdentifier | null;
         /* Excluded from this release type: internalRegisterSceneLightSettings */
         /* Excluded from this release type: internalUnregisterSceneLightSettings */
         /* Excluded from this release type: internalRegisterReflection */
         /* Excluded from this release type: internalGetReflection */
         private __currentReflectionId;
         /* Excluded from this release type: internalEnableReflection */
         /* Excluded from this release type: internalDisableReflection */
         private _lighting;
     }

     /**
      * This component is automatically added by the {@link Renderer} component if the object has lightmap uvs AND we have a lightmap.
      *
      * @category Rendering
      * @group Components
      */
     export declare class RendererLightmap {
         get lightmap(): Texture | null;
         set lightmap(tex: Texture | null);
         private lightmapIndex;
         private lightmapScaleOffset;
         private readonly renderer;
         private _isApplied;
         private get context();
         private get gameObject();
         private lightmapTexture;
         constructor(renderer: Renderer);
         init(lightmapIndex: number, lightmapScaleOffset: Vector4, lightmapTexture: Texture): void;
         updateLightmapUniforms(_material: any): void;
         /**
          * Apply the lightmap to the object using MaterialPropertyBlock instead of cloning materials.
          * The lightmap texture and its per-object UV transform are set as overrides via PropertyBlock.
          * Three.js reads material.lightMap to determine shader defines and upload uniforms,
          * and uses texture.offset/repeat to compute lightMapTransform in the vertex shader.
          */
         applyLightmap(): void;
         /** Update the lightMap override on all property blocks (e.g. after LOD swap) */
         private updatePropertyBlockTexture;
         /**
          * Remove the lightmap from the object
          */
         onUnset(): void;
         private ensureLightmapUvs;
         private setLightmapDebugMaterial;
     }

     declare enum RenderMode {
         ScreenSpaceOverlay = 0,
         ScreenSpaceCamera = 1,
         WorldSpace = 2,
         Undefined = -1
     }

     /**
      * A RenderTexture can be used to render a scene to a texture automatically by assigning it to the `Camera` component's `targetTexture` property.
      * You can then assign the `RenderTexture.texture` to materials to be displayed
      * @example Create a new RenderTexture and assign it to a camera and material
      * ```typescript
      * // create new RenderTexture with a resolution
      * const rt = new RenderTexture(256, 256);
      * // assign to a camera
      * myCameraComponent.targetTexture = rt;
      * // assign to a material
      * myMaterial.map = rt.texture;
      * ```
      */
     export declare class RenderTexture extends WebGLRenderTarget {
         /**
          * Render the scene to the texture
          * @param scene The scene to render
          * @param camera The camera to render from
          * @param renderer The renderer or effectcomposer to use
          */
         render(scene: Object3D, camera: Camera_2, renderer: WebGLRenderer | EffectComposer | EffectComposer_2): void;
         private static _userSet;
         private onBeforeRender;
         private onAfterRender;
     }

     export declare class RenderTextureSerializer extends TypeSerializer {
         constructor();
         onSerialize(_data: any, _context: SerializationContext): void;
         onDeserialize(data: any, context: SerializationContext): RenderTexture | undefined;
     }

     /**
      * Use to resolve a url serialized in a glTF file
      * @param source The uri of the loading file
      * @param uri The uri of the file to resolve, can be absolute or relative
      * @returns The resolved uri
      */
     export declare function resolveUrl(source: SourceIdentifier | undefined, uri: string): string;

     /**
      * RGBAColor is a class that represents a color with red, green, blue and alpha components.
      */
     export declare class RGBAColor extends Color {
         alpha: number;
         get isRGBAColor(): boolean;
         set a(val: number);
         get a(): number;
         constructor(color: ColorRepresentation);
         /**
          * Creates a new RGBAColor with the given red, green, blue and alpha components.
          * Color values should be in the range [0, 1].
          */
         constructor(r: number, g: number, b: number, a: number);
         clone(): this;
         copy(col: RGBAColor | Color): this;
         lerp(color: Color, alpha: number): this;
         lerpColors(color1: Color, color2: Color, alpha: number): this;
         multiply(color: Color): this;
         fromArray(array: number[], offset?: number): this;
         static fromColorRepresentation(col: ColorRepresentation): RGBAColor;
     }

     /**
      * Rigidbody component for realistic physics simulation and dynamic interactions.
      * Used together with a {@link Collider} to enable physical behavior like gravity, collisions,
      * forces, and constraints. Powered by the Rapier physics engine.
      *
      * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
      *
      * **Key features:**
      * - Dynamic, kinematic, or static body types
      * - Automatic or manual mass calculation
      * - Gravity, drag, and angular drag control
      * - Position and rotation constraints (locking axes)
      * - Force, impulse, and velocity manipulation
      * - Sleep/wake optimization for performance
      * - Continuous collision detection (CCD) support
      *
      * @example Basic dynamic rigidbody
      * ```ts
      * const rb = this.gameObject.getComponent(Rigidbody);
      * rb.useGravity = true;
      * rb.mass = 2.0;
      * rb.drag = 0.5;
      * ```
      *
      * @example Apply force to move object
      * ```ts
      * const rb = this.gameObject.getComponent(Rigidbody);
      * rb.applyForce(new Vector3(0, 10, 0)); // Upward force
      * rb.applyImpulse(new Vector3(5, 0, 0)); // Instant velocity change
      * ```
      *
      * @example Kinematic rigidbody (manually controlled)
      * ```ts
      * const rb = this.gameObject.getComponent(Rigidbody);
      * rb.isKinematic = true; // Not affected by forces
      * rb.teleport({ x: 0, y: 5, z: 0 }); // Move without physics
      * ```
      *
      * @example Lock rotation on Y axis (useful for characters)
      * ```ts
      * const rb = this.gameObject.getComponent(Rigidbody);
      * rb.lockRotationY = true;
      * // Or use constraints for multiple axes:
      * rb.constraints = RigidbodyConstraints.FreezeRotationY | RigidbodyConstraints.FreezePositionZ;
      * ```
      *
      * @summary Enables physics simulation with forces, gravity, and collisions
      * @category Physics
      * @group Components
      * @see {@link BoxCollider} for box-shaped colliders
      * @see {@link SphereCollider} for sphere-shaped colliders
      * @see {@link CapsuleCollider} for capsule-shaped colliders
      * @see {@link MeshCollider} for mesh-based colliders
      * @see {@link Collider} for collider base class
      * @see {@link CharacterController} for character movement
      * @see {@link Joint} for connecting bodies
      * @see {@link RapierPhysics} for physics engine implementation
      * @see {@link Physics} for raycasting and physics utilities
      * @link https://engine.needle.tools/samples/physics-basic/
      * @link https://engine.needle.tools/samples/physics-playground/
      * @link https://engine.needle.tools/samples/physics-&-animation/
      * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies
      */
     export declare class Rigidbody extends Component implements IRigidbody {
         get isRigidbody(): boolean;
         /** When true the mass will be automatically calculated by the attached colliders */
         autoMass: boolean;
         /** By default the mass will be automatically calculated (see `autoMass`) by the physics engine using the collider sizes
          * To set the mass manually you can either set the `mass` value or set `autoMass` to `false`
          */
         set mass(value: number);
         get mass(): number;
         private _mass;
         /**
          * Use gravity is a flag that can be set to false to disable gravity for a specific rigid-body.
          */
         useGravity: boolean;
         /**
          * The center of mass is the point around which the mass of the rigid-body is evenly distributed. It is used to compute the torque applied to the rigid-body when forces are applied to it.
          */
         centerOfMass: Vector3;
         /**
          * Constraints are used to lock the position or rotation of an object in a specific axis.
          */
         constraints: RigidbodyConstraints;
         /**
          * IsKinematic is a flag that can be set to true to make a rigid-body kinematic. Kinematic rigid-bodies are not affected by forces and collisions. They are meant to be animated by the user.
          */
         isKinematic: boolean;
         /** Drag is a force that resists the motion of the rigid-body. It is applied to the center-of-mass of the rigid-body.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#damping */
         drag: number;
         /** Angular drag is a force that resists the rotation of the rigid-body. It is applied to the center-of-mass of the rigid-body.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#damping */
         angularDrag: number;
         /**
          * Detect collisions is a flag that can be set to false to disable collision detection for a specific rigid-body.
          */
         detectCollisions: boolean;
         /** The sleeping threshold is the minimum velocity below which a dynamic rigid-body will be put to sleep by the physics engine.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#sleeping */
         sleepThreshold: number;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#continuous-collision-detection */
         collisionDetectionMode: CollisionDetectionMode;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockPositionX(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockPositionY(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockPositionZ(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockRotationX(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockRotationY(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         get lockRotationZ(): boolean;
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockPositionX(v: boolean);
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockPositionY(v: boolean);
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockPositionZ(v: boolean);
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockRotationX(v: boolean);
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockRotationY(v: boolean);
         /** @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#locking-translationsrotations */
         set lockRotationZ(v: boolean);
         /** Gravity is such a common force that it is implemented as a special case (even if it could easily be implemented by the user using force application). Note however that a change of gravity won't automatically wake-up the sleeping bodies so keep in mind that you may want to wake them up manually before a gravity change.
          *
          * It is possible to change the way gravity affects a specific rigid-body by setting the rigid-body's gravity scale to a value other than 1.0. The magnitude of the gravity applied to this body will be multiplied by this scaling factor. Therefore, a gravity scale set to 0.0 will disable gravity for the rigid-body whereas a gravity scale set to 2.0 will make it twice as strong. A negative value will flip the direction of the gravity for this rigid-body.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#gravity
          */
         set gravityScale(val: number);
         get gravityScale(): number;
         private _gravityScale;
         /** Rigidbodies with higher dominance will be immune to forces originating from contacts with rigidbodies of lower dominance.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#dominance
          */
         dominanceGroup: number;
         private static tempPosition;
         private _propertiesChanged;
         private _currentVelocity;
         private _smoothedVelocity;
         private _smoothedVelocityGetter;
         private _lastPosition;
         private _watch?;
         awake(): void;
         onEnable(): void;
         onDisable(): void;
         onDestroy(): void;
         onValidate(): void;
         beforePhysics(): Generator<undefined, void, unknown>;
         /** Teleport the rigidbody to a new position in the world.
          * Will reset forces before setting the object world position
          * @param pt The new position to teleport the object to (world space)
          * @param localspace When true the object will be teleported in local space, otherwise in world space
          * */
         teleport(pt: {
             x: number;
             y: number;
             z: number;
         }, localspace?: boolean): void;
         resetForces(wakeup?: boolean): void;
         resetTorques(wakeup?: boolean): void;
         resetVelocities(): void;
         resetForcesAndTorques(): void;
         /** When a dynamic rigid-body doesn't move (or moves very slowly) during a few seconds, it will be marked as sleeping by the physics pipeline. Rigid-bodies marked as sleeping are no longer simulated by the physics engine until they are woken up. That way the physics engine doesn't waste any computational resources simulating objects that don't actually move. They are woken up automatically whenever another non-sleeping rigid-body starts interacting with them (either with a joint, or with one of its attached colliders generating contacts).
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#sleeping */
         wakeUp(): void;
         get isSleeping(): boolean | undefined;
         /** Call to force an update of the rigidbody properties in the physics engine */
         updateProperties(): any;
         /** Forces affect the rigid-body's acceleration whereas impulses affect the rigid-body's velocity
          * the acceleration change is equal to the force divided by the mass:
          * @link see https://rapier.rs/docs/user_guides/javascript/rigid_bodies#forces-and-impulses */
         applyForce(vec: Vector3 | Vec3, _rel?: Vector3, wakeup?: boolean): void;
         /** Forces affect the rigid-body's acceleration whereas impulses affect the rigid-body's velocity
          * the velocity change is equal to the impulse divided by the mass
          * @link see https://rapier.rs/docs/user_guides/javascript/rigid_bodies#forces-and-impulses */
         applyImpulse(vec: Vector3 | Vec3, wakeup?: boolean): void;
         /** @link see https://rapier.rs/docs/user_guides/javascript/rigid_bodies#forces-and-impulses */
         setForce(x: Vector3 | Vec3 | number, y?: number, z?: number, wakeup?: boolean): void;
         /** The velocity of a dynamic rigid-body controls how fast it is moving in time. The velocity is applied at the center-of-mass of the rigid-body. This method returns the current linear velocity of the rigid-body.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#velocity */
         getVelocity(): Vector3;
         setVelocity(x: number | Vector3, y?: number, z?: number, wakeup?: boolean): void;
         /** The velocity of a dynamic rigid-body controls how fast it is moving in time. The velocity is applied at the center-of-mass of the rigid-body. This method returns the current angular velocity of the rigid-body.
          * @link https://rapier.rs/docs/user_guides/javascript/rigid_bodies#velocity */
         getAngularVelocity(): Vector3;
         setAngularVelocity(x: Vec3, wakeup?: boolean): any;
         setAngularVelocity(x: number, y: number, z: number, wakeup?: boolean): any;
         /** Set the angular velocity of a rigidbody (equivalent to calling `setAngularVelocity`) */
         setTorque(x: Vec3): any;
         setTorque(x: number, y: number, z: number): any;
         /**
          * Returns the rigidbody velocity smoothed over ~ 10 frames
          */
         get smoothedVelocity(): Vector3;
         /**d
          * @deprecated not used anymore and will be removed in a future update
          */
         setBodyFromGameObject(_velocity?: Vector3 | null | {
             x: number;
             y: number;
             z: number;
         }): void;
         private captureVelocity;
     }

     export declare enum RigidbodyConstraints {
         None = 0,
         FreezePositionX = 2,
         FreezePositionY = 4,
         FreezePositionZ = 8,
         FreezePosition = 14,
         FreezeRotationX = 16,
         FreezeRotationY = 32,
         FreezeRotationZ = 64,
         FreezeRotation = 112,
         FreezeAll = 126
     }

     /** Use to listen to room networking events like joining a networked room
      *  For example: `this.context.connection.beginListen(RoomEvents.JoinedRoom, () => { })`
      * @link https://engine.needle.tools/docs/networking.html#manual-networking
      * */
     export declare enum RoomEvents {
         /** Internal: Sent to the server when attempting to join a room */
         Join = "join-room",
         /** Internal: Sent to the server when attempting to leave a room */
         Leave = "leave-room",
         /** Incoming: When the local user has joined a room */
         JoinedRoom = "joined-room",
         /** Incoming: When the local user has left a room */
         LeftRoom = "left-room",
         /** Incoming: When a other user has joined the room */
         UserJoinedRoom = "user-joined-room",
         /** Incoming: When a other user has left the room */
         UserLeftRoom = "user-left-room",
         /** When a user joins a room, the server sends the entire room state. Afterwards, the server sends the room-state-sent event. */
         RoomStateSent = "room-state-sent"
     }

     export declare class RotationBySpeedModule {
         enabled: boolean;
         range: Vec2;
         separateAxes: boolean;
         x: MinMaxCurve;
         xMultiplier: number;
         y: MinMaxCurve;
         yMultiplier: number;
         z: MinMaxCurve;
         zMultiplier: number;
         evaluate(_t01: number, speed: number): number;
     }

     export declare class RotationOverLifetimeModule {
         enabled: boolean;
         separateAxes: boolean;
         x: MinMaxCurve;
         xMultiplier: number;
         y: MinMaxCurve;
         yMultiplier: number;
         z: MinMaxCurve;
         zMultiplier: number;
         evaluate(t01: number, t: number): number;
     }

     export declare function sanitizeString(str: any): string;

     /** Download a image (must be a data url).
      * @param dataUrl The data url of the image
      * @param filename The filename of the image
      * @example
      * ```ts
      * const dataUrl = screenshot();
      * saveImage(dataUrl, "screenshot.png");
      * ```
      */
     export declare function saveImage(dataUrl: string | null, filename: string): void;

     declare type ScaleClipOptions = {
         /**
          * Type of scale animation to create.
          * - "linear": Simple linear scale up animation.
          * - "spring": Spring-like scale animation with overshoot and settling.
          */
         type?: ScaleClipType;
         duration?: number;
         scale?: number | Vector3Like;
         scaleFactor?: number;
     };

     /**
      * Type of scale animation to create.
      * - "linear": Simple linear scale up animation.
      * - "spring": Spring-like scale animation with overshoot and settling.
      */
     declare type ScaleClipType = "linear" | "spring";

     export declare class SceneLightSettings extends Component {
         ambientMode: AmbientMode;
         ambientLight?: Color;
         ambientTrilight?: Color[];
         ambientIntensity: number;
         environmentReflectionSource: DefaultReflectionMode;
         private _hasReflection;
         private _ambientLightObj?;
         private _hemisphereLightObj?;
         awake(): void;
         onDestroy(): void;
         private calculateIntensityFactor;
         onEnable(): void;
         onDisable(): void;
     }

     /** The SceneSwitcher can be used to dynamically load and unload extra content
      * Available scenes are defined in the `scenes` array.
      * Loaded scenes will be added to the SceneSwitcher's GameObject as a child and removed when another scene is loaded by the same SceneSwitcher.
      * Live Examples
      * - [Multi Scenes Sample](https://engine.needle.tools/samples/multi-scene-example) (source code available)
      * - [Needle Website](https://needle.tools)
      * - [Songs Of Cultures](https://app.songsofcultures.com)
      *
      * ![](https://cloud.needle.tools/-/media/I-atrBcwIcg2kfvPl8c71A.gif)
      * *Replace entire scenes with the SceneSwitcher.*
      *
      * ![](https://cloud.needle.tools/-/media/_FbH-7pkDmluTdfphXlP-A.gif)
      * *Multiple SceneSwitcher components can be used at the same time and they can also be nested
      * (a scene loaded by a SceneSwitcher can also have a SceneSwitcher to load sub-scenes).*
      *
      * ### Interfaces
      * Use the {@link ISceneEventListener} interface to listen to scene open and closing events with the ability to modify transitions and stall the scene loading process.
      *
      * ### Events
      * - `loadscene-start`: Called when a scene starts loading
      * - `loadscene-finished`: Called when a scene finished loading
      * - `progress`: Called when a scene is loading and the progress changes
      * - `scene-opened`: Called when a scene is loaded and added to the SceneSwitcher's GameObject
      * @example
      * ```ts
      * sceneSwitcher.addEventListener("loadscene-start", (e) => {
      *    console.log("Loading scene", e.detail.scene.url);
      * });
      * sceneSwitcher.addEventListener("loadscene-finished", (e) => {
      *   console.log("Finished loading scene", e.detail.scene.url);
      * });
      * sceneSwitcher.addEventListener("progress", (e) => {
      *  console.log("Loading progress", e.loaded, e.total);
      * });
      * sceneSwitcher.addEventListener("scene-opened", (e) => {
      * console.log("Scene opened", e.detail.scene.url);
      * });
      * ```
      *
      * @summary Dynamically loads and switches between multiple scenes
      * @category Asset Management
      * @group Components
      * @see {@link ISceneEventListener} for scene transition callbacks
      * @see {@link AssetReference} for loading individual assets
      * @see {@link NestedGltf} for embedding static glTF content
      * @link https://engine.needle.tools/docs/how-to-guides/components/scene-switcher.html
      */
     export declare class SceneSwitcher extends Component {
         /** When enabled the first scene will be loaded when the SceneSwitcher becomes active
          * @default true
          */
         autoLoadFirstScene: boolean;
         /**
          * The scenes that can be loaded by the SceneSwitcher.
          * @default []
          */
         scenes: AssetReference[];
         /**
          * The scene that is displayed while a scene is loading.
          * @default undefined
          */
         loadingScene?: AssetReference;
         /** the url parameter that is set/used to store the currently loaded scene in, set to "" to disable
          * @default "scene"
          */
         queryParameterName: string;
         /**
          * when enabled the scene name will be used as the query parameter (otherwise the scene index will be used)
          * Needs `queryParameterName` set
          * @default true
          */
         useSceneName: boolean;
         /**
          * When enabled the current scene index will be clamped to the scenes array bounds.
          * For example when the last scene is loaded and `clamp` is true then trying to load the `next()` scene will not change the scene.
          * When `clamp` is false and the last scene is loaded then the first scene will be loaded instead.
          * @default true
          */
         clamp: boolean;
         /** when enabled the new scene is pushed to the browser navigation history, only works with a valid query parameter set
          * @default true
          */
         useHistory: boolean;
         /** when enabled you can switch between scenes using keyboard left, right, A and D or number keys
          * @default true
          */
         useKeyboard: boolean;
         /** when enabled you can switch between scenes using swipe (mobile only)
          * @default true
          */
         useSwipe: boolean;
         /** when enabled will automatically apply the environment scene lights
          * @default true
          */
         useSceneLighting: boolean;
         /** When enabled will automatically apply the skybox from the loaded scene
          * @default true
          */
         useSceneBackground: boolean;
         /** how many scenes after the currently active scene should be preloaded
          * @default 1
          */
         preloadNext: number;
         /** how many scenes before the currently active scene should be preloaded
          * @default 1
          */
         preloadPrevious: number;
         /** how many scenes can be loaded in parallel
          * @default 2
          */
         preloadConcurrent: number;
         /**
          * When enabled will create a button for the Needle menu to switch to the next or previous scene
          * @default false
          */
         createMenuButtons: boolean;
         /** The index of the currently loaded and active scene */
         get currentIndex(): number;
         /** Get the progress of the currently loading scene. This is undefined if no scene is loading
          * You can also subscribe to the loading event by adding an event listener to the scene switcher.
          * For example like this `sceneSwitcher.addEventListeneer("progress", (e) => {...})`
          */
         get currentLoadingProgress(): ProgressEvent<EventTarget> | undefined;
         /** The currently loading scene. This is undefined if no scene is loading. */
         get currentlyLoadingScene(): AssetReference | undefined;
         /**
          * The currently loaded scene. This is undefined if no scene is loaded.
          */
         get currentlyLoadedScene(): AssetReference | undefined;
         /**
          * Called when a scene starts loading
          */
         sceneLoadingStart: EventList<LoadSceneEvent>;
         sceneLoadingProgress: EventList<ProgressEvent>;
         /**
          * The sceneLoaded event is called when a scene/glTF is loaded and added to the scene
          */
         sceneLoaded: EventList<SceneSwitcher>;
         private _currentIndex;
         private _currentScene;
         private _currentSceneAsset;
         private _engineElementOverserver;
         private _preloadScheduler?;
         private _menuButtons?;
         private __lastSwitchScene?;
         private __lastSwitchScenePromise?;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private onPopState;
         private normalizedSwipeThresholdX;
         private _didSwipe;
         private onInputPointerMove;
         private onInputPointerUp;
         private onInputKeyDown;
         /**
          * Add a scene to the SceneSwitcher.
          * If the scene is already added it will be added again.
          * @param urlOrAssetReference The url of the scene or an AssetReference to the scene
          * @returns The AssetReference of the scene that was added
          * @example
          * ```ts
          * // adding a scene:
          * sceneSwitcher.addScene("scene1.glb");
          * // add another scene and load it:
          * const scene2 = sceneSwitcher.addScene("scene2.glb");
          * sceneSwitcher.switchScene(scene2).then(res => { console.log("Scene loaded", res); });
          * ```
          */
         addScene(urlOrAssetReference: string | AssetReference): AssetReference;
         /**
          * Load the next scene in the scenes array ({@link this.currentIndex} + 1)
          * If the current scene is the last scene in the array and {@link this.clamp} is disabled then the first scene will be loaded.
          * @returns a promise that resolves to true if the scene was loaded successfully
          */
         selectNext(): Promise<boolean>;
         /**
          * Load the previous scene in the scenes array ({@link this.currentIndex} - 1)
          * If the current scene is the first scene in the array and {@link this.clamp} is disabled then the last scene will be loaded.
          * @returns a promise that resolves to true if the scene was loaded successfully
          */
         selectPrev(): Promise<boolean>;
         /**
          * Load a scene by its index in the scenes array.
          * @param index The index of the scene or a string that represents the scene uri (if the url is not known to the SceneSwitcher it will try to load the scene by its uri but it won't be added to the current scenes array. Use {@link addScene} to add a scene to the SceneSwitcher)
          * @returns a promise that resolves to true if the scene was loaded successfully
          */
         select(index: number | string): Promise<boolean>;
         /**
          * Unload the currently loaded scene.
          */
         unload(): Promise<void>;
         /**
          * Reload the last scene that was loaded
          * @returns a promise that resolves to true if the scene was loaded successfully
          */
         reload(): Promise<boolean>;
         /**
          * Switch to a scene by its AssetReference.
          * If the scene is already loaded it will be unloaded and the new scene will be loaded.
          * If the scene is already loading it will wait for the scene to be loaded.
          * If the scene is already loaded and the same scene is requested again it will return the same promise that was returned the first time the scene was requested.
          * @param scene The AssetReference of the scene to switch to
          * @returns a promise that resolves to true if the scene was loaded successfully
          * @example
          * ```ts
          * const myAssetReference = new AssetReference("scene1.glb");
          * sceneSwitcher.switchScene(myAssetReference).then(res => { console.log("Scene loaded", res); });
          * ```
          */
         switchScene(scene: AssetReference): Promise<boolean>;
         private __unloadCurrentScene;
         private _currentlyLoadingScene?;
         /* Excluded from this release type: __internalSwitchScene */
         preload(index: number): Promise<boolean> | Promise<ArrayBufferLike | null>;
         private tryLoadFromQueryParam;
         /** try to select a scene from a string or index */
         private trySelectSceneFromValue;
         private _lastLoadingScene;
         private _loadingScenePromise;
         private _isCurrentlyLoading;
         private _currentLoadingProgress;
         private onStartLoading;
         private onEndLoading;
         private tryGetSceneEventListener;
     }

     declare class SceneTransition {
         private readonly _fadeToColorQuad;
         private readonly _fadeToColorMaterial;
         constructor();
         dispose(): void;
         update(camera: Camera_2, dt: number): void;
         remove(): void;
         /** Call to fade rendering to black for a short moment (the returned promise will be resolved when fully black)
          * This can be used to mask scene transitions or teleportation
          * @returns a promise that is resolved when the screen is fully black
          * @example `fadeTransition().then(() => { <fully_black> })`
          */
         fadeTransition(): Promise<void>;
         private _requestedFadeValue;
         private _transitionPromise;
         private _transitionResolve;
     }

     /**
      * ScreenCapture enables sharing screen, camera, or microphone with users in a networked room.
      * The stream is displayed via a {@link VideoPlayer} component on the same GameObject.
      *
      * **Supported capture devices:**
      * - `Screen` - Share desktop/window/tab
      * - `Camera` - Share webcam feed
      * - `Microphone` - Audio only
      * - `Canvas` - Share the 3D canvas (experimental)
      *
      * ![](https://cloud.needle.tools/-/media/Ugw6sKj3KNeLMzl0yKQXig.gif)
      *
      * **How it works:**
      * - Click the object to start/stop sharing (if `allowStartOnClick` is true)
      * - Or call `share()` / `close()` programmatically
      * - Stream is sent to all users in the same room via WebRTC
      * - Receiving clients see the video on their VideoPlayer
      *
      * **Debug:** Append `?debugscreensharing` to the URL for console logging.
      *
      * @example Start screen sharing programmatically
      * ```ts
      * const capture = myScreen.getComponent(ScreenCapture);
      * await capture?.share({ device: "Screen" });
      *
      * // Later, stop sharing
      * capture?.close();
      * ```
      *
      * @example Share webcam with constraints
      * ```ts
      * await capture?.share({
      *   device: "Camera",
      *   constraints: { width: 1280, height: 720 }
      * });
      * ```
      *
      * @summary Share screen, camera or microphone in a networked room
      * @category Networking
      * @category Multimedia
      * @group Components
      * @see {@link VideoPlayer} for displaying the received stream
      * @see {@link Voip} for voice-only communication
      * @see {@link SyncedRoom} for room management
      * @link https://engine.needle.tools/docs/networking.html
      */
     export declare class ScreenCapture extends Component implements IPointerClickHandler {
         /**
          * When enabled the stream will start when the user clicks on the object this component is attached to
          * It is also possible to start the stream manually from your code by calling the {@link share} method
          * To modify what type of device is shared you can set the {@link device} property.
          * @default true
          */
         allowStartOnClick: boolean;
         /* Excluded from this release type: onPointerEnter */
         /* Excluded from this release type: onPointerExit */
         /* Excluded from this release type: onPointerClick */
         /** When enabled the stream will start when this component becomes active (enabled in the scene) */
         autoConnect: boolean;
         /**
          * If a VideoPlayer component is assigned to this property the video will be displayed on the VideoPlayer component.
          */
         set videoPlayer(val: VideoPlayer | undefined);
         get videoPlayer(): VideoPlayer | undefined;
         private _videoPlayer?;
         private _audioSource?;
         /**
          * When enabled the video will be displayed in the screenspace of the VideoPlayer component.
          */
         get screenspace(): boolean;
         set screenspace(v: boolean);
         /**
          * Which streaming device type should be used when starting to share (if {@link share} is called without a device option). Options are Screen, Camera, Microphone.
          * This is e.g. used if `allowStartOnClick` is enabled and the user clicks on the object.
          * @default Screen
          */
         device: ScreenCaptureDeviceTypes;
         /**
          * If assigned the device the device will be selected by this id or label when starting to share.
          * Note: This is only supported for `Camera` devices
          */
         deviceName?: string;
         /**
          * Filter which device should be chosen for sharing by id or label.
          * Assign a method to this property to manually filter the available devices.
          */
         deviceFilter?: (device: MediaDeviceInfo) => boolean;
         /**
          * the current stream that is being shared or received
          * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaStream
          */
         get currentScream(): MediaStream | null;
         get currentMode(): ScreenCaptureMode;
         /**
          * @returns true if the component is currently sending a stream
          */
         get isSending(): boolean | undefined;
         /**
          * @returns true if the component is currently receiving a stream
          */
         get isReceiving(): boolean;
         private get requiresVideoPlayer();
         private _net?;
         private _requestOpen;
         private _currentStream;
         private _currentMode;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private onJoinedRoom;
         private _ensureVideoPlayer;
         private _activeShareRequest;
         /** Call to begin screensharing */
         share(opts?: ScreenCaptureOptions): Promise<void | null>;
         private internalShare;
         close(): void;
         private setStream;
         private onReceiveStream;
         private onCallEnded;
         private tryShareUserCamera;
     }

     /**
      * ScreenCapture component allows you to share your screen, camera or microphone with other users in the networked room.
      */
     declare enum ScreenCaptureDevice {
         /**
          * Capture the screen of the user.
          */
         Screen = 0,
         /**
          * Capture the camera of the user.
          */
         Camera = 1,
         /** Please note that canvas streaming might not work reliably on chrome: https://bugs.chromium.org/p/chromium/issues/detail?id=1156408 */
         Canvas = 2,
         /** When using Microphone only the voice will be send */
         Microphone = 3
     }

     /**
      * {@link ScreenCapture} allows you to share your screen, camera or microphone with other users in the networked room.
      */
     declare type ScreenCaptureDeviceTypes = keyof typeof ScreenCaptureDevice;

     /**
      * The current mode of the {@link ScreenCapture} component.
      */
     declare enum ScreenCaptureMode {
         Idle = 0,
         Sending = 1,
         Receiving = 2
     }

     /**
      * Options for the {@link ScreenCapture} component when starting to share a stream by calling the {@link ScreenCapture.share}.
      */
     declare type ScreenCaptureOptions = {
         /**
          * You can specify the device type to capture (e.g. Screen, Camera, Microphone)
          */
         device?: ScreenCaptureDeviceTypes;
         /**
          * Constraints for the media stream like resolution, frame rate, etc.
          * @see https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamConstraints
          */
         constraints?: MediaTrackConstraints;
         /** Filter video device by id. Alternatively pass in a deviceFilter callback to manually filter available devices */
         deviceId?: string;
         /** Return false to skip the available device */
         deviceFilter?: (device: MediaDeviceInfo) => boolean;
     };

     /**
      * Take a screenshot from the current scene.
      * **NOTE**: Use {@link screenshot2} for more options.
      *
      * @param context The context to take the screenshot from
      * @param width The width of the screenshot
      * @param height The height of the screenshot
      * @param mimeType The mime type of the image
      * @param camera The camera to use for the screenshot
      * @returns The data url of the screenshot. Returns null if the screenshot could not be taken.
      * @example
      * ```ts
      * const dataUrl = screenshot();
      * saveImage(dataUrl, "screenshot.png");
      * ```
      *
      */
     export declare function screenshot(context?: Context, width?: number, height?: number, mimeType?: ScreenshotImageMimeType, camera?: Camera_2 | null): string | null;

     /**
      * Take a screenshot from the current scene and return a {@link Texture}. This can be applied to a surface in 3D space.
      * @param opts Provide `{ type: "texture" }` to get a texture instead of a data url.
      * @returns The texture of the screenshot. Returns null if the screenshot could not be taken.
      * @category Utilities
      * @example
      * ```ts
      * // Create a texture from the current view
      * const screenshotTexture = screenshot2({ type: "texture", width: 512, height: 512 });
      * if (screenshotTexture) {
      *   myMaterial.map = screenshotTexture;
      *   myMaterial.needsUpdate = true;
      * }
      *
      * // Update an existing texture
      * const existingTexture = new Texture();
      * screenshot2({ type: "texture", target: existingTexture, transparent: true });
      * ```
      */
     export declare function screenshot2(opts: ScreenshotOptionsTexture): Texture | null;

     /**
      * Take a screenshot from the current scene and return a data URL string.
      *
      * @param opts Screenshot options. All properties are optional.
      * @returns The data URL of the screenshot (e.g., "data:image/png;base64,..."). Returns null if the screenshot could not be taken.
      * @category Utilities
      *
      * @example Basic screenshot
      * ```ts
      * // Take a simple screenshot with default settings
      * const dataUrl = screenshot2({});
      * console.log(dataUrl); // "data:image/webp;base64,..."
      * ```
      *
      * @example High-resolution screenshot with transparent background
      * ```ts
      * const dataUrl = screenshot2({
      *   width: 2048,
      *   height: 2048,
      *   mimeType: "image/png",
      *   transparent: true,
      *   trim: true, // Remove transparent edges
      * });
      * ```
      *
      * @example Screenshot with custom background color
      * ```ts
      * import { Color } from "three";
      *
      * const dataUrl = screenshot2({
      *   width: 1024,
      *   height: 1024,
      *   background: new Color(0x00ff00), // Green background
      * });
      * ```
      *
      * @example Download screenshot automatically
      * ```ts
      * screenshot2({
      *   width: 1920,
      *   height: 1080,
      *   mimeType: "image/jpeg",
      *   download_filename: "my-scene.jpg",
      * });
      * ```
      *
      * @example Manual download using saveImage
      * ```ts
      * const dataUrl = screenshot2({
      *   width: 1024,
      *   height: 1024,
      *   mimeType: "image/webp",
      *   transparent: true,
      * });
      * if (dataUrl) {
      *   saveImage(dataUrl, "screenshot.webp");
      * }
      * ```
      *
      * @example Screenshot from specific camera
      * ```ts
      * const myCamera = this.gameObject.getComponent(Camera);
      * const dataUrl = screenshot2({
      *   camera: myCamera,
      *   width: 1024,
      *   height: 1024,
      * });
      * ```
      */
     export declare function screenshot2(opts: ScreenshotOptionsDataUrl): string | null;

     /**
      * Take a screenshot asynchronously and return a Blob. This is useful when you need to process or upload the image data.
      *
      * @param opts Set `{ type: "blob" }` to get a blob instead of a data url. All other {@link ScreenshotOptions} are also available.
      * @returns A Promise that resolves with the Blob of the screenshot. Returns null if the screenshot could not be taken.
      * @category Utilities
      *
      * @example Upload screenshot to server
      * ```ts
      * const blob = await screenshot2({ type: "blob", mimeType: "image/png" });
      * if (blob) {
      *   const formData = new FormData();
      *   formData.append("screenshot", blob, "screenshot.png");
      *   await fetch("/api/upload", { method: "POST", body: formData });
      * }
      * ```
      *
      * @example Save blob to file (browser download)
      * ```ts
      * const blob = await screenshot2({
      *   type: "blob",
      *   width: 1920,
      *   height: 1080,
      *   transparent: true
      * });
      * if (blob) {
      *   const url = URL.createObjectURL(blob);
      *   saveImage(url, "screenshot.png");
      *   URL.revokeObjectURL(url); // Clean up
      * }
      * ```
      */
     export declare function screenshot2(opts: ScreenshotOptionsBlob): Promise<Blob | null>;

     /**
      * Take a screenshot and share it using the Web Share API (mobile-friendly).
      *
      * **Note**: The Web Share API is only available in secure contexts (HTTPS) and may not be supported on all platforms/browsers.
      *
      * @param opts Set `{ type: "share" }` to share the screenshot. Additional options like `filename`, `title`, `text`, and `url` can be provided.
      * @returns A Promise that resolves with an object containing the blob and whether it was successfully shared.
      * @category Utilities
      *
      * @example Share screenshot on mobile
      * ```ts
      * const result = await screenshot2({
      *   type: "share",
      *   filename: "my-creation.png",
      *   title: "Check out my 3D scene!",
      *   text: "I created this with Needle Engine",
      *   url: "https://engine.needle.tools",
      *   mimeType: "image/png",
      * });
      *
      * if (result.shared) {
      *   console.log("Screenshot shared successfully!");
      * } else {
      *   console.log("User cancelled or sharing not supported");
      * }
      * ```
      *
      * @example Share with fallback
      * ```ts
      * const result = await screenshot2({
      *   type: "share",
      *   filename: "screenshot.webp",
      *   file_type: "image/webp",
      * });
      *
      * if (!result.shared && result.blob) {
      *   // Fallback: download the image instead
      *   const url = URL.createObjectURL(result.blob);
      *   saveImage(url, "screenshot.webp");
      *   URL.revokeObjectURL(url);
      * }
      * ```
      */
     export declare function screenshot2(opts: ScreenshotOptionsShare): Promise<ScreenshotOptionsShareReturnType>;

     declare type ScreenshotImageMimeType = "image/webp" | "image/png" | "image/jpeg";

     /**
      * Options for the {@link screenshot2} function.
      */
     export declare type ScreenshotOptions = {
         /**
          * The context to take the screenshot from. If not provided, the current context will be used.
          */
         context?: Pick<Context, "scene" | "renderer" | "mainCamera" | "renderNow" | "updateAspect" | "updateSize" | "currentFrameEvent" | "devicePixelRatio">;
         /**
          * The width of the screenshot - if not provided, the width of the current renderer will be used.
          */
         width?: number;
         /**
          * The height of the screenshot - if not provided, the height of the current renderer will be used.
          */
         height?: number;
         /**
          * The mime type of the image
          */
         mimeType?: ScreenshotImageMimeType;
         /**
          * The camera to use for the screenshot. If not provided, the main camera of the context will be used.
          */
         camera?: Camera_2 | ICamera | null;
         /**
          * If true, the background will be transparent.
          */
         transparent?: boolean;
         /**
          * If true, the image will be trimmed to the non-transparent area. Has no effect if `transparent` is false.
          */
         trim?: boolean;
         /**
          * The background of the screenshot. If not provided, the currently set background of the renderer/scene will be used
          */
         background?: Color | RGBAColor | ColorRepresentation;
         /**
          * If true onBeforeRender and onAfterRender will be invoked on all renderers in the scene.
          * @default true
          */
         render_events?: boolean;
     };

     export declare type ScreenshotOptionsBlob = ScreenshotOptions & {
         type: "blob";
     };

     export declare type ScreenshotOptionsDataUrl = ScreenshotOptions & {
         /**
          * If set the screenshot will be downloaded using the provided filename.
          * NOTE: if you need more control you can manually download the returned image using {@link saveImage}
          * @default undefined
          */
         download_filename?: string;
     };

     export declare type ScreenshotOptionsShare = ScreenshotOptions & {
         /**
          * Set `{ type: "share" }` to share the screenshot using the Web Share API. The promise will resolve with the blob of the screenshot and whether it was shared successfully or not. Note that the Web Share API is only available in secure contexts (HTTPS) and on some platforms.
          */
         type: "share";
         /**
          * The filename to use when sharing the screenshot. If not provided, a default filename will be used.
          */
         filename?: string;
         /**
          * The mime type of the shared file. If not provided, the mime type will be inferred from the screenshot options or default to "image/png".
          */
         file_type?: ScreenshotImageMimeType;
         /**
          * The title to use when sharing the screenshot. This is optional and may not be supported by all platforms.
          */
         title?: string;
         /**
          * The text to use when sharing the screenshot. This is optional and may not be supported by all platforms.
          */
         text?: string;
         /**
          * The URL to use when sharing the screenshot. This is optional and may not be supported by all platforms.
          */
         url?: string;
     };

     declare type ScreenshotOptionsShareReturnType = {
         blob: Blob | null;
         shared: boolean;
     };

     export declare type ScreenshotOptionsTexture = ScreenshotOptions & {
         type: "texture";
         /**
          * If set the screenshot will be saved to the provided texture.
          * @default undefined
          */
         target?: Texture;
     };

     /** [ScreenSpaceAmbientOcclusion](https://engine.needle.tools/docs/api/ScreenSpaceAmbientOcclusion) is a screenspace ambient occlusion post-processing effect.
      * We recommend using ScreenSpaceAmbientOcclusionN8 instead.
      * @category Effects
      * @group Components
      */
     export declare class ScreenSpaceAmbientOcclusion extends PostProcessingEffect {
         get typeName(): string;
         readonly intensity: VolumeParameter;
         readonly falloff: VolumeParameter;
         readonly samples: VolumeParameter;
         readonly color: VolumeParameter;
         readonly luminanceInfluence: VolumeParameter;
         onBeforeRender(): void;
         private _ssao?;
         onCreateEffect(): EffectProviderResult;
     }

     /** [ScreenSpaceAmbientOcclusionN8](https://engine.needle.tools/docs/api/ScreenSpaceAmbientOcclusionN8) is a screen space ambient occlusion (SSAO) effect.
      * Ambient Occlusion is a shading method used to calculate how exposed each point in a scene is to ambient lighting.
      * The effect enhances the depth and realism of 3D scenes by simulating the soft shadows that occur in crevices, corners, and areas where objects are close to each other.
      * This implementation uses the N8AO library to provide high-quality SSAO with various quality settings.
      * @summary Screen Space Ambient Occlusion (SSAO) Post-Processing Effect
      * @category Effects
      * @group Components
      * @link [N8AO documentation](https://github.com/N8python/n8ao)
      */
     export declare class ScreenSpaceAmbientOcclusionN8 extends PostProcessingEffect {
         get typeName(): string;
         get pass(): N8AOPostPass;
         gammaCorrection: boolean;
         /** The most important parameter for your ambient occlusion effect.
          * Controls the radius/size of the ambient occlusion in world units.
          * Should be set to how far you want the occlusion to extend from a given object.
          * Set it too low, and AO becomes an edge detector.
          * Too high, and the AO becomes "soft" and might not highlight the details you want.
          * The radius should be one or two magnitudes less than scene scale:
          * if your scene is 10 units across, the radius should be between 0.1 and 1. If its 100, 1 to 10.
          * @default 1
          */
         aoRadius: VolumeParameter;
         /** The second most important parameter for your ambient occlusion effect.
          * Controls how fast the ambient occlusion fades away with distance in proportion to its radius.
          * Defaults to 1, and behind-the-scenes, is a calculated as a ratio of your radius (0.2 * distanceFalloff is the size used for attenuation).
          * Decreasing it reduces "haloing" artifacts and improves the accuracy of your occlusion,
          * but making it too small makes the ambient occlusion disappear entirely.
          * @default 1
          */
         falloff: VolumeParameter;
         /** A purely artistic control for the intensity of the AO - runs the ao through the function pow(ao, intensity),
          * which has the effect of darkening areas with more ambient occlusion.
          * Useful to make the effect more pronounced.
          * An intensity of 2 generally produces soft ambient occlusion that isn't too noticeable,
          * whereas one of 5 produces heavily prominent ambient occlusion.
          * @default 1
          */
         intensity: VolumeParameter;
         /** The color of the ambient occlusion. By default, it is black, but it can be changed to any color
          * to offer a crude approximation of global illumination.
          * Recommended in scenes where bounced light has a uniform "color",
          * for instance a scene that is predominantly lit by a blue sky.
          * The color is expected to be in the sRGB color space, and is automatically converted to linear space for you.
          * Keep the color pretty dark for sensible results.
          * @default new Color(0, 0, 0)
          */
         color: VolumeParameter;
         /** If you want the AO to calculate the radius based on screen space, you can do so by setting configuration.screenSpaceRadius to true.
          * This is useful for scenes where the camera is moving across different scales a lot,
          * or for scenes where the camera is very close to the objects.
          * @default false
          */
         screenspaceRadius: boolean;
         /**
          * The quality of the ambient occlusion effect.
          * @default ScreenSpaceAmbientOcclusionN8QualityMode.Medium
          */
         quality: ScreenSpaceAmbientOcclusionN8QualityMode;
         private _ssao?;
         onValidate(): void;
         onCreateEffect(): EffectProviderResult;
     }

     /**See (N8AO documentation)[https://github.com/N8python/n8ao] */
     declare enum ScreenSpaceAmbientOcclusionN8QualityMode {
         Performance = 0,
         Low = 1,
         Medium = 2,
         High = 3,
         Ultra = 4
     }

     /**
      * The [ScrollFollow](https://engine.needle.tools/docs/api/ScrollFollow) component allows you to link the scroll position of the page (or a specific element) to one or more target objects.

      * This can be used to create scroll-based animations, audio playback, or other effects. For example you can link the scroll position to a timeline (PlayableDirector) to create scroll-based storytelling effects or to an Animator component to change the animation state based on scroll.
      *
      * ![](https://cloud.needle.tools/-/media/SYuH-vXxO4Jf30oU1HhjKQ.gif)
      * ![](https://cloud.needle.tools/-/media/RplmU_j7-xb8XHXkOzc9PA.gif)
      *
      * Assign {@link target} objects to the component to have them updated based on the current scroll position (check the 'target' property for supported types).
      *
      * @link Example at https://scrollytelling-2-z23hmxby7c6x-u30ld.needle.run/
      * @link Template at https://github.com/needle-engine/scrollytelling-template
      * @link [Scrollytelling Bike Demo](https://scrollytelling-bike-z23hmxb2gnu5a.needle.run/)
      *
      * ## How to use with an Animator
      * 1. Create an Animator component and set up a float parameter named "scroll".
      * 2. Create transitions between animation states based on the "scroll" parameter (e.g. from 0 to 1).
      * 3. Add a ScrollFollow component to the same GameObject or another GameObject in the scene.
      * 4. Assign the Animator component to the ScrollFollow's target property.
      *
      * ## How to use with a PlayableDirector (timeline)
      * 1. Create a PlayableDirector component and set up a timeline asset.
      * 2. Add a ScrollFollow component to the same GameObject or another GameObject in the scene.
      * 3. Assign the PlayableDirector component to the ScrollFollow's target property.
      * 4. The timeline will now scrub based on the scroll position of the page.
      * 5. (Optional) Add ScrollMarker markers to your HTML to define specific points in the timeline that correspond to elements on the page. For example:
      *   ```html
      *   <div data-timeline-marker="0.0">Start of Timeline</div>
      *   <div data-timeline-marker="0.5">Middle of Timeline</div>
      *   <div data-timeline-marker="1.0">End of Timeline</div>
      * ```
      *
      * @summary Links scroll position to target objects
      * @category Web
      * @category Interaction
      * @group Components
      * @component
      */
     export declare class ScrollFollow extends Component {
         /**
          * Target object(s) to follow the scroll position of the page.
          *
          * Supported target types:
          * - PlayableDirector (timeline), the scroll position will be mapped to the timeline time
          * - Animator, the scroll position will be set to a float parameter named "scroll"
          * - Animation, the scroll position will be mapped to the animation time
          * - AudioSource, the scroll position will be mapped to the audio time
          * - SplineWalker, the scroll position will be mapped to the position01 property
          * - Light, the scroll position will be mapped to the intensity property
          * - Object3D, the object will move vertically based on the scroll position
          * - Any object with a `scroll` property (number or function)
          */
         target: object[] | object | null;
         /**
          * Damping for the movement, set to 0 for instant movement
          * @default 0
          */
         damping: number;
         /**
          * If true, the scroll value will be inverted (e.g. scrolling down will result in a value of 0)
          * @default false
          */
         invert: boolean;
         /**
          * **Experimental - might change in future updates**
          * If set, the scroll position will be read from the specified element instead of the window.
          * Use a CSS selector to specify the element, e.g. `#my-scrollable-div` or `.scroll-container`.
          * @default null
          */
         htmlSelector: string | null;
         mode: "window";
         /**
          * Event fired when the scroll position changes
          */
         changed: EventList<ScrollFollowEvent>;
         /**
          * Current scroll value in "pages" (0 = top of page, 1 = bottom of page)
          */
         get currentValue(): number;
         private _current_value;
         private _target_value;
         private _appliedValue;
         private _needsUpdate;
         private _firstUpdate;
         awake(): void;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: lateUpdate */
         private _lastSelectorValue;
         private _lastSelectorElement;
         private updateCurrentScrollValue;
         private applyScroll;
         private handleTimelineTarget;
     }

     declare type ScrollFollowEvent = {
         /** Event type */
         type: "change";
         /** Current scroll value */
         value: number;
         /** ScrollFollow component that raised the event */
         component: ScrollFollow;
         /** Call to prevent invocation of default (e.g. updating targets) */
         preventDefault: () => void;
         defaultPrevented: boolean;
     };

     /**
      * Marker with a name, used for scroll-driven timelines. It is used together with elements in your HTML to define what time in the timeline should be active when the element is in the scroll view.
      *
      * @example Mark html elements to define scroll positions
      * ```html
      * <div data-timeline-marker>...</div>
      * ```
      *
      * @link [Example Project using ScrollMarker](https://scrollytelling-bike-z23hmxb2gnu5a.needle.run/)
      * @category Animation and Sequencing
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare type ScrollMarkerModel = MarkerModel & {
         name?: string;
     };

     /**
      * Automatically fades objects to transparent when they obscure a reference point from the camera's view.
      * Perfect for architectural visualization, third-person games, or any scenario where objects should
      * become see-through when blocking the view of important content.
      *
      * [![](https://cloud.needle.tools/-/media/1gbMOJLgTlXOug_g6xeKfg.gif)](https://engine.needle.tools/samples/see-through)
      *
      * **How it works:**
      * - Monitors the angle between the camera, this object, and a reference point
      * - When the object blocks the view to the reference point, it fades out
      * - Automatically affects all {@link Renderer} components on this object and children
      * - Supports both transparent fading and alpha hash (dithered) fading
      *
      * **Key Features:**
      * - Smooth fade transitions with configurable duration
      * - Optional alpha hash for maintaining opaque rendering (better performance)
      * - Automatic or manual update modes
      * - Disables raycasting when faded (objects become click-through)
      * - Preserves original material properties when re-enabled
      *
      * **Configuration:**
      * - `referencePoint` - Object to keep visible (defaults to scene root)
      * - `fadeDuration` - Transition speed (default: 0.05 seconds)
      * - `minAlpha` - Minimum opacity when faded (default: 0 = fully transparent)
      * - `useAlphaHash` - Use dithered transparency instead of true transparency (default: true)
      *
      * **Performance:**
      * - Materials are cloned once per renderer to avoid affecting shared materials
      * - Updates direction calculation every 20 frames by default (configurable via `autoUpdate`)
      * - Use `needsUpdate = true` to force immediate recalculation
      *
      * **Requirements:**
      * Requires at least one {@link Renderer} component on the same object or child objects.
      *
      * @example Make walls transparent when blocking view
      * ```ts
      * // Add to walls or obstacles
      * const seeThrough = wall.addComponent(SeeThrough);
      * seeThrough.referencePoint = player; // Keep player visible
      * seeThrough.fadeDuration = 0.2; // Smooth fade
      * seeThrough.minAlpha = 0.2; // Slightly visible when faded
      * ```
      *
      * @example Third-person camera with see-through objects
      * ```ts
      * const character = GameObject.findByName("Character");
      * const obstacles = GameObject.findByTag("Obstacle");
      *
      * for (const obstacle of obstacles) {
      *   const st = obstacle.addComponent(SeeThrough);
      *   st.referencePoint = character;
      *   st.useAlphaHash = true; // Better performance
      * }
      * ```
      *
      * @example Manual control of see-through effect
      * ```ts
      * const seeThrough = this.gameObject.getComponent(SeeThrough);
      * if (seeThrough) {
      *   seeThrough.autoUpdate = false; // Disable automatic fading
      *
      *   // Manually control transparency
      *   seeThrough.updateAlpha(0.5, 0.3); // Fade to 50% over 0.3 seconds
      *
      *   // Or use override for precise control
      *   seeThrough.overrideAlpha = 0.8; // Force 80% opacity
      * }
      * ```
      *
      * @summary Fades objects when they obscure the camera's view of a reference point
      * @category Rendering
      * @group Components
      * @see {@link Renderer} for material/rendering control (required)
      * @see {@link Camera} for camera setup and configuration
      * @see {@link OrbitControls} for camera controls in similar use cases
      * @link https://see-through-walls-z23hmxbz1kjfjn.needle.run/ for live demo
      * @link https://engine.needle.tools/samples/see-through for sample project
      */
     export declare class SeeThrough extends Component {
         /**
          * Assign a reference point - if this point will be obscured from the camera by this object then this object will fade out.
          * If no reference point is assigned the scene's root object will be used as reference point.
          */
         referencePoint: Object3D | null;
         /**
          * Fade Duration in seconds
          * @default 0.05
          */
         fadeDuration: number;
         /**
          * Minimum alpha value when fading out (0-1)
          * @default 0
          */
         minAlpha: number;
         /**
          * When useAlphaHash is enabled the object will fade out using alpha hashing, this means the object can stay opaque. If disabled the object will set to be transparent when fading out.
          * @default true
          */
         useAlphaHash: boolean;
         /**
          * Set this to force updating the reference point position and direction
          */
         set needsUpdate(val: boolean);
         get needsUpdate(): boolean;
         /**
          * Override the alpha value, -1 means no override
          * @default -1
          */
         overrideAlpha: number;
         /**
          *
          */
         autoUpdate: boolean;
         private readonly _referencePointVector;
         private readonly _referencePointDir;
         private _distance;
         private _renderer;
         private _needsUpdate;
         private _id;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: update */
         private updateDirection;
         /**
          * Update the alpha of the object's materials towards the target alpha over the given duration.
          * @param targetAlpha Target alpha value (0-1)
          * @param duration Duration in seconds to reach the target alpha. 0 means immediate. Default is the component's fadeDuration.
          */
         updateAlpha(targetAlpha: number, duration?: number): void;
     }

     export declare function sendDestroyed(guid: string, con: INetworkConnection, opts?: SyncDestroyOptions): void;

     export declare enum SendQueue {
         OnConnection = 0,
         OnRoomJoin = 1,
         Queued = 2,
         Immediate = 3
     }

     /**
      * Marks a field for serialization and editor exposure. Required for fields that reference
      * other objects, components, or assets. Primitive types (string, number, boolean) work without a type argument.
      *
      * @param type The constructor type for complex objects. Omit for primitives.
      *
      * @example Primitive types (no type needed)
      * ```ts
      * @serializable()
      * speed: number = 1;
      *
      * @serializable()
      * label: string = "Hello";
      * ```
      * @example Object references
      * ```ts
      * @serializable(Object3D)
      * target: Object3D | null = null;
      *
      * @serializable(Renderer)
      * myRenderer: Renderer | null = null;
      * ```
      * @example Arrays
      * ```ts
      * @serializable([Object3D])
      * waypoints: Object3D[] = [];
      * ```
      * @see {@link syncField} for automatic network synchronization
      * @link https://engine.needle.tools/docs/reference/typescript-decorators.html#serializable
      */
     export declare const serializable: <T>(type?: Constructor<T> | null | Array<Constructor<any> | TypeResolver<T>> | TypeResolver<T>) => (_target: any, _propertyKey: string | {
         name: string;
     }) => void;

     export declare class SerializationContext {
         root: Object3D;
         gltf?: GLTF_2;
         /** the url of the glb that is currently being loaded */
         gltfId?: SourceIdentifier;
         object: Object3D;
         target?: object;
         nodeId?: number;
         nodeToObject?: NodeToObjectMap;
         objectToNode?: ObjectToNodeMap;
         context?: Context;
         path?: string;
         type?: ConstructorConcrete<any>;
         /** the serializable attribute for this field (target.path) */
         serializable?: any;
         /** holds information if a field was undefined before serialization. This gives us info if we might want to warn the user about missing attributes */
         implementationInformation?: ImplementationInformation;
         constructor(root: Object3D);
     }

     /** Please use {@link serializable} - this version has a typo and will be removed in future versions */
     export declare const serializeable: <T>(type?: Constructor<T> | null | Array<Constructor<any> | TypeResolver<T>> | TypeResolver<T>) => (_target: any, _propertyKey: string | {
         name: string;
     }) => void;

     export declare function serializeObject(obj: ISerializable, context: SerializationContext): object | null;

     export declare type SessionChangedEvt = (args: NeedleXREventArgs) => void;

     export declare type SessionRequestedEndEvent = (args: {
         readonly mode: XRSessionMode;
         readonly init: XRSessionInit;
         newSession: XRSession | null;
     }) => void;

     export declare type SessionRequestedEvent = (args: {
         readonly mode: XRSessionMode;
         readonly init: XRSessionInit;
     }) => void;

     export declare function setActive(go: Object3D, active: boolean | number): boolean;

     /**
      * Shows or hides a target object when this object is clicked.
      * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
      *
      * Optionally hides itself after being clicked (`hideSelf`), or toggles the target's visibility on each click (`toggleOnClick`).
      *
      * @see {@link HideOnStart}to hide an object when the scene starts
      * @see {@link PlayAnimationOnClick} to play animations when clicked
      * @see {@link ChangeMaterialOnClick} to change material when clicked
      * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
      * @summary Sets the active state of an object when clicked
      * @category Everywhere Actions
      * @group Components
      */
     export declare class SetActiveOnClick extends Component implements IPointerClickHandler, UsdzBehaviour {
         /** The target object to show or hide. */
         target?: Object3D;
         /** If true, the target's visibility will be toggled on each click. When enabled, `hideSelf` and `targetState` are ignored. */
         toggleOnClick: boolean;
         /** The visibility state to apply to the target when clicked. Only used when `toggleOnClick` is false. */
         targetState: boolean;
         /** If true, this object will hide itself after being clicked. Only used when `toggleOnClick` is false. */
         hideSelf: boolean;
         onPointerEnter(): void;
         onPointerExit(): void;
         onPointerClick(args: PointerEventData): void;
         private selfModel;
         private selfModelClone;
         private targetModel?;
         private toggleModel?;
         createBehaviours(_: any, model: USDObject, _context: USDZExporterContext): void;
         private stateBeforeCreatingDocument;
         private targetStateBeforeCreatingDocument;
         private static clonedToggleIndex;
         private static wasVisible;
         private static toggleClone;
         private static reverseToggleClone;
         beforeCreateDocument(): void;
         afterCreateDocument(ext: BehaviorExtension, context: USDZExporterContext): void;
         afterSerialize(_ext: BehaviorExtension, _context: USDZExporterContext): void;
     }

     /** Set false to prevent overlay messages from being shown */
     export declare function setAllowBalloonMessages(allow: boolean): void;

     /**
      * @deprecated Use {@link setAllowBalloonMessages} instead
      */
     export declare function setAllowOverlayMessages(allow: boolean): void;

     /**
      * Enable or disable autofitting for the given object. Objects that are 'disabled' will be excluded in getBoundingBox calculations.
      * This is used by ContactShadows or OrbitControls when fitting the shadow plane or camera to the given objects or scene.
      * @see useForAutoFit
      */
     export declare function setAutoFitEnabled(obj: Object3D, enabled: boolean): void;

     /** Set the camera controller for the given camera */
     export declare function setCameraController(cam: Camera_2, cameraController: ICameraController, active: boolean): void;

     export declare function setDestroyed(go: Object3D, value: boolean): void;

     /** Enforce the dev environment flag to be true or false */
     export declare function setDevEnvironment(val: boolean): void;

     export declare function setDisposable(obj: object | null | undefined, disposable: boolean): void;

     /** Mark an Object3D or component as not destroyable
      * @param instance the object to be marked as not destroyable
      * @param value true if the object should not be destroyed in `destroy`
      */
     export declare function setDontDestroy(instance: Object3D | IComponent, value?: boolean): void;

     /** Sets or adds an URL query parameter */
     export declare function setOrAddParamsToUrl(url: URLSearchParams, paramName: string, paramValue: string | number): void;

     export declare function setParam(paramName: string, paramValue: string): void;

     /** Sets an URL parameter without reloading the website */
     export declare function setParamWithoutReload(paramName: string, paramValue: string | null, appendHistory?: boolean): void;

     export declare function setPeerOptions(opts: PeerJSOption): void;

     /**
      * Disable usage tracking
      */
     export declare function setResourceTrackingEnabled(enabled: boolean): void;

     /** Replaces the current entry in the browser history. Internally uses `window.history.replaceState` */
     export declare function setState(title: string, urlParams: URLSearchParams, state?: any): void;

     declare type setter = (v: any) => void;

     export declare function setVisibleInCustomShadowRendering(obj: Object3D, enabled: boolean): void;

     export declare function setWorldEuler(obj: Object3D, val: Euler): void;

     /**
      * Set the world position of an object
      * @param obj the object to set the world position of
      * @param val the world position to set
      */
     export declare function setWorldPosition(obj: Object3D, val: Vector3): Object3D;

     /**
      * Set the world position of an object
      * @param obj the object to set the world position of
      * @param x the x position
      * @param y the y position
      * @param z the z position
      */
     export declare function setWorldPositionXYZ(obj: Object3D, x: number, y: number, z: number): Object3D;

     export declare function setWorldQuaternion(obj: Object3D, val: Quaternion): void;

     export declare function setWorldQuaternionXYZW(obj: Object3D, x: number, y: number, z: number, w: number): void;

     export declare function setWorldRotation(obj: Object3D, val: Vector3): void;

     export declare function setWorldRotationXYZ(obj: Object3D, x: number, y: number, z: number, degrees?: boolean): void;

     export declare function setWorldScale(obj: Object3D, vec: Vector3): void;

     declare enum ShadowCastingMode {
         Off = 0,
         On = 1,
         TwoSided = 2,
         ShadowsOnly = 3
     }

     /**
      * ShadowCatcher renders real-time shadows cast by lights onto a mesh surface.
      * Captures actual shadow data from the scene's lighting system (directional lights, point lights, spot lights).
      *
      * If the GameObject is a Mesh, it applies a shadow-catching material to it.
      * Otherwise, it creates a quad mesh with the shadow-catching material automatically.
      *
      * [![](https://cloud.needle.tools/-/media/pFXPchA4vynNKOjgG_KucQ.gif)](https://engine.needle.tools/samples/shadow-catcher/)
      * *Additive ShadowCatcher mode with point light shadows*
      *
      * [![](https://cloud.needle.tools/-/media/oIWgEU49rEA0xJ2TrbzVlg.gif)](https://engine.needle.tools/samples/transmission/)
      * *ShadowCatcher with directional light shadows*
      *
      * **Shadow Modes:**
      * - `ShadowMask` - Only renders shadows (works best with directional lights)
      * - `Additive` - Renders light additively (works best with point/spot lights)
      * - `Occluder` - Occludes light without rendering shadows
      *
      * **ShadowCatcher vs ContactShadows:**
      * - **ShadowCatcher**: Real-time shadows from actual lights. Accurate directional shadows that match light sources. Requires lights with shadows enabled. Updates every frame.
      * - **{@link ContactShadows}**: Proximity-based ambient occlusion-style shadows. Extremely soft and diffuse, ideal for subtle grounding. Better performance, works without lights.
      *
      * **When to use ShadowCatcher:**
      * - You need accurate shadows that match specific light directions
      * - Scene has real-time lighting with shadow-casting lights
      * - Shadows need to follow light attenuation and angles
      * - AR/VR scenarios where light estimation is available
      * - Hard or semi-hard shadow edges are desired
      *
      * **When to use ContactShadows instead:**
      * - You want very soft, ambient occlusion-style ground shadows
      * - Performance is critical (no per-frame shadow rendering)
      * - Scene doesn't have shadow-casting lights
      * - Product visualization or configurators (subtle grounding effect)
      * - Soft, diffuse shadows are more visually appealing than accurate ones
      *
      * **Note:** ShadowCatcher meshes are not raycastable by default (layer 2). Change layers in `onEnable()` if raycasting is needed.
      *
      * @example Basic shadow catcher plane
      * ```ts
      * const plane = new Object3D();
      * const catcher = addComponent(plane, ShadowCatcher);
      * catcher.mode = ShadowMode.ShadowMask;
      * catcher.shadowColor = new RGBAColor(0, 0, 0, 0.8);
      * ```
      *
      * @example Apply to existing mesh
      * ```ts
      * const mesh = this.gameObject.getComponent(Mesh);
      * const catcher = addComponent(mesh, ShadowCatcher);
      * // The mesh will now catch shadows from scene lights
      * ```
      *
      * @summary Renders real-time shadows from lights onto surfaces
      * @category Rendering
      * @group Components
      * @see {@link ContactShadows} for proximity-based fake shadows (better performance)
      * @see {@link Light} for shadow-casting light configuration
      * @see {@link Renderer} for shadow receiving settings
      * @link https://engine.needle.tools/samples/shadow-catcher/
      * @link https://engine.needle.tools/samples/transmission/
      */
     export declare class ShadowCatcher extends Component {
         mode: ShadowMode;
         shadowColor: RGBAColor;
         private targetMesh?;
         /* Excluded from this release type: start */
         applyLightBlendMaterial(): void;
         applyShadowMaterial(): void;
         applyOccluderMaterial(): void;
         private applyMaterialOptions;
     }

     /**
      * The mode of the ShadowCatcher.
      * - ShadowMask: only renders shadows.
      * - Additive: renders shadows additively.
      * - Occluder: occludes light.
      */
     declare enum ShadowMode {
         ShadowMask = 0,
         Additive = 1,
         Occluder = 2
     }

     export declare class ShapeModule implements EmitterShape {
         get type(): string;
         initialize(particle: QParticle): void;
         toJSON(): ShapeJSON;
         clone(): EmitterShape;
         shapeType: ParticleSystemShapeType;
         enabled: boolean;
         alignToDirection: boolean;
         angle: number;
         arc: number;
         arcSpread: number;
         arcSpeedMultiplier: number;
         arcMode: ParticleSystemShapeMultiModeValue;
         boxThickness: Vector3;
         position: Vector3;
         rotation: Vector3;
         private _rotation;
         scale: Vector3;
         radius: number;
         radiusThickness: number;
         sphericalDirectionAmount: number;
         randomDirectionAmount: number;
         randomPositionAmount: number;
         /** Controls if particles should spawn off vertices, faces or edges. `shapeType` must be set to `MeshRenderer` */
         meshShapeType?: ParticleSystemMeshShapeType;
         /** When assigned and `shapeType` is set to `MeshRenderer` particles will spawn using a mesh in the scene.
          * Use the `meshShapeType` to choose if particles should be spawned from vertices, faces or edges
          * To re-assign use the `setMesh` function to cache the mesh and geometry
          * */
         meshRenderer?: MeshRenderer;
         private _meshObj?;
         private _meshGeometry?;
         setMesh(mesh: MeshRenderer): void;
         private system;
         private _space?;
         private readonly _worldSpaceMatrix;
         private readonly _worldSpaceMatrixInverse;
         constructor();
         update(_system: IParticleSystem_2, _delta: number): void;
         onUpdate(system: IParticleSystem, _context: Context, simulationSpace: ParticleSystemSimulationSpace, obj: Object3D): void;
         private applyRotation;
         /** nebula implementations: */
         /** initializer implementation */
         private _vector;
         private _temp;
         private _triangle;
         onInitialize(particle: QParticle): void;
         private _dir;
         getDirection(particle: QParticle, pos: Vec3): Vector3;
         private static _randomQuat;
         private static _tempVec;
         private randomizePosition;
         private randomizeDirection;
         private spherizeDirection;
         private randomSpherePoint;
         private randomCirclePoint;
         private _loopTime;
         private _loopDirection;
         private randomConePoint;
     }

     export declare class ShapeOverlapResult {
         readonly object: Object3D;
         readonly collider: ICollider;
         constructor(object: Object3D, collider: ICollider);
     }

     declare type SharedMaterial = (Material & Partial<MeshStandardMaterial> & Partial<MeshPhysicalMaterial> & Partial<ShaderMaterial> & Partial<RawShaderMaterial>);

     declare class SharedMaterialArray implements ISharedMaterials {
         [num: number]: Material;
         private _renderer;
         private _targets;
         private _indexMapMaxIndex?;
         private _indexMap?;
         private _changed;
         get changed(): boolean;
         set changed(value: boolean);
         is(renderer: Renderer): boolean;
         constructor(renderer: Renderer, originalMaterials: Material[]);
         get length(): number;
         [Symbol.iterator](): Generator<SharedMaterial | null, void, unknown>;
         private resolveIndex;
         private setMaterial;
         private getMaterial;
     }

     /**
      * [SharpeningEffect](https://engine.needle.tools/docs/api/SharpeningEffect) Sharpening effect enhances the details and edges in the rendered scene by increasing the contrast between adjacent pixels.
      * This effect can make textures and fine details appear clearer and more defined, improving the overall visual quality of the scene.
      * It is particularly useful in scenes where details may be lost due to blurriness or low resolution.
      * @summary Sharpening Post-Processing Effect
      * @category Effects
      * @group Components
      */
     export declare class SharpeningEffect extends PostProcessingEffect {
         get typeName(): string;
         order: number | undefined;
         private _effect?;
         onCreateEffect(): any;
         private get effect();
         set amount(value: number);
         get amount(): number;
         private _amount;
         set radius(value: number);
         get radius(): number;
         private _radius;
     }

     /** Displays an error message on screen for a certain amount of time */
     export declare function showBalloonError(text: string, options?: Partial<BalloonOptions>): void;

     /** Displays a debug message on screen for a certain amount of time */
     export declare function showBalloonMessage(text: string, options?: Partial<BalloonOptions>): void;

     /** Displays a warning message on screen for a certain amount of time */
     export declare function showBalloonWarning(text: string, options?: Partial<BalloonOptions>): void;

     export declare function showDebugConsole(): void;

     /**
      * Used to reference a signal asset in a SignalReceiver. This is internally used by the {@link SignalReceiverEvent}.
      */
     export declare class SignalAsset {
         guid: string;
     }

     /**
      * @category Animation and Sequencing
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare class SignalMarkerModel extends MarkerModel {
         retroActive: boolean;
         emitOnce: boolean;
         asset: string;
     }

     /** SignalReceiver is a component that listens for signals and invokes a reaction when a signal is received.
      * Signals can be added to a signal track on a {@link PlayableDirector}
      *
      * @summary Receives signals and invokes reactions
      * @category Animation and Sequencing
      * @group Components
      */
     export declare class SignalReceiver extends Component {
         private static receivers;
         static invoke(guid: string): void;
         events?: SignalReceiverEvent[];
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         invoke(sig: SignalAsset | string): void;
     }

     /**
      * An event that links a signal to a reaction.
      * Used internally by {@link SignalReceiver}.
      */
     export declare class SignalReceiverEvent {
         signal: SignalAsset;
         reaction: EventList<void>;
     }

     export declare class SignalTrackHandler extends TrackHandler {
         models: Models.SignalMarkerModel[];
         didTrigger: boolean[];
         receivers: Array<SignalReceiver | null>;
         evaluate(time: number): void;
     }

     export declare class Size {
         width: number;
         height: number;
     }

     export declare class SizeBySpeedModule {
         enabled: boolean;
         range: Vector2;
         separateAxes: boolean;
         size: MinMaxCurve;
         sizeMultiplier: number;
         x: MinMaxCurve;
         xMultiplier: number;
         y: MinMaxCurve;
         yMultiplier: number;
         z: MinMaxCurve;
         zMultiplier: number;
         evaluate<T extends Vector3 | Vector3_2>(vel: T, _t01: number, lerpFactor: number, size: T): T;
     }

     export declare class SizeOverLifetimeModule {
         enabled: boolean;
         separateAxes: boolean;
         size: MinMaxCurve;
         sizeMultiplier: number;
         x: MinMaxCurve;
         xMultiplier: number;
         y: MinMaxCurve;
         yMultiplier: number;
         z: MinMaxCurve;
         zMultiplier: number;
         private _time;
         private _temp;
         evaluate(t01: number, target?: Vec3, lerpFactor?: number): Vec3;
     }

     /**
      * Renders deformable meshes that deform via bones and/or blend shapes.
      * @summary Renderer for deformable meshes
      * @category Rendering
      * @group Components
      **/
     export declare class SkinnedMeshRenderer extends MeshRenderer {
         private _needUpdateBoundingSphere;
         awake(): void;
         onAfterRender(): void;
         markBoundsDirty(): void;
     }

     /**
      * Slerp between two vectors
      */
     export declare function slerp(vec: Vector3, end: Vector3, t: number): Vector3;

     /**
      * [SmoothFollow](https://engine.needle.tools/docs/api/SmoothFollow) makes this GameObject smoothly follow another target object's position and/or rotation.
      *
      * **Position Following:**
      * When enabled (`followFactor > 0`), this object will move towards the target's world position.
      * The object interpolates from its current position to the target's position each frame.
      * Use `positionAxes` to restrict following to specific axes (e.g., only horizontal movement).
      *
      * **Rotation Following:**
      * When enabled (`rotateFactor > 0`), this object will rotate to match the target's world rotation.
      * The object smoothly interpolates from its current rotation to the target's rotation each frame.
      * This makes the object face the same direction as the target, not look at it (use {@link LookAt} for that).
      *
      * **Smoothing:**
      * Both position and rotation use time-based interpolation (lerp/slerp).
      * Higher factor values = faster following (less lag), lower values = slower following (more lag).
      * Set a factor to 0 to disable that type of following entirely.
      *
      * **Common Use Cases:**
      * - Camera following a player character
      * - UI elements tracking world objects
      * - Delayed motion effects (ghost trails, spring arms)
      * - Smoothed object attachment
      *
      * @example Follow a target with smooth position
      * ```ts
      * const follower = myObject.addComponent(SmoothFollow);
      * follower.target = playerObject;
      * follower.followFactor = 5;  // Higher = faster following
      * follower.rotateFactor = 0;  // Don't follow rotation
      * ```
      *
      * @example Follow only on horizontal plane
      * ```ts
      * follower.positionAxes = Axes.X | Axes.Z; // Follow X and Z only (no vertical)
      * ```
      *
      * @example Follow both position and rotation
      * ```ts
      * follower.target = targetObject;
      * follower.followFactor = 3;  // Smooth position following
      * follower.rotateFactor = 2;  // Smooth rotation following
      * ```
      *
      * @summary Smoothly follows a target object's position and/or rotation
      * @category Interactivity
      * @group Components
      * @see {@link Mathf} for the interpolation used
      */
     export declare class SmoothFollow extends Component {
         /**
          * The target to follow. If null, the GameObject will not move.
          */
         target: Object3D | null;
         /**
          * Speed factor for position following.
          * Controls how quickly this object moves to match the target's position.
          * Higher values = faster/tighter following (less lag), lower = slower/looser (more lag).
          * Set to 0 to disable position following entirely.
          * @default 0.1
          */
         followFactor: number;
         /**
          * Speed factor for rotation following.
          * Controls how quickly this object rotates to match the target's rotation.
          * Higher values = faster/tighter following (less lag), lower = slower/looser (more lag).
          * Set to 0 to disable rotation following entirely.
          * @default 0.1
          */
         rotateFactor: number;
         /**
          * Which position axes to follow. Use bitwise OR to combine:
          * `Axes.X | Axes.Y` follows only X and Y axes.
          * @default Axes.All
          */
         positionAxes: Axes;
         /** When true, rotates 180° around Y axis (useful for mirrored setups) */
         flipForward: boolean;
         private static _invertForward;
         private _firstUpdate;
         /**
          * Update the position and rotation of the GameObject to follow the target.
          */
         onBeforeRender(): void;
         /**
          * Manually update the position/rotation to follow the target.
          * @param hard If true, snaps instantly to target without smoothing
          */
         updateNow(hard: boolean): void;
     }

     /** used to find data registered via gltf files e.g. find lightmaps for a Renderer component that were shipped inside a gltf */
     export declare type SourceIdentifier = string;

     /* Excluded from this release type: Space */

     /**
      * SpatialGrabRaycaster enables direct grab interactions in VR/AR.
      * Uses sphere overlap detection around the controller/hand position
      * to allow grabbing objects by reaching into them.
      *
      * **Features:**
      * - Active only during XR sessions
      * - Can be globally disabled via `SpatialGrabRaycaster.allow`
      * - Works alongside ray-based interaction
      *
      * @category XR
      * @group Components
      * @see {@link WebXR} for XR session management
      * @see {@link DragControls} for object manipulation
      */
     export declare class SpatialGrabRaycaster extends Raycaster_2 {
         /**
          * Use to disable SpatialGrabRaycaster globally
          */
         static allow: boolean;
         performRaycast(_opts?: IRaycastOptions | RaycastOptions | null): Intersection[] | null;
     }

     /**
      * [SpatialHtml](https://engine.needle.tools/docs/api/SpatialHtml) is a component that allows you to integrate HTML elements into a 3D scene.
      * By specifying the ID of an existing HTML element, you can render it as a 3D object within the scene.
      * @summary Render HTML elements as 3D objects in the scene
      * @category User Interface
      * @group Components
      */
     export declare class SpatialHtml extends Component {
         id: string | null;
         keepAspect: boolean;
         private _object;
         onEnable(): void;
         onDisable(): void;
     }

     /**
      * A spatial trigger component that detects objects within a box-shaped area.
      * Used to trigger events when objects enter, stay in, or exit the defined area.
      *
      * The trigger area is defined by the GameObject's bounding box (uses {@link BoxHelperComponent}).
      * Objects with {@link SpatialTriggerReceiver} components are tested against this area.
      *
      * **Mask system:** Both trigger and receiver have a `triggerMask` - they only interact
      * when their masks have overlapping bits set. This allows selective triggering.
      *
      * **Debug:** Use `?debugspatialtrigger` URL parameter to visualize trigger zones.
      *
      * @example Create a pickup zone
      * ```ts
      * // On the pickup zone object
      * const trigger = pickupZone.addComponent(SpatialTrigger);
      * trigger.triggerMask = 1; // Layer 1 for pickups
      *
      * // On the player
      * const receiver = player.addComponent(SpatialTriggerReceiver);
      * receiver.triggerMask = 1; // Match the pickup layer
      * ```
      *
      * @summary Define a trigger zone that detects entering objects
      * @category Interactivity
      * @group Components
      * @see {@link SpatialTriggerReceiver} for objects that respond to triggers
      * @see {@link BoxHelperComponent} for the underlying box used to define the trigger area
      * @link https://engine.needle.tools/samples/spatial-triggers/
      */
     export declare class SpatialTrigger extends Component {
         /** Global registry of all active spatial triggers in the scene */
         static triggers: SpatialTrigger[];
         /**
          * Bitmask determining which receivers this trigger affects.
          * Only receivers with matching masks will be triggered.
          */
         triggerMask?: number;
         /** Box helper component used to visualize and calculate the trigger area */
         private boxHelper?;
         /**
          * Initializes the trigger and logs debug info if enabled
          */
         start(): void;
         /**
          * Registers this trigger in the global registry and sets up debug visualization if enabled
          */
         onEnable(): void;
         /**
          * Removes this trigger from the global registry when disabled
          */
         onDisable(): void;
         /**
          * Tests if an object is inside this trigger's box
          * @param obj The object to test against this trigger
          * @returns True if the object is inside the trigger box
          */
         test(obj: Object3D): boolean;
         /**
          * Raises the onEnter event on any SpatialTriggerReceiver components attached to this trigger's GameObject
          * @param rec The receiver that entered this trigger
          */
         raiseOnEnterEvent(rec: SpatialTriggerReceiver): void;
         /**
          * Raises the onStay event on any SpatialTriggerReceiver components attached to this trigger's GameObject
          * @param rec The receiver that is staying in this trigger
          */
         raiseOnStayEvent(rec: SpatialTriggerReceiver): void;
         /**
          * Raises the onExit event on any SpatialTriggerReceiver components attached to this trigger's GameObject
          * @param rec The receiver that exited this trigger
          */
         raiseOnExitEvent(rec: SpatialTriggerReceiver): void;
     }

     /**
      * Component that receives and responds to spatial events, like entering or exiting a trigger zone.
      * Used in conjunction with {@link SpatialTrigger} to create interactive spatial events.
      *
      * Place this on objects that should react when entering trigger zones. The receiver checks
      * against all active SpatialTriggers each frame and fires events when intersections change.
      *
      * Events can be connected via {@link EventList} in the editor or listened to in code.
      *
      * @example Listen to trigger events
      * ```ts
      * export class DoorTrigger extends Behaviour {
      *   @serializable(SpatialTriggerReceiver)
      *   receiver?: SpatialTriggerReceiver;
      *
      *   start() {
      *     this.receiver?.onEnter?.addEventListener(() => {
      *       console.log("Player entered door zone");
      *     });
      *   }
      * }
      * ```
      *
      * @summary Receives spatial trigger events
      * @category Interactivity
      * @group Components
      * @see {@link SpatialTrigger} for defining trigger zones
      * @see {@link EventList} for event handling
      * @link https://engine.needle.tools/samples/spatial-triggers/
      */
     export declare class SpatialTriggerReceiver extends Component {
         /**
          * Bitmask determining which triggers this receiver responds to
          * Only triggers with matching masks will interact with this receiver
          */
         triggerMask: number;
         /** Event invoked when this object enters a trigger zone */
         onEnter?: EventList<any>;
         /** Event invoked continuously while this object is inside a trigger zone */
         onStay?: EventList<any>;
         /** Event invoked when this object exits a trigger zone */
         onExit?: EventList<any>;
         /* Excluded from this release type: start */
         /* Excluded from this release type: update */
         /** Array of triggers currently intersecting with this receiver */
         readonly currentIntersected: SpatialTrigger[];
         /** Array of triggers that intersected with this receiver in the previous frame */
         readonly lastIntersected: SpatialTrigger[];
         /**
          * Handles trigger enter events.
          * @param trigger The spatial trigger that was entered
          */
         onEnterTrigger(trigger: SpatialTrigger): void;
         /**
          * Handles trigger exit events.
          * @param trigger The spatial trigger that was exited
          */
         onExitTrigger(trigger: SpatialTrigger): void;
         /**
          * Handles trigger stay events.
          * @param trigger The spatial trigger that the receiver is staying in
          */
         onStayTrigger(trigger: SpatialTrigger): void;
     }

     /**
      * SpectatorCamera enables following and spectating other users in networked sessions.
      * Switch between first-person (see what they see) and third-person (orbit around them) views.
      *
      * **Keyboard controls** (when `useKeys = true`):
      * - `F` - Request all users to follow the local player
      * - `ESC` - Stop spectating
      *
      * **Spectator modes:**
      * - `FirstPerson` - View from the followed player's perspective
      * - `ThirdPerson` - Freely orbit around the followed player
      *
      * **Debug:** Use `?debugspectator` URL parameter for logging.
      *
      * @example Start spectating a user
      * ```ts
      * const spectator = camera.getComponent(SpectatorCamera);
      * spectator.follow(targetUserId);
      * spectator.mode = SpectatorMode.ThirdPerson;
      * ```
      *
      * @summary Spectator camera for following other users
      * @category Networking
      * @group Components
      * @see {@link SpectatorMode} for view options
      * @see {@link SyncedRoom} for networked sessions
      * @see {@link OrbitControls} for third-person orbit
      */
     export declare class SpectatorCamera extends Component {
         /** Reference to the Camera component on this GameObject */
         cam: Camera | null;
         /**
          * When enabled, pressing F will send a request to all connected users to follow the local player.
          * Pressing ESC will stop spectating.
          */
         useKeys: boolean;
         private _mode;
         /** Gets the current spectator perspective mode */
         get mode(): SpectatorMode;
         /** Sets the current spectator perspective mode */
         set mode(val: SpectatorMode);
         /** Returns whether this user is currently spectating another user */
         get isSpectating(): boolean;
         /**
          * Checks if this instance is spectating the user with the given ID
          * @param userId The user ID to check
          * @returns True if spectating the specified user, false otherwise
          */
         isSpectatingUser(userId: string): boolean;
         /**
          * Checks if the user with the specified ID is following this user
          * @param userId The user ID to check
          * @returns True if the specified user is following this user, false otherwise
          */
         isFollowedBy(userId: string): boolean;
         /** List of user IDs that are currently following the user */
         get followers(): string[];
         /** Stops the current spectating session */
         stopSpectating(): void;
         /** Gets the local player's connection ID */
         private get localId();
         /**
          * Sets the player view to follow
          * @param target The PlayerView to follow, or undefined to stop spectating
          */
         set target(target: PlayerView | undefined);
         /** Gets the currently followed player view */
         get target(): PlayerView | undefined;
         /** Sends a network request for all users to follow this player */
         requestAllFollowMe(): void;
         /** Determines if the camera is spectating the local player */
         private get isSpectatingSelf();
         private orbit;
         private _handler?;
         private eventSub_WebXRRequestStartEvent;
         private eventSub_WebXRStartEvent;
         private eventSub_WebXREndEvent;
         private _debug?;
         private _networking;
         awake(): void;
         onDestroy(): void;
         /**
          * Checks if the current platform supports spectator mode
          * @returns True if the platform is supported, false otherwise
          */
         private isSupportedPlatform;
         /**
          * Called before entering WebXR mode
          * @param _evt The WebXR event
          */
         onBeforeXR(_evt: any): void;
         /**
          * Called when entering WebXR mode
          * @param _evt The WebXR event
          */
         onEnterXR(_evt: any): void;
         /**
          * Called when exiting WebXR mode
          * @param _evt The WebXR event
          */
         onLeaveXR(_evt: any): void;
         /**
          * Sets the target to follow the local player
          */
         private followSelf;
         /**
          * Called after the main rendering pass to render the spectator view
          */
         onAfterRender(): void;
         /**
          * Updates avatar visibility flags for rendering in spectator mode
          */
         private setAvatarFlagsBeforeRender;
         /**
          * Restores avatar visibility flags after spectator rendering
          */
         private resetAvatarFlags;
     }

     /**
      * Defines the viewing perspective in spectator mode
      */
     declare enum SpectatorMode {
         /** View from the perspective of the followed player */
         FirstPerson = 0,
         /** Freely view from a third-person perspective */
         ThirdPerson = 1
     }

     /**
      * SphereCollider represents a sphere-shaped collision volume.
      * Efficient and suitable for balls, projectiles, or approximate collision bounds.
      *
      * ![](https://cloud.needle.tools/-/media/slYWnXyaxdlrCqu8GP_lFQ.gif)
      *
      * @example Create a bouncing ball
      * ```ts
      * const sphere = ball.addComponent(SphereCollider);
      * sphere.radius = 0.5;
      * const rb = ball.addComponent(Rigidbody);
      * rb.mass = 1;
      * ```
      *
      * - Example: https://samples.needle.tools/physics-basic
      *
      * @summary Sphere-shaped physics collider
      * @category Physics
      * @group Components
      * @see {@link Collider} for base collider functionality
      * @see {@link CapsuleCollider} for elongated sphere shapes
      */
     export declare class SphereCollider extends Collider implements ISphereCollider {
         /**
          * The radius of the sphere collider.
          */
         radius: number;
         /**
          * The center position of the sphere collider relative to the transform's position.
          */
         center: Vector3;
         /**
          * Registers the sphere collider with the physics engine and sets up scale change monitoring.
          */
         onEnable(): void;
         /**
          * Removes scale change monitoring when the collider is disabled.
          */
         onDisable(): void;
         /**
          * Updates collider properties when validated in the editor or inspector.
          */
         onValidate(): void;
     }

     export declare class SphereIntersection implements Intersection {
         distance: number;
         point: Vector3;
         object: Object3D;
         constructor(object: Object3D, distance: number, point: Vector3);
     }

     /* Excluded from this release type: SphericalHarmonicsData */

     /**
      * [SplineContainer](https://engine.needle.tools/docs/api/SplineContainer) manages spline curves defined by a series of knots (control points).
      * This component stores spline data and generates smooth curves that can be used for animation paths, camera paths, racing tracks, or any curved path in 3D space.
      *
      * ![](https://cloud.needle.tools/-/media/XIHaiNFsA1IbMZVJepp1aQ.gif)
      *
      * **How It Works:**
      * The spline is defined by an array of {@link SplineData} knots. Each knot contains:
      * - **Position**: The location of the control point
      * - **Rotation**: Orientation at that point (useful for banking/tilting objects along the path)
      * - **Tangents**: Handles that control the curve's smoothness and shape
      *
      * The component uses Catmull-Rom interpolation to create smooth curves between knots. The curve is automatically
      * rebuilt when knots are added, removed, or marked dirty, and all sampling methods return positions in world space.
      *
      * **Key Features:**
      * - Smooth Catmull-Rom curve interpolation
      * - Support for open and closed curves
      * - Dynamic knot management (add/remove at runtime)
      * - World-space sampling with {@link getPointAt} and {@link getTangentAt}
      * - Automatic curve regeneration when modified
      * - Built-in debug visualization
      * - Integrates seamlessly with {@link SplineWalker}
      *
      * **Common Use Cases:**
      * - Camera paths and cinematics
      * - Object movement along curved paths
      * - Racing game tracks and racing lines
      * - Character patrol routes
      * - Procedural road/path generation
      * - Animation curves for complex motion
      * - Cable/rope visualization
      *
      * @example Basic spline setup with knots
      * ```ts
      * const splineObj = new Object3D();
      * const spline = splineObj.addComponent(SplineContainer);
      *
      * // Add knots to define the path
      * spline.addKnot({ position: new Vector3(0, 0, 0) });
      * spline.addKnot({ position: new Vector3(2, 1, 0) });
      * spline.addKnot({ position: new Vector3(4, 0, 2) });
      * spline.addKnot({ position: new Vector3(6, -1, 1) });
      *
      * // Sample a point halfway along the spline
      * const midpoint = spline.getPointAt(0.5);
      * console.log("Midpoint:", midpoint);
      * ```
      *
      * @example Creating a closed loop spline
      * ```ts
      * const loopSpline = gameObject.addComponent(SplineContainer);
      * loopSpline.closed = true; // Makes the spline loop back to the start
      *
      * // Add circular path knots
      * for (let i = 0; i < 8; i++) {
      *   const angle = (i / 8) * Math.PI * 2;
      *   const pos = new Vector3(Math.cos(angle) * 5, 0, Math.sin(angle) * 5);
      *   loopSpline.addKnot({ position: pos });
      * }
      * ```
      *
      * @example Sampling points along a spline
      * ```ts
      * const spline = gameObject.getComponent(SplineContainer);
      *
      * // Sample 10 points along the spline
      * const points: Vector3[] = [];
      * for (let i = 0; i <= 10; i++) {
      *   const t = i / 10; // 0 to 1
      *   const point = spline.getPointAt(t);
      *   points.push(point);
      * }
      *
      * // Get tangent (direction) at 75% along the spline
      * const tangent = spline.getTangentAt(0.75);
      * console.log("Direction at 75%:", tangent);
      * ```
      *
      * @example Dynamic knot manipulation
      * ```ts
      * const spline = gameObject.getComponent(SplineContainer);
      *
      * // Add a new knot dynamically
      * const newKnot = new SplineData();
      * newKnot.position.set(10, 5, 0);
      * spline.addKnot(newKnot);
      *
      * // Remove the first knot
      * spline.removeKnot(0);
      *
      * // Modify existing knot
      * spline.spline[1].position.y += 2;
      * spline.markDirty(); // Tell the spline to rebuild
      * ```
      *
      * @example Using with SplineWalker for animation
      * ```ts
      * // Set up spline path
      * const spline = pathObject.addComponent(SplineContainer);
      * spline.addKnot({ position: new Vector3(0, 0, 0) });
      * spline.addKnot({ position: new Vector3(5, 2, 5) });
      * spline.addKnot({ position: new Vector3(10, 0, 0) });
      *
      * // Make object follow the spline
      * const walker = movingObject.addComponent(SplineWalker);
      * walker.spline = spline;
      * walker.speed = 2; // Units per second
      * walker.loop = true;
      * ```
      *
      * **Debug Visualization:**
      * Add `?debugsplines` to your URL to enable debug visualization, which draws the spline curve as a purple line.
      * You can also enable it programmatically:
      * ```ts
      * spline.debug = true; // Show debug visualization
      * ```
      *
      * @see {@link SplineWalker} - Component for moving objects along a spline path
      * @see {@link SplineData} - The knot data structure used to define spline points
      * @see {@link getPointAt} - Sample positions along the spline
      * @see {@link getTangentAt} - Get direction vectors along the spline
      * @see {@link addKnot} - Add control points to the spline
      * @see {@link removeKnot} - Remove control points from the spline
      *
      * @summary Manages smooth spline curves defined by control point knots
      * @category Splines
      * @group Components
      * @component
      */
     export declare class SplineContainer extends Component {
         /**
          * Adds a knot (control point) to the end of the spline.
          *
          * You can pass either a full {@link SplineData} object or a simple object with just a position.
          * When passing a simple object, default values are used for rotation and tangents.
          *
          * The spline curve is automatically marked dirty and will be rebuilt on the next update.
          *
          * @param knot - Either a SplineData object or an object with at least a `position` property
          * @returns This SplineContainer for method chaining
          *
          * @example Add knots with positions only
          * ```ts
          * spline.addKnot({ position: new Vector3(0, 0, 0) })
          *       .addKnot({ position: new Vector3(5, 0, 0) })
          *       .addKnot({ position: new Vector3(5, 0, 5) });
          * ```
          *
          * @example Add a full SplineData knot
          * ```ts
          * const knot = new SplineData();
          * knot.position.set(10, 2, 5);
          * knot.rotation.setFromEuler(new Euler(0, Math.PI / 4, 0));
          * spline.addKnot(knot);
          * ```
          */
         addKnot(knot: SplineData | {
             position: Vector3;
         }): SplineContainer;
         /**
          * Removes a knot (control point) from the spline.
          *
          * You can remove a knot either by its numeric index in the spline array or by passing
          * a reference to the SplineData object itself.
          *
          * The spline curve is automatically marked dirty and will be rebuilt on the next update.
          *
          * @param index - Either the numeric index of the knot to remove, or the SplineData object reference
          * @returns This SplineContainer for method chaining
          *
          * @example Remove knot by index
          * ```ts
          * spline.removeKnot(0); // Remove first knot
          * spline.removeKnot(spline.spline.length - 1); // Remove last knot
          * ```
          *
          * @example Remove knot by reference
          * ```ts
          * const knotToRemove = spline.spline[2];
          * spline.removeKnot(knotToRemove);
          * ```
          */
         removeKnot(index: number | SplineData): SplineContainer;
         /**
          * Samples a point on the spline at a given parametric position (in world space).
          *
          * The parameter `t` ranges from 0 to 1, where:
          * - `0` = start of the spline
          * - `0.5` = middle of the spline
          * - `1` = end of the spline
          *
          * The returned position is in world space, accounting for the SplineContainer's transform.
          * Values outside 0-1 are clamped to the valid range.
          *
          * @param to01 - Parametric position along the spline (0 to 1)
          * @param target - Optional Vector3 to store the result (avoids allocation)
          * @returns The world-space position at parameter `t`
          *
          * @example Sample multiple points along the spline
          * ```ts
          * // Sample 20 evenly-spaced points
          * const points: Vector3[] = [];
          * for (let i = 0; i <= 20; i++) {
          *   const t = i / 20;
          *   points.push(spline.getPointAt(t));
          * }
          * ```
          *
          * @example Using a target vector for efficiency
          * ```ts
          * const reusableVector = new Vector3();
          * for (let i = 0; i < 100; i++) {
          *   const point = spline.getPointAt(i / 100, reusableVector);
          *   // Use point...
          * }
          * ```
          *
          * @see {@link getTangentAt} to get the direction at a point
          */
         getPointAt(to01: number, target?: Vector3): Vector3;
         /**
          * Marks the spline as dirty, causing it to be rebuilt on the next update frame.
          *
          * Call this method whenever you manually modify the spline data (knot positions, rotations, or tangents)
          * to ensure the curve is regenerated. This is done automatically when using {@link addKnot} or {@link removeKnot}.
          *
          * @example Modifying knots and marking dirty
          * ```ts
          * // Modify existing knot positions
          * spline.spline[0].position.y += 2;
          * spline.spline[1].position.x -= 1;
          *
          * // Tell the spline to rebuild
          * spline.markDirty();
          * ```
          *
          * @example Animating knot positions
          * ```ts
          * update() {
          *   const time = this.context.time.time;
          *   // Animate knot positions
          *   for (let i = 0; i < spline.spline.length; i++) {
          *     spline.spline[i].position.y = Math.sin(time + i) * 2;
          *   }
          *   spline.markDirty(); // Rebuild curve each frame
          * }
          * ```
          */
         markDirty(): void;
         /**
          * Samples the tangent (direction) vector on the spline at a given parametric position (in world space).
          *
          * The tangent represents the forward direction of the curve at point `t`. This is useful for:
          * - Orienting objects along the spline (facing the direction of travel)
          * - Calculating banking/tilting for vehicles on the path
          * - Understanding the curve's direction at any point
          *
          * The parameter `t` ranges from 0 to 1 (same as {@link getPointAt}).
          * The returned vector is normalized and in world space, accounting for the SplineContainer's rotation.
          *
          * @param t - Parametric position along the spline (0 to 1)
          * @param target - Optional Vector3 to store the result (avoids allocation)
          * @returns The normalized tangent vector in world space at parameter `t`
          *
          * @example Orient an object along the spline
          * ```ts
          * const position = spline.getPointAt(0.5);
          * const tangent = spline.getTangentAt(0.5);
          *
          * object.position.copy(position);
          * object.lookAt(position.clone().add(tangent)); // Face along the spline
          * ```
          *
          * @example Calculate velocity direction for a moving object
          * ```ts
          * let t = 0;
          * update() {
          *   t += this.context.time.deltaTime * 0.2; // Speed
          *   if (t > 1) t = 0; // Loop
          *
          *   const pos = spline.getPointAt(t);
          *   const direction = spline.getTangentAt(t);
          *
          *   movingObject.position.copy(pos);
          *   movingObject.quaternion.setFromUnitVectors(
          *     new Vector3(0, 0, 1),
          *     direction
          *   );
          * }
          * ```
          *
          * @see {@link getPointAt} to get the position at a point
          */
         getTangentAt(t: number, target?: Vector3): Vector3;
         /**
          * Whether the spline forms a closed loop.
          *
          * **When `true`:**
          * - The spline connects the last knot back to the first knot, forming a continuous loop
          * - Perfect for racing tracks, patrol routes, or any circular path
          * - Parameter `t=1` will smoothly connect back to `t=0`
          *
          * **When `false` (default):**
          * - The spline is open, with distinct start and end points
          * - Suitable for one-way paths, camera movements, or linear progressions
          *
          * Changing this property marks the spline as dirty and triggers a rebuild.
          *
          * @example Create a circular patrol route
          * ```ts
          * const patrol = gameObject.addComponent(SplineContainer);
          * patrol.closed = true; // Loop back to start
          *
          * // Add points in a circle
          * for (let i = 0; i < 8; i++) {
          *   const angle = (i / 8) * Math.PI * 2;
          *   patrol.addKnot({
          *     position: new Vector3(Math.cos(angle) * 10, 0, Math.sin(angle) * 10)
          *   });
          * }
          * ```
          *
          * @default false
          */
         set closed(value: boolean);
         get closed(): boolean;
         private _closed;
         /**
          * Array of knots (control points) that define the spline curve.
          *
          * Each element is a {@link SplineData} object containing position, rotation, and tangent information.
          * You can directly access and modify this array, but remember to call {@link markDirty} afterwards
          * to trigger a curve rebuild.
          *
          * **Best practices:**
          * - Use {@link addKnot} and {@link removeKnot} methods for automatic dirty marking
          * - If modifying knots directly, always call {@link markDirty} afterwards
          * - The order of knots determines the path direction
          *
          * @example Direct array access
          * ```ts
          * console.log(`Spline has ${spline.spline.length} knots`);
          *
          * // Access first knot
          * const firstKnot = spline.spline[0];
          * console.log("Start position:", firstKnot.position);
          *
          * // Modify and mark dirty
          * spline.spline[2].position.y += 5;
          * spline.markDirty();
          * ```
          *
          * @see {@link SplineData} for the knot data structure
          * @see {@link addKnot} for adding knots (auto marks dirty)
          * @see {@link removeKnot} for removing knots (auto marks dirty)
          * @see {@link markDirty} to trigger rebuild after manual modifications
          */
         spline: SplineData[];
         /**
          * Enables visual debug rendering of the spline curve.
          *
          * When enabled, the spline is rendered as a purple line in the scene, making it easy to
          * visualize the path during development. The debug line automatically updates when the spline is modified.
          *
          * **Debug visualization:**
          * - Purple line showing the complete curve path
          * - Automatically rebuilds when spline changes
          * - Line resolution based on number of knots (10 segments per knot)
          *
          * **Tip:** You can also enable debug visualization globally for all splines by adding `?debugsplines`
          * to your URL.
          *
          * @example Enable debug visualization
          * ```ts
          * const spline = gameObject.addComponent(SplineContainer);
          * spline.debug = true; // Show purple debug line
          *
          * // Add some knots to see the visualization
          * spline.addKnot({ position: new Vector3(0, 0, 0) });
          * spline.addKnot({ position: new Vector3(5, 2, 0) });
          * spline.addKnot({ position: new Vector3(10, 0, 5) });
          * ```
          */
         set debug(debug: boolean);
         /**
          * The Three.js Curve object generated from the spline knots.
          *
          * This is the underlying curve implementation (typically a CatmullRomCurve3) that's used for
          * all position and tangent sampling. The curve is automatically regenerated when the spline
          * is marked dirty.
          *
          * **Note:** This curve is in local space relative to the SplineContainer. Use {@link getPointAt}
          * and {@link getTangentAt} methods to get world-space results.
          *
          * @returns The generated Three.js Curve, or null if not yet built
          */
         get curve(): Curve<Vector3> | null;
         /**
          * Whether the spline needs to be rebuilt due to modifications.
          *
          * The spline is marked dirty when:
          * - Knots are added via {@link addKnot}
          * - Knots are removed via {@link removeKnot}
          * - {@link markDirty} is called manually
          * - The {@link closed} property is changed
          *
          * The curve is automatically rebuilt on the next update frame when dirty.
          *
          * @returns `true` if the spline needs rebuilding, `false` otherwise
          */
         get isDirty(): boolean;
         private _isDirty;
         private _curve;
         private _builtCurve;
         private _debugLine;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: update */
         private buildCurve;
         private buildDebugCurve;
     }

     /**
      * Represents a single knot (control point) in a spline curve.
      *
      * Each knot defines a point along the spline with its position, rotation, and tangent handles
      * that control the curve's shape entering and leaving the knot.
      *
      * **Properties:**
      * - **position**: The 3D position of this knot in local space
      * - **rotation**: The orientation at this knot (useful for rotating objects along the spline)
      * - **tangentIn**: The incoming tangent handle controlling the curve shape before this knot
      * - **tangentOut**: The outgoing tangent handle controlling the curve shape after this knot
      *
      * @see {@link SplineContainer} for the container that holds and manages multiple knots
      */
     export declare class SplineData {
         /**
          * The 3D position of this knot in local space relative to the SplineContainer.
          */
         position: Vector3;
         /**
          * The orientation at this knot. Can be used to rotate objects following the spline.
          */
         rotation: Quaternion;
         /**
          * The incoming tangent handle controlling the curve shape as it approaches this knot.
          * The magnitude and direction affect the smoothness and curvature of the spline.
          */
         tangentIn: Vector3;
         /**
          * The outgoing tangent handle controlling the curve shape as it leaves this knot.
          * The magnitude and direction affect the smoothness and curvature of the spline.
          */
         tangentOut: Vector3;
     }

     /**
      * @category Splines
      * @see {@link SplineContainer} for the main spline component that defines the path and knots
      */
     export declare namespace SplineUtils {
         /**
          * Creates a SplineContainer from an array of points.
          * @param positions The positions of the knots.
          * @param closed Whether the spline is closed (the last knot connects to the first).
          * @param tension The tension of the spline. 0 is no tension, 1 is high tension (straight lines between knots). Default is 0.75.
          * @return The created SplineContainer component - add it to an Object3D to use it.
          */
         export function createFromPoints(positions: Vector3[], closed?: boolean, tension?: number): SplineContainer;
     }

     /**
      * [SplineWalker](https://engine.needle.tools/docs/api/SplineWalker) Moves an object along a {@link SplineContainer}.
      * Use this with a SplineContainer component.
      *
      * ![](https://cloud.needle.tools/-/media/XIHaiNFsA1IbMZVJepp1aQ.gif)
      *
      * - Example http://samples.needle.tools/splines
      *
      * @summary Moves an object along a spline
      * @category Splines
      * @group Components
      */
     export declare class SplineWalker extends Component {
         /**
          * The spline to use/walk along. Add a SplineContainer component to an object and assign it here.
          */
         spline: SplineContainer | null;
         /**
          * The object to move along the spline.
          * If object is undefined then the spline walker will use it's own object (gameObject).
          * If object is null the spline walker will not move any object.
          * @default undefined
          */
         object?: Object3D | null;
         /**
          * If true the object will rotate to look in the direction of the spline while moving along it.
          * @default true
          */
         useLookAt: boolean;
         /**
          * The object to look at while moving along the spline.
          * If null the object will look in the direction of the spline.
          * This can be disabled by setting useLookAt to false.
          * @default null
          */
         lookAt: Object3D | null;
         /**
          * When clamp is set to true, the position01 value will be clamped between 0 and 1 and the object will not loop the spline.
          * @default false
          */
         clamp: boolean;
         /**
          * The current position on the spline. The value ranges from 0 (start of the spline curve) to 1 (end of the spline curve)
          *
          * When setting this value, the position will be updated in the next frame.
          * @default 0
          */
         get position01(): number;
         set position01(v: number);
         /** Resets the position to 0 */
         reset(): void;
         /**
          * If true the SplineWalker will automatically move along the spline
          * @default true
          */
         autoRun: boolean;
         /**
          * The duration in seconds it takes to complete the whole spline when autoWalk is enabled.
          * @default 10
          */
         duration: number;
         /**
          * The strength with which the object is pulled to the spline.
          * This can be used to create a "rubber band" effect when the object is moved away from the spline by other forces.
          * A value of 0 means no pull, a value of 1 means the object is always on the spline.
          * @default 1
          */
         pullStrength: number;
         private _position01;
         private _needsUpdate;
         /* Excluded from this release type: start */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private onUserInput;
         /* Excluded from this release type: update */
         /* Excluded from this release type: updateFromPosition */
         private _lastPosition01;
         private _requiredUpdates;
         private _performedUpdates;
         private _lastPositionVector;
     }

     /**
      * A sprite is a mesh that represents a 2D image. Used by the {@link SpriteRenderer} to render 2D images in the scene.
      * @summary 2D image renderer
      * @category Rendering
      * @group Components
      */
     export declare class Sprite {
         constructor(texture?: Texture);
         guid?: string;
         texture?: Texture;
         triangles: Array<number>;
         uv: Array<Vec2_3>;
         vertices: Array<Vec2_3>;
         /* Excluded from this release type: __cached_geometry */
         /**
          * The mesh that represents the sprite
          */
         get mesh(): Mesh;
         private _mesh;
         /**
          * The material used to render the sprite
          */
         get material(): MeshBasicMaterial;
         private _material;
         /**
          * The geometry of the sprite that can be used to create a mesh
          */
         getGeometry(): BufferGeometry<NormalBufferAttributes>;
     }

     declare class Sprite_3 {
         texture: Texture | null;
         rect?: {
             width: number;
             height: number;
         };
     }

     /**
      * Used by the {@link SpriteRenderer} to hold the sprite sheet and the currently active sprite index.
      *
      * @category Sprites
      */
     export declare class SpriteData {
         static create(): SpriteData;
         constructor();
         clone(): SpriteData;
         /**
          * Set the sprite to be rendered in the currently assigned sprite sheet at the currently active index {@link index}
          */
         set sprite(sprite: Sprite | undefined);
         /** The currently active sprite */
         get sprite(): Sprite | undefined;
         /**
          * The spritesheet holds all sprites that can be rendered by the sprite renderer
          */
         spriteSheet?: SpriteSheet;
         /**
          * The index of the sprite to be rendered in the currently assigned sprite sheet
          */
         index: number;
         update(material: Material | undefined): void;
     }

     declare enum SpriteDrawMode {
         Simple = 0,
         Sliced = 1,
         Tiled = 2
     }

     /**
      * The sprite renderer renders a sprite on a GameObject using an assigned spritesheet ({@link SpriteData}).
      *
      * - Example: https://engine.needle.tools/samples/spritesheet-animation
      *
      * @summary Renders 2D images from a sprite sheet
      * @category Rendering
      * @group Components
      */
     export declare class SpriteRenderer extends Component {
         /* Excluded from this release type: drawMode */
         /* Excluded from this release type: size */
         color?: RGBAColor;
         /**
          * The material that is used to render the sprite
          */
         sharedMaterial?: Material;
         transparent: boolean;
         cutoutThreshold: number;
         castShadows: boolean;
         renderOrder: number;
         toneMapped: boolean;
         /**
          * Assign a new texture to the currently active sprite
          */
         set texture(value: Texture | undefined);
         /**
          * Add a new sprite to the currently assigned sprite sheet. The sprite will be added to the end of the sprite sheet.
          * Note that the sprite will not be rendered by default - set the `spriteIndex` to the index of the sprite to be rendered.
          * @param sprite The sprite to be added
          * @returns The index of the sprite in the sprite sheet
          * @example
          * ```typescript
          * const spriteRenderer = gameObject.addComponent(SpriteRenderer);
          * const index = spriteRenderer.addSprite(mySprite);
          * if(index >= 0)
          *   spriteRenderer.spriteIndex = index;
          * ```
          */
         addSprite(sprite: Sprite, setActive?: boolean): number;
         /**
          * Get the currently active sprite
          */
         get sprite(): SpriteData | undefined;
         /**
          * Set the sprite to be rendered in the currently assigned sprite sheet at the currently active index {@link spriteIndex}
          */
         set sprite(value: Sprite | SpriteData | undefined | number);
         /**
          * Set the index of the sprite to be rendered in the currently assigned sprite sheet
          */
         set spriteIndex(value: number);
         get spriteIndex(): number;
         /**
          * Get the number of sprites in the currently assigned sprite sheet
          */
         get spriteFrames(): number;
         private _spriteSheet?;
         private _currentSprite?;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: start */
         /**
          * Update the sprite. Modified properties will be applied to the sprite mesh. This method is called automatically when the sprite is changed.
          * @param force If true, the sprite will be forced to update.
          * @returns True if the sprite was updated successfully
          */
         updateSprite(force?: boolean): boolean;
     }

     /**
      * @category Sprites
      */
     export declare class SpriteSheet {
         sprites: Sprite[];
         constructor();
     }

     export declare type State = {
         name: string;
         hash: number;
         motion: Motion;
         transitions: Transition[];
         behaviours: StateMachineBehaviourModel[];
         /** The base speed of the animation */
         speed?: number;
         /** Set to a animator controller float parameter name to multiply this ontop of the speed value */
         speedParameter?: string;
         /** Cycle offset normalized 0-1, used when starting a animation */
         cycleOffset?: number;
         /** If set to a parameter then this is used instead of the CycleOffset value to offset the animation start time */
         cycleOffsetParameter?: string;
     };

     export declare type StateMachine = {
         defaultState: number;
         states: State[];
     };

     export declare abstract class StateMachineBehaviour {
         _context?: Context;
         get context(): Context;
         get isStateMachineBehaviour(): boolean;
         onStateEnter?(animator: Animator, _animatorStateInfo: AnimatorStateInfo, layerIndex: number): any;
         onStateUpdate?(animator: Animator, animatorStateInfo: AnimatorStateInfo, _layerIndex: number): any;
         onStateExit?(animator: Animator, animatorStateInfo: AnimatorStateInfo, layerIndex: number): any;
     }

     export declare type StateMachineBehaviourModel = {
         typeName: string;
         properties: object;
         instance?: StateMachineBehaviour;
     };

     declare type StickName = "xr-standard-thumbstick" | "xr-standard-touchpad";

     export declare class StreamEndedEvent {
         readonly type = NetworkedStreamEvents.StreamEnded;
         readonly userId: string;
         readonly direction: CallDirection;
         constructor(userId: string, direction: CallDirection);
     }

     export declare class StreamReceivedEvent {
         readonly type = NetworkedStreamEvents.StreamReceived;
         readonly userId: string;
         readonly stream: MediaStream;
         readonly target: CallHandle;
         constructor(userId: string, stream: MediaStream, target: CallHandle);
     }

     /** Needle-defined names for stylus (MX Ink) */
     export declare type StylusButtonName = "stylus-touch" | "stylus-tip";

     /* Excluded from this release type: SubEmitterSystem */

     /**
      * Defines when a sub-emitter spawns particles relative to the parent particle's lifecycle.
      * Used to create complex effects like explosions on impact or trails following particles.
      */
     declare enum SubEmitterType {
         /** Sub-emitter triggers when the parent particle is born */
         Birth = 0,
         /** Sub-emitter triggers when the parent particle collides */
         Collision = 1,
         /** Sub-emitter triggers when the parent particle dies */
         Death = 2,
         /** Sub-emitter triggers when the parent particle enters a trigger zone */
         Trigger = 3,
         /** Sub-emitter is triggered manually via code */
         Manual = 4
     }

     /**
      * Destroy an object across the network. See also {@link syncInstantiate}.
      * @param obj The object or component to be destroyed
      * @param con The network connection to send the destroy event to
      * @param recursive If true, all children will be destroyed as well. Default is true
      * @param opts Options for the destroy operation
      * @category Networking
      */
     export declare function syncDestroy(obj: IGameObject | IComponent, con: INetworkConnection, recursive?: boolean, opts?: SyncDestroyOptions): void;

     declare type SyncDestroyOptions = {
         /** When true the state will be saved in the networking backend */
         saveInRoom?: boolean;
     };

     /**
      * SyncedCamera is a component that syncs the camera position and rotation of all users in the room.
      * A prefab can be set to represent the remote cameras visually in the scene.
      *
      * @summary Syncs camera position and rotation of users in a networked room
      * @category Networking
      * @group Components
      */
     export declare class SyncedCamera extends Component {
         static instances: UserCamInfo[];
         getCameraObject(userId: string): Object3D | null;
         /**
          * The prefab to visually represent the remote cameras in the scene.
          */
         cameraPrefab: Object3D | null | AssetReference;
         private _lastWorldPosition;
         private _lastWorldQuaternion;
         private _model;
         private _needsUpdate;
         private _lastUpdateTime;
         private remoteCams;
         private userToCamMap;
         private _camTimeoutInSeconds;
         private _receiveCallback;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: update */
         private onReceivedRemoteCameraInfoBin;
     }

     /**
      * [SyncedRoom](https://engine.needle.tools/docs/api/SyncedRoom) is a behaviour that will attempt to join a networked room based on the URL parameters or a random room.
      * It will also create a button in the menu to join or leave the room.
      * You can also join a networked room by calling the core methods like `this.context.connection.joinRoom("roomName")`.
      *
      * @example Join a networked room
      * ```typescript
      * const myObject = new Object3D();
      * myObject.addComponent(SyncedRoom, { roomName: "myRoom" });
      * ```
      *
      * @example Join a random networked room
      * ```typescript
      * const myObject = new Object3D();
      * myObject.addComponent(SyncedRoom, { joinRandomRoom: true });
      * ```
      *
      * @example Join a random networked room with prefix - this ensures that no room name collisions happen when running multiple applications on the same server instance
      * ```typescript
      * const myObject = new Object3D();
      * myObject.addComponent(SyncedRoom, { joinRandomRoom: true, roomPrefix: "myApp_" });
      * ```
      *
      * **Debug:** Use `?debugsyncedroom` URL parameter for logging.
      *
      * @summary Joins a networked room based on URL parameters or a random room
      * @category Networking
      * @group Components
      * @see {@link NetworkConnection} for the main networking API (`this.context.connection`)
      * @see {@link SyncedTransform} for synchronizing object transforms
      * @see {@link Voip} for voice communication in rooms
      * @see {@link ScreenCapture} for screen/video sharing
      * @link https://engine.needle.tools/docs/networking.html
      */
     export declare class SyncedRoom extends Component {
         /**
          * The name of the room to join.
          * @default ""
          */
         roomName: string;
         /**
          * The URL parameter name to use for the room name. E.g. if set to "room" the URL will look like `?room=roomName`.
          * @default "room"
          */
         urlParameterName: string;
         /**
          * If true, the room will be joined automatically when this component becomes active.
          * @default undefined which means it will join a random room if no roomName is set.
          */
         joinRandomRoom?: boolean;
         /**
          * If true and no room parameter is found in the URL then no room will be joined.
          * @default false
          */
         requireRoomParameter: boolean;
         /**
          * If true, the room will be rejoined automatically when disconnected.
          * @default true
          */
         autoRejoin: boolean;
         /**
          * If true, a join/leave room button will be created in the menu.
          * @default true
          */
         createJoinButton: boolean;
         /**
          * If true, a join/leave room button for the view only URL will be created in the menu.
          * @default false
          */
         createViewOnlyButton: boolean;
         /**
          * Get current room name from the URL parameter or the view parameter.
          */
         get currentRoomName(): string | null;
         private _lastJoinedRoom?;
         /** The room prefix to use for the room name. E.g. if set to "room_" and the room name is "name" the final room name will be "room_name". */
         set roomPrefix(val: string);
         get roomPrefix(): string;
         private _roomPrefix;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: onDestroy */
         /** Will generate a random room name, set it as an URL parameter and attempt to join the room */
         tryJoinRandomRoom(): void;
         /** Try to join the currently set roomName */
         tryJoinRoom(call?: number): boolean;
         private _lastPingTime;
         private _lastRoomTime;
         private _userWantsToBeInARoom;
         /* Excluded from this release type: update */
         /**
          * Get the URL to view the current room in view only mode.
          */
         getViewOnlyUrl(): string | null;
         private setRandomRoomUrlParameter;
         private generateRoomName;
         private _roomButton?;
         private _roomButtonIconJoin?;
         private _roomButtonIconLeave?;
         private createRoomButton;
         private updateRoomButtonState;
         private destroyRoomButton;
         private _viewOnlyButton?;
         private onEnableViewOnlyButton;
         private onDisableViewOnlyButton;
         private onCreateViewOnlyButton;
     }

     /**
      * SyncedTransform synchronizes position and rotation of a GameObject across the network.
      * When users interact with an object (e.g., via {@link DragControls}), they automatically
      * take ownership and their changes are broadcast to other users.
      *
      * **Features:**
      * - Automatic ownership transfer when interacting
      * - Smooth interpolation of remote updates
      * - Physics integration (can override kinematic state)
      * - Fast mode for rapidly moving objects
      *
      * **Requirements:**
      * - Active network connection via {@link SyncedRoom}
      * - Objects must have unique GUIDs (set automatically in Unity/Blender export)
      *
      * **Ownership:**
      * This component uses {@link OwnershipModel} internally to manage object ownership.
      * Only the client that owns an object can send transform updates. Use `requestOwnership()`
      * before modifying the transform, or check `hasOwnership()` to see if you can modify it.
      *
      * **Debug:** Use `?debugsync` URL parameter for logging.
      *
      * @example Basic networked object
      * ```ts
      * // Add to any object you want synced
      * const sync = myObject.addComponent(SyncedTransform);
      * sync.fastMode = true; // For fast-moving objects
      *
      * // Request ownership before modifying
      * sync.requestOwnership();
      * myObject.position.x += 1;
      * ```
      *
      * - Example: https://engine.needle.tools/samples/collaborative-sandbox
      *
      * @summary Synchronizes object transform over the network with ownership management
      * @category Networking
      * @group Components
      * @see {@link SyncedRoom} for room/session management
      * @see {@link OwnershipModel} for ownership management details
      * @see {@link DragControls} for interactive dragging with sync
      * @see {@link Duplicatable} for networked object spawning
      * @link https://engine.needle.tools/docs/networking.html
      */
     export declare class SyncedTransform extends Component {
         /** When true, overrides physics behavior when this object is owned by the local user */
         overridePhysics: boolean;
         /** Whether to smoothly interpolate position changes when receiving updates */
         interpolatePosition: boolean;
         /** Whether to smoothly interpolate rotation changes when receiving updates */
         interpolateRotation: boolean;
         /** When true, sends updates at a higher frequency, useful for fast-moving objects */
         fastMode: boolean;
         /** When true, notifies other clients when this object is destroyed */
         syncDestroy: boolean;
         private _model;
         private _needsUpdate;
         private rb;
         private _wasKinematic;
         private _receivedDataBefore;
         private _targetPosition;
         private _targetRotation;
         private _receivedFastUpdate;
         private _shouldRequestOwnership;
         /**
          * Requests ownership of this object on the network.
          * You need to be connected to a room for this to work.
          * Call this before modifying the object's transform to ensure your changes are synchronized.
          *
          * @example
          * ```ts
          * // Request ownership before modifying
          * syncedTransform.requestOwnership();
          * this.gameObject.position.y += 1;
          * ```
          * @see {@link OwnershipModel.requestOwnership} for more details
          */
         requestOwnership(): void;
         /**
          * Free ownership of this object on the network.
          * You need to be connected to a room for this to work.
          * This will also be called automatically when the component is disabled.
          * Call this when you're done modifying an object to allow other users to interact with it.
          * @see {@link OwnershipModel.freeOwnership} for more details
          */
         freeOwnership(): void;
         /**
          * Checks if this client has ownership of the object.
          * @returns `true` if this client has ownership, `false` if not, `undefined` if ownership state is unknown
          * @see {@link OwnershipModel.hasOwnership} for more details
          */
         hasOwnership(): boolean | undefined;
         /**
          * Checks if the object is owned by any client (local or remote).
          * @returns `true` if the object is owned, `false` if not, `undefined` if ownership state is unknown
          * @see {@link OwnershipModel.isOwned} for more details
          */
         isOwned(): boolean | undefined;
         private joinedRoomCallback;
         private receivedDataCallback;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onDestroy */
         /**
          * Attempts to retrieve and apply the last known network state for this transform
          */
         private tryGetLastState;
         private tempEuler;
         /**
          * Handles incoming network data for this transform
          * @param data The model containing transform information
          */
         private onReceivedData;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private receivedUpdate;
         private lastPosition;
         private lastRotation;
         private lastScale;
         /* Excluded from this release type: onBeforeRender */
     }

     /**
      * Marks a field for automatic network synchronization across connected clients.
      * When a synced field changes, the new value is automatically broadcast to all users in the room.
      *
      * Primitives (string, number, boolean) sync automatically.
      * For arrays/objects, reassign to trigger sync: `this.myArray = this.myArray`
      *
      * @param onFieldChanged Optional callback when the field changes (locally or from network).
      * Return `false` to prevent syncing this change to others.
      *
      * @example Basic sync
      * ```ts
      * class MyComponent extends Behaviour {
      *   @syncField() playerScore: number = 0;
      * }
      * ```
      * @example With change callback
      * ```ts
      * class MyComponent extends Behaviour {
      *   @syncField("onHealthChanged") health: number = 100;
      *
      *   onHealthChanged(newValue: number, oldValue: number) {
      *     console.log(`Health: ${oldValue} → ${newValue}`);
      *   }
      * }
      * ```
      * @example Preventing sync (one-way)
      * ```ts
      * class MyComponent extends Behaviour {
      *   @syncField(function(newVal, oldVal) {
      *     // Process incoming value but don't sync our changes
      *     return false;
      *   }) serverControlled: string = "";
      * }
      * ```
      * @see {@link serializable} for editor serialization
      * @link https://engine.needle.tools/docs/how-to-guides/networking/
      */
     export declare const syncField: (onFieldChanged?: string | FieldChangedCallbackFn | undefined | null) => (target: any, _propertyKey: string | {
         name: string;
     }) => void;

     /**
      * Instantiate an object across the network. See also {@link syncDestroy}.
      * @category Networking
      */
     export declare function syncInstantiate(object: IGameObject | Object3D, opts: SyncInstantiateOptions, hostData?: HostData, save?: boolean): IGameObject | null;

     /**
      * Instantiation options for {@link syncInstantiate}
      */
     export declare type SyncInstantiateOptions = IInstantiateOptions & Pick<IModel, "deleteOnDisconnect">;

     /**
      * Triggers a {@link PreliminaryAction} (such as {@link VisibilityAction}) when the object is tapped or clicked.
      * Works in the browser and in USDZ/QuickLook (Everywhere Actions).
      *
      * @see {@link VisibilityAction} for controlling object visibility on tap
      * @see {@link SetActiveOnClick} for a combined trigger and action component
      * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
      * @summary Triggers an action when the object is tapped/clicked
      * @category Everywhere Actions
      * @group Components
      */
     export declare class TapGestureTrigger extends PreliminaryTrigger {
     }

     declare type Target = USDObject | USDObject[] | Object3D | Object3D[];

     /**
      * Marks a GameObject as a valid teleportation target for VR locomotion.
      * Add this component to objects or surfaces where users should be able to teleport.
      *
      * **Usage:**
      * - Add to floor surfaces, platforms, or designated teleport spots
      * - Works with {@link XRControllerMovement} component's teleport system
      * - Can be used to restrict teleportation to specific areas
      *
      * **Setup:**
      * 1. Add this component to GameObjects that should be teleport destinations
      * 2. Configure {@link XRControllerMovement} to use teleport targets (optional)
      * 3. Test teleportation in VR mode
      *
      * @example
      * ```ts
      * // Make a platform teleportable
      * const platform = myPlatform.addComponent(TeleportTarget);
      * ```
      *
      * @summary Marker component for valid VR teleportation destinations
      * @category XR
      * @group Components
      * @see {@link XRControllerMovement} for VR locomotion and teleport configuration
      * @see {@link WebXR} for general WebXR setup
      */
     export declare class TeleportTarget extends Component {
     }

     /* Excluded from this release type: TestRunner */

     /* Excluded from this release type: TestSimulateUserData */

     /**
      * [Text](https://engine.needle.tools/docs/api/Text) displays text content in the UI. Supports custom fonts, colors,
      * alignment, and basic rich text formatting.
      *
      * **Text properties:**
      * - `text` - The string content to display
      * - `fontSize` - Size of the text in pixels
      * - `color` - Text color (inherited from Graphic)
      * - `alignment` - Text anchor position (UpperLeft, MiddleCenter, etc.)
      *
      * **Fonts:**
      * Set the `font` property to a URL pointing to a font file.
      * Supports MSDF (Multi-channel Signed Distance Field) fonts for crisp rendering.
      *
      * @example Update text at runtime
      * ```ts
      * const text = myLabel.getComponent(Text);
      * text.text = "Score: " + score;
      * text.fontSize = 24;
      * text.color = new RGBAColor(1, 1, 1, 1);
      * ```
      *
      * @summary Display text in the UI
      * @category User Interface
      * @group Components
      * @see {@link Canvas} for the UI root
      * @see {@link TextAnchor} for alignment options
      * @see {@link FontStyle} for bold/italic styles
      */
     declare class Text_2 extends Graphic implements IHasAlphaFactor, ICanvasEventReceiver {
         alignment: TextAnchor_2;
         verticalOverflow: VerticalWrapMode;
         horizontalOverflow: HorizontalWrapMode;
         lineSpacing: number;
         supportRichText: boolean;
         font?: string;
         fontStyle: FontStyle;
         setAlphaFactor(factor: number): void;
         get text(): string;
         set text(val: string);
         private set_text;
         get fontSize(): number;
         set fontSize(val: number);
         private sRGBTextColor;
         protected onColorChanged(): void;
         onParentRectTransformChanged(): void;
         onBeforeCanvasRender(_canvas: ICanvas): void;
         private updateOverflow;
         protected onCreate(_opts: any): void;
         onAfterAddedToScene(): void;
         private _text;
         private _fontSize;
         private _textMeshUi;
         private getTextOpts;
         onEnable(): void;
         onDisable(): void;
         onDestroy(): void;
         private getAlignment;
         private feedText;
         private _didHandleTextRenderOnTop;
         private handleTextRenderOnTop;
         private renderOnTopCoroutine;
         private handleTag;
         private getText;
         private getNextTag;
         /**
          * Update provided opts to have a proper fontDefinition : family+weight+style
          * Ensure Family and Variant are registered in FontLibrary
          *
          * @param opts
          * @param fontStyle
          * @private
          */
         private setFont;
         private getFamilyNameWithCorrectSuffix;
     }
     export { Text_2 as Text }

     declare enum TextAnchor {
         UpperLeft = 0,
         UpperCenter = 1,
         UpperRight = 2,
         MiddleLeft = 3,
         MiddleCenter = 4,
         MiddleRight = 5,
         LowerLeft = 6,
         LowerCenter = 7,
         LowerRight = 8,
         Custom = 9
     }

     declare enum TextAnchor_2 {
         UpperLeft = 0,
         UpperCenter = 1,
         UpperRight = 2,
         MiddleLeft = 3,
         MiddleCenter = 4,
         MiddleRight = 5,
         LowerLeft = 6,
         LowerCenter = 7,
         LowerRight = 8
     }

     export declare class TextBuilder {
         static singleLine(str: string, pointSize?: number, depth?: number): USDZText;
         static multiLine(str: string, width: number, height: number, horizontal: HorizontalAlignment, vertical: VerticalAlignment, wrapMode?: TextWrapMode): USDZText;
     }

     export declare class TextExtension implements IUSDExporterExtension {
         get extensionName(): string;
         exportText(object: Object3D, newModel: USDObject, _context: USDZExporterContext): void;
         private convertToTextSize;
         private setOverflow;
         private setTextAlignment;
     }

     /**
      * Options to create a 3D text object. Used by {@link ObjectUtils.createText}
      */
     export declare type TextOptions = Omit<ObjectOptions, "texture"> & {
         /**
          * Optional: The font to use for the text. If not provided, the default font will be used
          */
         font?: Font;
         /**
          * If the font is not provided, the familyFamily can be used to load a font from the default list
          */
         familyFamily?: "OpenSans" | "Helvetiker";
         /**
          * Optional: The depth of the text.
          * @default .1
          */
         depth?: number;
         /**
          * Optional: If the text should have a bevel effect
          * @default false
          */
         bevel?: boolean;
         /**
          * Invoked when the font geometry is loaded
          */
         onGeometry?: (obj: Mesh) => void;
     };

     declare type TextureMap = {
         [name: string]: {
             texture: Texture;
             scale?: Vector4;
         };
     };

     export declare class TextureSheetAnimationModule {
         animation: ParticleSystemAnimationType;
         enabled: boolean;
         cycleCount: number;
         frameOverTime: MinMaxCurve;
         frameOverTimeMultiplier: number;
         numTilesX: number;
         numTilesY: number;
         startFrame: MinMaxCurve;
         startFrameMultiplier: number;
         rowMode: ParticleSystemAnimationRowMode;
         rowIndex: number;
         spriteCount: number;
         timeMode: ParticleSystemAnimationTimeMode;
         private sampleOnceAtStart;
         getStartIndex(): number;
         evaluate(t01: number): number | undefined;
         private getIndex;
     }

     /**@obsolete use Graphics.textureToCanvas */
     export declare function textureToCanvas(texture: Texture, force?: boolean): HTMLCanvasElement | null;

     /**
      * Defines offset and repeat transformations for texture coordinates
      */
     declare interface TextureTransform {
         /** UV offset applied to the texture */
         offset?: Vector2;
         /** UV repeat/scale applied to the texture */
         repeat?: Vector2;
     }

     declare enum TextWrapMode {
         singleLine = "singleLine",
         hardBreaks = "hardBreaks",
         flowing = "flowing"
     }

     /**
      * [TiltShiftEffect](https://engine.needle.tools/docs/api/TiltShiftEffect) Tilt Shift effect simulates a miniature scene by applying a selective focus blur to the rendered image.
      * This effect creates a shallow depth of field, making real-world scenes appear as if they are small-scale models.
      * It is often used in photography and cinematography to draw attention to specific areas of the scene while blurring out the rest.
      * @summary Tilt Shift Post-Processing Effect
      * @category Effects
      * @group Components
      */
     export declare class TiltShiftEffect extends PostProcessingEffect {
         get typeName(): string;
         offset: VolumeParameter;
         rotation: VolumeParameter;
         focusArea: VolumeParameter;
         feather: VolumeParameter;
         kernelSize: VolumeParameter;
         resolutionScale: VolumeParameter;
         init(): void;
         onCreateEffect(): EffectProviderResult | undefined;
     }

     /**
      * Provides time-related information for frame-based game logic.
      * Access via `this.context.time` from any component.
      *
      * @example Using deltaTime for frame-rate independent movement
      * ```ts
      * update() {
      *   // Move 1 unit per second regardless of frame rate
      *   this.gameObject.position.x += 1 * this.context.time.deltaTime;
      * }
      * ```
      * @example Checking elapsed time
      * ```ts
      * start() {
      *   console.log(`Time since start: ${this.context.time.time}s`);
      *   console.log(`Current frame: ${this.context.time.frameCount}`);
      * }
      * ```
      */
     export declare class Time implements ITime {
         /** The time in seconds since the start of Needle Engine. */
         get time(): number;
         private set time(value);
         private _time;
         /** The time in seconds it took to complete the last frame (Read Only). */
         get deltaTime(): number;
         private set deltaTime(value);
         private _deltaTime;
         /** The time in seconds it took to complete the last frame (Read Only). Timescale is not applied. */
         get deltaTimeUnscaled(): number;
         private _deltaTimeUnscaled;
         /**
          * The scale at which time passes. Default is 1.
          * - Values < 1 create slow motion (e.g. 0.5 = half speed)
          * - Values > 1 speed up time (e.g. 2 = double speed)
          * - Value of 0 effectively pauses time-dependent logic
          */
         timeScale: number;
         /** same as frameCount */
         get frame(): number;
         private set frame(value);
         private _frame;
         /** The total number of frames that have passed (Read Only). Same as frame */
         get frameCount(): number;
         /** The time in seconds it took to complete the last frame (Read Only). */
         get realtimeSinceStartup(): number;
         /**
          * @returns {Number} FPS for this frame.
          * Note that this returns the raw value (e.g. 59.88023952362959) and will fluctuate a lot between frames.
          * If you want a more stable FPS, use `smoothedFps` instead.
          */
         get fps(): number;
         /**
          * Approximated frames per second
          * @returns the smoothed FPS value over the last 60 frames with decimals.
          */
         get smoothedFps(): number;
         /** The smoothed time in seconds it took to complete the last frame (Read Only). */
         get smoothedDeltaTime(): number;
         private clock;
         private _smoothedFps;
         private _smoothedDeltaTime;
         private readonly _fpsSamples;
         private _fpsSampleIndex;
         constructor();
         /* Excluded from this release type: update */
     }

     /**
      * @category Animation and Sequencing
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare type TimelineAssetModel = {
         name: string;
         tracks: TrackModel[];
     };

     declare type TonemappingAttributeOptions = "none" | "linear" | "neutral" | "agx";

     /**
      * [ToneMappingEffect](https://engine.needle.tools/docs/api/ToneMappingEffect) adjusts the brightness and contrast of the rendered scene to map high dynamic range (HDR) colors to a displayable range.
      * This effect is essential for achieving realistic lighting and color representation in 3D scenes, as it helps to preserve details in both bright and dark areas.
      * Various tonemapping algorithms can be applied to achieve different visual styles and effects.
      * @summary Tonemapping Post-Processing Effect
      * @category Effects
      * @group Components
      */
     export declare class ToneMappingEffect extends PostProcessingEffect {
         get typeName(): string;
         readonly mode: VolumeParameter;
         readonly exposure: VolumeParameter;
         /** Set the tonemapping mode to e.g. "agx" */
         setMode(mode: NEToneMappingModeNames): this;
         get isToneMapping(): boolean;
         onEffectEnabled(): void;
         private _tonemappingEffect;
         onCreateEffect(): EffectProviderResult | undefined;
         onBeforeRender(): void;
     }

     export declare function toSourceId(src: string | null): SourceIdentifier | undefined;

     /**
      * A TrackHandler is responsible for evaluating a specific type of timeline track.
      * A timeline track can be an animation track, audio track, signal track, control track etc and is controlled by a {@link PlayableDirector}.
      */
     export declare abstract class TrackHandler {
         director: PlayableDirector;
         track: Models.TrackModel;
         get muted(): boolean;
         set muted(val: boolean);
         forEachClip(backwards?: boolean): IterableIterator<Models.ClipModel>;
         onEnable?(): any;
         onDisable?(): any;
         onDestroy?(): any;
         abstract evaluate(time: number): any;
         onMuteChanged?(): any;
         onPauseChanged?(): any;
         /** invoked when PlayableDirectory playmode state changes (paused, playing, stopped) */
         onStateChanged?(isPlaying: boolean): any;
         getClipTime(time: number, model: Models.ClipModel): number;
         getClipTimeNormalized(time: number, model: Models.ClipModel): number;
         evaluateWeight(time: number, index: number, models: Array<Models.ClipModel>, isActive?: boolean): number;
     }

     /**
      * @category Animation and Sequencing
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare type TrackModel = {
         name: string;
         type: TrackType;
         muted: boolean;
         outputs: Array<null | string | object>;
         clips?: Array<ClipModel>;
         markers?: Array<MarkerModel>;
         trackOffset?: TrackOffset;
         volume?: number;
     };

     /**
      * @category Animation and Sequencing
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare type TrackOffset = {
         position: Vec3_3 | Vector3;
         rotation: Quat | Quaternion;
     };

     declare namespace Tracks {
         export {
             TrackHandler,
             AnimationTrackHandler,
             AudioTrackHandler,
             MarkerTrackHandler,
             SignalTrackHandler,
             ControlTrackHandler
         }
     }

     /**
      * @category Animation and Sequencing
      * @see {@link TimelineAssetModel} for the data structure of a timeline asset, which can be played using the PlayableDirector component.
      * @see {@link PlayableDirector} for the main component to control timelines in Needle Engine.
      */
     export declare enum TrackType {
         Activation = "ActivationTrack",
         Animation = "AnimationTrack",
         Audio = "AudioTrack",
         Control = "ControlTrack",
         Marker = "MarkerTrack",
         Signal = "SignalTrack"
     }

     export declare class TrailModule {
         enabled: boolean;
         attachRibbonToTransform: boolean;
         colorOverLifetime: MinMaxGradient;
         colorOverTrail: MinMaxGradient;
         dieWithParticles: boolean;
         inheritParticleColor: boolean;
         lifetime: MinMaxCurve;
         lifetimeMultiplier: number;
         minVertexDistance: number;
         mode: ParticleSystemTrailMode;
         ratio: number;
         ribbonCount: number;
         shadowBias: number;
         sizeAffectsLifetime: boolean;
         sizeAffectsWidth: boolean;
         splitSubEmitterRibbons: boolean;
         textureMode: ParticleSystemTrailTextureMode;
         widthOverTrail: MinMaxCurve;
         widthOverTrailMultiplier: number;
         worldSpace: boolean;
         getWidth(size: number, _life01: number, pos01: number, t: number): number;
         getColor(color: Vector4 | Vector4_2, life01: number, pos01: number): void;
     }

     export declare class TransformData {
         clip: AnimationClip | null;
         pos?: KeyframeTrack;
         rot?: KeyframeTrack;
         scale?: KeyframeTrack;
         private root;
         private target;
         private duration;
         private useRootMotion;
         /** This value can theoretically be anything – a value of 1 is good to clearly see animation gaps.
          * For production, a value of 1/60 is enough, since the files can then still properly play back at 60fps.
          */
         static frameRate: number;
         static animationDurationPadding: number;
         static restPoseClipDuration: number;
         constructor(root: Object3D | null, target: Object3D, clip: AnimationClip | null);
         addTrack(track: KeyframeTrack): void;
         getFrames(): number;
         getDuration(): number;
         getSortedTimesArray(generatePos?: boolean, generateRot?: boolean, generateScale?: boolean): number[];
         /**
          * Returns an iterator that yields the values for each time sample.
          * Values are reused objects - if you want to append them to some array
          * instead of processing them right away, clone() them.
          * @param timesArray
          * @param generatePos
          * @param generateRot
          * @param generateScale
          */
         getValues(timesArray: number[], generatePos?: boolean, generateRot?: boolean, generateScale?: boolean): Generator<{
             time: number;
             translation: Vector3;
             rotation: Quaternion;
             scale: Vector3;
             index: number;
         }, void, unknown>;
     }

     /**
      * The [TransformGizmo](https://engine.needle.tools/docs/api/TransformGizmo) displays manipulation controls for translating, rotating, and scaling objects.
      * Wraps three.js {@link TransformControls} with keyboard shortcuts and snapping support.
      *
      * **Keyboard shortcuts:**
      * - `W` - Translate mode
      * - `E` - Rotate mode
      * - `R` - Scale mode
      * - `Q` - Toggle local/world space
      * - `Shift` (hold) - Enable grid snapping
      * - `+/-` - Adjust gizmo size
      * - `X/Y/Z` - Toggle axis visibility
      * - `Space` - Toggle controls enabled
      *
      * **Snapping:**
      * Configure grid snapping with `translationSnap`, `rotationSnapAngle`, and `scaleSnap`.
      *
      * **Networking:**
      * Automatically works with {@link SyncedTransform} for multiplayer editing.
      *
      * @example Add transform gizmo to an object
      * ```ts
      * const gizmo = editableObject.addComponent(TransformGizmo);
      * gizmo.translationSnap = 0.5; // Snap to 0.5 unit grid
      * gizmo.rotationSnapAngle = 45; // Snap to 45° increments
      * ```
      *
      * @summary Object manipulation gizmo for translate/rotate/scale
      * @category Helpers
      * @group Components
      * @see {@link DragControls} for simpler drag-only interaction
      * @see {@link SyncedTransform} for network synchronization
      * @see {@link OrbitControls} - automatically disabled during gizmo drag
      * @link https://threejs.org/docs/index.html#examples/en/controls/TransformControls for underlying three.js controls and additional features
      */
     export declare class TransformGizmo extends Component {
         /**
          * When true, this is considered a helper gizmo and will only be shown if showGizmos is enabled in engine parameters.
          */
         isGizmo: boolean;
         /**
          * Specifies the translation grid snap value in world units.
          * Applied when holding Shift while translating an object.
          */
         translationSnap: number;
         /**
          * Specifies the rotation snap angle in degrees.
          * Applied when holding Shift while rotating an object.
          */
         rotationSnapAngle: number;
         /**
          * Specifies the scale snapping value.
          * Applied when holding Shift while scaling an object.
          */
         scaleSnap: number;
         /**
          * Gets the underlying three.js {@link TransformControls} instance.
          * @returns The TransformControls instance or undefined if not initialized.
          */
         get control(): TransformControls | undefined;
         private _control?;
         private orbit?;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /**
          * Enables grid snapping for transform operations according to set snap values.
          * This applies the translationSnap, rotationSnapAngle, and scaleSnap properties to the controls.
          */
         enableSnapping(): void;
         /**
          * Disables grid snapping for transform operations.
          * Removes all snapping constraints from the transform controls.
          */
         disableSnapping(): void;
         /**
          * Event handler for when dragging state changes.
          * Disables orbit controls during dragging and requests ownership of the transform if it's synchronized.
          * @param event The drag change event
          */
         private onControlChangedEvent;
         /**
          * Handles keyboard shortcuts for transform operations:
          * - Q: Toggle local/world space
          * - W: Translation mode
          * - E: Rotation mode
          * - R: Scale mode
          * - Shift: Enable snapping (while held)
          * - +/-: Adjust gizmo size
          * - X/Y/Z: Toggle visibility of respective axis
          * - Spacebar: Toggle controls enabled state
          * @param event The keyboard event
          */
         private windowKeyDownListener;
         /**
          * Handles keyboard key release events.
          * Currently only handles releasing Shift key to disable snapping.
          * @param event The keyboard event
          */
         private windowKeyUpListener;
     }

     export declare type Transition = {
         isExit?: boolean;
         exitTime: number;
         hasFixedDuration?: boolean;
         offset: number;
         duration: number;
         hasExitTime: number | boolean;
         destinationState: number | State;
         conditions: Condition[];
     };

     declare enum Transition_2 {
         None = 0,
         ColorTint = 1,
         SpriteSwap = 2,
         Animation = 3
     }

     export declare class TriggerBuilder {
         private static __sceneStartTrigger?;
         static sceneStartTrigger(): TriggerModel;
         /** Trigger that fires when an object has been tapped/clicked.
          * @param targetObject The object or list of objects that can be interacted with.
          * @param inputMode Input Mode (direct and/or indirect). Only available for USDObject targets. Only supported on Vision OS at the moment. */
         static tapTrigger(targetObject: Target, inputMode?: {
             direct: boolean;
             indirect: boolean;
         }): TriggerModel;
         static isTapTrigger(trigger?: TriggerModel): boolean;
         static proximityToCameraTrigger(targetObject: Target, distance: number): TriggerModel;
     }

     declare class TriggerEvent {
         eventID: EventType;
         callback: EventList<any>;
     }

     export declare class TriggerModel implements IBehaviorElement {
         static global_id: number;
         id: string;
         targetId?: string | Target;
         tokenId?: string;
         type?: string;
         distance?: number;
         constructor(targetId?: string | Target, id?: string);
         writeTo(document: USDDocument, writer: USDWriter): void;
     }

     export declare function tryCastBinary(bin: flatbuffers.ByteBuffer): object;

     /** Attempts to determine the file type of a binary file by looking at the first few bytes of the file.
      * @hidden
      */
     export declare function tryDetermineMimetypeFromBinary(url: string, data: ArrayBuffer, response: Response): NeedleMimetype;

     /**
      * Tries to determine the file type of a file from its URL
      * This method does perform a range request to the server to get the first few bytes of the file
      * If the file type can not be determined it will return "unknown"
      * @param url The URL of the file
      * @param useExtension If true the file type will be determined by the file extension first - if the file extension is not known it will then check the header
      * @example
      * ```typescript
      * const url = "https://example.com/model.glb";
      * const fileType = await tryDetermineFileTypeFromURL(url);
      * console.log(fileType); // "glb"
      */
     export declare function tryDetermineMimetypeFromURL(url: string, opts: {
         useExtension: boolean;
     }): Promise<NeedleMimetype>;

     /**
      * @param globalObjectIdentifier The guid of the object to find
      * @param obj The object to search in
      * @param recursive If true the search will be recursive
      * @param searchComponents If true the search will also search components
      * @returns the first object that has the globalObjectIdentifier as a guid */
     export declare function tryFindObject(globalObjectIdentifier: string, obj: any, recursive?: boolean, searchComponents?: boolean): any;

     export declare function tryGetGuid(obj: any): string | undefined | null;

     declare type Type = new (...args: any[]) => any;

     declare type TypeResolver<T> = (data: any) => Constructor<T> | null;

     /**
      * implement and call super(<type string or array>) with the type names this serializer can handle
      * for example:
      * class ColorSerializer extends TypeSerializer {
      *  constructor() {
      *      super("Color")
      *  }
      * }
      */
     declare abstract class TypeSerializer implements ITypeSerializer {
         readonly name?: string;
         constructor(type: Constructor<any> | Constructor<any>[], name?: string);
         abstract onSerialize(data: any, context: SerializationContext): any | void;
         abstract onDeserialize(data: any, context: SerializationContext): any | void;
     }

     export declare const TypeStore: _TypeStore;

     declare class _TypeStore {
         private _types;
         private _reverseTypes;
         private _lazyLoaders;
         constructor();
         /**
          * add a type to the store
          */
         add(key: string, type: Type): void;
         /**
          * Register a lazy-loadable type. The loader is called on first use via {@link getAsync}.
          * Once resolved, the type is cached for synchronous access via {@link get}.
          */
         addLazy(key: string, loader: () => Promise<Type>): void;
         /**
          * @returns the type for the given key if registered
          */
         get(key: string): Type | null;
         /**
          * Async version of {@link get} that also resolves lazy-registered types.
          * After resolving, the type is cached for future synchronous access.
          */
         getAsync(key: string): Promise<Type | null>;
         /**
          * @returns the key/name for the given type if registered
          */
         getKey(type: Type): string | null;
     }

     export declare interface UIDProvider {
         seed: number;
         generateUUID(): string;
     }

     export declare class UIRaycastUtils {
         /** returns the real object when dealing with shadow UI */
         static getObject(obj: Object3D): Object3D;
         static isInteractable(obj: Object3D, out?: {
             canvasGroup?: ICanvasGroup;
             graphic?: IGraphic;
         }): boolean;
         private static tryFindCanvasGroup;
     }

     export declare class UIRootComponent extends BaseUIComponent {
         awake(): void;
     }

     /* Excluded from this release type: unregisterHotReloadType */

     export declare function unwatchWrite(vec: Vector_2, cb: Function): void;

     export declare class UriSerializer extends TypeSerializer {
         constructor();
         onSerialize(_data: string, _context: SerializationContext): null;
         onDeserialize(data: string, _context: SerializationContext): string | undefined;
     }

     /**
      * UsageMarker indicates an object is currently being interacted with.
      * Components like {@link DragControls} add this to prevent accidental deletion
      * by {@link DeleteBox} while the user is dragging.
      *
      * @example Check if object is in use
      * ```ts
      * const marker = object.getComponent(UsageMarker);
      * if (marker?.isUsed) {
      *   console.log("Object is being used by:", marker.usedBy);
      * }
      * ```
      *
      * @summary Marks object as currently being interacted with
      * @category Interactivity
      * @group Components
      * @see {@link DeleteBox} respects this marker
      * @see {@link DragControls} adds this during drag
      */
     export declare class UsageMarker extends Component {
         isUsed: boolean;
         usedBy: any;
     }

     export declare class USDDocument extends USDObject {
         stageLength: number;
         get isDocumentRoot(): boolean;
         get isDynamic(): boolean;
         constructor();
         add(child: USDObject): void;
         remove(child: USDObject): void;
         traverse(callback: (object: USDObject) => void, current?: USDObject | null): void;
         findById(uuid: string): USDObject | undefined;
         buildHeader(_context: USDZExporterContext): string;
     }

     export declare class USDObject {
         static USDObject_export_id: number;
         uuid: string;
         name: string;
         /** If no type is provided, type is chosen automatically (Xform or Mesh) */
         type?: string;
         /** MaterialBindingAPI and SkelBindingAPI are handled automatically, extra schemas can be added here */
         extraSchemas: string[];
         displayName?: string;
         visibility?: "inherited" | "invisible";
         getMatrix(): Matrix4;
         setMatrix(value: any): void;
         /** @deprecated Use `transform`, or `getMatrix()` if you really need the matrix */
         get matrix(): Matrix4;
         /** @deprecated Use `transform`, or `setMatrix()` if you really need the matrix */
         set matrix(value: Matrix4);
         transform: USDObjectTransform | null;
         private _isDynamic;
         get isDynamic(): boolean;
         private set isDynamic(value);
         geometry: BufferGeometry | null;
         material: MeshStandardMaterial | MeshBasicMaterial | Material | MeshPhysicalNodeMaterial | null;
         camera: PerspectiveCamera | OrthographicCamera | null;
         parent: USDObject | null;
         skinnedMesh: SkinnedMesh | null;
         children: Array<USDObject | null>;
         animations: AnimationClip[] | null;
         _eventListeners: Record<USDObjectEventType, Function[]>;
         needsTranslate: boolean;
         needsOrient: boolean;
         needsScale: boolean;
         static createEmptyParent(object: USDObject): USDObject;
         static createEmpty(): USDObject;
         constructor(id: any, name: any, transform?: USDObjectTransform | null, mesh?: BufferGeometry | null, material?: MeshStandardMaterial | MeshBasicMaterial | MeshPhysicalNodeMaterial | Material | null, camera?: PerspectiveCamera | OrthographicCamera | null, skinnedMesh?: SkinnedMesh | null, animations?: AnimationClip[] | null);
         is(obj: any): boolean;
         isEmpty(): boolean;
         clone(): USDObject;
         deepClone(): USDObject;
         getPath(): string;
         add(child: any): void;
         remove(child: any): void;
         addEventListener(evt: USDObjectEventType, listener: (writer: USDWriter, context: USDZExporterContext) => void): void;
         removeEventListener(evt: any, listener: (writer: USDWriter, context: USDZExporterContext) => void): void;
         onSerialize(writer: any, context: any): void;
     }

     declare type USDObjectEventType = "serialize" & ({} & string);

     declare type USDObjectTransform = {
         position: Vector3 | null;
         quaternion: Quaternion | null;
         scale: Vector3 | null;
     };

     export declare class USDWriter {
         str: string;
         indent: number;
         constructor();
         clear(): void;
         beginBlock(str?: string | undefined, char?: string, createNewLine?: boolean): void;
         closeBlock(char?: string): void;
         beginArray(str: any): void;
         closeArray(): void;
         appendLine(str?: string): void;
         toString(): string;
         applyIndent(str: any): string;
     }

     declare interface UsdzAnimation {
         createAnimation(ext: AnimationExtension, model: USDObject, context: any): any;
     }

     export declare interface UsdzBehaviour {
         createBehaviours?(ext: BehaviorExtension, model: USDObject, context: USDZExporterContext): void;
         beforeCreateDocument?(ext: BehaviorExtension, context: USDZExporterContext): void | Promise<void>;
         afterCreateDocument?(ext: BehaviorExtension, context: USDZExporterContext): void | Promise<void>;
         afterSerialize?(ext: BehaviorExtension, context: USDZExporterContext): void;
     }

     /**
      * USDZExporter creates USDZ files and opens them in Apple QuickLook on iOS/iPadOS/visionOS.
      * Enables "View in AR" functionality for Apple devices directly from web experiences.
      *
      * **Key features:**
      * - Auto-exports animations and audio sources
      * - Interactive behaviors via Needle's "Everywhere Actions" system
      * - RealityKit physics support (iOS 18+, visionOS 1+)
      * - Custom QuickLook overlay with call-to-action buttons
      * - Progressive texture/mesh LOD handling
      *
      * [![](https://cloud.needle.tools/-/media/YyUz1lUVlOhEY4fWZ-oMsA.gif)](https://engine.needle.tools/samples/?overlay=samples&tag=usdz)
      *
      * **Automatic setup:**
      * - Creates QuickLook button on compatible devices
      * - Respects {@link XRFlag} for AR-specific visibility
      * - Handles {@link WebARSessionRoot} scale
      *
      * **Custom extensions:**
      * Add custom behaviors by implementing {@link IUSDExporterExtension} and adding to {@link extensions} array.
      *
      * **Debug:** Use `?debugusdz` URL parameter. Press 'T' to trigger export.
      *
      * @example Basic USDZ export
      * ```ts
      * const exporter = myObject.addComponent(USDZExporter);
      * exporter.objectToExport = productModel;
      * exporter.autoExportAnimations = true;
      * exporter.interactive = true; // Enable QuickLook behaviors
      *
      * // Trigger export
      * await exporter.exportAndOpen();
      * ```
      *
      * @example Custom branding
      * ```ts
      * exporter.customBranding = {
      *   callToAction: "Buy Now",
      *   checkoutTitle: "Product Name",
      *   callToActionURL: "https://shop.example.com"
      * };
      * ```
      *
      * @summary Export 3D objects as USDZ files for Apple QuickLook AR
      * @category XR
      * @group Components
      * @see {@link WebXR} for WebXR-based AR/VR
      * @see {@link WebARSessionRoot} for AR placement and scaling
      * @see {@link XRFlag} for AR-specific object visibility
      * @see {@link CustomBranding} for QuickLook overlay customization
      * @link https://engine.needle.tools/samples/?overlay=samples&tag=usdz
      */
     export declare class USDZExporter extends Component {
         /** Called before the USDZ file is exported */
         static readonly beforeExport: EventList<{
             readonly exporter: USDZExporter;
         }>;
         /** Called after the USDZ file has been exported */
         static readonly afterExport: EventList<{
             readonly exporter: USDZExporter;
         }>;
         /** Called before LOD level are exported. Can be used to override the LOD export settings */
         static readonly beforeLODExport: EventList<BeforeLODExportArguments>;
         /**
          * Assign the object to export as USDZ file. If undefined or null, the whole scene will be exported.
          */
         objectToExport: Object3D | null | undefined;
         /** Collect all Animations/Animators automatically on export and emit them as playing at the start.
          * Animator state chains and loops will automatically be collected and exported in order as well.
          * If this setting is off, Animators need to be registered by components – for example from PlayAnimationOnClick.
          */
         autoExportAnimations: boolean;
         /** Collect all AudioSources automatically on export and emit them as playing at the start.
          * They will loop according to their settings.
          * If this setting is off, Audio Sources need to be registered by components – for example from PlayAudioOnClick.
          */
         autoExportAudioSources: boolean;
         exportFileName: string | null | undefined;
         customUsdzFile: string | null | undefined;
         customBranding?: CustomBranding;
         anchoringType: "plane" | "image" | "face" | "none";
         maxTextureSize: 256 | 512 | 1024 | 2048 | 4096 | 8192;
         planeAnchoringAlignment: "horizontal" | "vertical" | "any";
         /** Enabling this option will export QuickLook-specific preliminary behaviours along with the USDZ files.
          * These extensions are only supported on QuickLook on iOS/visionOS/MacOS.
          * Keep this option off for general USDZ usage.
          */
         interactive: boolean;
         /** Enabling this option will export the USDZ file with RealityKit physics components.
          * Rigidbody and Collider components will be converted to their RealityKit counterparts.
          * Physics are supported on QuickLook in iOS 18+ and VisionOS 1+.
          * Physics export is automatically turned off when there are no Rigidbody components anywhere on the exported object.
          */
         physics: boolean;
         allowCreateQuicklookButton: boolean;
         quickLookCompatible: boolean;
         /**
          * Extensions to add custom behaviors and interactions to the USDZ file.
          * You can add your own extensions here by extending {@link IUSDExporterExtension}.
          */
         extensions: IUSDExporterExtension[];
         private link;
         private button?;
         /* Excluded from this release type: start */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private onClickedOpenInARElement;
         /**
          * Creates an USDZ file from the current scene or assigned objectToExport and opens it in QuickLook.
          * Use the various public properties of USDZExporter to customize export behaviour.
          * @deprecated use {@link exportAndOpen} instead
          */
         exportAsync(): Promise<Blob | null>;
         /**
          * Creates an USDZ file from the current scene or assigned objectToExport and opens it in QuickLook.
          * @returns a Promise<Blob> containing the USDZ file
          */
         exportAndOpen(): Promise<Blob | null>;
         /**
          * Creates an USDZ file from the current scene or assigned objectToExport and opens it in QuickLook.
          * @returns a Promise<Blob> containing the USDZ file
          */
         export(objectToExport: Object3D | undefined): Promise<Blob | null>;
         private readonly _currentExportTasks;
         private _previousTimeScale;
         private internalExport;
         /**
          * Opens QuickLook on iOS/iPadOS/visionOS with the given content in AR mode.
          * @param content The URL to the .usdz or .reality file or a blob containing an USDZ file.
          * @param name Download filename
          */
         openInQuickLook(content: Blob | string, name: string): void;
         /**
          * Downloads the given blob as a file.
          */
         download(blob: Blob, name: string): void;
         private static save;
         private lastCallback?;
         private quicklookCallback;
         private buildQuicklookOverlay;
         private static invertForwardMatrix;
         private static invertForwardQuaternion;
         private _rootSessionRootWasAppliedTo;
         private _rootPositionBeforeExport;
         private _rootRotationBeforeExport;
         private _rootScaleBeforeExport;
         getARScaleAndTarget(): {
             scale: number;
             _invertForward: boolean;
             target: Object3D;
             sessionRoot: Object3D | null;
         };
         private applyWebARSessionRoot;
         private revertWebARSessionRoot;
         private createQuicklookButton;
     }

     export declare class USDZExporterContext {
         root?: Object3D;
         exporter: NeedleUSDZExporter;
         extensions: Array<IUSDExporterExtension>;
         quickLookCompatible: boolean;
         exportInvisible: boolean;
         materials: Map<string, Material>;
         textures: TextureMap;
         files: {
             [path: string]: Uint8Array | [Uint8Array, fflate.ZipOptions] | null | any;
         };
         document: USDDocument;
         output: string;
         animations: AnimationClip[];
         constructor(root: Object3D | null | undefined, exporter: NeedleUSDZExporter, options: {
             extensions?: Array<IUSDExporterExtension>;
             quickLookCompatible: boolean;
             exportInvisible: boolean;
         });
         makeNameSafe(str: any): any;
     }

     declare type USDZExporterOptions = {
         ar: {
             anchoring: {
                 type: Anchoring;
             };
             planeAnchoring: {
                 alignment: Alignment;
             };
         };
         quickLookCompatible: boolean;
         extensions: Array<IUSDExporterExtension>;
         maxTextureSize: number;
         exportInvisible: boolean;
     };

     export declare class USDZText implements IBehaviorElement {
         static global_id: number;
         static getId(): number;
         id: string;
         content: string;
         font?: string[];
         pointSize: number;
         width?: number;
         height?: number;
         depth?: number;
         wrapMode?: TextWrapMode;
         horizontalAlignment?: HorizontalAlignment;
         verticalAlignment?: VerticalAlignment;
         material?: Material;
         setDepth(depth: number): USDZText;
         setPointSize(pointSize: number): USDZText;
         setHorizontalAlignment(align: HorizontalAlignment): this;
         setVerticalAlignment(align: VerticalAlignment): this;
         constructor(id: string);
         writeTo(_document: USDDocument | undefined, writer: USDWriter): void;
     }

     export declare class USDZUIExtension implements IUSDExporterExtension {
         get extensionName(): string;
         onExportObject(object: Object3D, model: USDObject, _context: USDZExporterContext): void;
         private flipWindingOrder;
     }

     /* Excluded from this release type: useForAutoFit */

     declare type UserCamInfo = {
         obj: Object3D;
         lastUpdate: number;
         userId: string;
     };

     export declare type UserFilter = (user: object) => boolean;

     export declare class UserJoinedOrLeftRoomModel {
         userId: string;
     }

     /**
      * Marks a field to trigger the `onValidate` callback when its value changes.
      * Useful for reacting to property changes from the editor or at runtime.
      *
      * Your component must implement `onValidate(property?: string)` to receive notifications.
      *
      * @param set Optional custom setter called before the value is assigned
      * @param get Optional custom getter called when the value is read
      *
      * @example Basic usage
      * ```ts
      * export class MyComponent extends Behaviour {
      *   @serializable()
      *   @validate()
      *   color: Color = new Color(1, 0, 0);
      *
      *   onValidate(property?: string) {
      *     if (property === "color") {
      *       console.log("Color changed to:", this.color);
      *     }
      *   }
      * }
      * ```
      * @example With custom setter
      * ```ts
      * @validate(function(value) {
      *   console.log("Setting speed to", value);
      * })
      * speed: number = 1;
      * ```
      */
     export declare const validate: (set?: setter, get?: getter) => (target: IComponent | any, propertyKey: string, descriptor?: undefined) => void;

     declare type ValidLoaderReturnType = CustomModel | Object3D | BufferGeometry;

     export declare class VariantAction extends DocumentAction {
         constructor(obj: Object3D, matrix?: Matrix4, material?: Material, geometry?: BufferGeometry);
         private matrix;
         private material;
         private geometry;
         protected onApply(_: USDDocument): void;
         private _enableAction?;
         private _disableAction?;
         enable(): ActionModel;
         disable(): ActionModel;
     }

     export declare type Vec2 = {
         x: number;
         y: number;
     };

     declare class Vec2_2 {
         x: number;
         y: number;
     }

     declare class Vec2_3 {
         x: number;
         y: number;
     }

     export declare type Vec3 = {
         x: number;
         y: number;
         z: number;
     };

     declare class Vec3_2 {
         x: number;
         y: number;
         z: number;
         constructor(x: number, y: number, z: number);
         static get up(): Vec3_2;
         static get right(): Vec3_2;
         static get forward(): Vec3_2;
         static get back(): Vec3_2;
         static get zero(): Vec3_2;
     }

     declare type Vec3_3 = {
         x: number;
         y: number;
         z: number;
     };

     export declare type Vec4 = {
         x: number;
         y: number;
         z: number;
         w: number;
     };

     declare type Vector = Vector3 | Vector4 | Vector2 | Quaternion;

     declare type Vector_2 = Vector2 | Vector3 | Vector4 | Quaternion;

     export declare class VelocityOverLifetimeModule {
         enabled: boolean;
         space: ParticleSystemSimulationSpace;
         orbitalX: MinMaxCurve;
         orbitalY: MinMaxCurve;
         orbitalZ: MinMaxCurve;
         orbitalXMultiplier: number;
         orbitalYMultiplier: number;
         orbitalZMultiplier: number;
         orbitalOffsetX: number;
         orbitalOffsetY: number;
         orbitalOffsetZ: number;
         speedModifier: MinMaxCurve;
         speedModifierMultiplier: number;
         x: MinMaxCurve;
         xMultiplier: number;
         y: MinMaxCurve;
         yMultiplier: number;
         z: MinMaxCurve;
         zMultiplier: number;
         private _system?;
         update(system: IParticleSystem): void;
         private _temp;
         private _temp2;
         private _temp3;
         private _hasOrbital;
         private _index;
         private _orbitalMatrix;
         init(particle: object): void;
         apply(_particle: object, _index: number, _pos: Vec3, vel: Vec3, _dt: number, age: number, life: number): void;
     }

     /** The version of the Needle engine */
     export declare const VERSION: string;

     declare enum VerticalAlignment {
         top = "top",
         middle = "middle",
         lowerMiddle = "lowerMiddle",
         baseline = "baseline",
         bottom = "bottom"
     }

     /**
      * [VerticalLayoutGroup](https://engine.needle.tools/docs/api/VerticalLayoutGroup) arranges child UI elements vertically with spacing, padding, and alignment options.
      * @category User Interface
      * @group Components
      */
     export declare class VerticalLayoutGroup extends HorizontalOrVerticalLayoutGroup {
         protected get primaryAxis(): Axis;
     }

     declare enum VerticalWrapMode {
         Truncate = 0,
         Overflow = 1
     }

     /**
      * [VideoPlayer](https://engine.needle.tools/docs/api/VideoPlayer) plays video clips from URLs, media streams, or HLS playlists (m3u8 livestreams).
      *
      * **Supported formats:**
      * - Standard video files (MP4, WebM, etc.)
      * - Media streams (from webcam, screen capture, etc.)
      * - HLS playlists (m3u8) for livestreaming
      *
      * [![](https://cloud.needle.tools/-/media/w1uHur5yu3Ni7qFaKIFX-g.gif)](https://engine.needle.tools/samples/video-playback/)
      *
      * **Rendering modes:**
      * Video can be rendered to a material texture, render texture, or camera planes.
      * Set `targetMaterialRenderer` to apply video to a specific mesh's material.
      *
      * **Browser autoplay:**
      * Videos may require user interaction to play with audio.
      * Set `playOnAwake = true` for automatic playback (muted if needed).
      *
      * @example Basic video playback
      * ```ts
      * const videoPlayer = addComponent(obj, VideoPlayer, {
      *   url: "https://example.com/video.mp4",
      *   playOnAwake: true,
      *   loop: true,
      * });
      * ```
      *
      * @example Play video on a 3D surface
      * ```ts
      * const video = myScreen.getComponent(VideoPlayer);
      * video.targetMaterialRenderer = myScreen.getComponent(Renderer);
      * video.play();
      * ```
      *
      * @summary Plays video clips from URLs or streams
      * @category Multimedia
      * @group Components
      * @see {@link AudioSource} for audio-only playback
      * @see {@link ScreenCapture} for capturing and sharing video
      * @see {@link Renderer} for video texture targets
      */
     export declare class VideoPlayer extends Component {
         /**
          * When true the video will start playing as soon as the component is enabled
          */
         playOnAwake: boolean;
         /**
          * The aspect mode to use for the video. If
          */
         aspectMode: AspectMode;
         private clip?;
         private source;
         /**
          * The video clip url to play.
          */
         get url(): string | null;
         /**
          * The video clip to play.
          */
         set url(val: string | null);
         private _url;
         private renderMode;
         private targetMaterialProperty?;
         private targetMaterialRenderer?;
         private targetTexture?;
         private time;
         private _playbackSpeed;
         /**
          * Get the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
          * @default 1
          */
         get playbackSpeed(): number;
         /**
          * Set the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
          */
         set playbackSpeed(val: number);
         private _isLooping;
         get isLooping(): boolean;
         set isLooping(val: boolean);
         /**
          * @returns the current time of the video in seconds
          */
         get currentTime(): number;
         /**
          * set the current time of the video in seconds
          */
         set currentTime(val: number);
         /**
          * @returns true if the video is currently playing
          */
         get isPlaying(): boolean;
         get crossOrigin(): string | null;
         set crossOrigin(val: string | null);
         /**
          * the material that is used to render the video
          */
         get videoMaterial(): Material | null;
         /**
          * the video texture that is used to render the video
          */
         get videoTexture(): VideoTexture | null;
         /**
          * the HTMLVideoElement that is used to play the video
          */
         get videoElement(): HTMLVideoElement | null;
         /**
          * Request the browser to enter picture in picture mode
          * @link https://developer.mozilla.org/en-US/docs/Web/API/Picture-in-Picture_API
          * @returns the promise returned by the browser
          */
         requestPictureInPicture(): Promise<PictureInPictureWindow> | null;
         /**
          * @returns true if the video is muted
          */
         get muted(): boolean;
         /**
          * set the video to be muted
          */
         set muted(val: boolean);
         private _muted;
         /**
          * The current video clip that is being played
          */
         get currentVideo(): string | MediaStream | null | undefined;
         private set audioOutputMode(value);
         private get audioOutputMode();
         private _audioOutputMode;
         /** Set this to false to pause video playback while the tab is not active
          * @default true
          */
         playInBackground: boolean;
         private _crossOrigin;
         private _videoElement;
         private _videoTexture;
         private _videoMaterial;
         private _isPlaying;
         private wasPlaying;
         /** ensure's the video element has been created and will start loading the clip */
         preloadVideo(): void;
         /** @deprecated use `preloadVideo()` */
         preload(): void;
         /** Set a new video stream
          * starts to play automatically if the videoplayer hasnt been active before and playOnAwake is true */
         setVideo(video: MediaStream): void;
         setClipURL(url: string): void;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private visibilityChanged;
         /* Excluded from this release type: onDestroy */
         private _receivedInput;
         /* Excluded from this release type: __constructor */
         private _playErrors;
         /** start playing the video source */
         play(): void;
         /**
          * Stop the video playback. This will reset the video to the beginning
          */
         stop(): void;
         /**
          * Pause the video playback
          */
         pause(): void;
         /** create the video element and assign the video source url or stream */
         create(playAutomatically: boolean): boolean;
         updateAspect(): void;
         private _overlay;
         /**
          * If true the video will be rendered in screenspace mode and overlayed on top of the scene.
          * Alternatively you can also request the video to be played in PictureInPicture mode by calling `requestPictureInPicture()`
          */
         get screenspace(): boolean;
         set screenspace(val: boolean);
         private _targetObjects;
         private createVideoElement;
         private handleBeginPlaying;
         private updateVideoElementSettings;
         private updateVideoElementStyles;
         private _updateAspectRoutineId;
         private updateAspectImpl;
         private get shouldUseM3U();
         private ensureM3UCanBePlayed;
         private _hls?;
         private onHlsAvailable;
     }

     /**
      * [ViewBox](https://engine.needle.tools/docs/api/ViewBox) automatically fits a defined box area into the camera view regardless of screen size or aspect ratio.
      * This component is useful for framing characters, objects, or scenes in the center of the view while ensuring they remain fully visible.
      * You can animate or scale the viewbox to create dynamic zoom effects, cinematic transitions, or responsive framing.
      *
      * [![](https://cloud.needle.tools/-/media/Thy6svVftsIC6Z_wIxUJMA.gif)](https://engine.needle.tools/samples/bike-scrollytelling-responsive-3d)
      *
      * The ViewBox component works by adjusting the camera's focus rect settings (offset and zoom) to ensure that the box defined by the
      * GameObject's position, rotation, and scale fits perfectly within the visible viewport. It supports different modes for one-time
      * fitting or continuous adjustment, making it versatile for both static compositions and animated sequences.
      *
      * **Key Features:**
      * - Automatically adjusts camera framing to fit the box area
      * - Works with any screen size and aspect ratio
      * - Supports one-time fitting or continuous updates
      * - Can be animated for dynamic zoom and framing effects
      * - Multiple ViewBoxes can be active, with the most recently enabled taking priority
      * - Handles camera positioning to ensure the box is visible (moves camera if inside the box)
      *
      * **Common Use Cases:**
      * - Character framing in cutscenes or dialogue
      * - Product showcases with guaranteed visibility
      * - Scrollytelling experiences with animated camera movements
      * - Responsive layouts that adapt to different screen sizes
      * - UI-driven camera transitions
      *
      * - [Example on needle.run](https://viewbox-demo-z23hmxbz2gkayo-z1nyzm6.needle.run/)
      * - [Scrollytelling Demo using animated Viewbox](https://scrollytelling-bike-z23hmxb2gnu5a.needle.run/)
      * - [Example on Stackblitz](https://stackblitz.com/edit/needle-engine-view-box-example)
      *
      * @example Basic setup - Add a ViewBox component to frame an object
      * ```ts
      * const viewBox = new Object3D();
      * viewBox.position.set(0, 1, 0); // Position the viewbox center
      * viewBox.scale.set(2, 2, 2);    // Define the box size
      * viewBox.addComponent(ViewBox, { debug: true });
      * scene.add(viewBox);
      * ```
      *
      * @example Animated ViewBox for zoom effects
      * ```ts
      * const viewBox = new Object3D();
      * viewBox.addComponent(ViewBox, { mode: "continuous" });
      * scene.add(viewBox);
      *
      * // Animate the viewbox scale over time
      * function update() {
      *   const scale = 1 + Math.sin(Date.now() * 0.001) * 0.5;
      *   viewBox.scale.setScalar(scale);
      * }
      * ```
      *
      * @example One-time fitting with user control afterwards
      * ```ts
      * const viewBox = new Object3D();
      * viewBox.addComponent(ViewBox, {
      *   mode: "once", // Fit once, then allow free camera control
      *   referenceFieldOfView: 60
      * });
      * scene.add(viewBox);
      * ```
      *
      * @see {@link CameraComponent} - The camera component that ViewBox controls
      * @see {@link OrbitControls} - Camera controls that work alongside ViewBox
      * @see {@link DragControls} - Alternative camera controls compatible with ViewBox
      * @see {@link SceneSwitcher} - Can be combined with ViewBox for scene transitions
      * @see {@link Context.setCameraFocusRect} - The underlying focus rect API used by ViewBox
      * @see {@link Context.focusRectSettings} - Manual control of focus rect settings
      * @see {@link ViewBoxMode} - The mode type for controlling ViewBox behavior
      *
      * @summary Automatically fits a box area into the camera view
      * @category Camera and Controls
      * @group Components
      * @component
      */
     export declare class ViewBox extends Component {
         /**
          * Array of all active ViewBox instances in the scene.
          * When multiple ViewBoxes are enabled, the last one in the array (most recently enabled) takes priority and controls the camera.
          * Other ViewBoxes remain registered but inactive, displayed with a dimmed gizmo color when debug visualization is enabled.
          */
         static readonly instances: ViewBox[];
         /**
          * The reference field of view (in degrees) used to calculate how the box should fit within the camera view.
          * This determines the baseline camera FOV for fitting calculations.
          *
          * **Behavior:**
          * - If set to `-1` (default), the component will automatically use the camera's FOV on the first frame
          * - Should typically match your camera's FOV for predictable framing
          * - Can be set to a different value to create specific framing effects
          *
          * **Example:**
          * If your camera has an FOV of 60° and you set `referenceFieldOfView` to 60, the ViewBox will fit objects
          * as they would appear with that field of view. Setting it to a wider FOV (e.g., 90) makes objects appear
          * smaller, while a narrower FOV (e.g., 30) makes them appear larger.
          *
          * @see {@link CameraComponent} for the camera component and its FOV property
          * @default -1 (automatically uses the camera's FOV on the first frame)
          */
         referenceFieldOfView: number;
         /**
          * Controls how the ViewBox applies camera adjustments.
          *
          * **Modes:**
          * - `"once"`: Applies the framing adjustment once when the ViewBox becomes active, then stops updating.
          *   This is ideal when you want to frame the view initially but allow users to freely zoom, pan, or orbit afterwards.
          *   Perfect for interactive scenes where you want a good starting view but full user control.
          *
          * - `"continuous"`: Continuously updates the camera framing while this ViewBox is active.
          *   Use this when animating or scaling the ViewBox over time, or when you need the framing to constantly adjust.
          *   Great for cutscenes, scrollytelling, or any scenario with animated ViewBoxes.
          *
          * **Example Use Cases:**
          * - Set to `"once"` for: Initial scene framing, product showcases where users explore freely after initial framing
          * - Set to `"continuous"` for: Animated zoom effects, scrollytelling sequences, dynamic camera movements tied to ViewBox transforms
          *
          * @see {@link ViewBoxMode} for the type definition
          * @default "continuous"
          */
         get mode(): ViewBoxMode;
         set mode(v: ViewBoxMode);
         private _mode;
         /**
          * Enables debug visualization and logging for this ViewBox instance.
          *
          * **When enabled, you will see:**
          * - A yellow wireframe box showing the active ViewBox bounds in 3D space
          * - Gray wireframe boxes for inactive ViewBox instances
          * - A red dashed outline on screen showing the projected box in 2D (when using `?debugviewbox` URL parameter)
          * - Console logs for mode changes, FOV settings, and camera adjustments
          *
          * **Tip:** You can also enable debug visualization globally for all ViewBoxes by adding `?debugviewbox` to your URL.
          *
          * @see {@link Gizmos} for the gizmo rendering system used for debug visualization
          * @default false
          */
         debug: boolean;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private removeUpdateCallback;
         private static readonly _tempProjectionMatrix;
         private static readonly _tempProjectionMatrixInverse;
         private _applyCount;
         private internalUpdate;
         /**
          * Cover fit
          */
         private fit;
         private projectBoxIntoCamera;
         private _projectedBoxElement;
     }

     /**
      * Defines how the {@link ViewBox} component applies camera framing adjustments.
      *
      * - `"continuous"`: Camera framing is continuously updated while the ViewBox is active. Use for animated or dynamic ViewBoxes.
      * - `"once"`: Camera framing is applied once when the ViewBox becomes active, then updates stop. Use for initial framing with subsequent user control.
      */
     export declare type ViewBoxMode = "continuous" | "once";

     export declare enum ViewDevice {
         Browser = "browser",
         Headset = "headset",
         Handheld = "handheld"
     }

     /**
      * [Vignette](https://engine.needle.tools/docs/api/Vignette) darkens the edges of the rendered scene to draw attention to the center.
      * This effect simulates the natural vignetting that occurs in photography and cinematography, where the corners of an image are darker than the center.
      * It can be used to enhance the visual focus on the main subject of the scene and create a more immersive viewing experience.
      * @summary Vignette Post-Processing Effect
      * @category Effects
      * @group Components
      */
     export declare class Vignette extends PostProcessingEffect {
         get typeName(): string;
         color: VolumeParameter;
         intensity: VolumeParameter;
         center: VolumeParameter;
         init(): void;
         onCreateEffect(): EffectProviderResult;
         private updateDarkness;
     }

     /**
      * Action to show or hide an object.
      * Use together with a {@link TapGestureTrigger} to show or hide objects when tapped or clicked.
      *
      * @see {@link TapGestureTrigger} to trigger actions on tap/click
      * @see {@link SetActiveOnClick} for a combined trigger and action component
      * @see [Everywhere Actions](https://engine.needle.tools/docs/everywhere-actions)
      * @summary Hides or shows the object when clicked
      * @category Everywhere Actions
      * @group Components
      */
     export declare class VisibilityAction extends PreliminaryAction {
         /** The type of visibility action to apply. */
         type: VisibilityActionType;
         /** The duration of the fade animation in seconds. */
         duration: number;
         getType(): "show" | "hide";
         getDuration(): number;
     }

     /* Excluded from this release type: VisibilityActionMotionType */

     declare enum VisibilityActionType {
         Show = 0,
         Hide = 1
     }

     /* Excluded from this release type: VisibilityMode */

     /**
      * [Voip](https://engine.needle.tools/docs/api/Voip) Voice over IP (VoIP) component for real-time audio communication between users.
      * Allows sending and receiving audio streams in networked rooms.
      *
      * **Requirements:**
      * - Active network connection (via {@link SyncedRoom} or manual connection)
      * - User permission for microphone access (requested automatically)
      * - HTTPS connection (required for WebRTC)
      *
      * **Features:**
      * - Automatic connection when joining rooms (`autoConnect`)
      * - Background audio support (`runInBackground`)
      * - Optional UI toggle button (`createMenuButton`)
      * - Mute/unmute control
      *
      * **Debug:** Use `?debugvoip` URL parameter or set `debug = true` for logging.
      * Press 'v' to toggle mute, 'c' to connect/disconnect when debugging.
      *
      * @example Enable VoIP in your scene
      * ```ts
      * const voip = myObject.addComponent(Voip);
      * voip.autoConnect = true;
      * voip.createMenuButton = true;
      *
      * // Manual control
      * voip.connect();    // Start sending audio
      * voip.disconnect(); // Stop sending
      * voip.setMuted(true); // Mute microphone
      * ```
      *
      * @summary Voice over IP for networked audio communication
      * @category Networking
      * @group Components
      * @see {@link SyncedRoom} for room management
      * @see {@link NetworkedStreams} for the underlying streaming
      * @see {@link ScreenCapture} for video streaming
      * @link https://engine.needle.tools/docs/networking.html
      */
     export declare class Voip extends Component {
         /** When enabled, VoIP will start when a room is joined or when this component is enabled while already in a room.
          * @default true
          */
         autoConnect: boolean;
         /**
          * When enabled, VoIP will stay connected even when the browser tab is not focused/active anymore.
          * @default true
          */
         runInBackground: boolean;
         /**
          * When enabled, a menu button will be created to allow the user to toggle VoIP on and off.
          */
         createMenuButton: boolean;
         /**
          * When enabled debug messages will be printed to the console. This is useful for debugging audio issues. You can also append ?debugvoip to the URL to enable this.
          */
         debug: boolean;
         private _net?;
         private _menubutton?;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: onDestroy */
         /** Set via the mic button (e.g. when the websocket connection closes and rejoins but the user was muted before we don't want to enable VOIP again automatically) */
         private _allowSending;
         private _outputStream;
         /**
          * @returns true if the component is currently sending audio
          */
         get isSending(): boolean;
         /** Start sending audio. */
         connect(audioSource?: MediaTrackConstraints): Promise<boolean>;
         /** Stop sending audio (muting your own microphone) */
         disconnect(opts?: {
             remember: boolean;
         }): void;
         /**
          * Mute or unmute the audio stream (this will only mute incoming streams and not mute your own microphone. Use disconnect() to mute your own microphone)
          */
         setMuted(mute: boolean): void;
         /** Returns true if the audio stream is currently muted */
         get isMuted(): boolean;
         private updateButton;
         /** @deprecated */
         getFrequency(_userId: string | null): number | null;
         private getAudioStream;
         private onJoinedRoom;
         private onLeftRoom;
         private _incomingStreams;
         private onReceiveStream;
         private onStreamEnded;
         private onEnabledChanged;
         private onVisibilityChanged;
     }

     /** [Volume](https://engine.needle.tools/docs/api/Volume) The Volume/PostprocessingManager component is responsible for managing post processing effects.
      * Add this component to any object in your scene to enable post processing effects.
      *
      * @example Add bloom
      * ```ts
      * const volume = new Volume();
      * volume.addEffect(new BloomEffect({
      *   intensity: 3,
      *   luminanceThreshold: .2
      * }));
      * gameObject.addComponent(volume);
      * ```
      *
      * @example Remove bloom
      * ```ts
      * volume.removeEffect(bloom);
      * ```
      *
      * @example Add pixelation
      * ```ts
      * const pixelation = new PixelationEffect();
      * pixelation.granularity.value = 10;
      * volume.addEffect(pixelation);
      * ```
      *
      * @summary Manage Post-Processing Effects
      * @category Rendering
      * @category Effects
      * @group Components
      */
     declare class Volume extends Component implements IEditorModification, IPostProcessingManager {
         get isPostProcessingManager(): boolean;
         /** Currently active postprocessing effects */
         get effects(): PostProcessingEffect[];
         get dirty(): boolean;
         set dirty(value: boolean);
         sharedProfile?: VolumeProfile;
         /**
          * Set multisampling to "auto" to automatically adjust the multisampling level based on performance.
          * Set to a number to manually set the multisampling level.
          * @default "auto"
          * @min 0
          * @max renderer.capabilities.maxSamples
          */
         multisampling: "auto" | number;
         /** When enabled, the device pixel ratio will be gradually reduced when FPS is low
          * and restored when performance recovers. This helps maintain smooth frame rates
          * on devices where full retina resolution is too expensive for postprocessing.
          * Disable this if you need a fixed resolution and prefer consistent quality over frame rate.
          * @default true
          */
         adaptiveResolution: boolean;
         /**
          * Add a post processing effect to the stack and schedules the effect stack to be re-created.
          */
         addEffect<T extends PostProcessingEffect | Effect>(effect: T & {
             order?: number;
         }): T;
         /**
          * Remove a post processing effect from the stack and schedules the effect stack to be re-created.
          */
         removeEffect<T extends PostProcessingEffect | Effect>(effect: T): T;
         private _postprocessing?;
         private readonly _activeEffects;
         private readonly _effects;
         /**
          * When dirty the post processing effects will be re-applied
          */
         markDirty(): void;
         /* Excluded from this release type: awake */
         private _componentEnabledTime;
         private _multisampleAutoChangeTime;
         private _multisampleAutoDecreaseTime;
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /* Excluded from this release type: onBeforeRender */
         /* Excluded from this release type: onDestroy */
         private _lastApplyTime?;
         private _rapidApplyCount;
         private _isDirty;
         private apply;
         private _applyPostQueue;
         /** called from needle editor sync package if its active */
         onEditorModification(modification: EditorModification): void | boolean | undefined;
         private _modificationQueue?;
         private _recreateId;
         private scheduleRecreate;
     }
     export { Volume as PostProcessingManager }
     export { Volume }

     export declare class VolumeParameter {
         readonly isVolumeParameter = true;
         constructor(value?: any);
         private _isInitialized;
         get isInitialized(): boolean;
         initialize(value?: any): void;
         get overrideState(): boolean;
         set overrideState(val: boolean);
         private _active;
         get value(): any;
         set value(val: any);
         private _value;
         private _valueRaw?;
         set defaultValue(val: any);
         private _defaultValue;
         /** enforce the value to be set and onValueChanged to be called if assigned */
         __init(): void;
         /** called to modify a changing value before it is saved */
         valueProcessor: VolumeParameterValueProcessor | undefined;
         /** called when a value has changed (with the final value) */
         onValueChanged: VolumeParameterChangedEvent | undefined;
         private processValue;
         private testIfValueChanged;
     }

     export declare type VolumeParameterChangedEvent = (newValue: any, oldValue: any, parameter: VolumeParameter) => void;

     export declare type VolumeParameterValueProcessor = (value: any) => any;

     /* Excluded from this release type: VolumeProfile */

     /** Wait for a number of frames to pass.
      * @param frames
      * @returns Generator function
      * @example
      * ```typescript
      * function *myCoroutineFunction() {
      *    yield* WaitForFrames(10);
      * }
      * ```
      */
     export declare function WaitForFrames(frames: number): Generator<undefined, void, unknown>;

     /** Wait for a promise to resolve.
      * @param promise
      * @returns Generator function
      * @example
      * ```typescript
      * function *myCoroutineFunction() {
      *   yield* WaitForPromise(fetch("https://jsonplaceholder.typicode.com/todos/1"));
      * }
      * ```
      */
     export declare function WaitForPromise(promise: Promise<any>): Generator<undefined, void, unknown>;

     /** Wait for a number of seconds to pass.
      * @param seconds
      * @param context
      * @returns Generator function
      * @example
      * ```typescript
      * function *myCoroutineFunction() {
      *   yield* WaitForSeconds(1);
      * }
      * ```
      */
     export declare function WaitForSeconds(seconds: number, context?: Context | null): Generator<undefined, void, unknown>;

     export declare class Watch implements IWatch {
         private readonly _watches;
         constructor(object: object, str: string[] | string);
         subscribeWrite(callback: WriteCallback): void;
         unsubscribeWrite(callback: WriteCallback): void;
         apply(): void;
         revoke(): void;
         dispose(): void;
     }

     /** Subscribe to an object being written to
      * Currently supporting Vector3
      */
     export declare function watchWrite(vec: Vector_2, cb: Function): boolean;

     /**
      * WebARCameraBackground is a component that allows to display the camera feed as a background in an AR session to more easily blend the real world with the virtual world or applying effects to the camera feed.
      *
      * This component automatically requests `camera-access` permission when entering AR mode, which is required to:
      * - Display the real-world camera feed as a background
      * - Include the camera feed in AR screenshots taken with {@link screenshot2}
      *
      * **Note**: If you want to take AR screenshots with the camera feed but don't need to display it as a background,
      * you can still add this component to your scene (it will request camera access) or manually request the
      * `camera-access` feature in your `onBeforeXR` method.
      *
      * - Example: https://samples.needle.tools/ar-camera-background
      *
      * @summary Displays the camera feed as background in WebAR sessions
      * @category XR
      * @group Components
      * @see {@link screenshot2} for taking screenshots in AR (requires camera access for camera feed compositing)
      */
     export declare class WebARCameraBackground extends Component {
         /* Excluded from this release type: onBeforeXR */
         /* Excluded from this release type: onEnterXR */
         /* Excluded from this release type: onLeaveXR */
         /**
          * The tint color of the camera feed
          */
         backgroundTint: RGBAColor;
         get background(): Mesh<BufferGeometry<NormalBufferAttributes>, Material | Material[], Object3DEventMap> | undefined;
         private backgroundPlane?;
         private threeTexture?;
         private forceTextureInitialization;
         /* Excluded from this release type: preRender */
         /* Excluded from this release type: onBeforeRender */
         private updateFromFrame;
         setTexture(texture: Texture): void;
     }

     /**
      * The WebARSessionRoot is the root object for a WebAR session and used to place the scene in AR.
      * It is also responsible for scaling the user in AR and to define the center of the AR scene.
      * If not present in the scene it will be created automatically by the WebXR component when entering an AR session.
      *
      * **Note**: If the WebXR component {@link WebXR.autoCenter} option is enabled the scene will be automatically centered based on the content in the scene.
      *
      * @example Callback when the scene has been placed in AR:
      * ```ts
      * WebARSessionRoot.onPlaced((args) => {
      *    console.log("Scene has been placed in AR");
      * });
      * ```
      *
      * @summary Root object for WebAR sessions, managing scene placement and user scaling in AR.
      * @category XR
      * @group Components
      */
     export declare class WebARSessionRoot extends Component {
         private static _eventListeners;
         /**
          * Event that is called when the scene has been placed in AR.
          * @param cb the callback that is called when the scene has been placed
          * @returns a function to remove the event listener
          */
         static onPlaced(cb: (args: {
             instance: WebARSessionRoot;
         }) => void): () => void;
         private static _hasPlaced;
         /**
          * @returns true if the scene has been placed in AR by the user or automatic placement
          */
         static get hasPlaced(): boolean;
         /** The scale of the user in AR.
          * **NOTE**: a large value makes the scene appear smaller
          * @default 1
          */
         get arScale(): number;
         set arScale(val: number);
         private _arScale;
         /** When enabled the placed scene forward direction will towards the XRRig
          * @deprecated
          * @default false
          */
         invertForward: boolean;
         /** When assigned this asset will be loaded and visualize the placement while in AR
          * @default null
          */
         customReticle?: AssetReference;
         /** Enable touch transform to translate, rotate and scale the scene in AR with multitouch
          * @default true
          */
         arTouchTransform: boolean;
         /** When enabled the scene will be placed automatically when a point in the real world is found
          * @default false
          */
         autoPlace: boolean;
         /** When enabled the scene center will be automatically calculated from the content in the scene */
         autoCenter: boolean;
         /** Experimental: When enabled we will create a XR anchor for the scene placement
          * and make sure the scene is at that anchored point during a XR session
          * @default false
          **/
         useXRAnchor: boolean;
         /** true if we're currently placing the scene */
         private _isPlacing;
         /** This is the world matrix of the ar session root when entering webxr
          * it is applied when the scene has been placed (e.g. if the session root is x:10, z:10 we want this position to be the center of the scene)
          */
         private readonly _startOffset;
         private _createdPlacementObject;
         private readonly _reparentedComponents;
         private readonly _placementScene;
         /** the reticles used for placement */
         private readonly _reticle;
         /** needs to be in sync with the reticles */
         private readonly _hits;
         private _placementStartTime;
         private _rigPlacementMatrix?;
         /** if useAnchor is enabled this is the anchor we have created on placing the scene using the placement hit */
         private _anchor;
         /** user input is used for ar touch transform */
         private userInput?;
         onEnable(): void;
         supportsXR(mode: XRSessionMode): boolean;
         onEnterXR(_args: NeedleXREventArgs): void;
         onLeaveXR(): void;
         onUpdateXR(args: NeedleXREventArgs): void;
         private updateReticleAndHits;
         private onPlaceScene;
         private onSetScale;
         private onRevertSceneChanges;
         private onCreateAnchor;
         private upVec;
         private lookPoint;
         private worldUpVec;
         private applyViewBasedTransform;
         private onApplyPose;
     }

     declare type WebsocketSendType = IModel | object | boolean | null | string | number;

     /**
      * Use the [WebXR](https://engine.needle.tools/docs/api/WebXR) component to enable VR and AR on **iOS and Android** in your scene. VisionOS support is also provided via QuickLook USDZ export.
      *
      * The WebXR component is a simple to use wrapper around the {@link NeedleXRSession} API and adds some additional features like creating buttons for AR, VR, enabling default movement behaviour ({@link XRControllerMovement}) and controller rendering ({@link XRControllerModel}), as well as handling AR placement and Quicklook USDZ export.
      *
      * ![](https://cloud.needle.tools/-/media/gcj_YoSns8FivafQRiCiOQ.gif)
      *
      *
      * @example Enable VR and AR support using code
      * ```ts
      * import { onStart, WebXR } from "@needle-tools/engine";
      * onStart(context => {
      *    const webxr = context.scene.addComponent(WebXR, { createVRButton: true, createARButton: true });
      * });
      * ```
      *
      * @example Customize VR movement
      * ```ts
      * import { onStart, WebXR } from "@needle-tools/engine";
      * onStart(context => {
      *   const webxr = context.scene.addComponent(WebXR, { createVRButton: true });
      *   const movement = webxr.setDefaultMovementEnabled(true);
      *   if (movement) {
      *    movement.enableTeleport = false; // disable teleport, only use smooth locomotion
      *    movement.smoothMovementSpeed = 2; // increase speed
      *    // NOTE: you can also disable default movement and write your own movement component (or derive and extend the {@link XRControllerMovement} class)
      *  }
      * });
      * ```
      *
      *
      * @example Start AR session with placement reticle and touch to place and adjust the scene
      * ```ts
      * import { onStart, WebXR } from "@needle-tools/engine";
      * onStart(context => {
      *  const webxr = context.scene.addComponent(WebXR);
      *  webxr.autoPlace = false; // disable auto placement, we want the user to tap to place the scene
      *  webxr.usePlacementReticle = true; // show the placement reticle to help the user find surfaces to place the scene
      *  webxr.usePlacementAdjustment = true; // allow the user to adjust the position, rotation and scale of the scene with touch after placing
      *  webxr.arScale = 2; // set the initial scale of the scene in AR. Larger values make the scene appear smaller in AR.
      *  webxr.enterAR(); // start AR session
      * });
      *
      * @summary WebXR Component for VR and AR support
      * @category XR
      * @group Components
      * @see {@link NeedleXRSession} for low-level XR API
      * @see {@link XRControllerMovement} for VR locomotion
      * @see {@link WebARSessionRoot} for AR session configuration and an AR content placement event
      * @see {@link Avatar} for networked user avatars
      * @see {@link screenshot2} for taking screenshots in XR (including AR camera feed compositing)
      * @link https://engine.needle.tools/docs/xr.html
      * @link https://engine.needle.tools/samples/?overlay=samples&tag=xr
      * @link https://engine.needle.tools/samples/collaborative-sandbox
      */
     export declare class WebXR extends Component {
         /**
          * When enabled, a button will be automatically added to {@link NeedleMenu} that allows users to enter VR mode.
          */
         createVRButton: boolean;
         /**
          * When enabled, a button will be automatically added to {@link NeedleMenu} that allows users to enter AR mode.
          */
         createARButton: boolean;
         /**
          * When enabled, a button to send the experience to an Oculus Quest will be shown if the current device does not support VR.
          * This helps direct users to compatible devices for optimal VR experiences.
          */
         createSendToQuestButton: boolean;
         /**
          * When enabled, a QR code will be generated and displayed on desktop devices to allow easy opening of the experience on mobile devices.
          */
         createQRCode: boolean;
         /**
          * When enabled, default movement controls will be automatically added to the scene when entering VR.
          * This includes teleportation and smooth locomotion options for VR controllers.
          */
         useDefaultControls: boolean;
         /**
          * When enabled, 3D models representing the user's VR controllers will be automatically created and rendered in the scene.
          */
         showControllerModels: boolean;
         /**
          * When enabled, 3D models representing the user's hands will be automatically created and rendered when hand tracking is available.
          */
         showHandModels: boolean;
         /**
          * When enabled, a reticle will be displayed to help place the scene in AR. The user must tap on a detected surface to position the scene.
          */
         usePlacementReticle: boolean;
         /**
          * Optional custom 3D object to use as the AR placement reticle instead of the default one.
          */
         customARPlacementReticle?: AssetReference;
         /**
          * When enabled, users can adjust the position, rotation, and scale of the AR scene with one or two fingers after initial placement.
          */
         usePlacementAdjustment: boolean;
         /**
          * Determines the scale of the user relative to the scene in AR. Larger values make the 3D content appear smaller.
          * Only applies when `usePlacementReticle` is enabled.
          */
         arScale: number;
         /**
          * When enabled, an XRAnchor will be created for the AR scene and its position will be regularly updated to match the anchor.
          * This can help with spatial persistence in AR experiences.
          * @experimental
          */
         useXRAnchor: boolean;
         /**
          * When enabled, the scene will be automatically placed as soon as a suitable surface is detected in AR,
          * without requiring the user to tap to confirm placement.
          */
         autoPlace: boolean;
         /**
          * When enabled, the AR session root center will be automatically adjusted to place the center of the scene.
          * This helps ensure the scene is properly aligned with detected surfaces.
          *
          * **Note**: This option overrides the placement of the {@link WebARSessionRoot} component if both are used.
          */
         autoCenter: boolean;
         /**
          * When enabled, a USDZExporter component will be automatically added to the scene if none is found.
          * This allows iOS and visionOS devices to view 3D content using Apple's AR QuickLook.
          */
         useQuicklookExport: boolean;
         /**
          * When enabled, the 'depth-sensing' WebXR feature will be requested to provide real-time depth occlusion.
          * Currently only supported on Oculus Quest devices.
          * @see https://developer.mozilla.org/en-US/docs/Web/API/XRDepthInformation
          * @experimental
          */
         useDepthSensing: boolean;
         /**
          * When enabled, a {@link SpatialGrabRaycaster} will be added or enabled in the scene,
          * allowing users to interact with objects at a distance in VR/AR.
          * @default true
          */
         useSpatialGrab: boolean;
         /**
          * Specifies the avatar representation that will be created when entering a WebXR session.
          * Can be a reference to a 3D model or a boolean to use the default avatar.
          */
         defaultAvatar?: AssetReference | boolean;
         private _playerSync?;
         /** these components were created by the WebXR component on session start and will be cleaned up again in session end */
         private readonly _createdComponentsInSession;
         private _usdzExporter?;
         static activeWebXRComponent: WebXR | null;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         /**
          * Checks if WebXR is supported and offers an appropriate session.
          * This is used to show the WebXR session joining prompt in browsers that support it.
          * @returns A Promise that resolves to true if a session was offered, false otherwise
          */
         private handleOfferSession;
         /** the currently active webxr input session */
         get session(): NeedleXRSession | null;
         /** immersive-vr or immersive-ar */
         get sessionMode(): XRSessionMode | null;
         /** While AR: this will return the currently active WebARSessionRoot component.
          * You can also query this component in your scene with `findObjectOfType(WebARSessionRoot)`
          */
         get arSessionRoot(): WebARSessionRoot | null;
         /** Call to start an WebVR session.
          *
          * This is a shorthand for `NeedleXRSession.start("immersive-vr", init, this.context)`
          */
         enterVR(init?: XRSessionInit): Promise<NeedleXRSession | null>;
         /** Call to start an WebAR session
          *
          * This is a shorthand for `NeedleXRSession.start("immersive-ar", init, this.context)`
          */
         enterAR(init?: XRSessionInit): Promise<NeedleXRSession | null>;
         /** Call to end a WebXR (AR or VR) session.
          *
          * This is a shorthand for `NeedleXRSession.stop()`
          */
         exitXR(): void;
         private _exitXRMenuButton?;
         private _previousXRState;
         private _spatialGrabRaycaster?;
         private _activeWebARSessionRoot;
         private get isActiveWebXR();
         /* Excluded from this release type: onBeforeXR */
         /* Excluded from this release type: onEnterXR */
         /* Excluded from this release type: onUpdateXR */
         /* Excluded from this release type: onLeaveXR */
         /** Call to enable or disable default controller behaviour */
         setDefaultMovementEnabled(enabled: boolean): XRControllerMovement | null;
         /** Call to enable or disable default controller rendering */
         setDefaultControllerRenderingEnabled(enabled: boolean): XRControllerModel | null;
         /**
          * Creates and instantiates the user's avatar representation in the WebXR session.
          * @param xr The active session
          */
         protected createLocalAvatar(xr: NeedleXRSession): Promise<void>;
         /**
          * Event handler called when a player avatar is spawned.
          * Ensures the avatar has the necessary Avatar component.
          * @param instance The spawned avatar 3D object
          */
         private onAvatarSpawned;
         /** @deprecated use {@link getButtonsFactory} or directly access {@link WebXRButtonFactory.getOrCreate} */
         getButtonsContainer(): WebXRButtonFactory;
         /**
          * Returns the WebXR button factory, creating one if it doesn't exist.
          * Use this to access and modify WebXR UI buttons.
          * @returns The WebXRButtonFactory instance
          */
         getButtonsFactory(): WebXRButtonFactory;
         /**
          * Reference to the WebXR button factory used by this component.
          */
         private _buttonFactory?;
         /**
          * Creates and sets up UI elements for WebXR interaction based on component settings
          * and device capabilities. Handles creating AR, VR, QuickLook buttons and utility buttons like QR codes.
          */
         private handleCreatingHTML;
         /**
          * Storage for UI buttons created by this component.
          */
         private readonly _buttons;
         /**
          * Adds a button to the UI with the specified priority.
          * @param button The HTML element to add
          * @param priority The button's priority value (lower numbers appear first)
          */
         private addButton;
         /**
          * Removes all buttons created by this component from the UI.
          */
         private removeButtons;
     }

     /**
      * Factory to create WebXR buttons for AR, VR, Quicklook and Send to Quest
      * The buttons are created as HTMLButtonElements and can be added to the DOM.
      * The buttons will automatically hide when a XR session is started and show again when the session ends.
      */
     export declare class WebXRButtonFactory {
         private static _instance;
         private static create;
         static getOrCreate(): WebXRButtonFactory;
         private get isSecureConnection();
         get quicklookButton(): HTMLButtonElement | undefined;
         private _quicklookButton?;
         get arButton(): HTMLButtonElement | undefined;
         private _arButton?;
         get vrButton(): HTMLButtonElement | undefined;
         private _vrButton?;
         get sendToQuestButton(): HTMLButtonElement | undefined;
         private _sendToQuestButton?;
         get qrButton(): HTMLButtonElement;
         /** get or create the quicklook button
          * Behaviour of the button:
          * - if the button is clicked a USDZExporter component will be searched for in the scene and if found, it will be used to export the scene to USDZ / Quicklook
          */
         createQuicklookButton(): HTMLButtonElement;
         /** get or create the WebXR AR button
          * @param init optional session init options
          * Behaviour of the button:
          * - if the device supports AR, the button will be visible and clickable
          * - if the device does not support AR, the button will be hidden
          * - if the device changes and now supports AR, the button will be visible
          */
         createARButton(init?: XRSessionInit): HTMLButtonElement;
         /** get or create the WebXR VR button
          * @param init optional session init options
          * Behaviour of the button:
          * - if the device supports VR, the button will be visible and clickable
          * - if the device does not support VR, the button will be hidden
          * - if the device changes and now supports VR, the button will be visible
          */
         createVRButton(init?: XRSessionInit): HTMLButtonElement;
         /** get or create the Send To Quest button
          * Behaviour of the button:
          * - if the button is clicked, the current URL will be sent to the Oculus Browser on the Quest
          */
         createSendToQuestButton(): HTMLButtonElement;
         /**
          * @deprecated please use ButtonsFactory.getOrCreate().createQRCode(). This method will be removed in a future update
          */
         createQRCode(): HTMLButtonElement;
         private updateSessionSupported;
         private hideElementDuringXRSession;
         private listenToXRSessionState;
     }

     /**
      * Create powerful AR image tracking experiences with just a few lines of code!
      * WebXRImageTracking makes it incredibly easy to detect marker images in the real world and anchor 3D content to them.
      * Needle Engine automatically handles all the complexity across different platforms and fallback modes for you.
      *
      * [![Image Tracking Demo](https://cloud.needle.tools/-/media/vRUf9BmqW_bgNARATjmfCQ.gif)](https://engine.needle.tools/samples/image-tracking)
      *
      * **What makes Needle Engine special:**
      * - **Write once, run everywhere**: The same code works across iOS, Android, and visionOS
      * - **Automatic platform optimization**: Seamlessly switches between WebXR, ARKit, and QuickLook
      * - **Flexible deployment options**: From full WebXR with unlimited markers to QuickLook fallback
      * - **Production ready**: Battle-tested tracking with adaptive smoothing and stability features
      *
      * **Platform Support & Options:**
      * - **iOS (WebXR via AppClip)**: Full WebXR support - track unlimited markers simultaneously via native ARKit!
      * - **iOS (QuickLook mode)**: Instant AR without app installation - perfect for quick demos (tracks first marker)
      * - **Android (WebXR)**: Native WebXR Image Tracking API - unlimited markers (requires browser flag during early access)
      * - **visionOS (QuickLook)**: Spatial image anchoring with Apple's AR QuickLook
      *
      * **Simple 3-Step Setup:**
      * 1. Add this component to any GameObject in your scene
      * 2. Configure your markers in the `trackedImages` array:
      *    - `image`: URL to your marker image
      *    - `widthInMeters`: Physical size of your printed marker
      *    - `object`: The 3D content to display
      * 3. Export and test - Needle handles the rest!
      *
      * **Pro Tips for Best Results:**
      * - Use high-contrast markers with unique features for reliable tracking
      * - Match `widthInMeters` to your actual physical marker size for accurate positioning
      * - Enable `imageDoesNotMove` for wall posters or floor markers - significantly improves stability
      * - Use `smooth` (enabled by default) for professional-looking, jitter-free tracking
      * - Test with different marker sizes and lighting - Needle's adaptive tracking handles various conditions
      *
      * ![](https://cloud.needle.tools/-/media/V-2UxGVRJxvH9oDnXGnIdg.png)
      * *WebXRImageTracking component in Unity Editor*
      *
      * ![](https://cloud.needle.tools/-/media/poDPca1bI1an4SBY7LtKNA.png)
      * *WebXRImageTracking panel/component in Blender*
      *
      * @example Getting started - it's this easy!
      * ```ts
      * // Just add markers and Needle handles everything else
      * const imageTracking = myObject.addComponent(WebXRImageTracking);
      * const marker = new WebXRImageTrackingModel({
      *   url: "https://example.com/my-poster.png",
      *   widthInMeters: 0.3, // 30cm poster
      *   object: my3DContent
      * });
      * imageTracking.addImage(marker);
      * // Done! Works on iOS, Android, and visionOS automatically
      * ```
      *
      * @example Track multiple markers (WebXR mode)
      * ```ts
      * const imageTracking = myObject.addComponent(WebXRImageTracking);
      *
      * // In WebXR mode (iOS AppClip, Android), all markers work simultaneously!
      * const productBox = new WebXRImageTrackingModel({ url: "product-box.png", widthInMeters: 0.15, object: productInfo });
      * const businessCard = new WebXRImageTrackingModel({ url: "business-card.png", widthInMeters: 0.09, object: contactCard });
      * const poster = new WebXRImageTrackingModel({ url: "poster.png", widthInMeters: 0.5, object: videoPlayer });
      *
      * imageTracking.addImage(productBox);
      * imageTracking.addImage(businessCard);
      * imageTracking.addImage(poster);
      *
      * // For QuickLook fallback mode, optionally set which marker is primary
      * imageTracking.setPrimaryImage(poster); // This will be used in QuickLook
      * ```
      *
      * @example Professional setup for static markers
      * ```ts
      * // Perfect for museums, retail displays, or permanent installations
      * const wallArt = new WebXRImageTrackingModel({
      *   url: "gallery-painting.png",
      *   widthInMeters: 0.6,
      *   object: interactiveExplanation,
      *   imageDoesNotMove: true, // Rock-solid tracking for static markers!
      *   hideWhenTrackingIsLost: false // Content stays visible even if temporarily occluded
      * });
      * ```
      *
      * **Why developers love Needle's image tracking:**
      * - Zero platform-specific code required
      * - Automatic graceful degradation across deployment modes
      * - Built-in jitter reduction and stability features
      * - Works with any image - posters, packaging, business cards, artwork
      * - Export once, deploy everywhere
      *
      * @summary The easiest way to create cross-platform AR image tracking experiences
      * @category XR
      * @group Components
      * @see {@link WebXRImageTrackingModel} for marker configuration options
      * @see {@link WebXRTrackedImage} for runtime tracking data and events
      * @see {@link WebXR} for general WebXR setup and session management
      * @link https://engine.needle.tools/docs/xr.html#image-tracking - Full Documentation
      * @link https://engine.needle.tools/samples/image-tracking - Try Live Demo
      * @link https://github.com/immersive-web/marker-tracking/blob/main/explainer.md - WebXR Marker Tracking Specification
      */
     export declare class WebXRImageTracking extends Component {
         /**
          * Set which marker should be primary (first in the list).
          * Useful when deploying to QuickLook mode where one marker is tracked at a time.
          * In full WebXR mode (iOS AppClip, Android), all markers track simultaneously regardless of order.
          *
          * **Note:** Needle Engine automatically adapts - in WebXR all markers work, in QuickLook the primary is used.
          *
          * @param image The marker model to set as primary
          *
          * @example
          * ```ts
          * // Great for offering different AR experiences from one deployment
          * imageTracking.setPrimaryImage(businessCardMarker); // Use this for QuickLook
          * // In WebXR mode, all markers still work simultaneously!
          * ```
          */
         setPrimaryImage(image: WebXRImageTrackingModel): void;
         /**
          * Add a marker to track - it's that simple!
          * Needle Engine handles all the platform differences automatically.
          *
          * **Tip:** Add all your markers upfront. In WebXR mode they all work simultaneously.
          * In QuickLook mode, the first (primary) marker is used.
          *
          * @param image The marker configuration to add
          * @param asPrimary Set to true to make this the primary marker (for QuickLook fallback)
          *
          * @example
          * ```ts
          * // Super simple - works across all platforms
          * const marker = new WebXRImageTrackingModel({
          *   url: "https://mysite.com/poster.png",
          *   widthInMeters: 0.42, // A3 poster width
          *   object: cool3DModel
          * });
          * imageTracking.addImage(marker);
          * // That's it! Needle does the rest.
          * ```
          */
         addImage(image: WebXRImageTrackingModel, asPrimary?: boolean): void;
         /**
          * Your list of markers to track. Add as many as you need!
          *
          * **How it works across platforms:**
          * - **WebXR mode** (iOS AppClip, Android): All markers are tracked simultaneously - amazing for multi-marker experiences!
          * - **QuickLook mode** (iOS fallback, visionOS): First marker is used - perfect for quick demos without app installation
          *
          * **Needle's smart deployment:** Configure all your markers once, and Needle automatically uses the best
          * tracking mode available on each platform. No platform-specific code needed!
          *
          * @see {@link WebXRImageTrackingModel} for marker configuration
          * @see {@link addImage} and {@link setPrimaryImage} for runtime management
          */
         readonly trackedImages: WebXRImageTrackingModel[];
         /**
          * Enable Needle's professional-grade adaptive smoothing for rock-solid tracking.
          * Automatically reduces jitter while staying responsive to real movement.
          *
          * **Pro tip:** Keep this enabled (default) for production experiences!
          *
          * @default true
          */
         smooth: boolean;
         private readonly trackedImageIndexMap;
         /**
          * Check if image tracking is available on this device right now.
          *
          * **Note:** On Android Chrome, WebXR Image Tracking is currently behind a flag during the early access period.
          * Needle automatically falls back to other modes when needed, so your experience keeps working!
          */
         get supported(): boolean;
         private _supported;
         /* Excluded from this release type: awake */
         /* Excluded from this release type: onEnable */
         /* Excluded from this release type: onDisable */
         private onBeforeUSDZExport;
         /* Excluded from this release type: onBeforeXR */
         /* Excluded from this release type: onEnterXR */
         /* Excluded from this release type: onLeaveXR */
         private readonly imageToObjectMap;
         private readonly currentImages;
         private readonly webXRIncubationsWarning;
         /* Excluded from this release type: onUpdateXR */
         private onImageTrackingUpdate;
     }

     /**
      * Configuration model for a tracked image marker.
      * Defines which image to track, its physical size, and which 3D content to display when detected.
      *
      * **Important:** The physical size (`widthInMeters`) must match your printed marker size for accurate tracking.
      * Mismatched sizes cause the tracked object to appear to "float" above or below the marker.
      *
      * **Best practices for marker images:**
      * - Use high-contrast images with distinct features
      * - Avoid repetitive patterns or solid colors
      * - Test images at intended viewing distances
      * - Ensure good lighting conditions
      *
      * @summary Configuration for a single trackable image marker
      * @category XR
      * @see {@link WebXRImageTracking} for the component that uses these models
      * @link https://engine.needle.tools/docs/xr.html#image-tracking
      * @link https://engine.needle.tools/samples/image-tracking
      */
     export declare class WebXRImageTrackingModel {
         /**
          * Creates a new image tracking configuration.
          *
          * @param params Configuration parameters
          * @param params.url URL to the marker image to track
          * @param params.widthInMeters Physical width of the printed marker in meters (must match real size!)
          * @param params.object The 3D object or AssetReference to display when this image is detected
          * @param params.createObjectInstance If true, creates a new instance for each detection (useful for tracking multiple instances of the same marker)
          * @param params.imageDoesNotMove Enable for static markers (floor/wall mounted) to improve tracking stability
          * @param params.hideWhenTrackingIsLost If true, hides the object when tracking is lost; if false, leaves it at the last known position
          */
         constructor(params: {
             url: string;
             widthInMeters: number; /** Object to track */
             object: AssetReference | Object3D;
             createObjectInstance?: boolean;
             imageDoesNotMove?: boolean;
             hideWhenTrackingIsLost?: boolean;
         });
         /**
          * URL to the marker image to track.
          * **Important:** Use images with high contrast and unique features to improve tracking quality.
          * Avoid repetitive patterns, solid colors, or low-contrast images.
          */
         image?: string;
         /**
          * Physical width of the printed marker in meters.
          * **Critical:** This must match your actual printed marker size!
          * If mismatched, the tracked object will appear to "float" above or below the marker.
          *
          * @default 0.25 (25cm)
          * @example
          * ```ts
          * // For a business card sized marker (9cm wide)
          * widthInMeters = 0.09;
          *
          * // For an A4 page width (21cm)
          * widthInMeters = 0.21;
          * ```
          */
         widthInMeters: number;
         /**
          * The 3D object or prefab to display when this marker is detected.
          * The object will be positioned and rotated to match the tracked image in the real world.
          *
          * **Note:** Scale your 3D content appropriately relative to `widthInMeters`.
          */
         object?: AssetReference;
         /**
          * When enabled, creates a new instance of the referenced object each time this image is detected.
          * Enable this if you want to track multiple instances of the same marker simultaneously,
          * or if the same object is used for multiple different markers.
          *
          * @default false
          */
         createObjectInstance: boolean;
         /**
          * Enable for static markers that don't move (e.g., posters on walls or markers on the floor).
          * When enabled, only the first few tracking frames are used to position the object,
          * resulting in more stable tracking by ignoring subsequent minor position changes.
          *
          * **Use cases:**
          * - Wall-mounted posters or artwork
          * - Floor markers for persistent AR content
          * - Product packaging on shelves
          *
          * **Don't use for:**
          * - Handheld cards or objects
          * - Moving markers
          *
          * @default false
          */
         imageDoesNotMove: boolean;
         /**
          * Controls visibility behavior when tracking is lost.
          * - When `true`: Object is hidden when the marker is no longer visible
          * - When `false`: Object remains visible at its last tracked position
          *
          * @default true
          */
         hideWhenTrackingIsLost: boolean;
         /**
          * Extracts the filename from the marker image URL.
          * @returns The filename (last part of the URL path), or null if no image URL is set
          * @example
          * ```ts
          * // URL: "https://example.com/markers/business-card.png"
          * // Returns: "business-card.png"
          * ```
          */
         getNameFromUrl(): string | null;
     }

     /**
      * [WebXRPlaneTracking](https://engine.needle.tools/docs/api/WebXRPlaneTracking) tracks planes and meshes in the real world when in immersive-ar (e.g. on Oculus Quest).
      * @category XR
      * @group Components
      */
     export declare class WebXRPlaneTracking extends Component {
         /**
          * Optional: if assigned it will be instantiated per tracked plane/tracked mesh.
          * If not assigned a simple mesh will be used. Use `occluder` to create occlusion meshes that don't render color but only depth.
          */
         dataTemplate?: AssetReference;
         /**
          * If true an occluder material will be applied to the tracked planes/meshes.
          * Note: this will only be applied if dataTemplate is not assigned
          */
         occluder: boolean;
         /**
          * If true the system will try to initiate room capture if no planes are detected.
          */
         initiateRoomCaptureIfNoData: boolean;
         /**
          * If true plane tracking will be enabled
          */
         usePlaneData: boolean;
         /**
          * If true mesh tracking will be enabled
          */
         useMeshData: boolean;
         /** when enabled mesh or plane tracking will also be used in VR */
         runInVR: boolean;
         /**
          * Returns all tracked planes
          */
         get trackedPlanes(): MapIterator<XRPlaneContext>;
         get trackedMeshes(): MapIterator<XRPlaneContext>;
         /* Excluded from this release type: onBeforeXR */
         /* Excluded from this release type: onEnterXR */
         onLeaveXR(_args: NeedleXREventArgs): void;
         /* Excluded from this release type: onUpdateXR */
         private bounds;
         private center;
         private labelOffset;
         private removeData;
         private _dataId;
         private readonly _allPlanes;
         private readonly _allMeshes;
         private firstTimeNoPlanesDetected;
         private makeOccluder;
         private processFrameData;
         private _flipForwardMatrix;
         private checkIfContextShouldBeConvex;
         private createGeometry;
         private _verticesCache;
         private createMeshGeometry;
         private createPlaneGeometry;
     }

     /**
      * Used by {@link WebXRPlaneTracking} to track planes in the real world.
      */
     export declare type WebXRPlaneTrackingEvent = {
         type: "plane-added" | "plane-updated" | "plane-removed";
         context: XRPlaneContext;
     };

     /**
      * Represents a tracked image detected during a WebXR session.
      * Contains position, rotation, and tracking state information for a detected marker image.
      *
      * **Properties:**
      * - Access image URL and physical dimensions
      * - Get current position and rotation in world space
      * - Check tracking state (tracked vs emulated)
      * - Apply transform to 3D objects
      *
      * @summary Runtime data for a detected marker image in WebXR
      * @category XR
      */
     export declare class WebXRTrackedImage {
         /** URL of the tracked marker image */
         get url(): string;
         /** Physical width of the marker in meters */
         get widthInMeters(): number;
         /** The ImageBitmap used for tracking */
         get bitmap(): ImageBitmap;
         /** The {@link WebXRImageTrackingModel} configuration for this tracked image */
         get model(): WebXRImageTrackingModel;
         /**
          * The measured size of the detected image in the real world.
          * May differ from `widthInMeters` if the physical marker doesn't match the configured size.
          */
         readonly measuredSize: number;
         /**
          * Current tracking state of the image:
          * - `tracked` - Image is currently being tracked by the system
          * - `emulated` - Tracking is being emulated (less accurate)
          */
         readonly state: "tracked" | "emulated";
         /**
          * Copy the current world position of the tracked image to a Vector3.
          * @param vec The vector to store the position in
          * @returns The input vector with the position copied to it
          */
         getPosition(vec: Vector3): Vector3;
         /**
          * Copy the current world rotation of the tracked image to a Quaternion.
          * @param quat The quaternion to store the rotation in
          * @returns The input quaternion with the rotation copied to it
          */
         getQuaternion(quat: Quaternion): Quaternion;
         /**
          * Apply the tracked image's position and rotation to a 3D object.
          * Optionally applies smoothing to reduce jitter.
          *
          * @param object The 3D object to update
          * @param t01 Interpolation factor (0-1) for smoothing. If undefined or >= 1, no smoothing is applied. When smoothing is enabled, larger position/rotation changes will automatically reduce the smoothing to prevent lag.
          */
         applyToObject(object: Object3D, t01?: number | undefined): void;
         private static _positionBuffer;
         private static _rotationBuffer;
         private _position;
         private _rotation;
         private ensureTransformData;
         private readonly _trackingComponent;
         private readonly _trackedImage;
         private readonly _bitmap;
         private readonly _pose;
         constructor(context: WebXRImageTracking, trackedImage: WebXRImageTrackingModel, bitmap: ImageBitmap, measuredSize: number, state: "tracked" | "emulated", pose: any);
     }

     export declare type WriteCallback = (data: any, prop: string) => void;

     /** Button names as used in the xr profile */
     export declare type XRControllerButtonName = "thumbrest" | "xr-standard-trigger" | "xr-standard-squeeze" | "xr-standard-thumbstick" | "xr-standard-touchpad" | "menu" | GamepadButtonName | StylusButtonName;

     /**
      * Add this script to an object and set `side` to make the object follow a specific controller.
      *
      * This can be useful to attach objects to controllers, for example a laser pointer or a 3D model of a tool.
      *
      * @example Make an object follow the right controller
      * ```ts
      * import { onStart, XRControllerFollow } from "@needle-tools/engine";
      * onStart(context => {
      *   const obj = context.scene.getObjectByName("MyObject");
      *   obj?.addComponent(XRControllerFollow, { side: "right", controller: true, hands: true });
      * });
      * ```
      *
      * @summary Makes the object follow a specific XR controller or hand
      * @category XR
      * @group Components
      * */
     export declare class XRControllerFollow extends Component {
         get activeAndEnabled(): boolean;
         /** Should this object follow a right hand/controller or left hand/controller.
          * When a number is provided, the controller with that index is followed.
          * @default "none"
          **/
         side: XRHandedness | number;
         /** should it follow controllers (the physics controller)
          * @default true
          */
         controller: boolean;
         /** should it follow hands (when using hand tracking in WebXR)
          * @default false
          */
         hands: boolean;
         /** Disable if you don't want this script to modify the object's visibility
          * If enabled the object will be hidden when the configured controller or hand is not available
          * If disabled this script will not modify the object's visibility
          * @default true
          */
         controlVisibility: boolean;
         /** when true it will use the grip space, otherwise the ray space
          * @default false
          */
         useGripSpace: boolean;
         /** when enabled the position, rotation and scale of this object will be set to the position it was at when it entered the XR session
          * @default true
          */
         resetTransformAfterXRSession: boolean;
         private readonly _startPosition;
         private readonly _startRotation;
         private readonly _startScale;
         /* Excluded from this release type: onEnterXR */
         /* Excluded from this release type: onUpdateXR */
         /* Excluded from this release type: onLeaveXR */
     }

     /**
      * XRControllerModel is a component that allows to display controller models or hand models in an XR session.
      * It automatically loads the appropriate model for the connected controller or hand.
      *
      * You can configure if controller models or hand models should be created.
      *
      * @summary Displays controller or hand models in XR
      * @category XR
      * @group Components
      */
     export declare class XRControllerModel extends Component {
         /**
          * If true, the controller model will be created when a controller is added/connected
          * @default true
          */
         createControllerModel: boolean;
         /**
          * If true, the hand model will be created when a hand is "added"/tracked
          * @default true
          */
         createHandModel: boolean;
         /** assign a model or model url to create custom hand models */
         customLeftHand?: AssetReference;
         /** assign a model or model url to create custom hand models */
         customRightHand?: AssetReference;
         static readonly factory: XRControllerModelFactory;
         supportsXR(mode: XRSessionMode): boolean;
         private readonly _models;
         onXRControllerAdded(args: NeedleXRControllerEventArgs): Promise<void>;
         onXRControllerRemoved(args: NeedleXRControllerEventArgs): void;
         onBeforeXR(_mode: XRSessionMode, args: XRSessionInit & {
             trackedImages: Array<any>;
         }): void;
         onLeaveXR(_args: NeedleXREventArgs): void;
         onBeforeRender(): void;
         private updateRendering;
         protected loadModel(controller: NeedleXRController, url: string): Promise<IGameObject | null>;
         protected loadHandModel(comp: Component, controller: NeedleXRController): Promise<{
             handObject: IGameObject;
             handmesh: XRHandMeshModel;
         } | null>;
         private makeOccluder;
     }

     /**
      * XRControllerMovement is a component that allows to move the XR rig using the XR controller input.
      *
      * It supports movement using the left controller's thumbstick and rotation using the right controller's thumbstick.
      *
      * Additionally it supports teleporting using the right controller's thumbstick or by pinching the index finger tip in front of the hand (if hand tracking is enabled).
      * It also visualizes controller rays and hit points in the scene.
      *
      *
      * @summary Move the XR rig using controller input
      * @category XR
      * @group Components
      */
     export declare class XRControllerMovement extends Component implements XRMovementBehaviour {
         /** Movement speed in meters per second
          * @default 1.5
          */
         movementSpeed: number;
         /** How many degrees to rotate the XR rig when using the rotation trigger
          * @default 30
          */
         rotationStep: number;
         /** When enabled you can teleport using the right XR controller's thumbstick by pressing forward
          * @default true
          */
         useTeleport: boolean;
         /**
          * When enabled you can teleport by pinching the right XR controller's index finger tip in front of the hand
          * @default true
          */
         usePinchToTeleport: boolean;
         /** Enable to only allow teleporting on objects with a TeleportTarget component (see {@link TeleportTarget})
          * @default false
          */
         useTeleportTarget: boolean;
         /** Enable to fade out the scene when teleporting
          * @default false
          */
         useTeleportFade: boolean;
         /** enable to visualize controller rays in the 3D scene
          * @default true
          */
         showRays: boolean;
         /** enable to visualize pointer targets in the 3D scene
          * @default false
          */
         showHits: boolean;
         readonly isXRMovementHandler: true;
         readonly xrSessionMode = "immersive-vr";
         private _didApplyRotation;
         private _didTeleport;
         onUpdateXR(args: NeedleXREventArgs): void;
         onLeaveXR(_: NeedleXREventArgs): void;
         onBeforeRender(): void;
         protected onHandleMovement(controller: NeedleXRController, rig: IGameObject): void;
         protected onHandleRotation(controller: NeedleXRController, rig: IGameObject): void;
         private readonly _teleportBuffer;
         protected onHandleTeleport(controller: NeedleXRController, rig: IGameObject): void;
         private _plane;
         private readonly _lines;
         private readonly _hitDiscs;
         private readonly _hitDistances;
         private readonly _lastHitDistances;
         protected renderRays(session: NeedleXRSession): void;
         protected renderHits(session: NeedleXRSession): void;
         private isObjectWithInteractiveComponent;
         private updateHitPointerPosition;
         protected hitPointRaycastFilter: RaycastTestObjectCallback;
         /** create an object to visualize hit points in the scene */
         protected createHitPointObject(): HitPointObject;
         /** create an object to visualize controller rays */
         protected createRayLineObject(): Line2;
     }

     declare type XRControllerType = "hand" | "controller";

     /**
      * XRFlag shows or hides GameObjects based on the current XR state or session.
      * Use for XR-responsive content that should only appear in specific modes.
      *
      * **XR states:**
      * - `Browser` - Normal web browsing (no XR)
      * - `AR` - Augmented reality session
      * - `VR` - Virtual reality session
      * - `FirstPerson` - First-person view mode
      * - `ThirdPerson` - Third-person/spectator view mode
      * - Combine with bitwise OR: `AR | VR`
      *
      * **Debug options:**
      * - `?debugxrflags` - Log flag changes
      * - `?disablexrflags` - Disable all XR flags
      *
      * @example Show only in VR
      * ```ts
      * const flag = myObject.addComponent(XRFlag);
      * flag.visibleIn = XRStateFlag.VR;
      * ```
      *
      * @example Show in AR and VR, hide in browser
      * ```ts
      * flag.visibleIn = XRStateFlag.AR | XRStateFlag.VR;
      * ```
      *
      * @category XR
      * @category Utilities
      * @group Components
      * @see {@link XRStateFlag} for state options
      * @see {@link XRState} for global state management
      * @see {@link DeviceFlag} for device-based visibility
      * @see {@link WebXR} for XR session management
      */
     export declare class XRFlag extends Component {
         private static registry;
         static Apply(): void;
         private static firstApply;
         private static buffer;
         visibleIn: number;
         awake(): void;
         onEnable(): void;
         onDestroy(): void;
         get isOn(): boolean;
         UpdateVisible(state?: XRState | XRStateFlag | null): void;
     }

     export declare type XRGestureName = "pinch";

     declare type XRMesh_2 = {
         meshSpace: XRSpace;
         lastChangedTime: number;
         vertices: Float32Array;
         indices: Uint32Array;
         semanticLabel?: string;
     };

     declare interface XRMovementBehaviour {
         isXRMovementHandler: true;
     }

     /**
      * Used by {@link WebXRPlaneTracking} to track planes in the real world.
      */
     export declare type XRPlaneContext = {
         id: number;
         xrData: (XRPlane & {
             semanticLabel?: string;
         }) | XRMesh_2;
         timestamp: number;
         mesh?: Mesh | Group;
         collider?: MeshCollider;
     };

     /**
      * A user in XR (VR or AR) is parented to an XR rig during the session.
      * When moving through the scene the rig is moved instead of the user.
      *
      * You can create multiple rigs in your scene and switch between them during an active XR session by calling {@link setAsActiveXRRig} on the XRRig instance.
      * For advanced use-cases you can also provide your own XRRig class by implementing the {@link IXRRig} interface and adding it to your scene.
      *
      * @category XR
      * @group Components
      */
     export declare class XRRig extends Component implements IXRRig {
         priority: number;
         get isActive(): boolean;
         /**
          * Sets this rig to be the active XR rig (needs to be called during an active XR session)
          * Note that this might modify the priority of this rig to be the highest.
          */
         setAsActiveXRRig(): void;
         /**
          * Sets the priority of the rig.
          */
         setPriority(value: number): void;
         /* Excluded from this release type: awake */
         isXRRig(): boolean;
         supportsXR(_mode: XRSessionMode): boolean;
         private _startScale?;
         /* Excluded from this release type: onEnterXR */
         /* Excluded from this release type: onLeaveXR */
     }

     export declare type XRSessionEventArgs = {
         session: NeedleXRSession;
     };

     export declare class XRState {
         static Global: XRState;
         Mask: XRStateFlag;
         Has(state: XRStateFlag): boolean;
         Set(state: number): void;
         Enable(state: number): void;
         Disable(state: number): void;
         Toggle(state: number): void;
         EnableAll(): void;
         DisableAll(): void;
     }

     export declare enum XRStateFlag {
         Never = 0,
         Browser = 1,
         AR = 2,
         VR = 4,
         FirstPerson = 8,
         ThirdPerson = 16,
         All = 4294967295
     }

     export { }


declare module 'three' {
    interface Object3D {
        get guid(): string | undefined;
        set guid(value: string | undefined);
        /**
         * Allows to control e.g. if an object should be exported
         */
        hideFlags: HideFlags;
        /**
         * If false the object will be ignored for raycasting (e.g. pointer events). Default is true.
         * @default true
         */
        raycastAllowed: boolean;
        /**
         * Set a raycast preference for the object:
         * - `lod` will use the raycast mesh lod if available (default). This is usually a simplified mesh for raycasting.
         * - `bounds` will use the bounding box of the object for raycasting. This is very fast but not very accurate.
         * - `full` will use the full mesh for raycasting. This is the most accurate but also the slowest option.
         *
         * **NOTE:** Needle Engine's Raycast system will use Mesh BVH by default - so event 'full' is usually faster than default three.js raycasting.
         */
        raycastPreference?: 'lod' | 'bounds' | 'full';
        /**
         * Add a Needle Engine component to the {@link Object3D}.
         * @param comp The component instance or constructor to add.
         * @param init Optional initialization data for the component.
         * @returns The added component instance.
         * @example Directly pass in constructor and properties:
         * ```ts
         * const obj = new Object3D();
         * obj.addComponent(MyComponent, { myProperty: 42 });
         * ```
         * @example Create a component instance, assign properties and then add it:
         * ```ts
         * const obj = new Object3D();
         * const comp = new MyComponent();
         * comp.myProperty = 42;
         * obj.addComponent(comp);
         * ```
         */
        addComponent<T extends IComponent>(comp: T | ConstructorConcrete<T>, init?: ComponentInit<T>): T;
        /**
         * Remove a Needle Engine component from the {@link Object3D}.
         */
        removeComponent(inst: IComponent): IComponent;
        /**
         * Get or add a Needle Engine component to the Object3D.
         * If the component already exists, it will be returned. Otherwise, a new component will be added.
         * @param typeName The component constructor to get or add.
         * @param init Optional initialization data for the component.
         * @returns The component instance.
         */
        getOrAddComponent<T extends IComponent>(typeName: ConstructorConcrete<T>, init?: ComponentInit<T>): T;
        /**
         * Get a Needle Engine component from the {@link Object3D}.
         * @returns The component instance or null if not found.
         */
        getComponent<T extends IComponent>(type: Constructor<T>): T | null;
        /**
         * Get all components of a specific type from the {@link Object3D}.
         * @param arr Optional array to fill with the found components.
         * @returns An array of components.
         */
        getComponents<T extends IComponent>(type: Constructor<T>, arr?: []): T[];
        /**
         * Get a Needle Engine component from the {@link Object3D} or its children. This will search on the current Object and all its children.
         * @param type The type of the component to search for.
         * @param includeInactive If true, also inactive components are considered. Default is false.
         * @returns The component instance or null if not found.
         */
        getComponentInChildren<T extends IComponent>(type: Constructor<T>, includeInactive?: boolean): T | null;
        /**
         * Get all components of a specific type from the {@link Object3D} or its children. This will search on the current Object and all its children.
         * @param arr Optional array to fill with the found components.
         * @returns An array of components.
         */
        getComponentsInChildren<T extends IComponent>(type: Constructor<T>, arr?: []): T[];
        /**
         * Get a Needle Engine component from the {@link Object3D} or its parents. This will search on the current Object and all its parents.
         * @param type The type of the component to search for.
         * @param includeInactive If true, also inactive components are considered. Default is false.
         * @returns The component instance or null if not found.
         */
        getComponentInParent<T extends IComponent>(type: Constructor<T>, includeInactive?: boolean): T | null;
        /**
         * Get all Needle Engine components of a specific type from the {@link Object3D} or its parents. This will search on the current Object and all its parents.
         * @param arr Optional array to fill with the found components.
         * @returns An array of components.
         */
        getComponentsInParent<T extends IComponent>(type: Constructor<T>, arr?: []): T[];
        /**
         * Destroys the {@link Object3D} and all its Needle Engine components.
         */
        destroy(): void;
        /**
         * Get or set the world position of the {@link Object3D}.
         * Added by Needle Engine.
         */
        worldPosition: Vector3;
        /**
         * Get or set the world quaternion of the {@link Object3D}.
         * Added by Needle Engine.
         */
        worldQuaternion: Quaternion;
        /**
         * Get or set the world rotation of the {@link Object3D}.
         * Added by Needle Engine.
         */
        worldRotation: Vector3;
        /**
         * Get or set the world scale of the {@link Object3D}.
         * Added by Needle Engine.
         */
        worldScale: Vector3;
        /**
         * Get the world forward vector of the {@link Object3D}.
         * Added by Needle Engine.
         */
        get worldForward(): Vector3;
        set worldForward(v: Vector3);
        /**
         * Get the world right vector of the {@link Object3D}.
         * Added by Needle Engine.
         */
        get worldRight(): Vector3;
        /**
         * Get the world up vector of the {@link Object3D}.
         * Added by Needle Engine.
         */
        get worldUp(): Vector3;
        /**
         * Check if the given object is contained in the hierarchy of this object or if it's the same object.
         * @param object The object to check.
         * @returns True if the object is contained in the hierarchy, false otherwise.
         */
        contains(object: Object3D | null | undefined): boolean;
    }
}


declare module 'three' {
    interface Vector3 {
        slerp(end: Vector3, t: number): Vector3;
    }
}


declare module 'three' {
    interface SkinnedMesh {
        staticGenerator?: StaticGeometryGenerator;
        staticGeometry?: BufferGeometry;
        staticGeometryLastUpdate?: number;
    }
    interface Mesh {
        acceleratedRaycast?: any;
    }
    interface SkinnedMesh {
        /** @deprecated use autoUpdateMeshBvhInterval */
        autoUpdateMeshBVH?: boolean;
        /**
         * Interval in milliseconds to automatically update the mesh BVH. When set to >= 0 the BVH will be updated every x milliseconds.
         * @default undefined (disabled)
         */
        autoUpdateMeshBvhInterval?: number;
        bvhNeedsUpdate?: boolean;
    }
}
