import { ResourceSource } from '../types/common';
import { ChatConfig, LLMTool, Message, ToolsConfig } from '../types/llm';
export declare class LLMController {
    private nativeModule;
    private chatConfig;
    private toolsConfig;
    private tokenizerConfig;
    private onToken?;
    private _response;
    private _isReady;
    private _isGenerating;
    private _messageHistory;
    private tokenCallback;
    private responseCallback;
    private messageHistoryCallback;
    private isReadyCallback;
    private isGeneratingCallback;
    constructor({ tokenCallback, responseCallback, messageHistoryCallback, isReadyCallback, isGeneratingCallback, }: {
        tokenCallback?: (token: string) => void;
        responseCallback?: (response: string) => void;
        messageHistoryCallback?: (messageHistory: Message[]) => void;
        isReadyCallback?: (isReady: boolean) => void;
        isGeneratingCallback?: (isGenerating: boolean) => void;
    });
    get response(): string;
    get isReady(): boolean;
    get isGenerating(): boolean;
    get messageHistory(): Message[];
    load({ modelSource, tokenizerSource, tokenizerConfigSource, onDownloadProgressCallback, }: {
        modelSource: ResourceSource;
        tokenizerSource: ResourceSource;
        tokenizerConfigSource: ResourceSource;
        onDownloadProgressCallback?: (downloadProgress: number) => void;
    }): Promise<void>;
    setTokenCallback(tokenCallback: (token: string) => void): void;
    configure({ chatConfig, toolsConfig, }: {
        chatConfig?: Partial<ChatConfig>;
        toolsConfig?: ToolsConfig;
    }): void;
    delete(): void;
    forward(input: string): Promise<void>;
    interrupt(): void;
    generate(messages: Message[], tools?: LLMTool[]): Promise<void>;
    sendMessage(message: string): Promise<void>;
    deleteMessage(index: number): void;
    private applyChatTemplate;
}
