/**
 * Prompt Caching for LLM Providers
 * Implements caching strategies to reduce token usage and costs
 */
import { Message } from '../../types/index.js';
interface CacheOptions {
    maxSize?: number;
    ttl?: number;
    enableSimilarity?: boolean;
    threshold?: number;
}
export declare class PromptCache {
    private cache;
    private logger;
    private options;
    private stats;
    constructor(options?: CacheOptions);
    /**
     * Generate cache key from messages
     */
    private generateKey;
    /**
     * Check if cache entry is still valid
     */
    private isValid;
    /**
     * Get cached response if available
     */
    get(messages: Message[], model: string, provider: string): string | null;
    /**
     * Store response in cache
     */
    set(messages: Message[], response: string, model: string, provider: string, tokenCount: number): void;
    /**
     * Find semantically similar cached entry
     */
    private findSimilar;
    /**
     * Calculate simple similarity between two strings
     */
    private calculateSimilarity;
    /**
     * Evict oldest entries
     */
    private evictOldest;
    /**
     * Clear expired entries
     */
    clearExpired(): void;
    /**
     * Get cache statistics
     */
    getStats(): {
        totalEntries: number;
        avgHits: number;
        hitRate: number;
        memorySizeEstimate: number;
        hits: number;
        misses: number;
        tokensSaved: number;
        costSaved: number;
    };
    /**
     * Estimate memory usage
     */
    private estimateMemorySize;
    /**
     * Clear all cache
     */
    clear(): void;
    /**
     * Export cache for persistence
     */
    export(): string;
    /**
     * Import cache from persistence
     */
    import(data: string): void;
}
/**
 * Anthropic-specific prompt caching
 * Implements Anthropic's beta caching feature
 */
export declare class AnthropicPromptCache {
    private logger;
    private cacheBreakpoints;
    constructor();
    /**
     * Prepare messages with cache control for Anthropic API
     */
    prepareCachedMessages(messages: Message[]): any[];
    /**
     * Calculate potential token savings from caching
     */
    estimateSavings(messages: Message[], cachedTokens: number): number;
}
export {};
//# sourceMappingURL=prompt-cache.d.ts.map