import type { GuardianResult } from "../types/guardian.js"; export interface LLMConfig { provider: "openai" | "anthropic"; apiKey: string; model?: string; timeoutMs?: number; responseFormat?: "text" | "json"; retryConfig?: { maxRetries?: number; initialDelayMs?: number; maxDelayMs?: number; backoffMultiplier?: number; }; } export interface LLMMessage { role: "system" | "user" | "assistant"; content: string; } export interface LLMResponse { content: string; usage?: { promptTokens: number; completionTokens: number; totalTokens: number; }; } export declare function callLLM(config: LLMConfig, messages: LLMMessage[]): Promise; export declare function parseGuardianJSON(content: string): GuardianResult | null; export declare function getApiKey(provider: "openai" | "anthropic"): string | null; //# sourceMappingURL=llm-client.d.ts.map