import { ChatLogs, ChatMessage, Tokenizer, LlmApi, LlmModel } from '../index.js'; export declare class LlmQuery { private readonly llm; private tokenizer; private logs; constructor(llm: LlmApi, tokenizer: Tokenizer, logs?: ChatLogs); content(opts?: { maxResponseTokens?: number; model?: LlmModel; }): Promise; response(opts?: { maxResponseTokens?: number; model?: LlmModel; }): Promise; ask(question: string, opts?: { maxResponseTokens?: number; model?: LlmModel; }): Promise; }