import { LLM as BaseLLM, AssistantMessage, ContinueConversationOptions, ToolDefinition, Resource } from 'wirejs-resources'; export declare class LLM extends BaseLLM { private bedrockClient; constructor(scope: Resource | string, id: string, options: { models: string[]; systemPrompt?: string; targetContextSize?: number; tools?: ToolDefinition[]; }); private createBedrockInstructionMessage; private convertToBedrockFormat; private getModelAliases; private getRegionalPrefix; private getModelId; private invokeBedrock; private streamBedrock; private invokeModel; continueConversation({ history, onChunk, timeoutSeconds, systemPrompt, models, tools, }: ContinueConversationOptions): Promise; }