import { BaseMessage, BasePromptValue, ChatResult, BaseMessageChunk, LLMResult, ChatGenerationChunk } from "../schema/index.js"; import { BaseLanguageModel, BaseLanguageModelCallOptions, BaseLanguageModelInput, BaseLanguageModelParams } from "../base_language/index.js"; import { CallbackManagerForLLMRun, Callbacks } from "../callbacks/manager.js"; import { RunnableConfig } from "../schema/runnable.js"; export type SerializedChatModel = { _model: string; _type: string; } & Record; export type SerializedLLM = { _model: string; _type: string; } & Record; export type BaseChatModelParams = BaseLanguageModelParams; export type BaseChatModelCallOptions = BaseLanguageModelCallOptions; export declare function createChatMessageChunkEncoderStream(): TransformStream; export declare abstract class BaseChatModel extends BaseLanguageModel { ParsedCallOptions: Omit; lc_namespace: string[]; constructor(fields: BaseChatModelParams); abstract _combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"]; protected _separateRunnableConfigFromCallOptions(options: CallOptions): [RunnableConfig, this["ParsedCallOptions"]]; invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise; _streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator; _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator; generate(messages: BaseMessage[][], options?: string[] | CallOptions, callbacks?: Callbacks): Promise; /** * Get the parameters used to invoke the model */ invocationParams(_options?: this["ParsedCallOptions"]): any; _modelType(): string; abstract _llmType(): string; generatePrompt(promptValues: BasePromptValue[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise; abstract _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise; call(messages: BaseMessage[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise; callPrompt(promptValue: BasePromptValue, options?: string[] | CallOptions, callbacks?: Callbacks): Promise; predictMessages(messages: BaseMessage[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise; predict(text: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise; } export declare abstract class SimpleChatModel extends BaseChatModel { abstract _call(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise; _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise; }