import { ChatCompletionRequestMessage, CreateChatCompletionRequest, ConfigurationParameters, CreateChatCompletionResponse } from "openai"; import { AzureOpenAIInput, OpenAICallOptions, OpenAIChatInput } from "../types/openai-types.js"; import type { StreamingAxiosConfiguration } from "../util/axios-types.js"; import { BaseLLMParams, LLM } from "./base.js"; import { CallbackManagerForLLMRun } from "../callbacks/manager.js"; import { LLMResult, GenerationChunk } from "../schema/index.js"; export { OpenAIChatInput, AzureOpenAIInput }; export interface OpenAIChatCallOptions extends OpenAICallOptions { promptIndex?: number; } /** * Wrapper around OpenAI large language models that use the Chat endpoint. * * To use you should have the `openai` package installed, with the * `OPENAI_API_KEY` environment variable set. * * To use with Azure you should have the `openai` package installed, with the * `AZURE_OPENAI_API_KEY`, * `AZURE_OPENAI_API_INSTANCE_NAME`, * `AZURE_OPENAI_API_DEPLOYMENT_NAME` * and `AZURE_OPENAI_API_VERSION` environment variable set. * * @remarks * Any parameters that are valid to be passed to {@link * https://platform.openai.com/docs/api-reference/chat/create | * `openai.createCompletion`} can be passed through {@link modelKwargs}, even * if not explicitly available on this class. * * @augments BaseLLM * @augments OpenAIInput * @augments AzureOpenAIChatInput */ export declare class OpenAIChat extends LLM implements OpenAIChatInput, AzureOpenAIInput { get callKeys(): (keyof OpenAIChatCallOptions)[]; lc_serializable: boolean; get lc_secrets(): { [key: string]: string; } | undefined; get lc_aliases(): Record; temperature: number; topP: number; frequencyPenalty: number; presencePenalty: number; n: number; logitBias?: Record; maxTokens?: number; modelName: string; prefixMessages?: ChatCompletionRequestMessage[]; modelKwargs?: OpenAIChatInput["modelKwargs"]; timeout?: number; stop?: string[]; streaming: boolean; openAIApiKey?: string; azureOpenAIApiVersion?: string; azureOpenAIApiKey?: string; azureOpenAIApiInstanceName?: string; azureOpenAIApiDeploymentName?: string; azureOpenAIBasePath?: string; private client; private clientConfig; constructor(fields?: Partial & Partial & BaseLLMParams & { configuration?: ConfigurationParameters; }, /** @deprecated */ configuration?: ConfigurationParameters); /** * Get the parameters used to invoke the model */ invocationParams(options?: this["ParsedCallOptions"]): Omit; /** @ignore */ _identifyingParams(): { apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise) | undefined; organization?: string | undefined; username?: string | undefined; password?: string | undefined; accessToken?: string | Promise | ((name?: string | undefined, scopes?: string[] | undefined) => string) | ((name?: string | undefined, scopes?: string[] | undefined) => Promise) | undefined; basePath?: string | undefined; baseOptions?: any; formDataCtor?: (new () => any) | undefined; function_call?: import("openai").CreateChatCompletionRequestFunctionCall | undefined; stop?: import("openai").CreateChatCompletionRequestStop | undefined; stream?: boolean | null | undefined; user?: string | undefined; functions?: import("openai").ChatCompletionFunctions[] | undefined; model: string; temperature?: number | null | undefined; top_p?: number | null | undefined; n?: number | null | undefined; max_tokens?: number | undefined; presence_penalty?: number | null | undefined; frequency_penalty?: number | null | undefined; logit_bias?: object | null | undefined; model_name: string; }; /** * Get the identifying parameters for the model */ identifyingParams(): { apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise) | undefined; organization?: string | undefined; username?: string | undefined; password?: string | undefined; accessToken?: string | Promise | ((name?: string | undefined, scopes?: string[] | undefined) => string) | ((name?: string | undefined, scopes?: string[] | undefined) => Promise) | undefined; basePath?: string | undefined; baseOptions?: any; formDataCtor?: (new () => any) | undefined; function_call?: import("openai").CreateChatCompletionRequestFunctionCall | undefined; stop?: import("openai").CreateChatCompletionRequestStop | undefined; stream?: boolean | null | undefined; user?: string | undefined; functions?: import("openai").ChatCompletionFunctions[] | undefined; model: string; temperature?: number | null | undefined; top_p?: number | null | undefined; n?: number | null | undefined; max_tokens?: number | undefined; presence_penalty?: number | null | undefined; frequency_penalty?: number | null | undefined; logit_bias?: object | null | undefined; model_name: string; }; private formatMessages; _streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator; startStream(request: CreateChatCompletionRequest, options?: StreamingAxiosConfiguration): { next(): Promise; [Symbol.asyncIterator](): any; }; /** @ignore */ _call(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise; /** @ignore */ completionWithRetry(request: CreateChatCompletionRequest, options?: StreamingAxiosConfiguration): Promise; _llmType(): string; } /** * PromptLayer wrapper to OpenAIChat */ export declare class PromptLayerOpenAIChat extends OpenAIChat { get lc_secrets(): { [key: string]: string; } | undefined; lc_serializable: boolean; promptLayerApiKey?: string; plTags?: string[]; returnPromptLayerId?: boolean; constructor(fields?: ConstructorParameters[0] & { promptLayerApiKey?: string; plTags?: string[]; returnPromptLayerId?: boolean; }); completionWithRetry(request: CreateChatCompletionRequest, options?: StreamingAxiosConfiguration): Promise; _generate(prompts: string[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise; }