import { LLM, BaseLLMParams } from "./base.js"; export type AI21PenaltyData = { scale: number; applyToWhitespaces: boolean; applyToPunctuations: boolean; applyToNumbers: boolean; applyToStopwords: boolean; applyToEmojis: boolean; }; export interface AI21Input extends BaseLLMParams { ai21ApiKey?: string; model?: string; temperature?: number; minTokens?: number; maxTokens?: number; topP?: number; presencePenalty?: AI21PenaltyData; countPenalty?: AI21PenaltyData; frequencyPenalty?: AI21PenaltyData; numResults?: number; logitBias?: Record; stop?: string[]; baseUrl?: string; } export declare class AI21 extends LLM implements AI21Input { model: string; temperature: number; maxTokens: number; minTokens: number; topP: number; presencePenalty: AI21PenaltyData; countPenalty: AI21PenaltyData; frequencyPenalty: AI21PenaltyData; numResults: number; logitBias?: Record; ai21ApiKey?: string; stop?: string[]; baseUrl?: string; constructor(fields?: AI21Input); validateEnvironment(): void; static getDefaultAI21PenaltyData(): AI21PenaltyData; /** Get the type of LLM. */ _llmType(): string; /** Get the default parameters for calling AI21 API. */ get defaultParams(): { temperature: number; maxTokens: number; minTokens: number; topP: number; presencePenalty: AI21PenaltyData; countPenalty: AI21PenaltyData; frequencyPenalty: AI21PenaltyData; numResults: number; logitBias: Record | undefined; }; /** Get the identifying parameters for this LLM. */ get identifyingParams(): { model: string; temperature: number; maxTokens: number; minTokens: number; topP: number; presencePenalty: AI21PenaltyData; countPenalty: AI21PenaltyData; frequencyPenalty: AI21PenaltyData; numResults: number; logitBias: Record | undefined; }; /** Call out to AI21's complete endpoint. Args: prompt: The prompt to pass into the model. stop: Optional list of stop words to use when generating. Returns: The string generated by the model. Example: let response = ai21._call("Tell me a joke."); */ _call(prompt: string, options: this["ParsedCallOptions"]): Promise; }