import { AudioToTextProps, StreamMessageCallback } from '../types'; import { LanguageModel, LanguageModelUsage, ToolChoice, ToolSet } from 'ai'; import { VercelAi } from './vercelai'; /** * Configuration properties for VercelAiClient * @interface */ export type VercelAiClientConfigureProps = { /** API key for authentication */ apiKey?: string; /** Model name to use */ model?: string; /** System instructions for the model */ instructions?: string; /** Temperature for controlling randomness (0-1) */ temperature?: number; /** Top P sampling parameter (0-1) */ topP?: number; /** Description of the assistant */ description?: string; /** Version of the model */ version?: string; /** Maximum tokens to generate */ maxTokens?: number; /** Base URL for API requests */ baseURL?: string; /** Tool choice configuration */ toolChoice?: ToolChoice; /** Maximum number of tool call steps */ maxSteps?: number; /** Tool call streaming */ toolCallStreaming?: boolean; /** Custom headers to include in API requests */ headers?: Record; /** Custom model context windows to extend or override the default mapping */ modelContextWindows?: Record; }; /** * Abstract Vercel AI Client for client-side usage. Extends the VercelAi class to handle * LLM interactions directly from the browser using Vercel AI SDK instead of API endpoints. * * @abstract * @extends {VercelAi} */ export declare abstract class VercelAiClient extends VercelAi { /** API key for authentication */ protected static apiKey: string; /** Model name to use */ protected static model: string; /** Base URL for API requests */ protected static baseURL: string; /** Custom headers for API requests */ protected static headers: Record; /** Language model instance */ llm: LanguageModel | null; /** Singleton instance */ protected static instance: VercelAiClient | null; /** * Gets the base URL for API requests * @abstract * @throws {Error} Always throws as this is an abstract class */ static getBaseURL(): void; /** * Validates that a model has been configured * @protected * @throws {Error} If model is not configured */ protected static checkModel(): void; /** * Validates that an API key has been configured * @protected * @throws {Error} If API key is not configured */ protected static checkApiKey(): void; /** * Protected constructor to prevent direct instantiation * @protected */ protected constructor(); /** * Configures the client with the provided settings * @param {VercelAiClientConfigureProps} config - Configuration options */ static configure(config: VercelAiClientConfigureProps): void; /** * Restarts the chat by clearing messages and resetting the LLM instance */ restart(): void; protected handleToolCallStart(toolCallId: string, toolName: string, args: Record, streamMessageCallback: StreamMessageCallback): Promise; protected handleToolCallFinish(toolCallId: string, result: unknown, streamMessageCallback: StreamMessageCallback, onToolFinished: (toolCallId: string, additionalData: unknown) => void): Promise; protected handleTextStreaming(textDelta: string, messageContent: string, streamMessageCallback: StreamMessageCallback): Promise; /** * Triggers a request to the Vercel AI API using the local LLM instance */ protected triggerRequest({ streamMessageCallback, onToolFinished, }: { streamMessageCallback: StreamMessageCallback; onToolFinished: (toolCallId: string, additionalData: unknown) => void; }): Promise<{ tokensUsed: LanguageModelUsage; }>; /** * Converts audio to text using the configured LLM * @param {AudioToTextProps} params - Audio conversion parameters * @returns {Promise} Transcribed text * @throws {Error} If LLM is not configured or audio blob is missing */ audioToText({ audioBlob, }: AudioToTextProps): Promise; temporaryPrompt({ prompt, temperature, }: { prompt: string; temperature?: number; }): Promise; }