import { VercelAiClient, VercelAiClientConfigureProps } from './vercelai-client'; import { LanguageModelV1 } from 'ai'; type ConfigureProps = { baseURL?: string; raw?: boolean; } & VercelAiClientConfigureProps; type OllamaChatSettings = { raw?: boolean; simulateStreaming?: boolean; stream?: boolean; }; type OllamaProvider = (model: string, settings?: OllamaChatSettings) => LanguageModelV1; /** * Ollama Assistant LLM for Client only */ export declare class OllamaAssistant extends VercelAiClient { protected static baseURL: string; protected static raw: boolean; protected static instance: OllamaAssistant | null; protected providerInstance: OllamaProvider | null; protected static checkBaseURL(): void; static getBaseURL(): string; private static loadModule; static testConnection(apiKey: string, model: string): Promise; static configure(config: ConfigureProps): void; private constructor(); private initializeProvider; static getInstance(): Promise; restart(): void; } export {};