/** * @license * Copyright 2025 Vybestack LLC * SPDX-License-Identifier: Apache-2.0 */ import { ProviderManager } from '../ProviderManager.js'; import { ConversationCache } from './ConversationCache.js'; type OpenAIProviderLike = { name: string; getCurrentModel?: () => string; getConversationCache?: () => ConversationCache; shouldUseResponses?: (model: string) => boolean; [key: string]: unknown; }; export interface OpenAIProviderInfo { provider: OpenAIProviderLike | null; conversationCache: ConversationCache | null; isResponsesAPI: boolean; currentModel: string | null; remoteTokenInfo: { promptTokenCount?: number; candidatesTokenCount?: number; totalTokenCount?: number; }; } /** * Retrieves OpenAI provider information from the current ProviderManager instance * @param providerManager The ProviderManager instance * @returns OpenAI provider info if available, null values otherwise */ export declare function getOpenAIProviderInfo(providerManager?: ProviderManager | null): OpenAIProviderInfo; export {}; /** * Example usage: * * const openAIInfo = getOpenAIProviderInfo(providerManager); * if (openAIInfo.provider && openAIInfo.conversationCache) { * // Access conversation cache * const cachedMessages = openAIInfo.conversationCache.get(conversationId, parentId); * * // Check if using Responses API * if (openAIInfo.isResponsesAPI) { * console.log('Using OpenAI Responses API'); * } * } */