import { Span } from '@opentelemetry/api'; export default class OpenLitHelper { static readonly PROMPT_TOKEN_FACTOR = 1000; static openaiTokens(text: string, model: string): number; static generalTokens(text: string): number; static getChatModelCost(model: string, pricingInfo: any, promptTokens: number, completionTokens: number): number; static getEmbedModelCost(model: string, pricingInfo: any, promptTokens: number): number; static getImageModelCost(model: string, pricingInfo: any, size: string, quality: number): number; static getAudioModelCost(model: string, pricingInfo: any, prompt: string): number; static fetchPricingInfo(pricingJson: any): Promise; /** * Build OTel-spec input messages JSON string from provider messages array. * Format: [{"role": "user", "parts": [{"type": "text", "content": "..."}]}] */ static buildInputMessages(messages: any[], system?: string): string; /** * Build OTel-spec output messages JSON string from provider response. * Format: [{"role": "assistant", "parts": [{"type": "text", "content": "..."}], "finish_reason": "stop"}] */ static buildOutputMessages(text: string, finishReason: string, toolCalls?: any[]): string; static handleException(span: Span, error: Error): void; static createStreamProxy(stream: any, generatorFuncResponse: any): Promise; }