import { Run, GetRunContext } from '../context.js'; import { FrameworkError } from '../errors.js'; import { Serializable } from '../internals/serializable.js'; import { OneOf } from '../internals/types.js'; import { C as Callback, E as Emitter } from '../emitter-l0W9gC1A.js'; import { BaseCache } from '../cache/base.js'; import { Task } from 'promise-based-task'; import '../internals/helpers/promise.js'; import '../internals/helpers/guards.js'; interface BaseLLMEvents { newToken?: Callback<{ value: TOutput; callbacks: { abort: () => void; }; }>; success?: Callback<{ value: TOutput; }>; start?: Callback<{ input: TInput; options: unknown; }>; error?: Callback<{ input: TInput; error: FrameworkError; options: unknown; }>; finish?: Callback; } /** * @deprecated Use BaseLLMEvents instead */ type GenerateCallbacks = BaseLLMEvents; type GuidedOptions = OneOf<[ { json?: string | Record; }, { regex?: string; }, { choice?: string[]; }, { grammar?: string; }, { decoding_backend?: string; }, { whitespace_pattern?: string; } ]>; interface GenerateOptions { stream?: boolean; signal?: AbortSignal; guided?: GuidedOptions; } interface InternalGenerateOptions { signal?: AbortSignal; } interface StreamGenerateOptions { signal?: AbortSignal; guided?: GuidedOptions; } type AsyncStream = AsyncGenerator; declare class LLMError extends FrameworkError { } declare class LLMFatalError extends LLMError { constructor(message: string, errors?: Error[]); } declare class LLMOutputError extends LLMFatalError { } interface BaseLLMTokenizeOutput { tokensCount: number; tokens?: string[]; } declare abstract class BaseLLMOutput extends Serializable { mergeImmutable(this: T, other: T): T; abstract merge(other: BaseLLMOutput): void; abstract getTextContent(): string; abstract toString(): string; } interface ExecutionOptions { maxRetries?: number; } interface EmbeddingOptions { signal?: AbortSignal; } interface EmbeddingOutput { embeddings: number[][]; } interface LLMMeta { tokenLimit: number; } type LLMCache = BaseCache>; declare abstract class BaseLLM extends Serializable { readonly modelId: string; readonly executionOptions: ExecutionOptions; readonly cache: LLMCache; abstract readonly emitter: Emitter>; constructor(modelId: string, executionOptions?: ExecutionOptions, cache?: LLMCache); abstract meta(): Promise; abstract embed(input: TInput[], options?: EmbeddingOptions): Promise; abstract tokenize(input: TInput): Promise; generate(input: TInput, options?: Partial): Run]>; stream(input: TInput, options?: Partial): AsyncStream; protected abstract _generate(input: TInput, options: Partial, run: GetRunContext): Promise; protected abstract _stream(input: TInput, options: Partial, run: GetRunContext): AsyncStream; protected _mergeChunks(chunks: TOutput[]): TOutput; static cast>(this: new (...args: any[]) => T, value: unknown): asserts value is T; static castInput(this: new (...args: any[]) => BaseLLM, value: unknown): asserts value is A; static castOutput>(this: new (...args: any[]) => T, value: BaseLLMOutput): asserts value is InferLLMOutput; createSnapshot(): { modelId: string; executionOptions: ExecutionOptions; emitter: Emitter>; cache: LLMCache; }; loadSnapshot(snapshot: ReturnType): void; protected createCacheAccessor(input: TInput, options: Partial | Partial, ...extra: any[]): Promise<{ key: string; value: TOutput[] | undefined; resolve: (value: T2 | T2[]) => void; reject: (error: Error) => Promise; }>; } type AnyLLM = BaseLLM; type InferLLMOutput = T extends BaseLLM ? A : never; export { type AnyLLM, type AsyncStream, BaseLLM, type BaseLLMEvents, BaseLLMOutput, type BaseLLMTokenizeOutput, type EmbeddingOptions, type EmbeddingOutput, type ExecutionOptions, type GenerateCallbacks, type GenerateOptions, type GuidedOptions, type InferLLMOutput, type InternalGenerateOptions, type LLMCache, LLMError, LLMFatalError, type LLMMeta, LLMOutputError, type StreamGenerateOptions };