import { GenerateOptions } from '../llms/base.js'; import { BaseMessage } from '../llms/primitives/message.js'; import { BaseMemory } from './base.js'; import { ChatLLM, ChatLLMOutput } from '../llms/chat.js'; import '../context.js'; import '../emitter-l0W9gC1A.js'; import '../internals/types.js'; import '../internals/helpers/guards.js'; import '../internals/serializable.js'; import '../internals/helpers/promise.js'; import '../errors.js'; import '../cache/base.js'; import 'promise-based-task'; interface Handlers { estimate: (messages: BaseMessage) => number; removalSelector: (messages: BaseMessage[]) => BaseMessage; } interface TokenMemoryInput { llm: ChatLLM; maxTokens?: number; syncThreshold?: number; capacityThreshold?: number; handlers?: Partial; } interface TokenByMessage { tokensCount: number; dirty: boolean; } declare class TokenMemory extends BaseMemory { readonly messages: BaseMessage[]; protected llm: ChatLLM; protected threshold: number; protected syncThreshold: number; protected maxTokens: number | null; protected tokensByMessage: WeakMap; readonly handlers: Handlers; constructor(config: TokenMemoryInput); get tokensUsed(): number; get isDirty(): boolean; add(message: BaseMessage, index?: number): Promise; delete(message: BaseMessage): Promise; sync(): Promise; reset(): void; stats(): { tokensUsed: number; maxTokens: number | null; messagesCount: number; isDirty: boolean; }; createSnapshot(): { llm: ChatLLM; maxTokens: number | null; threshold: number; syncThreshold: number; messages: BaseMessage[]; handlers: Handlers; tokensByMessage: [BaseMessage, number][]; }; loadSnapshot({ tokensByMessage, ...state }: ReturnType): void; } export { type Handlers, TokenMemory, type TokenMemoryInput };