import { type ITokenizer } from './ITokenizer.js'; export declare class AnthropicTokenizer implements ITokenizer { /** * Count tokens for Anthropic models * * Anthropic uses a tokenizer similar to Claude's tiktoken-based tokenizer. * Since there's no official @anthropic-ai/tokenizer package available, * we use a character-based estimation that aligns with typical Claude tokenization: * - Average ~3.5-4 characters per token for English text * - More conservative estimate for code and special characters * * This estimation should be within 10-15% of actual token counts for most content. * * @param text The text to tokenize * @param model The model name (for model-specific tokenization if needed) * @returns Estimated token count */ countTokens(text: string, _model: string): Promise; }