import { Embedding, Linear, RMSNorm, Tensor, TinyJit, type Variable } from '@jsgrad/jsgrad/base'; export declare const precompute_freqs_cis: (dim: number, end: number, theta?: number) => Tensor; export declare const complex_mult: (A: Tensor, c: Tensor, d: Tensor) => Tensor; export declare const apply_rotary_emb: (xq: Tensor, xk: Tensor, freqs_cis: Tensor) => [Tensor, Tensor]; export declare const repeat_kv: (x: Tensor, n_rep: number) => Tensor; export declare class Attention { n_heads: number; max_context: number; n_kv_heads: number; head_dim: number; n_rep: number; wq: Linear; wk: Linear; wv: Linear; wo: Linear; wqkv?: Tensor; cache_kv?: Tensor; constructor(dim: number, n_heads: number, n_kv_heads: number | undefined, max_context: number, linear?: typeof Linear); call: (x: Tensor, start_pos: number | Variable, freqs_cis: Tensor, mask?: Tensor) => Promise; } export declare class FeedForward { w1: Linear; w2: Linear; w3: Linear; constructor(dim: number, hidden_dim: number, linear?: typeof Linear); call: (x: Tensor) => Tensor; } export declare class TransformerBlock { attention: Attention; feed_forward: FeedForward; attention_norm: RMSNorm; ffn_norm: RMSNorm; constructor(dim: number, hidden_dim: number, n_heads: number, n_kv_heads: number | undefined, norm_eps: number, max_context: number, linear?: typeof Linear, feed_forward?: typeof FeedForward); call: (x: Tensor, start_pos: number | Variable, freqs_cis: Tensor, mask?: Tensor) => Promise; } export declare const sample: (logits: Tensor, temp: number, k: number, p: number, af: number, ap: number) => Tensor; export declare class Transformer { max_context: number; layers: TransformerBlock[]; norm: RMSNorm; tok_embeddings: Embedding; output: Linear; freqs_cis: Tensor; forward_jit?: TinyJit<[tokens: Tensor, start_pos: number | Variable, temperature: number, top_k: number, top_p: number, alpha_f: number, alpha_p: number], Tensor>; constructor(dim: number, hidden_dim: number, n_heads: number, n_layers: number, norm_eps: number, vocab_size: number, linear?: typeof Linear, embedding?: typeof Embedding, n_kv_heads?: number, rope_theta?: number, max_context?: number, jit?: boolean, feed_forward?: typeof FeedForward); forward: (tokens: Tensor, start_pos: number | Variable, temperature: number, top_k: number, top_p: number, alpha_f: number, alpha_p: number) => Promise; call: (tokens: Tensor, start_pos: number, temperature?: number, top_k?: number, top_p?: number, alpha_f?: number, alpha_p?: number) => Promise; } export declare const convert_from_huggingface: (weights: Record, model: Transformer, n_heads: number, n_kv_heads: number, permute_layers?: boolean) => Record; export declare const convert_from_gguf: (weights: Record, model: Transformer) => { [k: string]: Tensor; }; export declare const fix_bf16: (weights: Record) => { [k: string]: Tensor; };