import { Tensor } from '../tensor.ts'; export declare class Optimizer { params: Tensor[]; device: string | string[]; buffers: Tensor[]; lr: Tensor; constructor(params: Tensor[], lr: number); /** * Zeroes the gradients of all the parameters. */ zero_grad: () => void; /** * Performs a single optimization step. */ step: () => Promise; /** * Returns the tensors that need to be realized to perform a single optimization step. */ schedule_step: () => Tensor[]; schedule_step_with_grads: (grads: Tensor[]) => Tensor[]; } /** * Combines multiple optimizers into one. */ export declare class OptimizerGroup extends Optimizer { optimizers: Optimizer[]; constructor(optimizers: Optimizer[]); get: (i: number) => Optimizer; zero_grad: () => void[]; schedule_step: () => Tensor[]; } /** * Layer-wise Adaptive Rate Scaling (LARS) optimizer with optional momentum and weight decay. * * - Described: https://paperswithcode.com/method/lars * - Paper: https://arxiv.org/abs/1708.03888v3 */ export declare class LARS extends Optimizer { momentum: number; weight_decay: number; nesterov: boolean; classic: boolean; tcoef: number; b: Tensor[]; constructor(params: Tensor[], lr?: number, momentum?: number, weight_decay?: number, nesterov?: boolean, classic?: boolean, tcoef?: number); schedule_step_with_grads: (grads: Tensor[]) => Tensor[]; } export declare class SGD extends LARS { constructor(params: Tensor[], lr?: number, momentum?: number, weight_decay?: number, nesterov?: boolean, classic?: boolean); } /** * LAMB optimizer with optional weight decay. * * - Described: https://paperswithcode.com/method/lamb * - Paper: https://arxiv.org/abs/1904.00962 */ declare class LAMB extends Optimizer { b1: number; b2: number; eps: number; wd: number; adam: boolean; b1_t: Tensor; b2_t: Tensor; m: Tensor[]; v: Tensor[]; constructor(params: Tensor[], lr?: number, b1?: number, b2?: number, eps?: number, weight_decay?: number, adam?: boolean); schedule_step_with_grads: (grads: Tensor[]) => Tensor[]; } /** * AdamW optimizer with optional weight decay. * * - Described: https://paperswithcode.com/method/adamw * - Paper: https://arxiv.org/abs/1711.05101v3 */ export declare class AdamW extends LAMB { constructor(params: Tensor[], lr?: number, b1?: number, b2?: number, eps?: number, weight_decay?: number); } /** * Adam optimizer. * * - Described: https://paperswithcode.com/method/adam * - Paper: https://arxiv.org/abs/1412.6980 */ export declare class Adam extends LAMB { constructor(params: Tensor[], lr?: number, b1?: number, b2?: number, eps?: number); } export {};