/// import { Transform } from 'stream'; export declare const enum Mode { NORMAL = 0, CONTROL_START = 1, CONTROL_WORD = 2, CONTROL_PARAM = 3, BINARY = 4, HEX = 5 } export declare const enum TokenType { GROUP_START = 0, GROUP_END = 1, CONTROL = 2, TEXT = 3 } export interface BaseToken { type: TokenType; word?: string; data?: Buffer; length?: number; param?: number; } export interface GroupStartToken extends BaseToken { type: TokenType.GROUP_START; } export interface GroupEndToken extends BaseToken { type: TokenType.GROUP_END; } export interface TextToken extends BaseToken { type: TokenType.TEXT; data: Buffer; length: number; } export interface ControlToken extends BaseToken { type: TokenType.CONTROL; word: string; } export declare type Token = GroupStartToken | GroupEndToken | TextToken | ControlToken; export declare class Tokenize extends Transform { protected _mode: Mode | undefined; protected _token: Token | null | undefined; protected _expectedBinaryBytes: number; protected _readHexDigitsCount: number; protected _paramStr: string; constructor(); _flushToken(): void; _handleSpecialOrPush(): void; _transform(chunk: Buffer | string, encoding: string | undefined, cb: (err?: any) => void): void; __transform(chunk: Buffer | string, encoding: string | undefined): void; _flush(cb: () => void): void; } export default Tokenize;