/** * @license * Copyright Google LLC * SPDX-License-Identifier: BSD-3-Clause */ export interface TokenHandler { handleNull(): void; handleBoolean(value: boolean): void; handleNumber(value: number): void; handleStringStart(): void; handleStringMiddle(value: string): void; handleStringEnd(): void; handleArrayStart(): void; handleArrayEnd(): void; handleObjectStart(): void; handleObjectEnd(): void; } /** * Read tokens from an async iterable of strings and forward them to the given * {@link TokenHandler}. The handler is invoked synchronously as tokens are * recognized. Throws if the input is not valid JSON, including if it has * trailing content. */ export declare function tokenize(stream: AsyncIterable, handler: TokenHandler): Tokenizer; export declare class Tokenizer { readonly input: Input; private readonly handler; private stack; private emittedTokens; constructor(stream: AsyncIterable, handler: TokenHandler); isDone(): boolean; pump(): Promise; private tokenizeMore; private tokenizeValue; private tokenizeString; private tokenizeArrayStart; private tokenizeAfterArrayValue; private tokenizeObjectStart; private tokenizeAfterObjectKey; private tokenizeAfterObjectValue; private tokenizeBeforeObjectKey; } /** * A part of the input that has been unambiguously decoded. * * Note that, due to StringMiddleToken the same input may be tokenized in * multiple equivalent ways depending on how it's chunked up in the input * stream. * * Implementation note: every token has a `value`, though most are undefined. * This is to give all tokens the same shape, to aid VM optimizations. */ export declare const enum JsonTokenType { Null = 0, Boolean = 1, Number = 2, StringStart = 3, StringMiddle = 4, StringEnd = 5, ArrayStart = 6, ArrayEnd = 7, ObjectStart = 8, ObjectEnd = 9 } export declare function jsonTokenTypeToString(type: JsonTokenType): string; /** A complete boolean literal. */ export interface BooleanToken { readonly type: JsonTokenType.Boolean; readonly value: boolean; } /** A complete number literal. */ export interface NumberToken { readonly type: JsonTokenType.Number; readonly value: number; } /** * Our input buffer. * * This was more feature rich when we interleaved awaits while tokenizing. * Now that we're doing all the work synchronously, it's a bit overkill. */ declare class Input { private buffer; private startIndex; bufferComplete: boolean; moreContentExpected: boolean; private stream; constructor(stream: AsyncIterable); get length(): number; advance(len: number): void; peek(offset: number): string | undefined; peekCharCode(offset: number): number; slice(start: number, end: number): string; commit(): void; remaining(): string; /** * Throws if there's any non-whitespace content left in the buffer or the * input stream. */ expectEndOfContent(): Promise; /** * Tries to read more content into the buffer. * * Returns false if the stream is exhausted. */ tryToExpandBuffer(): Promise; skipPastWhitespace(): void; /** * If the buffer starts with `prefix`, consumes it and returns true. */ tryToTakePrefix(prefix: string): boolean; /** * Tries to take `len` characters from the buffer. * * If there are fewer than `len` characters in the buffer, returns undefined. */ tryToTake(len: number): string | undefined; /** * Tries to take a single character from the buffer and returns its code. * * If there are no characters in the buffer, returns undefined. */ tryToTakeCharCode(): number | undefined; /** * Consumes and returns the input up to the first quote or backslash. * * If neither not found, consumes the entire buffer and returns it. * * Returns a tuple of the consumed content and a boolean indicating whether * the pattern was found. */ takeUntilQuoteOrBackslash(): [string, boolean]; } export {};