import { Records as FernRecords } from "../../api/resources/records/client/Client"; import { Flatfile } from "../.."; import { GetRecordsRequestOptions, JsonlRecord, WriteRecordsRequestOptions, WriteRecordsResponse, WriteStreamingOptions } from "./types"; interface RecordsOptions extends FernRecords.Options { } export declare class RecordsV2 { private readonly options; constructor(options?: RecordsOptions); /** * Retrieve records from a sheet in raw JSONL format. * * This method fetches all records at once and returns them as an array of * JsonlRecord objects, which contain the raw data structure from the API * including special fields like __k (record ID), __v (version), etc. * * @param sheetId - The ID of the sheet to retrieve records from * @param options - Optional request parameters for filtering, pagination, etc. * @param requestOptions - Optional request configuration (headers, timeout, etc.) * @returns Promise that resolves to an array of JsonlRecord objects * * @example * ```typescript * const rawRecords = await recordsV2.getRaw('us_sh_123', { * fields: ['firstName', 'lastName'], * pageSize: 1000 * }); * rawRecords.forEach(record => { * console.log(`Record ID: ${record.__k}`); * console.log(`Field values:`, record); * }); * ``` */ getRaw(sheetId: Flatfile.SheetId, options: GetRecordsRequestOptions | undefined, requestOptions: FernRecords.RequestOptions): Promise; /** * Stream records from a sheet in raw JSONL format. * * This method provides an async generator that yields JsonlRecord objects * as they are received from the server. This is the most memory-efficient * way to process large datasets as records are yielded individually rather * than loading all records into memory at once. * * @param sheetId - The ID of the sheet to retrieve records from * @param options - Optional request parameters for filtering, pagination, etc. * @param requestOptions - Optional request configuration (headers, timeout, etc.) * @returns AsyncGenerator that yields JsonlRecord objects * * @example * ```typescript * for await (const rawRecord of recordsV2.getRawStreaming('us_sh_123', { * includeTimestamps: true * })) { * console.log(`Record ID: ${rawRecord.__k}`); * console.log(`Updated at: ${rawRecord.__u}`); * // Process each record as it streams in * } * ``` */ getRawStreaming(sheetId: Flatfile.SheetId, options?: GetRecordsRequestOptions, requestOptions?: FernRecords.RequestOptions): AsyncGenerator; /** * Stream JSONL response using ReadableStream (modern browsers) */ private _streamJsonlResponse; /** * Parse JSONL text into an array of JsonlRecord objects */ private _parseJsonlText; /** * Fallback JSONL processing for browsers without streaming support */ private _fallbackJsonlResponse; /** * Write records to a sheet in raw JSONL format. * * This method takes an array of JsonlRecord objects and writes them to the specified sheet. * Records can be inserts (no __k field) or updates (with __k field for existing record ID). * Supports various write options like truncate, snapshot, and sheet targeting. * * @param records - Array of JsonlRecord objects to write * @param options - Write configuration options * @param requestOptions - Optional request configuration (headers, timeout, etc.) * @returns Promise that resolves to WriteRecordsResponse with operation results * * @example * ```typescript * const records: JsonlRecord[] = [ * { firstName: 'John', lastName: 'Doe', __s: 'us_sh_123' }, * { __k: 'us_rc_456', firstName: 'Jane', lastName: 'Smith' } // Update existing * ]; * const result = await recordsV2.writeRaw(records, { * sheetId: 'us_sh_123', * truncate: false * }); * console.log(`Created: ${result.created}, Updated: ${result.updated}`); * ``` */ writeRaw(records: JsonlRecord[], options?: WriteRecordsRequestOptions, requestOptions?: FernRecords.RequestOptions): Promise; /** * Stream records to a sheet in raw JSONL format using HTTP body streaming. * * This method accepts an async generator/iterator of records and streams them * directly to the server using a ReadableStream as the HTTP request body. * This approach is memory efficient for large datasets as records are processed * and transmitted without loading all data into memory at once. * * The operation is atomic - all records are sent in a single HTTP request, * ensuring consistent write semantics. Records can be new inserts (without __k) * or updates to existing records (with __k field containing the record ID). * * @param recordsStream - Async generator/iterator that yields JsonlRecord objects * @param options - Write configuration options (sheetId, truncate, etc.) * @param requestOptions - Optional request configuration (headers, timeout, etc.) * @returns Promise that resolves to WriteRecordsResponse with operation results * * @example * ```typescript * async function* generateRecords() { * for (let i = 0; i < 100000; i++) { * yield { * firstName: `User${i}`, * email: `user${i}@example.com`, * __s: 'us_sh_123' * }; * } * } * * const result = await recordsV2.writeRawStreaming(generateRecords(), { * sheetId: 'us_sh_123', * truncate: false * }); * console.log(`Created: ${result.created}, Updated: ${result.updated}`); * ``` */ writeRawStreaming(recordsStream: AsyncIterable, options?: WriteStreamingOptions, requestOptions?: FernRecords.RequestOptions): Promise; private _prepareHeaders; /** * Get authorization header from options */ private _getAuthorizationHeader; /** * Build full URL from path */ private _buildUrl; /** * Build query parameters string from options object */ private _buildQueryParams; /** * Execute HTTP request with retry logic and timeout handling */ private _executeRequest; /** * Parse error body from response */ private _parseErrorBody; /** * Throw appropriate error based on HTTP status code */ private _throwErrorForStatus; } export {};