interface CacheImpl { defaultQueryTTL: number; getAll(): Promise>; get: (key: string) => Promise; set: (key: string, value: T) => Promise; delete: (key: string) => Promise; clear: () => Promise; } interface SimpleCacheOptions { max?: number; defaultQueryTTL?: number; } declare class SimpleCache implements CacheImpl { #private; capacity: number; defaultQueryTTL: number; constructor(options?: SimpleCacheOptions); getAll(): Promise>; get(key: string): Promise; set(key: string, value: T): Promise; delete(key: string): Promise; clear(): Promise; } declare abstract class XataPlugin { abstract build(options: XataPluginOptions): unknown; } type XataPluginOptions = ApiExtraProps & { cache: CacheImpl; host: HostProvider; tables: Table[]; branch: string; }; type AttributeDictionary = Record; type TraceFunction = (name: string, fn: (options: { name?: string; setAttributes: (attrs: AttributeDictionary) => void; }) => T, options?: AttributeDictionary) => Promise; type RequestInit = { body?: any; headers?: Record; method?: string; signal?: any; }; type Response = { ok: boolean; status: number; url: string; json(): Promise; text(): Promise; blob(): Promise; headers?: { get(name: string): string | null; }; }; type FetchImpl = (url: string, init?: RequestInit) => Promise; /** * Generated by @openapi-codegen * * @version 1.0 */ /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. * * @maxLength 511 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+:[a-zA-Z0-9_\-~]+ */ type DBBranchName = string; type ApplyMigrationResponse = { /** * The id of the migration job */ jobID: string; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type TableName = string; type MigrationJobType = 'apply' | 'start' | 'complete' | 'rollback'; type MigrationJobStatus = 'pending' | 'in_progress' | 'completed' | 'failed'; type MigrationJobStatusResponse = { /** * The id of the migration job */ jobID: string; /** * The type of the migration job */ type: MigrationJobType; /** * The status of the migration job */ status: MigrationJobStatus; /** * The error message associated with the migration job */ error?: string; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type MigrationJobID = string; type MigrationType = 'pgroll' | 'inferred'; type MigrationHistoryItem = { /** * The name of the migration */ name: string; /** * The pgroll migration that was applied */ migration: string; /** * The timestamp at which the migration was started * * @format date-time */ startedAt: string; /** * The name of the parent migration, if any */ parent?: string; /** * Whether the migration is completed or not */ done: boolean; /** * The type of the migration */ migrationType: MigrationType; }; type MigrationHistoryResponse = { /** * The migrations that have been applied to the branch */ migrations: MigrationHistoryItem[]; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type DBName$1 = string; /** * @format date-time * @x-go-type string */ type DateTime$1 = string; type Branch = { name: string; /** * The cluster where this branch resides. Value of 'shared-cluster' for branches in shared clusters * * @minLength 1 */ clusterID?: string; createdAt: DateTime$1; }; type ListBranchesResponse = { databaseName: string; branches: Branch[]; }; type DatabaseSettings = { searchEnabled: boolean; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type BranchName$1 = string; /** * @example {"repository":"github.com/my/repository","branch":"feature-login","stage":"testing","labels":["epic-100"]} * @x-go-type xata.BranchMetadata */ type BranchMetadata$1 = { /** * @minLength 1 */ repository?: string; branch?: BranchName$1; /** * @minLength 1 */ stage?: string; labels?: string[]; }; type StartedFromMetadata = { branchName: BranchName$1; dbBranchID: string; migrationID: string; }; type ColumnLink = { table: string; }; type ColumnVector = { /** * @maximum 10000 * @minimum 2 */ dimension: number; }; type ColumnFile = { defaultPublicAccess?: boolean; }; type Column = { name: string; type: string; link?: ColumnLink; vector?: ColumnVector; file?: ColumnFile; ['file[]']?: ColumnFile; notNull?: boolean; defaultValue?: string; unique?: boolean; }; type RevLink = { table: string; column: string; }; type Table = { id?: string; name: TableName; columns: Column[]; revLinks?: RevLink[]; }; /** * @x-go-type xata.Schema */ type Schema = { tables: Table[]; tablesOrder?: string[]; }; type DBBranch = { databaseName: DBName$1; branchName: BranchName$1; createdAt: DateTime$1; id: string; /** * The cluster where this branch resides. Value of 'shared-cluster' for branches in shared clusters * * @minLength 1 * @x-internal true */ clusterID?: string; version: number; lastMigrationID: string; metadata?: BranchMetadata$1; startedFrom?: StartedFromMetadata; schema: Schema; }; type MigrationStatus$1 = 'completed' | 'pending' | 'failed'; type BranchSchema = { name: string; tables: { [key: string]: { oid: string; name: string; xataCompatible: boolean; comment: string; columns: { [key: string]: { name: string; type: string; ['default']: string | null; nullable: boolean; unique: boolean; comment: string; }; }; indexes: { [key: string]: { name: string; unique: boolean; columns: string[]; }; }; primaryKey: string[]; foreignKeys: { [key: string]: { name: string; columns: string[]; referencedTable: string; referencedColumns: string[]; }; }; checkConstraints: { [key: string]: { name: string; columns: string[]; definition: string; }; }; uniqueConstraints: { [key: string]: { name: string; columns: string[]; }; }; }; }; }; type BranchWithCopyID = { branchName: BranchName$1; dbBranchID: string; copyID: string; }; type MetricsDatapoint = { timestamp: string; value: number; }; type MetricsLatency = { p50?: MetricsDatapoint[]; p90?: MetricsDatapoint[]; }; type ListGitBranchesResponse = { mapping: { gitBranch: string; xataBranch: string; }[]; }; type ColumnMigration = { old: Column; ['new']: Column; }; type TableMigration = { newColumns?: { [key: string]: Column; }; removedColumns?: string[]; modifiedColumns?: ColumnMigration[]; newColumnOrder: string[]; }; /** * @example {"newName":"newName","oldName":"oldName"} */ type TableRename = { /** * @minLength 1 */ newName: string; /** * @minLength 1 */ oldName: string; }; type BranchMigration = { id?: string; parentID?: string; status: string; title?: string; lastGitRevision?: string; localChanges: boolean; createdAt?: DateTime$1; newTables?: { [key: string]: Table; }; removedTables?: string[]; tableMigrations?: { [key: string]: TableMigration; }; newTableOrder: string[]; renamedTables?: TableRename[]; }; /** * @minProperties 1 */ type FilterExpression = { $exists?: string; $existsNot?: string; $any?: FilterList; $all?: FilterList; $none?: FilterList; $not?: FilterList; } & { [key: string]: FilterColumn; }; type FilterList = FilterExpression | FilterExpression[]; type FilterValue = number | string | boolean; type FilterPredicate = FilterValue | FilterPredicate[] | FilterPredicateOp | FilterPredicateRangeOp; type FilterRangeValue = number | string; /** * @maxProperties 1 * @minProperties 1 */ type FilterPredicateOp = { $any?: FilterPredicate[]; $all?: FilterPredicate[]; $none?: FilterPredicate | FilterPredicate[]; $not?: FilterPredicate | FilterPredicate[]; $is?: FilterValue | FilterValue[]; $isNot?: FilterValue | FilterValue[]; $lt?: FilterRangeValue; $le?: FilterRangeValue; $gt?: FilterRangeValue; $ge?: FilterRangeValue; $contains?: string; $iContains?: string; $startsWith?: string; $endsWith?: string; $pattern?: string; $iPattern?: string; }; /** * @maxProperties 2 * @minProperties 2 */ type FilterPredicateRangeOp = { $lt?: FilterRangeValue; $le?: FilterRangeValue; $gt?: FilterRangeValue; $ge?: FilterRangeValue; }; /** * @maxProperties 1 * @minProperties 1 */ type FilterColumnIncludes = { $includes?: FilterPredicate; $includesAny?: FilterPredicate; $includesAll?: FilterPredicate; $includesNone?: FilterPredicate; }; type FilterColumn = FilterColumnIncludes | FilterPredicate | FilterList; type SortOrder = 'asc' | 'desc' | 'random'; type SortExpression = string[] | { [key: string]: SortOrder; } | { [key: string]: SortOrder; }[]; /** * Pagination settings. */ type PageConfig = { /** * Query the next page that follow the cursor. */ after?: string; /** * Query the previous page before the cursor. */ before?: string; /** * Query the first page from the cursor. */ start?: string; /** * Query the last page from the cursor. */ end?: string; /** * Set page size. If the size is missing it is read from the cursor. If no cursor is given Xata will choose the default page size. * * @default 20 */ size?: number; /** * Use offset to skip entries. To skip pages set offset to a multiple of size. * * @default 0 */ offset?: number; }; /** * @example name * @example email * @example created_at */ type ColumnsProjection = string[]; /** * The migration request number. * * @minimum 0 * @x-go-type migration.RequestNumber */ type MigrationRequestNumber = number; type MigrationRequest = { number?: MigrationRequestNumber; /** * Migration request creation timestamp. */ createdAt?: DateTime$1; /** * Last modified timestamp. */ modifiedAt?: DateTime$1; /** * Timestamp when the migration request was closed. */ closedAt?: DateTime$1; /** * Timestamp when the migration request was merged. */ mergedAt?: DateTime$1; status?: 'open' | 'closed' | 'merging' | 'merged' | 'failed'; /** * The migration request title. */ title?: string; /** * The migration request body with detailed description. */ body?: string; /** * Name of the source branch. */ source?: string; /** * Name of the target branch. */ target?: string; }; /** * Records metadata */ type RecordsMetadata = { page: { /** * last record id */ cursor: string; /** * true if more records can be fetched */ more: boolean; /** * the number of records returned per page */ size: number; }; }; type TableOpAdd = { table: string; }; type TableOpRemove = { table: string; }; type TableOpRename = { oldName: string; newName: string; }; type MigrationTableOp = { addTable: TableOpAdd; } | { removeTable: TableOpRemove; } | { renameTable: TableOpRename; }; type ColumnOpAdd = { table: string; column: Column; }; type ColumnOpRemove = { table: string; column: string; }; type ColumnOpRename = { table: string; oldName: string; newName: string; }; type MigrationColumnOp = { addColumn: ColumnOpAdd; } | { removeColumn: ColumnOpRemove; } | { renameColumn: ColumnOpRename; }; /** * Branch schema migration operations. */ type MigrationOp = MigrationTableOp | MigrationColumnOp; type Commit = { title?: string; message?: string; id: string; parentID?: string; checksum: string; mergeParentID?: string; createdAt: DateTime$1; operations: MigrationOp[]; }; type SchemaEditScript = { sourceMigrationID?: string; targetMigrationID?: string; operations: MigrationOp[]; }; type BranchOp = { id: string; parentID?: string; title?: string; message?: string; status: MigrationStatus$1; createdAt: DateTime$1; modifiedAt?: DateTime$1; migration?: Commit; }; /** * Branch schema migration. */ type Migration = { parentID?: string; operations: MigrationOp[]; }; type MigrationObject = { title?: string; message?: string; id: string; parentID?: string; checksum: string; operations: MigrationOp[]; }; /** * @pattern [a-zA-Z0-9_\-~\.]+ */ type ColumnName = string; /** * Insert operation */ type TransactionInsertOp = { /** * The table name */ table: string; /** * The record to insert. The `id` field is optional; when specified, it will be used as the ID for the record. */ record: { [key: string]: any; }; /** * The version of the record you expect to be overwriting. Only valid with an * explicit ID is also set in the `record` key. */ ifVersion?: number; /** * createOnly is used to change how Xata acts when an explicit ID is set in the `record` key. * * If `createOnly` is set to `true`, Xata will only attempt to insert the record. If there's a conflict, Xata * will cancel the transaction. * * If `createOnly` is set to `false`, Xata will attempt to insert the record. If there's no * conflict, the record is inserted. If there is a conflict, Xata will replace the record. */ createOnly?: boolean; /** * If set, the call will return the requested fields as part of the response. */ columns?: string[]; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_-~:]+ */ type RecordID = string; /** * Update operation */ type TransactionUpdateOp = { /** * The table name */ table: string; id: RecordID; /** * The fields of the record you'd like to update */ fields: { [key: string]: any; }; /** * The version of the record you expect to be updating */ ifVersion?: number; /** * Xata will insert this record if it cannot be found. */ upsert?: boolean; /** * If set, the call will return the requested fields as part of the response. */ columns?: string[]; }; /** * A delete operation. The transaction will continue if no record matches the ID by default. To override this behaviour, set failIfMissing to true. */ type TransactionDeleteOp = { /** * The table name */ table: string; id: RecordID; /** * If true, the transaction will fail when the record doesn't exist. */ failIfMissing?: boolean; /** * If set, the call will return the requested fields as part of the response. */ columns?: string[]; }; /** * Get by id operation. */ type TransactionGetOp = { /** * The table name */ table: string; id: RecordID; /** * If set, the call will return the requested fields as part of the response. */ columns?: string[]; }; /** * A transaction operation */ type TransactionOperation$1 = { insert: TransactionInsertOp; } | { update: TransactionUpdateOp; } | { ['delete']: TransactionDeleteOp; } | { get: TransactionGetOp; }; /** * Fields to return in the transaction result. */ type TransactionResultColumns = { [key: string]: any; }; /** * A result from an insert operation. */ type TransactionResultInsert = { /** * The type of operation who's result is being returned. */ operation: 'insert'; /** * The number of affected rows */ rows: number; id: RecordID; columns?: TransactionResultColumns; }; /** * A result from an update operation. */ type TransactionResultUpdate = { /** * The type of operation who's result is being returned. */ operation: 'update'; /** * The number of updated rows */ rows: number; id: RecordID; columns?: TransactionResultColumns; }; /** * A result from a delete operation. */ type TransactionResultDelete = { /** * The type of operation who's result is being returned. */ operation: 'delete'; /** * The number of deleted rows */ rows: number; columns?: TransactionResultColumns; }; /** * A result from a get operation. */ type TransactionResultGet = { /** * The type of operation who's result is being returned. */ operation: 'get'; columns?: TransactionResultColumns; }; /** * An ordered array of results from the submitted operations. */ type TransactionSuccess = { results: (TransactionResultInsert | TransactionResultUpdate | TransactionResultDelete | TransactionResultGet)[]; }; /** * An error message from a failing transaction operation */ type TransactionError = { /** * The index of the failing operation */ index: number; /** * The error message */ message: string; }; /** * An array of errors, with indices, from the transaction. */ type TransactionFailure = { /** * The request ID. */ id: string; /** * An array of errors from the submitted operations. */ errors: TransactionError[]; }; /** * Object column value */ type ObjectValue = { [key: string]: string | boolean | number | string[] | number[] | DateTime$1 | ObjectValue; }; /** * Unique file identifier * * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_-~:]+ */ type FileItemID = string; /** * File name * * @maxLength 1024 * @minLength 0 * @pattern [0-9a-zA-Z!\-_\.\*'\(\)]* */ type FileName = string; /** * Media type * * @maxLength 255 * @minLength 3 * @pattern ^\w+/[-+.\w]+$ */ type MediaType = string; /** * Object representing a file in an array */ type InputFileEntry = { id?: FileItemID; name?: FileName; mediaType?: MediaType; /** * Base64 encoded content * * @maxLength 20971520 */ base64Content?: string; /** * Enable public access to the file */ enablePublicUrl?: boolean; /** * Time to live for signed URLs */ signedUrlTimeout?: number; /** * Time to live for upload URLs */ uploadUrlTimeout?: number; }; /** * Array of file entries * * @maxItems 50 */ type InputFileArray = InputFileEntry[]; /** * Object representing a file * * @x-go-type file.InputFile */ type InputFile = { name: FileName; mediaType?: MediaType; /** * Base64 encoded content * * @maxLength 20971520 */ base64Content?: string; /** * Enable public access to the file */ enablePublicUrl?: boolean; /** * Time to live for signed URLs */ signedUrlTimeout?: number; /** * Time to live for upload URLs */ uploadUrlTimeout?: number; }; /** * Xata input record */ type DataInputRecord = { [key: string]: RecordID | string | boolean | number | string[] | number[] | DateTime$1 | ObjectValue | InputFileArray | InputFile | null; }; /** * Xata Table Record Metadata */ type RecordMeta = { id: RecordID; xata: { /** * The record's version. Can be used for optimistic concurrency control. */ version: number; /** * The time when the record was created. */ createdAt?: string; /** * The time when the record was last updated. */ updatedAt?: string; /** * The record's table name. APIs that return records from multiple tables will set this field accordingly. */ table?: string; /** * Highlights of the record. This is used by the search APIs to indicate which fields and parts of the fields have matched the search. */ highlight?: { [key: string]: string[] | { [key: string]: any; }; }; /** * The record's relevancy score. This is returned by the search APIs. */ score?: number; /** * Encoding/Decoding errors */ warnings?: string[]; }; } | { xata_id: RecordID; /** * The record's version. Can be used for optimistic concurrency control. */ xata_version: number; /** * The time when the record was created. */ xata_createdat?: string; /** * The time when the record was last updated. */ xata_updatedat?: string; /** * The record's table name. APIs that return records from multiple tables will set this field accordingly. */ xata_table?: string; /** * Highlights of the record. This is used by the search APIs to indicate which fields and parts of the fields have matched the search. */ xata_highlight?: { [key: string]: string[] | { [key: string]: any; }; }; /** * The record's relevancy score. This is returned by the search APIs. */ xata_score?: number; /** * Encoding/Decoding errors */ xata_warnings?: string[]; }; /** * File metadata */ type FileResponse = { id?: FileItemID; name: FileName; mediaType: MediaType; /** * Enable public access to the file */ enablePublicUrl: boolean; /** * Time to live for signed URLs */ signedUrlTimeout: number; /** * Time to live for signed URLs */ uploadUrlTimeout: number; /** * @format int64 */ size: number; /** * @format int64 */ version: number; /** * File access URL * * @format uri */ url: string; /** * Signed file access URL * * @format uri */ signedUrl: string; /** * Upload file URL * * @format uri */ uploadUrl: string; attributes?: Record; }; type QueryColumnsProjection = (string | ProjectionConfig)[]; /** * A structured projection that allows for some configuration. */ type ProjectionConfig = { /** * The name of the column to project or a reverse link specification, see [API Guide](https://xata.io/docs/concepts/data-model#links-and-relations). */ name?: string; columns?: QueryColumnsProjection; /** * An alias for the projected field, this is how it will be returned in the response. */ as?: string; sort?: SortExpression; /** * @default 20 */ limit?: number; /** * @default 0 */ offset?: number; }; /** * The target expression is used to filter the search results by the target columns. */ type TargetExpression = (string | { /** * The name of the column. */ column: string; /** * The weight of the column. * * @default 1 * @maximum 10 * @minimum 1 */ weight?: number; })[]; /** * Boost records with a particular value for a column. */ type ValueBooster$1 = { /** * The column in which to look for the value. */ column: string; /** * The exact value to boost. */ value: string | number | boolean; /** * The factor with which to multiply the added boost. */ factor: number; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: FilterExpression; }; /** * Boost records based on the value of a numeric column. */ type NumericBooster$1 = { /** * The column in which to look for the value. */ column: string; /** * The factor with which to multiply the value of the column before adding it to the item score. */ factor: number; /** * Modifier to be applied to the column value, before being multiplied with the factor. The possible values are: * - none (default). * - log: common logarithm (base 10) * - log1p: add 1 then take the common logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - ln: natural logarithm (base e) * - ln1p: add 1 then take the natural logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - square: raise the value to the power of two. * - sqrt: take the square root of the value. * - reciprocal: reciprocate the value (if the value is `x`, the reciprocal is `1/x`). */ modifier?: 'none' | 'log' | 'log1p' | 'ln' | 'ln1p' | 'square' | 'sqrt' | 'reciprocal'; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: FilterExpression; }; /** * Boost records based on the value of a datetime column. It is configured via "origin", "scale", and "decay". The further away from the "origin", * the more the score is decayed. The decay function uses an exponential function. For example if origin is "now", and scale is 10 days and decay is 0.5, it * should be interpreted as: a record with a date 10 days before/after origin will be boosted 2 times less than a record with the date at origin. * The result of the exponential function is a boost between 0 and 1. The "factor" allows you to control how impactful this boost is, by multiplying it with a given value. */ type DateBooster$1 = { /** * The column in which to look for the value. */ column: string; /** * The datetime (formatted as RFC3339) from where to apply the score decay function. The maximum boost will be applied for records with values at this time. * If it is not specified, the current date and time is used. */ origin?: string; /** * The duration at which distance from origin the score is decayed with factor, using an exponential function. It is formatted as number + units, for example: `5d`, `20m`, `10s`. * * @pattern ^(\d+)(d|h|m|s|ms)$ */ scale: string; /** * The decay factor to expect at "scale" distance from the "origin". */ decay: number; /** * The factor with which to multiply the added boost. * * @minimum 0 */ factor?: number; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: FilterExpression; }; /** * Booster Expression * * @x-go-type xata.BoosterExpression */ type BoosterExpression = { valueBooster?: ValueBooster$1; } | { numericBooster?: NumericBooster$1; } | { dateBooster?: DateBooster$1; }; /** * Maximum [Levenshtein distance](https://en.wikipedia.org/wiki/Levenshtein_distance) for the search terms. The Levenshtein * distance is the number of one character changes needed to make two strings equal. The default is 1, meaning that single * character typos per word are tolerated by search. You can set it to 0 to remove the typo tolerance or set it to 2 * to allow two typos in a word. * * @default 1 * @maximum 2 * @minimum 0 */ type FuzzinessExpression = number; /** * If the prefix type is set to "disabled" (the default), the search only matches full words. If the prefix type is set to "phrase", the search will return results that match prefixes of the search phrase. */ type PrefixExpression = 'phrase' | 'disabled'; type HighlightExpression = { /** * Set to `false` to disable highlighting. By default it is `true`. */ enabled?: boolean; /** * Set to `false` to disable HTML encoding in highlight snippets. By default it is `true`. */ encodeHTML?: boolean; }; /** * Pagination settings for the search endpoints. */ type SearchPageConfig = { /** * Set page size. * * @default 25 * @maximum 200 */ size?: number; /** * Use offset to skip entries. To skip pages set offset to a multiple of size. * * @default 0 * @maximum 800 */ offset?: number; }; /** * A summary expression is the description of a single summary operation. It consists of a single * key representing the operation, and a value representing the column to be operated on. * * The column being summarized cannot be an internal column (id, xata.*), nor the base of * an object, i.e. if `settings` is an object with `dark_mode` as a field, you may summarize * `settings.dark_mode` but not `settings` nor `settings.*`. * * We currently support several aggregation functions. Not all functions can be run on all column * types. * * - `count` is used to count the number of records in each group. Use `{"count": "*"}` to count * all columns present, otherwise `{"count": ""}` to count the number of non-null * values are present at column path. * * Count can be used on any column type, and always returns an int. * * - `min` calculates the minimum value in each group. `min` is compatible with most types; * string, multiple, text, email, int, float, and datetime. It returns a value of the same * type as operated on. This means that `{"lowest_latency": {"min": "latency"}}` where * `latency` is an int, will always return an int. * * - `max` calculates the maximum value in each group. `max` shares the same compatibility as * `min`. * * - `sum` adds up all values in a group. `sum` can be run on `int` and `float` types, and will * return a value of the same type as requested. * * - `average` averages all values in a group. `average` can be run on `int` and `float` types, and * always returns a float. * * @example {"count":"deleted_at"} * @x-go-type xbquery.Summary */ type SummaryExpression = Record; /** * The description of the summaries you wish to receive. Set each key to be the field name * you'd like for the summary. These names must not collide with other columns you've * requested from `columns`; including implicit requests like `settings.*`. * * The value for each key needs to be an object. This object should contain one key and one * value only. In this object, the key should be set to the summary function you wish to use * and the value set to the column name to be summarized. * * The column being summarized cannot be an internal column (id, xata.*), nor the base of * an object, i.e. if `settings` is an object with `dark_mode` as a field, you may summarize * `settings.dark_mode` but not `settings` nor `settings.*`. * * @example {"all_users":{"count":"*"},"total_created":{"count":"created_at"},"min_cost":{"min":"cost"},"max_happiness":{"max":"happiness"},"total_revenue":{"sum":"revenue"},"average_speed":{"average":"speed"}} * @x-go-type xbquery.SummaryList */ type SummaryExpressionList = { [key: string]: SummaryExpression; }; /** * Count the number of records with an optional filter. */ type CountAgg = { filter?: FilterExpression; } | '*'; /** * The sum of the numeric values in a particular column. */ type SumAgg = { /** * The column on which to compute the sum. Must be a numeric type. */ column: string; }; /** * The max of the numeric values in a particular column. */ type MaxAgg = { /** * The column on which to compute the max. Must be a numeric type. */ column: string; }; /** * The min of the numeric values in a particular column. */ type MinAgg = { /** * The column on which to compute the min. Must be a numeric type. */ column: string; }; /** * The average of the numeric values in a particular column. */ type AverageAgg = { /** * The column on which to compute the average. Must be a numeric type. */ column: string; }; /** * Calculate given percentiles of the numeric values in a particular column. */ type PercentilesAgg = { /** * The column on which to compute the average. Must be a numeric type. */ column: string; percentiles: number[]; }; /** * Count the number of distinct values in a particular column. */ type UniqueCountAgg = { /** * The column from where to count the unique values. */ column: string; /** * The threshold under which the unique count is exact. If the number of unique * values in the column is higher than this threshold, the results are approximate. * Maximum value is 40,000, default value is 3000. */ precisionThreshold?: number; }; /** * The description of the aggregations you wish to receive. * * @example {"totalCount":{"count":"*"},"dailyActiveUsers":{"dateHistogram":{"column":"date","interval":"1d","aggs":{"uniqueUsers":{"uniqueCount":{"column":"userID"}}}}}} */ type AggExpressionMap = { [key: string]: AggExpression; }; /** * Split data into buckets by a datetime column. Accepts sub-aggregations for each bucket. */ type DateHistogramAgg = { /** * The column to use for bucketing. Must be of type datetime. */ column: string; /** * The fixed interval to use when bucketing. * It is formatted as number + units, for example: `5d`, `20m`, `10s`. * * @pattern ^(\d+)(d|h|m|s|ms)$ */ interval?: string; /** * The calendar-aware interval to use when bucketing. Possible values are: `minute`, * `hour`, `day`, `week`, `month`, `quarter`, `year`. */ calendarInterval?: 'minute' | 'hour' | 'day' | 'week' | 'month' | 'quarter' | 'year'; /** * The timezone to use for bucketing. By default, UTC is assumed. * The accepted format is as an ISO 8601 UTC offset. For example: `+01:00` or * `-08:00`. * * @pattern ^[+-][01]\d:[0-5]\d$ */ timezone?: string; aggs?: AggExpressionMap; }; /** * Split data into buckets by the unique values in a column. Accepts sub-aggregations for each bucket. * The top values as ordered by the number of records (`$count`) are returned. */ type TopValuesAgg = { /** * The column to use for bucketing. Accepted types are `string`, `email`, `int`, `float`, or `bool`. */ column: string; aggs?: AggExpressionMap; /** * The maximum number of unique values to return. * * @default 10 * @maximum 1000 */ size?: number; }; /** * Split data into buckets by dynamic numeric ranges. Accepts sub-aggregations for each bucket. */ type NumericHistogramAgg = { /** * The column to use for bucketing. Must be of numeric type. */ column: string; /** * The numeric interval to use for bucketing. The resulting buckets will be ranges * with this value as size. * * @minimum 0 */ interval: number; /** * By default the bucket keys start with 0 and then continue in `interval` steps. The bucket * boundaries can be shifted by using the offset option. For example, if the `interval` is 100, * but you prefer the bucket boundaries to be `[50, 150), [150, 250), etc.`, you can set `offset` * to 50. * * @default 0 */ offset?: number; aggs?: AggExpressionMap; }; /** * The description of a single aggregation operation. It is an object with only one key-value pair. * The key represents the aggregation type, while the value is an object with the configuration of * the aggregation. * * @x-go-type xata.AggExpression */ type AggExpression = { count?: CountAgg; } | { sum?: SumAgg; } | { max?: MaxAgg; } | { min?: MinAgg; } | { average?: AverageAgg; } | { percentiles?: PercentilesAgg; } | { uniqueCount?: UniqueCountAgg; } | { dateHistogram?: DateHistogramAgg; } | { topValues?: TopValuesAgg; } | { numericHistogram?: NumericHistogramAgg; }; type AggResponse$1 = (number | null) | { values: ({ $key: string | number; $count: number; } & { [key: string]: AggResponse$1; })[] | { [key: string]: number; }; }; /** * File identifier in access URLs * * @maxLength 296 * @minLength 88 * @pattern [a-v0-9=]+ */ type FileAccessID = string; /** * File signature */ type FileSignature = string; /** * Xata Table SQL Record */ type SQLRecord = { [key: string]: any; }; /** * Xata Table Record Metadata */ type XataRecord$1 = RecordMeta & { [key: string]: any; }; /** * Generated by @openapi-codegen * * @version 1.0 */ type BadRequestError$1 = { id?: string; message: string; }; /** * @example {"message":"invalid API key"} */ type AuthError$1 = { id?: string; message: string; }; type SimpleError$1 = { id?: string; message: string; }; type BranchMigrationPlan = { version: number; migration: BranchMigration; }; type SchemaUpdateResponse = { /** * @minLength 1 */ migrationID: string; parentMigrationID: string; status: MigrationStatus$1; }; type SchemaCompareResponse = { source: Schema; target: Schema; edits: SchemaEditScript; }; type RateLimitError = { id?: string; message: string; }; type RecordUpdateResponse = XataRecord$1 | { id: string; xata: { version: number; createdAt: string; updatedAt: string; }; } | { xata_id: string; xata_version: number; xata_createdat: string; xata_updatedat: string; }; type PutFileResponse = FileResponse; type RecordResponse = XataRecord$1; type BulkInsertResponse = { recordIDs: string[]; } | { records: XataRecord$1[]; }; type BulkError = { errors: { message?: string; status?: number; }[]; }; type QueryResponse = { records: XataRecord$1[]; meta: RecordsMetadata; }; type ServiceUnavailableError = { id?: string; message: string; }; type SearchResponse = { records: XataRecord$1[]; warning?: string; /** * The total count of records matched. It will be accurately returned up to 10000 records. */ totalCount: number; }; type SummarizeResponse = { summaries: Record[]; }; /** * @example {"aggs":{"dailyUniqueUsers":{"values":[{"$count":321,"$key":"2022-02-22T22:22:22Z","uniqueUsers":134},{"$count":202,"$key":"2022-02-23T22:22:22Z","uniqueUsers":90}]}}} */ type AggResponse = { aggs?: { [key: string]: AggResponse$1; }; }; type SQLResponse = { records?: SQLRecord[]; rows?: any[][]; /** * Name of the column and its PostgreSQL type */ columns?: { name?: string; type?: string; }[]; /** * Number of selected columns */ total?: number; warning?: string; }; /** * Generated by @openapi-codegen * * @version 1.0 */ type OAuthResponseType = 'code'; type OAuthScope = 'admin:all'; type AuthorizationCodeResponse = { state?: string; redirectUri?: string; scopes?: OAuthScope[]; clientId?: string; /** * @format date-time */ expires?: string; code?: string; }; type AuthorizationCodeRequest = { state?: string; redirectUri?: string; scopes?: OAuthScope[]; clientId: string; responseType: OAuthResponseType; }; type User = { /** * @format email */ email: string; fullname: string; image: string; }; /** * @pattern [a-zA-Z0-9_-~:]+ */ type UserID = string; type UserWithID = User & { id: UserID; }; /** * @format date-time * @x-go-type string */ type DateTime = string; /** * @pattern [a-zA-Z0-9_\-~]* */ type APIKeyName = string; type OAuthClientPublicDetails = { name?: string; description?: string; icon?: string; clientId: string; }; type OAuthClientID = string; type OAuthAccessToken = { token: string; scopes: string[]; /** * @format date-time */ createdAt: string; /** * @format date-time */ updatedAt: string; /** * @format date-time */ expiresAt: string; clientId: string; }; type AccessToken = string; /** * @pattern ^([a-zA-Z0-9][a-zA-Z0-9_\-~]+-)?[a-zA-Z0-9]{6} * @x-go-type auth.WorkspaceID */ type WorkspaceID = string; /** * @x-go-type auth.Role */ type Role = 'owner' | 'maintainer'; type WorkspacePlan = 'free' | 'pro'; type WorkspaceMeta = { name: string; slug?: string; }; type Workspace = WorkspaceMeta & { id: WorkspaceID; memberCount: number; plan: WorkspacePlan; }; type WorkspaceSettings = { postgresEnabled: boolean; dedicatedClusters: boolean; }; type WorkspaceMember = { userId: UserID; fullname: string; /** * @format email */ email: string; role: Role; }; /** * @pattern [a-zA-Z0-9]+ */ type InviteID = string; type WorkspaceInvite = { inviteId: InviteID; /** * @format email */ email: string; /** * @format date-time */ expires: string; role: Role; }; type WorkspaceMembers = { members: WorkspaceMember[]; invites: WorkspaceInvite[]; }; /** * @pattern ^ik_[a-zA-Z0-9]+ */ type InviteKey = string; /** * Page size. * * @x-internal true * @default 25 * @minimum 0 */ type PageSize = number; /** * Page token * * @x-internal true * @maxLength 255 * @minLength 24 */ type PageToken = string; /** * @x-internal true * @pattern [a-zA-Z0-9_-~:]+ */ type ClusterID = string; /** * @x-internal true */ type ClusterShortMetadata = { id: ClusterID; state: string; region: string; name: string; /** * @format int64 */ branches: number; }; /** * @x-internal true */ type PageResponse = { size: number; hasMore: boolean; token?: string; }; /** * @x-internal true */ type ListClustersResponse = { clusters: ClusterShortMetadata[]; page: PageResponse; }; /** * @x-internal true */ type AutoscalingConfig = { /** * @format double * @default 2 */ minCapacity?: number; /** * @format double * @default 16 */ maxCapacity?: number; }; /** * @x-internal true */ type WeeklyTimeWindow = { day: 'mon' | 'tue' | 'wed' | 'thu' | 'fri' | 'sat' | 'sun'; /** * @maximum 24 * @minimum 0 */ hour: number; /** * @maximum 60 * @minimum 0 */ minute: number; /** * @format float * @maximum 23.5 * @minimum 0.5 */ duration: number; }; /** * @x-internal true */ type DailyTimeWindow = { /** * @maximum 24 * @minimum 0 */ hour: number; /** * @maximum 60 * @minimum 0 */ minute: number; /** * @format float */ duration: number; }; /** * @x-internal true */ type MaintenanceConfig = { /** * @default false */ autoMinorVersionUpgrade?: boolean; /** * @default false */ applyImmediately?: boolean; maintenanceWindow?: WeeklyTimeWindow; backupWindow?: DailyTimeWindow; }; /** * @x-internal true */ type ClusterConfiguration = { engineVersion: string; instanceType: string; /** * @format int64 */ replicas?: number; /** * @format int64 * @default 1 * @maximum 3 * @minimum 1 */ instanceCount?: number; /** * @default false */ deletionProtection?: boolean; autoscaling?: AutoscalingConfig; maintenance?: MaintenanceConfig; }; /** * @x-internal true */ type ClusterCreateDetails = { /** * @minLength 1 */ region: string; /** * @maxLength 63 * @minLength 1 * @pattern [a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)* */ name: string; configuration: ClusterConfiguration; }; /** * @x-internal true */ type ClusterResponse = { state: string; clusterID: string; }; /** * @x-internal true */ type AutoscalingConfigResponse = { /** * @format double * @default 0.5 */ minCapacity: number; /** * @format double * @default 4 */ maxCapacity: number; }; /** * @x-internal true */ type MaintenanceConfigResponse = { /** * @default false */ autoMinorVersionUpgrade: boolean; /** * @default false */ applyImmediately: boolean; maintenanceWindow: WeeklyTimeWindow; backupWindow: DailyTimeWindow; }; /** * @x-internal true */ type ClusterConfigurationResponse = { engineVersion: string; instanceType: string; /** * @format int64 */ replicas: number; /** * @format int64 */ instanceCount: number; /** * @default false */ deletionProtection: boolean; autoscaling?: AutoscalingConfigResponse; maintenance: MaintenanceConfigResponse; }; /** * @x-internal true */ type ClusterMetadata = { id: ClusterID; state: string; region: string; name: string; /** * @format int64 */ branches: number; configuration: ClusterConfigurationResponse; }; /** * @x-internal true */ type ClusterUpdateDetails = { /** * @pattern ^[Ss][Tt][Oo][Pp]|[Ss][Tt][Aa][Rr][Tt]$ */ command: string; }; /** * @x-internal true */ type ClusterUpdateMetadata = { id: ClusterID; state: string; }; /** * Metadata of databases */ type DatabaseMetadata = { /** * The machine-readable name of a database */ name: string; /** * Region where this database is hosted */ region: string; /** * The time this database was created */ createdAt: DateTime; /** * @x-internal true */ newMigrations?: boolean; /** * The default cluster ID where branches from this database reside. Value of 'shared-cluster' for branches in shared clusters. */ defaultClusterID?: string; /** * The database is accessible via the Postgres protocol */ postgresEnabled?: boolean; /** * Metadata about the database for display in Xata user interfaces */ ui?: { /** * The user-selected color for this database across interfaces */ color?: string; }; }; type ListDatabasesResponse = { /** * A list of databases in a Xata workspace */ databases: DatabaseMetadata[]; }; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type DBName = string; /** * @maxLength 255 * @minLength 1 * @pattern [a-zA-Z0-9_\-~]+ */ type BranchName = string; /** * @example {"repository":"github.com/my/repository","branch":"feature-login","stage":"testing","labels":["epic-100"]} * @x-go-type xata.BranchMetadata */ type BranchMetadata = { /** * @minLength 1 */ repository?: string; branch?: BranchName; /** * @minLength 1 */ stage?: string; labels?: string[]; }; type MigrationStatus = 'completed' | 'pending' | 'failed'; /** * Github repository settings for this database (optional) */ type DatabaseGithubSettings = { /** * Repository owner (user or organization) */ owner: string; /** * Repository name */ repo: string; }; type Region = { id: string; name: string; }; type ListRegionsResponse = { /** * A list of regions where databases can be created */ regions: Region[]; }; declare class ErrorWithCause extends Error { cause?: Error; constructor(message?: string, options?: { cause?: Error; }); } declare class FetcherError extends ErrorWithCause { status: number | string; requestId: string | undefined; errors: BulkError['errors'] | undefined; constructor(status: number, data?: unknown, requestId?: string); toString(): string; } type WorkspaceApiUrlBuilder = (path: string, pathParams: Partial>) => string; type FetcherExtraProps = { endpoint: 'controlPlane' | 'dataPlane'; apiUrl: string; workspacesApiUrl: string | WorkspaceApiUrlBuilder; fetch: FetchImpl; apiKey: string; trace: TraceFunction; signal?: AbortSignal; clientID?: string; sessionID?: string; clientName?: string; xataAgentExtra?: Record; fetchOptions?: Record; rawResponse?: boolean; headers?: Record; }; type ControlPlaneFetcherExtraProps = { apiUrl: string; workspacesApiUrl: string | WorkspaceApiUrlBuilder; fetch: FetchImpl; apiKey: string; trace: TraceFunction; signal?: AbortSignal; clientID?: string; sessionID?: string; clientName?: string; xataAgentExtra?: Record; }; type ErrorWrapper$1 = TError | { status: 'unknown'; payload: string; }; /** * Generated by @openapi-codegen * * @version 1.0 */ type BadRequestError = { id?: string; message: string; }; /** * @example {"message":"invalid API key"} */ type AuthError = { id?: string; message: string; }; type SimpleError = { id?: string; message: string; }; /** * Generated by @openapi-codegen * * @version 1.0 */ type GetAuthorizationCodeQueryParams = { clientID: string; responseType: OAuthResponseType; redirectUri?: string; scopes?: OAuthScope[]; state?: string; }; type GetAuthorizationCodeError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 409; payload: SimpleError; }>; type GetAuthorizationCodeVariables = { queryParams: GetAuthorizationCodeQueryParams; } & ControlPlaneFetcherExtraProps; /** * Creates, stores and returns an authorization code to be used by a third party app. Supporting use of GET is required by OAuth2 spec */ declare const getAuthorizationCode: (variables: GetAuthorizationCodeVariables, signal?: AbortSignal) => Promise; type GrantAuthorizationCodeError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 409; payload: SimpleError; }>; type GrantAuthorizationCodeVariables = { body: AuthorizationCodeRequest; } & ControlPlaneFetcherExtraProps; /** * Creates, stores and returns an authorization code to be used by a third party app */ declare const grantAuthorizationCode: (variables: GrantAuthorizationCodeVariables, signal?: AbortSignal) => Promise; type GetUserError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetUserVariables = ControlPlaneFetcherExtraProps; /** * Return details of the user making the request */ declare const getUser: (variables: GetUserVariables, signal?: AbortSignal) => Promise; type UpdateUserError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type UpdateUserVariables = { body: User; } & ControlPlaneFetcherExtraProps; /** * Update user info */ declare const updateUser: (variables: UpdateUserVariables, signal?: AbortSignal) => Promise; type DeleteUserError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteUserVariables = ControlPlaneFetcherExtraProps; /** * Delete the user making the request */ declare const deleteUser: (variables: DeleteUserVariables, signal?: AbortSignal) => Promise; type GetUserAPIKeysError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetUserAPIKeysResponse = { keys: { name: string; createdAt: DateTime; }[]; }; type GetUserAPIKeysVariables = ControlPlaneFetcherExtraProps; /** * Retrieve a list of existing user API keys */ declare const getUserAPIKeys: (variables: GetUserAPIKeysVariables, signal?: AbortSignal) => Promise; type CreateUserAPIKeyPathParams = { /** * API Key name */ keyName: APIKeyName; }; type CreateUserAPIKeyError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type CreateUserAPIKeyResponse = { name: string; key: string; createdAt: DateTime; }; type CreateUserAPIKeyVariables = { pathParams: CreateUserAPIKeyPathParams; } & ControlPlaneFetcherExtraProps; /** * Create and return new API key */ declare const createUserAPIKey: (variables: CreateUserAPIKeyVariables, signal?: AbortSignal) => Promise; type DeleteUserAPIKeyPathParams = { /** * API Key name */ keyName: APIKeyName; }; type DeleteUserAPIKeyError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteUserAPIKeyVariables = { pathParams: DeleteUserAPIKeyPathParams; } & ControlPlaneFetcherExtraProps; /** * Delete an existing API key */ declare const deleteUserAPIKey: (variables: DeleteUserAPIKeyVariables, signal?: AbortSignal) => Promise; type GetUserOAuthClientsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetUserOAuthClientsResponse = { clients?: OAuthClientPublicDetails[]; }; type GetUserOAuthClientsVariables = ControlPlaneFetcherExtraProps; /** * Retrieve the list of OAuth Clients that a user has authorized */ declare const getUserOAuthClients: (variables: GetUserOAuthClientsVariables, signal?: AbortSignal) => Promise; type DeleteUserOAuthClientPathParams = { clientId: OAuthClientID; }; type DeleteUserOAuthClientError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteUserOAuthClientVariables = { pathParams: DeleteUserOAuthClientPathParams; } & ControlPlaneFetcherExtraProps; /** * Delete the oauth client for the user and revoke all access */ declare const deleteUserOAuthClient: (variables: DeleteUserOAuthClientVariables, signal?: AbortSignal) => Promise; type GetUserOAuthAccessTokensError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetUserOAuthAccessTokensResponse = { accessTokens: OAuthAccessToken[]; }; type GetUserOAuthAccessTokensVariables = ControlPlaneFetcherExtraProps; /** * Retrieve the list of valid OAuth Access Tokens on the current user's account */ declare const getUserOAuthAccessTokens: (variables: GetUserOAuthAccessTokensVariables, signal?: AbortSignal) => Promise; type DeleteOAuthAccessTokenPathParams = { token: AccessToken; }; type DeleteOAuthAccessTokenError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 409; payload: SimpleError; }>; type DeleteOAuthAccessTokenVariables = { pathParams: DeleteOAuthAccessTokenPathParams; } & ControlPlaneFetcherExtraProps; /** * Expires the access token for a third party app */ declare const deleteOAuthAccessToken: (variables: DeleteOAuthAccessTokenVariables, signal?: AbortSignal) => Promise; type UpdateOAuthAccessTokenPathParams = { token: AccessToken; }; type UpdateOAuthAccessTokenError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 409; payload: SimpleError; }>; type UpdateOAuthAccessTokenRequestBody = { /** * expiration time of the token as a unix timestamp */ expires: number; }; type UpdateOAuthAccessTokenVariables = { body: UpdateOAuthAccessTokenRequestBody; pathParams: UpdateOAuthAccessTokenPathParams; } & ControlPlaneFetcherExtraProps; /** * Updates partially the access token for a third party app */ declare const updateOAuthAccessToken: (variables: UpdateOAuthAccessTokenVariables, signal?: AbortSignal) => Promise; type GetWorkspacesListError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetWorkspacesListResponse = { workspaces: { id: WorkspaceID; name: string; slug: string; role: Role; plan: WorkspacePlan; }[]; }; type GetWorkspacesListVariables = ControlPlaneFetcherExtraProps; /** * Retrieve the list of workspaces the user belongs to */ declare const getWorkspacesList: (variables: GetWorkspacesListVariables, signal?: AbortSignal) => Promise; type CreateWorkspaceError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type CreateWorkspaceVariables = { body: WorkspaceMeta; } & ControlPlaneFetcherExtraProps; /** * Creates a new workspace with the user requesting it as its single owner. */ declare const createWorkspace: (variables: CreateWorkspaceVariables, signal?: AbortSignal) => Promise; type GetWorkspacePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type GetWorkspaceError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetWorkspaceVariables = { pathParams: GetWorkspacePathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve workspace info from a workspace ID */ declare const getWorkspace: (variables: GetWorkspaceVariables, signal?: AbortSignal) => Promise; type UpdateWorkspacePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type UpdateWorkspaceError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type UpdateWorkspaceVariables = { body: WorkspaceMeta; pathParams: UpdateWorkspacePathParams; } & ControlPlaneFetcherExtraProps; /** * Update workspace info */ declare const updateWorkspace: (variables: UpdateWorkspaceVariables, signal?: AbortSignal) => Promise; type DeleteWorkspacePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type DeleteWorkspaceError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteWorkspaceVariables = { pathParams: DeleteWorkspacePathParams; } & ControlPlaneFetcherExtraProps; /** * Delete the workspace with the provided ID */ declare const deleteWorkspace: (variables: DeleteWorkspaceVariables, signal?: AbortSignal) => Promise; type GetWorkspaceSettingsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type GetWorkspaceSettingsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetWorkspaceSettingsVariables = { pathParams: GetWorkspaceSettingsPathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve workspace settings from a workspace ID */ declare const getWorkspaceSettings: (variables: GetWorkspaceSettingsVariables, signal?: AbortSignal) => Promise; type UpdateWorkspaceSettingsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type UpdateWorkspaceSettingsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type UpdateWorkspaceSettingsRequestBody = { postgresEnabled: boolean; }; type UpdateWorkspaceSettingsVariables = { body: UpdateWorkspaceSettingsRequestBody; pathParams: UpdateWorkspaceSettingsPathParams; } & ControlPlaneFetcherExtraProps; /** * Update workspace settings */ declare const updateWorkspaceSettings: (variables: UpdateWorkspaceSettingsVariables, signal?: AbortSignal) => Promise; type GetWorkspaceMembersListPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type GetWorkspaceMembersListError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetWorkspaceMembersListVariables = { pathParams: GetWorkspaceMembersListPathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve the list of members of the given workspace */ declare const getWorkspaceMembersList: (variables: GetWorkspaceMembersListVariables, signal?: AbortSignal) => Promise; type UpdateWorkspaceMemberRolePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * UserID */ userId: UserID; }; type UpdateWorkspaceMemberRoleError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type UpdateWorkspaceMemberRoleRequestBody = { role: Role; }; type UpdateWorkspaceMemberRoleVariables = { body: UpdateWorkspaceMemberRoleRequestBody; pathParams: UpdateWorkspaceMemberRolePathParams; } & ControlPlaneFetcherExtraProps; /** * Update a workspace member role. Workspaces must always have at least one owner, so this operation will fail if trying to remove owner role from the last owner in the workspace. */ declare const updateWorkspaceMemberRole: (variables: UpdateWorkspaceMemberRoleVariables, signal?: AbortSignal) => Promise; type RemoveWorkspaceMemberPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * UserID */ userId: UserID; }; type RemoveWorkspaceMemberError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type RemoveWorkspaceMemberVariables = { pathParams: RemoveWorkspaceMemberPathParams; } & ControlPlaneFetcherExtraProps; /** * Remove the member from the workspace */ declare const removeWorkspaceMember: (variables: RemoveWorkspaceMemberVariables, signal?: AbortSignal) => Promise; type InviteWorkspaceMemberPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type InviteWorkspaceMemberError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 409; payload: SimpleError; }>; type InviteWorkspaceMemberRequestBody = { /** * @format email */ email: string; role: Role; }; type InviteWorkspaceMemberVariables = { body: InviteWorkspaceMemberRequestBody; pathParams: InviteWorkspaceMemberPathParams; } & ControlPlaneFetcherExtraProps; /** * Invite some user to join the workspace with the given role */ declare const inviteWorkspaceMember: (variables: InviteWorkspaceMemberVariables, signal?: AbortSignal) => Promise; type UpdateWorkspaceMemberInvitePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Invite identifier */ inviteId: InviteID; }; type UpdateWorkspaceMemberInviteError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; } | { status: 422; payload: SimpleError; }>; type UpdateWorkspaceMemberInviteRequestBody = { role: Role; }; type UpdateWorkspaceMemberInviteVariables = { body: UpdateWorkspaceMemberInviteRequestBody; pathParams: UpdateWorkspaceMemberInvitePathParams; } & ControlPlaneFetcherExtraProps; /** * This operation provides a way to update an existing invite. Updates are performed in-place; they do not change the invite link, the expiry time, nor do they re-notify the recipient of the invite. */ declare const updateWorkspaceMemberInvite: (variables: UpdateWorkspaceMemberInviteVariables, signal?: AbortSignal) => Promise; type CancelWorkspaceMemberInvitePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Invite identifier */ inviteId: InviteID; }; type CancelWorkspaceMemberInviteError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type CancelWorkspaceMemberInviteVariables = { pathParams: CancelWorkspaceMemberInvitePathParams; } & ControlPlaneFetcherExtraProps; /** * This operation provides a way to cancel invites by deleting them. Already accepted invites cannot be deleted. */ declare const cancelWorkspaceMemberInvite: (variables: CancelWorkspaceMemberInviteVariables, signal?: AbortSignal) => Promise; type AcceptWorkspaceMemberInvitePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Invite Key (secret) for the invited user */ inviteKey: InviteKey; }; type AcceptWorkspaceMemberInviteError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type AcceptWorkspaceMemberInviteVariables = { pathParams: AcceptWorkspaceMemberInvitePathParams; } & ControlPlaneFetcherExtraProps; /** * Accept the invitation to join a workspace. If the operation succeeds the user will be a member of the workspace */ declare const acceptWorkspaceMemberInvite: (variables: AcceptWorkspaceMemberInviteVariables, signal?: AbortSignal) => Promise; type ResendWorkspaceMemberInvitePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Invite identifier */ inviteId: InviteID; }; type ResendWorkspaceMemberInviteError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 403; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type ResendWorkspaceMemberInviteVariables = { pathParams: ResendWorkspaceMemberInvitePathParams; } & ControlPlaneFetcherExtraProps; /** * This operation provides a way to resend an Invite notification. Invite notifications can only be sent for Invites not yet accepted. */ declare const resendWorkspaceMemberInvite: (variables: ResendWorkspaceMemberInviteVariables, signal?: AbortSignal) => Promise; type ListClustersPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type ListClustersQueryParams = { /** * Page size */ page?: PageSize; /** * Page token */ token?: PageToken; }; type ListClustersError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; }>; type ListClustersVariables = { pathParams: ListClustersPathParams; queryParams?: ListClustersQueryParams; } & ControlPlaneFetcherExtraProps; /** * List all clusters available in your Workspace. */ declare const listClusters: (variables: ListClustersVariables, signal?: AbortSignal) => Promise; type CreateClusterPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type CreateClusterError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 422; payload: SimpleError; } | { status: 423; payload: SimpleError; }>; type CreateClusterVariables = { body: ClusterCreateDetails; pathParams: CreateClusterPathParams; } & ControlPlaneFetcherExtraProps; declare const createCluster: (variables: CreateClusterVariables, signal?: AbortSignal) => Promise; type GetClusterPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Cluster ID */ clusterId: ClusterID; }; type GetClusterError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; }>; type GetClusterVariables = { pathParams: GetClusterPathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve metadata for given cluster ID */ declare const getCluster: (variables: GetClusterVariables, signal?: AbortSignal) => Promise; type UpdateClusterPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * Cluster ID */ clusterId: ClusterID; }; type UpdateClusterError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; }>; type UpdateClusterVariables = { body: ClusterUpdateDetails; pathParams: UpdateClusterPathParams; } & ControlPlaneFetcherExtraProps; /** * Update cluster for given cluster ID */ declare const updateCluster: (variables: UpdateClusterVariables, signal?: AbortSignal) => Promise; type GetDatabaseListPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type GetDatabaseListError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; }>; type GetDatabaseListVariables = { pathParams: GetDatabaseListPathParams; } & ControlPlaneFetcherExtraProps; /** * List all databases available in your Workspace. */ declare const getDatabaseList: (variables: GetDatabaseListVariables, signal?: AbortSignal) => Promise; type CreateDatabasePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type CreateDatabaseError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 422; payload: SimpleError; } | { status: 423; payload: SimpleError; }>; type CreateDatabaseResponse = { /** * @minLength 1 */ databaseName: string; branchName?: string; status: MigrationStatus; }; type CreateDatabaseRequestBody = { /** * @minLength 1 */ branchName?: string; /** * @minLength 1 */ region: string; /** * The dedicated cluster where branches from this database will be created. Defaults to 'shared-cluster'. * * @minLength 1 * @x-internal true */ defaultClusterID?: string; ui?: { color?: string; }; metadata?: BranchMetadata; }; type CreateDatabaseVariables = { body: CreateDatabaseRequestBody; pathParams: CreateDatabasePathParams; } & ControlPlaneFetcherExtraProps; /** * Create Database with identifier name */ declare const createDatabase: (variables: CreateDatabaseVariables, signal?: AbortSignal) => Promise; type DeleteDatabasePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type DeleteDatabaseError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteDatabaseResponse = { status: MigrationStatus; }; type DeleteDatabaseVariables = { pathParams: DeleteDatabasePathParams; } & ControlPlaneFetcherExtraProps; /** * Delete a database and all of its branches and tables permanently. */ declare const deleteDatabase: (variables: DeleteDatabaseVariables, signal?: AbortSignal) => Promise; type GetDatabaseMetadataPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type GetDatabaseMetadataError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetDatabaseMetadataVariables = { pathParams: GetDatabaseMetadataPathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve metadata of the given database */ declare const getDatabaseMetadata: (variables: GetDatabaseMetadataVariables, signal?: AbortSignal) => Promise; type UpdateDatabaseMetadataPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type UpdateDatabaseMetadataError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type UpdateDatabaseMetadataRequestBody = { ui?: { /** * @minLength 1 */ color?: string; }; /** * The dedicated cluster where branches from this database will be created. Defaults to 'shared-cluster'. * * @minLength 1 * @x-internal true */ defaultClusterID?: string; }; type UpdateDatabaseMetadataVariables = { body?: UpdateDatabaseMetadataRequestBody; pathParams: UpdateDatabaseMetadataPathParams; } & ControlPlaneFetcherExtraProps; /** * Update the color of the selected database */ declare const updateDatabaseMetadata: (variables: UpdateDatabaseMetadataVariables, signal?: AbortSignal) => Promise; type RenameDatabasePathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type RenameDatabaseError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 422; payload: SimpleError; } | { status: 423; payload: SimpleError; }>; type RenameDatabaseRequestBody = { /** * @minLength 1 */ newName: string; }; type RenameDatabaseVariables = { body: RenameDatabaseRequestBody; pathParams: RenameDatabasePathParams; } & ControlPlaneFetcherExtraProps; /** * Change the name of an existing database */ declare const renameDatabase: (variables: RenameDatabaseVariables, signal?: AbortSignal) => Promise; type GetDatabaseGithubSettingsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type GetDatabaseGithubSettingsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type GetDatabaseGithubSettingsVariables = { pathParams: GetDatabaseGithubSettingsPathParams; } & ControlPlaneFetcherExtraProps; /** * Retrieve current Github database settings */ declare const getDatabaseGithubSettings: (variables: GetDatabaseGithubSettingsVariables, signal?: AbortSignal) => Promise; type UpdateDatabaseGithubSettingsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type UpdateDatabaseGithubSettingsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 422; payload: SimpleError; } | { status: 423; payload: SimpleError; }>; type UpdateDatabaseGithubSettingsVariables = { body: DatabaseGithubSettings; pathParams: UpdateDatabaseGithubSettingsPathParams; } & ControlPlaneFetcherExtraProps; /** * Map the database to a Github repository, Xata will create database branch previews for all new branches/PRs in the repo. */ declare const updateDatabaseGithubSettings: (variables: UpdateDatabaseGithubSettingsVariables, signal?: AbortSignal) => Promise; type DeleteDatabaseGithubSettingsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; /** * The Database Name */ dbName: DBName; }; type DeleteDatabaseGithubSettingsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; } | { status: 404; payload: SimpleError; }>; type DeleteDatabaseGithubSettingsVariables = { pathParams: DeleteDatabaseGithubSettingsPathParams; } & ControlPlaneFetcherExtraProps; /** * Delete any existing database Github settings */ declare const deleteDatabaseGithubSettings: (variables: DeleteDatabaseGithubSettingsVariables, signal?: AbortSignal) => Promise; type ListRegionsPathParams = { /** * Workspace ID */ workspaceId: WorkspaceID; }; type ListRegionsError = ErrorWrapper$1<{ status: 400; payload: BadRequestError; } | { status: 401; payload: AuthError; }>; type ListRegionsVariables = { pathParams: ListRegionsPathParams; } & ControlPlaneFetcherExtraProps; /** * List regions available to create a database on */ declare const listRegions: (variables: ListRegionsVariables, signal?: AbortSignal) => Promise; type DataPlaneFetcherExtraProps = { apiUrl: string; workspacesApiUrl: string | WorkspaceApiUrlBuilder; fetch: FetchImpl; apiKey: string; trace: TraceFunction; signal?: AbortSignal; clientID?: string; sessionID?: string; clientName?: string; xataAgentExtra?: Record; rawResponse?: boolean; headers?: Record; }; type ErrorWrapper = TError | { status: 'unknown'; payload: string; }; /** * Generated by @openapi-codegen * * @version 1.0 */ type ApplyMigrationPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type ApplyMigrationError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type ApplyMigrationRequestBody = { /** * Migration name */ name?: string; operations: { [key: string]: any; }[]; adaptTables?: boolean; }; type ApplyMigrationVariables = { body: ApplyMigrationRequestBody; pathParams: ApplyMigrationPathParams; } & DataPlaneFetcherExtraProps; /** * Applies a pgroll migration to the specified database. */ declare const applyMigration: (variables: ApplyMigrationVariables, signal?: AbortSignal) => Promise; type AdaptTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type AdaptTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type AdaptTableVariables = { pathParams: AdaptTablePathParams; } & DataPlaneFetcherExtraProps; /** * Adapt a table to be used from Xata, this will add the Xata metadata fields to the table, making it accessible through the data API. */ declare const adaptTable: (variables: AdaptTableVariables, signal?: AbortSignal) => Promise; type AdaptAllTablesPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type AdaptAllTablesError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type AdaptAllTablesVariables = { pathParams: AdaptAllTablesPathParams; } & DataPlaneFetcherExtraProps; /** * Adapt all xata incompatible tables present in the branch, this will add the Xata metadata fields to the table, making them accessible through the data API. */ declare const adaptAllTables: (variables: AdaptAllTablesVariables, signal?: AbortSignal) => Promise; type GetBranchMigrationJobStatusPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchMigrationJobStatusError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchMigrationJobStatusVariables = { pathParams: GetBranchMigrationJobStatusPathParams; } & DataPlaneFetcherExtraProps; declare const getBranchMigrationJobStatus: (variables: GetBranchMigrationJobStatusVariables, signal?: AbortSignal) => Promise; type GetMigrationJobStatusPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The id of the migration job */ jobId: MigrationJobID; workspace: string; region: string; }; type GetMigrationJobStatusError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetMigrationJobStatusVariables = { pathParams: GetMigrationJobStatusPathParams; } & DataPlaneFetcherExtraProps; declare const getMigrationJobStatus: (variables: GetMigrationJobStatusVariables, signal?: AbortSignal) => Promise; type GetMigrationHistoryPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetMigrationHistoryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetMigrationHistoryVariables = { pathParams: GetMigrationHistoryPathParams; } & DataPlaneFetcherExtraProps; declare const getMigrationHistory: (variables: GetMigrationHistoryVariables, signal?: AbortSignal) => Promise; type GetBranchListPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type GetBranchListError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchListVariables = { pathParams: GetBranchListPathParams; } & DataPlaneFetcherExtraProps; /** * List all available Branches */ declare const getBranchList: (variables: GetBranchListVariables, signal?: AbortSignal) => Promise; type GetDatabaseSettingsPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type GetDatabaseSettingsError = ErrorWrapper<{ status: 400; payload: SimpleError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetDatabaseSettingsVariables = { pathParams: GetDatabaseSettingsPathParams; } & DataPlaneFetcherExtraProps; /** * Get database settings */ declare const getDatabaseSettings: (variables: GetDatabaseSettingsVariables, signal?: AbortSignal) => Promise; type UpdateDatabaseSettingsPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type UpdateDatabaseSettingsError = ErrorWrapper<{ status: 400; payload: SimpleError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type UpdateDatabaseSettingsRequestBody = { searchEnabled?: boolean; }; type UpdateDatabaseSettingsVariables = { body?: UpdateDatabaseSettingsRequestBody; pathParams: UpdateDatabaseSettingsPathParams; } & DataPlaneFetcherExtraProps; /** * Update database settings, this endpoint can be used to disable search */ declare const updateDatabaseSettings: (variables: UpdateDatabaseSettingsVariables, signal?: AbortSignal) => Promise; type GetBranchDetailsPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchDetailsError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchDetailsVariables = { pathParams: GetBranchDetailsPathParams; } & DataPlaneFetcherExtraProps; declare const getBranchDetails: (variables: GetBranchDetailsVariables, signal?: AbortSignal) => Promise; type CreateBranchPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type CreateBranchQueryParams = { /** * Name of source branch to branch the new schema from */ from?: string; }; type CreateBranchError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 423; payload: SimpleError$1; }>; type CreateBranchResponse = { /** * @minLength 1 */ databaseName: string; branchName: string; status: MigrationStatus$1; }; type CreateBranchRequestBody = { /** * Select the branch to fork from. Defaults to 'main' */ from?: string; /** * Select the dedicated cluster to create on. Defaults to 'xata-cloud' * * @minLength 1 * @x-internal true */ clusterID?: string; metadata?: BranchMetadata$1; }; type CreateBranchVariables = { body?: CreateBranchRequestBody; pathParams: CreateBranchPathParams; queryParams?: CreateBranchQueryParams; } & DataPlaneFetcherExtraProps; declare const createBranch: (variables: CreateBranchVariables, signal?: AbortSignal) => Promise; type DeleteBranchPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type DeleteBranchError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 409; payload: SimpleError$1; }>; type DeleteBranchResponse = { status: MigrationStatus$1; }; type DeleteBranchVariables = { pathParams: DeleteBranchPathParams; } & DataPlaneFetcherExtraProps; /** * Delete the branch in the database and all its resources */ declare const deleteBranch: (variables: DeleteBranchVariables, signal?: AbortSignal) => Promise; type GetSchemaPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetSchemaError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetSchemaResponse = { schema: BranchSchema; }; type GetSchemaVariables = { pathParams: GetSchemaPathParams; } & DataPlaneFetcherExtraProps; declare const getSchema: (variables: GetSchemaVariables, signal?: AbortSignal) => Promise; type CopyBranchPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type CopyBranchError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type CopyBranchRequestBody = { destinationBranch: string; limit?: number; }; type CopyBranchVariables = { body: CopyBranchRequestBody; pathParams: CopyBranchPathParams; } & DataPlaneFetcherExtraProps; /** * Create a copy of the branch */ declare const copyBranch: (variables: CopyBranchVariables, signal?: AbortSignal) => Promise; type UpdateBranchMetadataPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type UpdateBranchMetadataError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type UpdateBranchMetadataVariables = { body?: BranchMetadata$1; pathParams: UpdateBranchMetadataPathParams; } & DataPlaneFetcherExtraProps; /** * Update the branch metadata */ declare const updateBranchMetadata: (variables: UpdateBranchMetadataVariables, signal?: AbortSignal) => Promise; type GetBranchMetadataPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchMetadataError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchMetadataVariables = { pathParams: GetBranchMetadataPathParams; } & DataPlaneFetcherExtraProps; declare const getBranchMetadata: (variables: GetBranchMetadataVariables, signal?: AbortSignal) => Promise; type GetBranchStatsPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchStatsError = ErrorWrapper<{ status: 400; payload: SimpleError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchStatsResponse = { timestamp: string; interval: string; resolution: string; numberOfRecords?: MetricsDatapoint[]; writesOverTime?: MetricsDatapoint[]; readsOverTime?: MetricsDatapoint[]; readLatency?: MetricsLatency; writeLatency?: MetricsLatency; warning?: string; }; type GetBranchStatsVariables = { pathParams: GetBranchStatsPathParams; } & DataPlaneFetcherExtraProps; /** * Get branch usage metrics. */ declare const getBranchStats: (variables: GetBranchStatsVariables, signal?: AbortSignal) => Promise; type GetGitBranchesMappingPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type GetGitBranchesMappingError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; }>; type GetGitBranchesMappingVariables = { pathParams: GetGitBranchesMappingPathParams; } & DataPlaneFetcherExtraProps; /** * Lists all the git branches in the mapping, and their associated Xata branches. * * Example response: * * ```json * { * "mappings": [ * { * "gitBranch": "main", * "xataBranch": "main" * }, * { * "gitBranch": "gitBranch1", * "xataBranch": "xataBranch1" * } * { * "gitBranch": "xataBranch2", * "xataBranch": "xataBranch2" * } * ] * } * ``` */ declare const getGitBranchesMapping: (variables: GetGitBranchesMappingVariables, signal?: AbortSignal) => Promise; type AddGitBranchesEntryPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type AddGitBranchesEntryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; }>; type AddGitBranchesEntryResponse = { /** * Warning message */ warning?: string; }; type AddGitBranchesEntryRequestBody = { /** * The name of the Git branch. */ gitBranch: string; /** * The name of the Xata branch. */ xataBranch: BranchName$1; }; type AddGitBranchesEntryVariables = { body: AddGitBranchesEntryRequestBody; pathParams: AddGitBranchesEntryPathParams; } & DataPlaneFetcherExtraProps; /** * Adds an entry to the mapping of git branches to Xata branches. The git branch and the Xata branch must be present in the body of the request. If the Xata branch doesn't exist, a 400 error is returned. * * If the git branch is already present in the mapping, the old entry is overwritten, and a warning message is included in the response. If the git branch is added and didn't exist before, the response code is 204. If the git branch existed and it was overwritten, the response code is 201. * * Example request: * * ```json * // POST https://tutorial-ng7s8c.xata.sh/dbs/demo/gitBranches * { * "gitBranch": "fix/bug123", * "xataBranch": "fix_bug" * } * ``` */ declare const addGitBranchesEntry: (variables: AddGitBranchesEntryVariables, signal?: AbortSignal) => Promise; type RemoveGitBranchesEntryPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type RemoveGitBranchesEntryQueryParams = { /** * The git branch to remove from the mapping */ gitBranch: string; }; type RemoveGitBranchesEntryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; }>; type RemoveGitBranchesEntryVariables = { pathParams: RemoveGitBranchesEntryPathParams; queryParams: RemoveGitBranchesEntryQueryParams; } & DataPlaneFetcherExtraProps; /** * Removes an entry from the mapping of git branches to Xata branches. The name of the git branch must be passed as a query parameter. If the git branch is not found, the endpoint returns a 404 status code. * * Example request: * * ```json * // DELETE https://tutorial-ng7s8c.xata.sh/dbs/demo/gitBranches?gitBranch=fix%2Fbug123 * ``` */ declare const removeGitBranchesEntry: (variables: RemoveGitBranchesEntryVariables, signal?: AbortSignal) => Promise; type ResolveBranchPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type ResolveBranchQueryParams = { /** * The Git Branch */ gitBranch?: string; /** * Default branch to fallback to */ fallbackBranch?: string; }; type ResolveBranchError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; }>; type ResolveBranchResponse = { branch: string; reason: { code: 'FOUND_IN_MAPPING' | 'BRANCH_EXISTS' | 'FALLBACK_BRANCH' | 'DEFAULT_BRANCH'; message: string; }; }; type ResolveBranchVariables = { pathParams: ResolveBranchPathParams; queryParams?: ResolveBranchQueryParams; } & DataPlaneFetcherExtraProps; /** * In order to resolve the database branch, the following algorithm is used: * * if the `gitBranch` was provided and is found in the [git branches mapping](/docs/api-reference/dbs/db_name/gitBranches), the associated Xata branch is returned * * else, if a Xata branch with the exact same name as `gitBranch` exists, return it * * else, if `fallbackBranch` is provided and a branch with that name exists, return it * * else, return the default branch of the DB (`main` or the first branch) * * Example call: * * ```json * // GET https://tutorial-ng7s8c.xata.sh/dbs/demo/dbs/demo/resolveBranch?gitBranch=test&fallbackBranch=tsg * ``` * * Example response: * * ```json * { * "branch": "main", * "reason": { * "code": "DEFAULT_BRANCH", * "message": "Default branch for this database (main)" * } * } * ``` */ declare const resolveBranch: (variables: ResolveBranchVariables, signal?: AbortSignal) => Promise; type GetBranchMigrationHistoryPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchMigrationHistoryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchMigrationHistoryResponse = { startedFrom?: StartedFromMetadata; migrations?: BranchMigration[]; }; type GetBranchMigrationHistoryRequestBody = { limit?: number; startFrom?: string; }; type GetBranchMigrationHistoryVariables = { body?: GetBranchMigrationHistoryRequestBody; pathParams: GetBranchMigrationHistoryPathParams; } & DataPlaneFetcherExtraProps; declare const getBranchMigrationHistory: (variables: GetBranchMigrationHistoryVariables, signal?: AbortSignal) => Promise; type GetBranchMigrationPlanPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchMigrationPlanError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchMigrationPlanVariables = { body: Schema; pathParams: GetBranchMigrationPlanPathParams; } & DataPlaneFetcherExtraProps; /** * Compute a migration plan from a target schema the branch should be migrated too. */ declare const getBranchMigrationPlan: (variables: GetBranchMigrationPlanVariables, signal?: AbortSignal) => Promise; type ExecuteBranchMigrationPlanPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type ExecuteBranchMigrationPlanError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type ExecuteBranchMigrationPlanRequestBody = { version: number; migration: BranchMigration; }; type ExecuteBranchMigrationPlanVariables = { body: ExecuteBranchMigrationPlanRequestBody; pathParams: ExecuteBranchMigrationPlanPathParams; } & DataPlaneFetcherExtraProps; /** * Apply a migration plan to the branch */ declare const executeBranchMigrationPlan: (variables: ExecuteBranchMigrationPlanVariables, signal?: AbortSignal) => Promise; type QueryMigrationRequestsPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type QueryMigrationRequestsError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type QueryMigrationRequestsResponse = { migrationRequests: MigrationRequest[]; meta: RecordsMetadata; }; type QueryMigrationRequestsRequestBody = { filter?: FilterExpression; sort?: SortExpression; page?: PageConfig; columns?: ColumnsProjection; }; type QueryMigrationRequestsVariables = { body?: QueryMigrationRequestsRequestBody; pathParams: QueryMigrationRequestsPathParams; } & DataPlaneFetcherExtraProps; declare const queryMigrationRequests: (variables: QueryMigrationRequestsVariables, signal?: AbortSignal) => Promise; type CreateMigrationRequestPathParams = { /** * The Database Name */ dbName: DBName$1; workspace: string; region: string; }; type CreateMigrationRequestError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type CreateMigrationRequestResponse = { number: number; }; type CreateMigrationRequestRequestBody = { /** * The source branch. */ source: string; /** * The target branch. */ target: string; /** * The title. */ title: string; /** * Optional migration request description. */ body?: string; }; type CreateMigrationRequestVariables = { body: CreateMigrationRequestRequestBody; pathParams: CreateMigrationRequestPathParams; } & DataPlaneFetcherExtraProps; declare const createMigrationRequest: (variables: CreateMigrationRequestVariables, signal?: AbortSignal) => Promise; type GetMigrationRequestPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type GetMigrationRequestError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetMigrationRequestVariables = { pathParams: GetMigrationRequestPathParams; } & DataPlaneFetcherExtraProps; declare const getMigrationRequest: (variables: GetMigrationRequestVariables, signal?: AbortSignal) => Promise; type UpdateMigrationRequestPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type UpdateMigrationRequestError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type UpdateMigrationRequestRequestBody = { /** * New migration request title. */ title?: string; /** * New migration request description. */ body?: string; /** * Change the migration request status. */ status?: 'open' | 'closed'; }; type UpdateMigrationRequestVariables = { body?: UpdateMigrationRequestRequestBody; pathParams: UpdateMigrationRequestPathParams; } & DataPlaneFetcherExtraProps; declare const updateMigrationRequest: (variables: UpdateMigrationRequestVariables, signal?: AbortSignal) => Promise; type ListMigrationRequestsCommitsPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type ListMigrationRequestsCommitsError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type ListMigrationRequestsCommitsResponse = { meta: { /** * last record id */ cursor: string; /** * true if more records can be fetch */ more: boolean; }; logs: Commit[]; }; type ListMigrationRequestsCommitsRequestBody = { page?: { /** * Query the next page that follow the cursor. */ after?: string; /** * Query the previous page before the cursor. */ before?: string; /** * Set page size. If the size is missing it is read from the cursor. If no cursor is given xata will choose the default page size. * * @default 20 */ size?: number; }; }; type ListMigrationRequestsCommitsVariables = { body?: ListMigrationRequestsCommitsRequestBody; pathParams: ListMigrationRequestsCommitsPathParams; } & DataPlaneFetcherExtraProps; declare const listMigrationRequestsCommits: (variables: ListMigrationRequestsCommitsVariables, signal?: AbortSignal) => Promise; type CompareMigrationRequestPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type CompareMigrationRequestError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type CompareMigrationRequestVariables = { pathParams: CompareMigrationRequestPathParams; } & DataPlaneFetcherExtraProps; declare const compareMigrationRequest: (variables: CompareMigrationRequestVariables, signal?: AbortSignal) => Promise; type GetMigrationRequestIsMergedPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type GetMigrationRequestIsMergedError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetMigrationRequestIsMergedResponse = { merged?: boolean; }; type GetMigrationRequestIsMergedVariables = { pathParams: GetMigrationRequestIsMergedPathParams; } & DataPlaneFetcherExtraProps; declare const getMigrationRequestIsMerged: (variables: GetMigrationRequestIsMergedVariables, signal?: AbortSignal) => Promise; type MergeMigrationRequestPathParams = { /** * The Database Name */ dbName: DBName$1; /** * The migration request number. */ mrNumber: MigrationRequestNumber; workspace: string; region: string; }; type MergeMigrationRequestError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type MergeMigrationRequestVariables = { pathParams: MergeMigrationRequestPathParams; } & DataPlaneFetcherExtraProps; declare const mergeMigrationRequest: (variables: MergeMigrationRequestVariables, signal?: AbortSignal) => Promise; type GetBranchSchemaHistoryPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type GetBranchSchemaHistoryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetBranchSchemaHistoryResponse = { meta: { /** * last record id */ cursor: string; /** * true if more records can be fetch */ more: boolean; }; logs: Commit[]; }; type GetBranchSchemaHistoryRequestBody = { page?: { /** * Query the next page that follow the cursor. */ after?: string; /** * Query the previous page before the cursor. */ before?: string; /** * Set page size. If the size is missing it is read from the cursor. If no cursor is given xata will choose the default page size. * * @default 20 */ size?: number; }; /** * Report only migrations that have been added since the given Migration ID. */ since?: string; }; type GetBranchSchemaHistoryVariables = { body?: GetBranchSchemaHistoryRequestBody; pathParams: GetBranchSchemaHistoryPathParams; } & DataPlaneFetcherExtraProps; declare const getBranchSchemaHistory: (variables: GetBranchSchemaHistoryVariables, signal?: AbortSignal) => Promise; type CompareBranchWithUserSchemaPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type CompareBranchWithUserSchemaError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type CompareBranchWithUserSchemaRequestBody = { schema: Schema; schemaOperations?: MigrationOp[]; branchOperations?: MigrationOp[]; }; type CompareBranchWithUserSchemaVariables = { body: CompareBranchWithUserSchemaRequestBody; pathParams: CompareBranchWithUserSchemaPathParams; } & DataPlaneFetcherExtraProps; declare const compareBranchWithUserSchema: (variables: CompareBranchWithUserSchemaVariables, signal?: AbortSignal) => Promise; type CompareBranchSchemasPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Database Name */ branchName: BranchName$1; workspace: string; region: string; }; type CompareBranchSchemasError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type CompareBranchSchemasRequestBody = { sourceBranchOperations?: MigrationOp[]; targetBranchOperations?: MigrationOp[]; }; type CompareBranchSchemasVariables = { body: CompareBranchSchemasRequestBody; pathParams: CompareBranchSchemasPathParams; } & DataPlaneFetcherExtraProps; declare const compareBranchSchemas: (variables: CompareBranchSchemasVariables, signal?: AbortSignal) => Promise; type UpdateBranchSchemaPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type UpdateBranchSchemaError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type UpdateBranchSchemaVariables = { body: Migration; pathParams: UpdateBranchSchemaPathParams; } & DataPlaneFetcherExtraProps; declare const updateBranchSchema: (variables: UpdateBranchSchemaVariables, signal?: AbortSignal) => Promise; type PreviewBranchSchemaEditPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type PreviewBranchSchemaEditError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type PreviewBranchSchemaEditResponse = { original: Schema; updated: Schema; }; type PreviewBranchSchemaEditRequestBody = { edits?: SchemaEditScript; }; type PreviewBranchSchemaEditVariables = { body?: PreviewBranchSchemaEditRequestBody; pathParams: PreviewBranchSchemaEditPathParams; } & DataPlaneFetcherExtraProps; declare const previewBranchSchemaEdit: (variables: PreviewBranchSchemaEditVariables, signal?: AbortSignal) => Promise; type ApplyBranchSchemaEditPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type ApplyBranchSchemaEditError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type ApplyBranchSchemaEditRequestBody = { edits: SchemaEditScript; }; type ApplyBranchSchemaEditVariables = { body: ApplyBranchSchemaEditRequestBody; pathParams: ApplyBranchSchemaEditPathParams; } & DataPlaneFetcherExtraProps; declare const applyBranchSchemaEdit: (variables: ApplyBranchSchemaEditVariables, signal?: AbortSignal) => Promise; type PushBranchMigrationsPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type PushBranchMigrationsError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type PushBranchMigrationsRequestBody = { migrations: MigrationObject[]; }; type PushBranchMigrationsVariables = { body: PushBranchMigrationsRequestBody; pathParams: PushBranchMigrationsPathParams; } & DataPlaneFetcherExtraProps; /** * The `schema/push` API accepts a list of migrations to be applied to the * current branch. A list of applicable migrations can be fetched using * the `schema/history` API from another branch or database. * * The most recent migration must be part of the list or referenced (via * `parentID`) by the first migration in the list of migrations to be pushed. * * Each migration in the list has an `id`, `parentID`, and `checksum`. The * checksum for migrations are generated and verified by xata. The * operation fails if any migration in the list has an invalid checksum. */ declare const pushBranchMigrations: (variables: PushBranchMigrationsVariables, signal?: AbortSignal) => Promise; type CreateTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type CreateTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type CreateTableResponse = { branchName: string; /** * @minLength 1 */ tableName: string; status: MigrationStatus$1; }; type CreateTableVariables = { pathParams: CreateTablePathParams; } & DataPlaneFetcherExtraProps; /** * Creates a new table with the given name. Returns 422 if a table with the same name already exists. */ declare const createTable: (variables: CreateTableVariables, signal?: AbortSignal) => Promise; type DeleteTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type DeleteTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; }>; type DeleteTableResponse = { status: MigrationStatus$1; }; type DeleteTableVariables = { pathParams: DeleteTablePathParams; } & DataPlaneFetcherExtraProps; /** * Deletes the table with the given name. */ declare const deleteTable: (variables: DeleteTableVariables, signal?: AbortSignal) => Promise; type UpdateTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type UpdateTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type UpdateTableRequestBody = { /** * @minLength 1 */ name: string; }; type UpdateTableVariables = { body: UpdateTableRequestBody; pathParams: UpdateTablePathParams; } & DataPlaneFetcherExtraProps; /** * Update table. Currently there is only one update operation supported: renaming the table by providing a new name. * * In the example below, we rename a table from “users” to “people”: * * ```json * // PATCH /db/test:main/tables/users * * { * "name": "people" * } * ``` */ declare const updateTable: (variables: UpdateTableVariables, signal?: AbortSignal) => Promise; type GetTableSchemaPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type GetTableSchemaError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetTableSchemaResponse = { columns: Column[]; }; type GetTableSchemaVariables = { pathParams: GetTableSchemaPathParams; } & DataPlaneFetcherExtraProps; declare const getTableSchema: (variables: GetTableSchemaVariables, signal?: AbortSignal) => Promise; type SetTableSchemaPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type SetTableSchemaError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 409; payload: SimpleError$1; }>; type SetTableSchemaRequestBody = { columns: Column[]; }; type SetTableSchemaVariables = { body: SetTableSchemaRequestBody; pathParams: SetTableSchemaPathParams; } & DataPlaneFetcherExtraProps; declare const setTableSchema: (variables: SetTableSchemaVariables, signal?: AbortSignal) => Promise; type GetTableColumnsPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type GetTableColumnsError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetTableColumnsResponse = { columns: Column[]; }; type GetTableColumnsVariables = { pathParams: GetTableColumnsPathParams; } & DataPlaneFetcherExtraProps; /** * Retrieves the list of table columns and their definition. This endpoint returns the column list with object columns being reported with their * full dot-separated path (flattened). */ declare const getTableColumns: (variables: GetTableColumnsVariables, signal?: AbortSignal) => Promise; type AddTableColumnPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type AddTableColumnError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type AddTableColumnVariables = { body: Column; pathParams: AddTableColumnPathParams; } & DataPlaneFetcherExtraProps; /** * Adds a new column to the table. The body of the request should contain the column definition. */ declare const addTableColumn: (variables: AddTableColumnVariables, signal?: AbortSignal) => Promise; type GetColumnPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type GetColumnError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetColumnVariables = { pathParams: GetColumnPathParams; } & DataPlaneFetcherExtraProps; /** * Get the definition of a single column. */ declare const getColumn: (variables: GetColumnVariables, signal?: AbortSignal) => Promise; type UpdateColumnPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type UpdateColumnError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type UpdateColumnRequestBody = { /** * @minLength 1 */ name: string; }; type UpdateColumnVariables = { body: UpdateColumnRequestBody; pathParams: UpdateColumnPathParams; } & DataPlaneFetcherExtraProps; /** * Update column with partial data. Can be used for renaming the column by providing a new "name" field. */ declare const updateColumn: (variables: UpdateColumnVariables, signal?: AbortSignal) => Promise; type DeleteColumnPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type DeleteColumnError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type DeleteColumnVariables = { pathParams: DeleteColumnPathParams; } & DataPlaneFetcherExtraProps; /** * Deletes the specified column. */ declare const deleteColumn: (variables: DeleteColumnVariables, signal?: AbortSignal) => Promise; type BranchTransactionPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type BranchTransactionError = ErrorWrapper<{ status: 400; payload: TransactionFailure; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 429; payload: RateLimitError; }>; type BranchTransactionRequestBody = { operations: TransactionOperation$1[]; }; type BranchTransactionVariables = { body: BranchTransactionRequestBody; pathParams: BranchTransactionPathParams; } & DataPlaneFetcherExtraProps; declare const branchTransaction: (variables: BranchTransactionVariables, signal?: AbortSignal) => Promise; type InsertRecordPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type InsertRecordQueryParams = { /** * Column filters */ columns?: ColumnsProjection; }; type InsertRecordError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type InsertRecordVariables = { body?: DataInputRecord; pathParams: InsertRecordPathParams; queryParams?: InsertRecordQueryParams; } & DataPlaneFetcherExtraProps; /** * Insert a new Record into the Table */ declare const insertRecord: (variables: InsertRecordVariables, signal?: AbortSignal) => Promise; type GetFileItemPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; /** * The File Identifier */ fileId: FileItemID; workspace: string; region: string; }; type GetFileItemError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetFileItemVariables = { pathParams: GetFileItemPathParams; } & DataPlaneFetcherExtraProps; /** * Retrieves file content from an array by file ID */ declare const getFileItem: (variables: GetFileItemVariables, signal?: AbortSignal) => Promise; type PutFileItemPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; /** * The File Identifier */ fileId: FileItemID; workspace: string; region: string; }; type PutFileItemError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type PutFileItemVariables = { body?: Blob; pathParams: PutFileItemPathParams; } & DataPlaneFetcherExtraProps; /** * Uploads the file content to an array given the file ID */ declare const putFileItem: (variables: PutFileItemVariables, signal?: AbortSignal) => Promise; type DeleteFileItemPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; /** * The File Identifier */ fileId: FileItemID; workspace: string; region: string; }; type DeleteFileItemError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type DeleteFileItemVariables = { pathParams: DeleteFileItemPathParams; } & DataPlaneFetcherExtraProps; /** * Deletes an item from an file array column given the file ID */ declare const deleteFileItem: (variables: DeleteFileItemVariables, signal?: AbortSignal) => Promise; type GetFilePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type GetFileError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetFileVariables = { pathParams: GetFilePathParams; } & DataPlaneFetcherExtraProps; /** * Retrieves the file content from a file column */ declare const getFile: (variables: GetFileVariables, signal?: AbortSignal) => Promise; type PutFilePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type PutFileError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type PutFileVariables = { body?: Blob; pathParams: PutFilePathParams; } & DataPlaneFetcherExtraProps; /** * Uploads the file content to the given file column */ declare const putFile: (variables: PutFileVariables, signal?: AbortSignal) => Promise; type DeleteFilePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; /** * The Column name */ columnName: ColumnName; workspace: string; region: string; }; type DeleteFileError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type DeleteFileVariables = { pathParams: DeleteFilePathParams; } & DataPlaneFetcherExtraProps; /** * Deletes a file referred in a file column */ declare const deleteFile: (variables: DeleteFileVariables, signal?: AbortSignal) => Promise; type GetRecordPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; workspace: string; region: string; }; type GetRecordQueryParams = { /** * Column filters */ columns?: ColumnsProjection; }; type GetRecordError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type GetRecordVariables = { pathParams: GetRecordPathParams; queryParams?: GetRecordQueryParams; } & DataPlaneFetcherExtraProps; /** * Retrieve record by ID */ declare const getRecord: (variables: GetRecordVariables, signal?: AbortSignal) => Promise; type InsertRecordWithIDPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; workspace: string; region: string; }; type InsertRecordWithIDQueryParams = { /** * Column filters */ columns?: ColumnsProjection; createOnly?: boolean; ifVersion?: number; }; type InsertRecordWithIDError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type InsertRecordWithIDVariables = { body?: DataInputRecord; pathParams: InsertRecordWithIDPathParams; queryParams?: InsertRecordWithIDQueryParams; } & DataPlaneFetcherExtraProps; /** * By default, IDs are auto-generated when data is inserted into Xata. Sending a request to this endpoint allows us to insert a record with a pre-existing ID, bypassing the default automatic ID generation. */ declare const insertRecordWithID: (variables: InsertRecordWithIDVariables, signal?: AbortSignal) => Promise; type UpdateRecordWithIDPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; workspace: string; region: string; }; type UpdateRecordWithIDQueryParams = { /** * Column filters */ columns?: ColumnsProjection; ifVersion?: number; }; type UpdateRecordWithIDError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type UpdateRecordWithIDVariables = { body?: DataInputRecord; pathParams: UpdateRecordWithIDPathParams; queryParams?: UpdateRecordWithIDQueryParams; } & DataPlaneFetcherExtraProps; declare const updateRecordWithID: (variables: UpdateRecordWithIDVariables, signal?: AbortSignal) => Promise; type UpsertRecordWithIDPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; workspace: string; region: string; }; type UpsertRecordWithIDQueryParams = { /** * Column filters */ columns?: ColumnsProjection; ifVersion?: number; }; type UpsertRecordWithIDError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type UpsertRecordWithIDVariables = { body?: DataInputRecord; pathParams: UpsertRecordWithIDPathParams; queryParams?: UpsertRecordWithIDQueryParams; } & DataPlaneFetcherExtraProps; declare const upsertRecordWithID: (variables: UpsertRecordWithIDVariables, signal?: AbortSignal) => Promise; type DeleteRecordPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * The Record name */ recordId: RecordID; workspace: string; region: string; }; type DeleteRecordQueryParams = { /** * Column filters */ columns?: ColumnsProjection; }; type DeleteRecordError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type DeleteRecordVariables = { pathParams: DeleteRecordPathParams; queryParams?: DeleteRecordQueryParams; } & DataPlaneFetcherExtraProps; declare const deleteRecord: (variables: DeleteRecordVariables, signal?: AbortSignal) => Promise; type BulkInsertTableRecordsPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type BulkInsertTableRecordsQueryParams = { /** * Column filters */ columns?: ColumnsProjection; }; type BulkInsertTableRecordsError = ErrorWrapper<{ status: 400; payload: BulkError; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 422; payload: SimpleError$1; }>; type BulkInsertTableRecordsRequestBody = { records: DataInputRecord[]; }; type BulkInsertTableRecordsVariables = { body: BulkInsertTableRecordsRequestBody; pathParams: BulkInsertTableRecordsPathParams; queryParams?: BulkInsertTableRecordsQueryParams; } & DataPlaneFetcherExtraProps; /** * Bulk insert records */ declare const bulkInsertTableRecords: (variables: BulkInsertTableRecordsVariables, signal?: AbortSignal) => Promise; type QueryTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type QueryTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 503; payload: ServiceUnavailableError; }>; type QueryTableRequestBody = { filter?: FilterExpression; sort?: SortExpression; page?: PageConfig; columns?: QueryColumnsProjection; /** * The consistency level for this request. * * @default strong */ consistency?: 'strong' | 'eventual'; }; type QueryTableVariables = { body?: QueryTableRequestBody; pathParams: QueryTablePathParams; } & DataPlaneFetcherExtraProps; /** * The Query Table API can be used to retrieve all records in a table. * The API support filtering, sorting, selecting a subset of columns, and pagination. * * The overall structure of the request looks like this: * * ```json * // POST /db/:/tables//query * { * "columns": [...], * "filter": { * "$all": [...], * "$any": [...] * ... * }, * "sort": { * "multiple": [...] * ... * }, * "page": { * ... * } * } * ``` * * For usage, see also the [Xata SDK documentation](https://xata.io/docs/sdk/get). * * ### Column selection * * If the `columns` array is not specified, all columns are included. For link * fields, only the ID column of the linked records is included in the response. * * If the `columns` array is specified, only the selected and internal * columns `id` and `xata` are included. The `*` wildcard can be used to * select all columns. * * For objects and link fields, if the column name of the object is specified, we * include all of its sub-keys. If only some sub-keys are specified (via dotted * notation, e.g. `"settings.plan"` ), then only those sub-keys from the object * are included. * * By the way of example, assuming two tables like this: * * ```json {"truncate": true} * { * "tables": [ * { * "name": "teams", * "columns": [ * { * "name": "name", * "type": "string" * }, * { * "name": "owner", * "type": "link", * "link": { * "table": "users" * } * }, * { * "name": "foundedDate", * "type": "datetime" * }, * ] * }, * { * "name": "users", * "columns": [ * { * "name": "email", * "type": "email" * }, * { * "name": "full_name", * "type": "string" * }, * { * "name": "address", * "type": "object", * "columns": [ * { * "name": "street", * "type": "string" * }, * { * "name": "number", * "type": "int" * }, * { * "name": "zipcode", * "type": "int" * } * ] * }, * { * "name": "team", * "type": "link", * "link": { * "table": "teams" * } * } * ] * } * ] * } * ``` * * A query like this: * * ```json * POST /db/:/tables/
/query * { * "columns": [ * "name", * "address.*" * ] * } * ``` * * returns objects like: * * ```json * { * "name": "Kilian", * "address": { * "street": "New street", * "number": 41, * "zipcode": 10407 * } * } * ``` * * while a query like this: * * ```json * POST /db/:/tables/
/query * { * "columns": [ * "name", * "address.street" * ] * } * ``` * * returns objects like: * * ```json * { * "id": "id1" * "xata": { * "version": 0 * } * "name": "Kilian", * "address": { * "street": "New street" * } * } * ``` * * If you want to return all columns from the main table and selected columns from the linked table, you can do it like this: * * ```json * { * "columns": ["*", "team.name"] * } * ``` * * The `"*"` in the above means all columns, including columns of objects. This returns data like: * * ```json * { * "id": "id1" * "xata": { * "version": 0 * } * "name": "Kilian", * "email": "kilian@gmail.com", * "address": { * "street": "New street", * "number": 41, * "zipcode": 10407 * }, * "team": { * "id": "XX", * "xata": { * "version": 0 * }, * "name": "first team" * } * } * ``` * * If you want all columns of the linked table, you can do: * * ```json * { * "columns": ["*", "team.*"] * } * ``` * * This returns, for example: * * ```json * { * "id": "id1" * "xata": { * "version": 0 * } * "name": "Kilian", * "email": "kilian@gmail.com", * "address": { * "street": "New street", * "number": 41, * "zipcode": 10407 * }, * "team": { * "id": "XX", * "xata": { * "version": 0 * }, * "name": "first team", * "code": "A1", * "foundedDate": "2020-03-04T10:43:54.32Z" * } * } * ``` * * ### Filtering * * There are two types of operators: * * - Operators that work on a single column: `$is`, `$contains`, `$pattern`, * `$includes`, `$gt`, etc. * - Control operators that combine multiple conditions: `$any`, `$all`, `$not` , * `$none`, etc. * * All operators start with an `$` to differentiate them from column names * (which are not allowed to start with a dollar sign). * * #### Exact matching and control operators * * Filter by one column: * * ```json * { * "filter": { * "": "value" * } * } * ``` * * This is equivalent to using the `$is` operator: * * ```json * { * "filter": { * "": { * "$is": "value" * } * } * } * ``` * * For example: * * ```json * { * "filter": { * "name": "r2" * } * } * ``` * * Or: * * ```json * { * "filter": { * "name": { * "$is": "r2" * } * } * } * ``` * * For objects, both dots and nested versions work: * * ```json * { * "filter": { * "settings.plan": "free" * } * } * ``` * * ```json * { * "filter": { * "settings": { * "plan": "free" * } * } * } * ``` * * If you want to OR together multiple values, you can use the `$any` operator with an array of values: * * ```json * { * "filter": { * "settings.plan": { "$any": ["free", "paid"] } * } * } * ``` * * If you specify multiple columns in the same filter, they are logically AND'ed together: * * ```json * { * "filter": { * "settings.dark": true, * "settings.plan": "free" * } * } * ``` * * The above matches if both conditions are met. * * To be more explicit about it, you can use `$all` or `$any`: * * ```json * { * "filter": { * "$any": { * "settings.dark": true, * "settings.plan": "free" * } * } * } * ``` * * The `$all` and `$any` operators can also receive an array of objects, which allows for repeating column names: * * ```json * { * "filter": { * "$any": [ * { * "name": "r1" * }, * { * "name": "r2" * } * ] * } * } * ``` * * You can check for a value being not-null with `$exists`: * * ```json * { * "filter": { * "$exists": "settings" * } * } * ``` * * This can be combined with `$all` or `$any` : * * ```json * { * "filter": { * "$all": [ * { * "$exists": "settings" * }, * { * "$exists": "name" * } * ] * } * } * ``` * * Or you can use the inverse operator `$notExists`: * * ```json * { * "filter": { * "$notExists": "settings" * } * } * ``` * * #### Partial match * * `$contains` is the simplest operator for partial matching. Note that `$contains` operator can * cause performance issues at scale, because indices cannot be used. * * ```json * { * "filter": { * "": { * "$contains": "value" * } * } * } * ``` * * Wildcards are supported via the `$pattern` operator: * * ```json * { * "filter": { * "": { * "$pattern": "v*alu?" * } * } * } * ``` * * The `$pattern` operator accepts two wildcard characters: * * `*` matches zero or more characters * * `?` matches exactly one character * * If you want to match a string that contains a wildcard character, you can escape them using a backslash (`\`). You can escape a backslash by usign another backslash. * * You can also use the `$endsWith` and `$startsWith` operators: * * ```json * { * "filter": { * "": { * "$endsWith": ".gz" * }, * "": { * "$startsWith": "tmp-" * } * } * } * ``` * * #### Numeric or datetime ranges * * ```json * { * "filter": { * "": { * "$ge": 0, * "$lt": 100 * } * } * } * ``` * Date ranges support the same operators, with the date using the format defined in * [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339): * ```json * { * "filter": { * "": { * "$gt": "2019-10-12T07:20:50.52Z", * "$lt": "2021-10-12T07:20:50.52Z" * } * } * } * ``` * The supported operators are `$gt`, `$lt`, `$ge`, `$le`. * * #### Negations * * A general `$not` operator can inverse any operation. * * ```json * { * "filter": { * "$not": { * "": "value1", * "": "value1" * } * } * } * ``` * * Note: in the above the two condition are AND together, so this does (NOT ( ... * AND ...)) * * Or more complex: * * ```json * { * "filter": { * "$not": { * "$any": [ * { * "": "value1" * }, * { * "$all": [ * { * "": "value2" * }, * { * "": "value3" * } * ] * } * ] * } * } * } * ``` * * The `$not: { $any: {}}` can be shorted using the `$none` operator: * * ```json * { * "filter": { * "$none": { * "": "value1", * "": "value1" * } * } * } * ``` * * In addition, you can use operators like `$isNot` or `$notExists` to simplify expressions: * * ```json * { * "filter": { * "": { * "$isNot": "2019-10-12T07:20:50.52Z" * } * } * } * ``` * * #### Working with arrays * * To test that an array contains a value, use `$includesAny`. * * ```json * { * "filter": { * "": { * "$includesAny": "value" * } * } * } * ``` * * ##### `includesAny` * * The `$includesAny` operator accepts a custom predicate that will check if * any value in the array column matches the predicate. The `$includes` operator is a * synonym for the `$includesAny` operator. * * For example a complex predicate can include * the `$all` , `$contains` and `$endsWith` operators: * * ```json * { * "filter": { * "": { * "$includes": { * "$all": [ * { "$contains": "label" }, * { "$not": { "$endsWith": "-debug" } } * ] * } * } * } * } * ``` * * ##### `includesNone` * * The `$includesNone` operator succeeds if no array item matches the * predicate. * * ```json * { * "filter": { * "settings.labels": { * "$includesNone": [{ "$contains": "label" }] * } * } * } * ``` * The above matches if none of the array values contain the string "label". * * ##### `includesAll` * * The `$includesAll` operator succeeds if all array items match the * predicate. * * Here is an example of using the `$includesAll` operator: * * ```json * { * "filter": { * "settings.labels": { * "$includesAll": [{ "$contains": "label" }] * } * } * } * ``` * * The above matches if all array values contain the string "label". * * ### Sorting * * Sorting by one element: * * ```json * POST /db/demo:main/tables/table/query * { * "sort": { * "index": "asc" * } * } * ``` * * or descendently: * * ```json * POST /db/demo:main/tables/table/query * { * "sort": { * "index": "desc" * } * } * ``` * * Sorting by multiple fields: * * ```json * POST /db/demo:main/tables/table/query * { * "sort": [ * { * "index": "desc" * }, * { * "createdAt": "desc" * } * ] * } * ``` * * It is also possible to sort results randomly: * * ```json * POST /db/demo:main/tables/table/query * { * "sort": { * "*": "random" * } * } * ``` * * Note that a random sort does not apply to a specific column, hence the special column name `"*"`. * * A random sort can be combined with an ascending or descending sort on a specific column: * * ```json * POST /db/demo:main/tables/table/query * { * "sort": [ * { * "name": "desc" * }, * { * "*": "random" * } * ] * } * ``` * * This will sort on the `name` column, breaking ties randomly. * * ### Pagination * * We offer cursor pagination and offset pagination. The cursor pagination method can be used for sequential scrolling with unrestricted depth. The offset pagination can be used to skip pages and is limited to 1000 records. * * Example of cursor pagination: * * ```json * POST /db/demo:main/tables/table/query * { * "page": { * "after":"fMoxCsIwFIDh3WP8c4amDai5hO5SJCRNfaVSeC9b6d1FD" * } * } * ``` * * In the above example, the value of the `page.after` parameter is the cursor returned by the previous query. A sample response is shown below: * * ```json * { * "meta": { * "page": { * "cursor": "fMoxCsIwFIDh3WP8c4amDai5hO5SJCRNfaVSeC9b6d1FD", * "more": true * } * }, * "records": [...] * } * ``` * * The `page` object might contain the follow keys, in addition to `size` and `offset` that were introduced before: * * - `after`: Return the next page 'after' the current cursor * - `before`: Return the previous page 'before' the current cursor. * - `start`: Resets the given cursor position to the beginning of the query result set. * Will return the first N records from the query result, where N is the `page.size` parameter. * - `end`: Resets the give cursor position to the end for the query result set. * Returns the last N records from the query result, where N is the `page.size` parameter. * * The request will fail if an invalid cursor value is given to `page.before`, * `page.after`, `page.start` , or `page.end`. No other cursor setting can be * used if `page.start` or `page.end` is set in a query. * * If both `page.before` and `page.after` parameters are present we treat the * request as a range query. The range query will return all entries after * `page.after`, but before `page.before`, up to `page.size` or the maximum * page size. This query requires both cursors to use the same filters and sort * settings, plus we require `page.after < page.before`. The range query returns * a new cursor. If the range encompass multiple pages the next page in the range * can be queried by update `page.after` to the returned cursor while keeping the * `page.before` cursor from the first range query. * * The `filter` , `columns`, `sort` , and `page.size` configuration will be * encoded with the cursor. The pagination request will be invalid if * `filter` or `sort` is set. The columns returned and page size can be changed * anytime by passing the `columns` or `page.size` settings to the next query. * * In the following example of size + offset pagination we retrieve the third page of up to 100 results: * * ```json * POST /db/demo:main/tables/table/query * { * "page": { * "size": 100, * "offset": 200 * } * } * ``` * * The `page.size` parameter represents the maximum number of records returned by this query. It has a default value of 20 and a maximum value of 200. * The `page.offset` parameter represents the number of matching records to skip. It has a default value of 0 and a maximum value of 800. * * Cursor pagination also works in combination with offset pagination. For example, starting from a specific cursor position, using a page size of 200 and an offset of 800, you can skip up to 5 pages of 200 records forwards or backwards from the cursor's position: * * ```json * POST /db/demo:main/tables/table/query * { * "page": { * "size": 200, * "offset": 800, * "after": "fMoxCsIwFIDh3WP8c4amDai5hO5SJCRNfaVSeC9b6d1FD" * } * } * ``` * * **Special cursors:** * * - `page.after=end`: Result points past the last entry. The list of records * returned is empty, but `page.meta.cursor` will include a cursor that can be * used to "tail" the table from the end waiting for new data to be inserted. * - `page.before=end`: This cursor returns the last page. * - `page.start=$cursor`: Start at the beginning of the result set of the $cursor query. This is equivalent to querying the * first page without a cursor but applying `filter` and `sort` . Yet the `page.start` * cursor can be convenient at times as user code does not need to remember the * filter, sort, columns or page size configuration. All these information are * read from the cursor. * - `page.end=$cursor`: Move to the end of the result set of the $cursor query. This is equivalent to querying the * last page with `page.before=end`, `filter`, and `sort` . Yet the * `page.end` cursor can be more convenient at times as user code does not * need to remember the filter, sort, columns or page size configuration. All * these information are read from the cursor. * * When using special cursors like `page.after="end"` or `page.before="end"`, we * still allow `filter` and `sort` to be set. * * Example of getting the last page: * * ```json * POST /db/demo:main/tables/table/query * { * "page": { * "size": 10, * "before": "end" * } * } * ``` */ declare const queryTable: (variables: QueryTableVariables, signal?: AbortSignal) => Promise; type SearchBranchPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type SearchBranchError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 503; payload: ServiceUnavailableError; }>; type SearchBranchRequestBody = { /** * An array with the tables in which to search. By default, all tables are included. Optionally, filters can be included that apply to each table. */ tables?: (string | { /** * The name of the table. */ table: string; filter?: FilterExpression; target?: TargetExpression; boosters?: BoosterExpression[]; })[]; /** * The query string. * * @minLength 1 */ query: string; fuzziness?: FuzzinessExpression; prefix?: PrefixExpression; highlight?: HighlightExpression; page?: SearchPageConfig; }; type SearchBranchVariables = { body: SearchBranchRequestBody; pathParams: SearchBranchPathParams; } & DataPlaneFetcherExtraProps; /** * Run a free text search operation across the database branch. */ declare const searchBranch: (variables: SearchBranchVariables, signal?: AbortSignal) => Promise; type SearchTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type SearchTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type SearchTableRequestBody = { /** * The query string. * * @minLength 1 */ query: string; fuzziness?: FuzzinessExpression; target?: TargetExpression; prefix?: PrefixExpression; filter?: FilterExpression; highlight?: HighlightExpression; boosters?: BoosterExpression[]; page?: SearchPageConfig; }; type SearchTableVariables = { body: SearchTableRequestBody; pathParams: SearchTablePathParams; } & DataPlaneFetcherExtraProps; /** * Run a free text search operation in a particular table. * * The endpoint accepts a `query` parameter that is used for the free text search and a set of structured filters (via the `filter` parameter) that are applied before the search. The `filter` parameter uses the same syntax as the [query endpoint](/docs/api-reference/db/db_branch_name/tables/table_name/query#filtering) with the following exceptions: * * filters `$contains`, `$startsWith`, `$endsWith` don't work on columns of type `text` * * filtering on columns of type `multiple` is currently unsupported */ declare const searchTable: (variables: SearchTableVariables, signal?: AbortSignal) => Promise; type VectorSearchTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type VectorSearchTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type VectorSearchTableRequestBody = { /** * The vector to search for similarities. Must have the same dimension as * the vector column used. */ queryVector: number[]; /** * The vector column in which to search. It must be of type `vector`. */ column: string; /** * The function used to measure the distance between two points. Can be one of: * `cosineSimilarity`, `l1`, `l2`. The default is `cosineSimilarity`. * * @default cosineSimilarity */ similarityFunction?: string; /** * Number of results to return. * * @default 10 * @maximum 100 * @minimum 1 */ size?: number; filter?: FilterExpression; }; type VectorSearchTableVariables = { body: VectorSearchTableRequestBody; pathParams: VectorSearchTablePathParams; } & DataPlaneFetcherExtraProps; /** * This endpoint can be used to perform vector-based similarity searches in a table. * It can be used for implementing semantic search and product recommendation. To use this * endpoint, you need a column of type vector. The input vector must have the same * dimension as the vector column. */ declare const vectorSearchTable: (variables: VectorSearchTableVariables, signal?: AbortSignal) => Promise; type AskTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type AskTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 429; payload: RateLimitError; }>; type AskTableResponse = { /** * The answer to the input question */ answer: string; /** * The session ID for the chat session. */ sessionId: string; }; type AskTableRequestBody = { /** * The question you'd like to ask. * * @minLength 3 */ question: string; /** * The type of search to use. If set to `keyword` (the default), the search can be configured by passing * a `search` object with the following fields. For more details about each, see the Search endpoint documentation. * All fields are optional. * * fuzziness - typo tolerance * * target - columns to search into, and weights. * * prefix - prefix search type. * * filter - pre-filter before searching. * * boosters - control relevancy. * If set to `vector`, a `vectorSearch` object must be passed, with the following parameters. For more details, see the Vector * Search endpoint documentation. The `column` and `contentColumn` parameters are required. * * column - the vector column containing the embeddings. * * contentColumn - the column that contains the text from which the embeddings where computed. * * filter - pre-filter before searching. * * @default keyword */ searchType?: 'keyword' | 'vector'; search?: { fuzziness?: FuzzinessExpression; target?: TargetExpression; prefix?: PrefixExpression; filter?: FilterExpression; boosters?: BoosterExpression[]; }; vectorSearch?: { /** * The column to use for vector search. It must be of type `vector`. */ column: string; /** * The column containing the text for vector search. Must be of type `text`. */ contentColumn: string; filter?: FilterExpression; }; rules?: string[]; }; type AskTableVariables = { body: AskTableRequestBody; pathParams: AskTablePathParams; } & DataPlaneFetcherExtraProps; /** * Ask your table a question. If the `Accept` header is set to `text/event-stream`, Xata will stream the results back as SSE's. */ declare const askTable: (variables: AskTableVariables, signal?: AbortSignal) => Promise; type AskTableSessionPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; /** * @maxLength 36 * @minLength 36 */ sessionId: string; workspace: string; region: string; }; type AskTableSessionError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 429; payload: RateLimitError; } | { status: 503; payload: ServiceUnavailableError; }>; type AskTableSessionResponse = { /** * The answer to the input question */ answer: string; }; type AskTableSessionRequestBody = { /** * The question you'd like to ask. * * @minLength 3 */ message?: string; }; type AskTableSessionVariables = { body?: AskTableSessionRequestBody; pathParams: AskTableSessionPathParams; } & DataPlaneFetcherExtraProps; /** * Ask a follow-up question. If the `Accept` header is set to `text/event-stream`, Xata will stream the results back as SSE's. */ declare const askTableSession: (variables: AskTableSessionVariables, signal?: AbortSignal) => Promise; type SummarizeTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type SummarizeTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type SummarizeTableRequestBody = { filter?: FilterExpression; columns?: ColumnsProjection; summaries?: SummaryExpressionList; sort?: SortExpression; summariesFilter?: FilterExpression; /** * The consistency level for this request. * * @default strong */ consistency?: 'strong' | 'eventual'; page?: { /** * The number of records returned by summarize. If the amount of data you have exceeds this, or you have * more complex reporting requirements, we recommend that you use the aggregate endpoint instead. * * @default 20 * @maximum 1000 * @minimum 1 */ size?: number; }; }; type SummarizeTableVariables = { body?: SummarizeTableRequestBody; pathParams: SummarizeTablePathParams; } & DataPlaneFetcherExtraProps; /** * This endpoint allows you to (optionally) define groups, and then to run * calculations on the values in each group. This is most helpful when * you'd like to understand the data you have in your database. * * A group is a combination of unique values. If you create a group for * `sold_by`, `product_name`, we will return one row for every combination * of `sold_by` and `product_name` you have in your database. When you * want to calculate statistics, you define these groups and ask Xata to * calculate data on each group. * * **Some questions you can ask of your data:** * * How many records do I have in this table? * - Set `columns: []` as we we want data from the entire table, so we ask * for no groups. * - Set `summaries: {"total": {"count": "*"}}` in order to see the count * of all records. We use `count: *` here we'd like to know the total * amount of rows; ignoring whether they are `null` or not. * * What are the top total sales for each product in July 2022 and sold * more than 10 units? * - Set `filter: {soldAt: { * "$ge": "2022-07-01T00:00:00.000Z", * "$lt": "2022-08-01T00:00:00.000Z"} * }` * in order to limit the result set to sales recorded in July 2022. * - Set `columns: [product_name]` as we'd like to run calculations on * each unique product name in our table. Setting `columns` like this will * produce one row per unique product name. * - Set `summaries: {"total_sales": {"count": "product_name"}}` as we'd * like to create a field called "total_sales" for each group. This field * will count all rows in each group with non-null product names. * - Set `sort: [{"total_sales": "desc"}]` in order to bring the rows with * the highest total_sales field to the top. * - Set `summariesFilter: {"total_sales": {"$ge": 10}}` to only send back data * with greater than or equal to 10 units. * * `columns`: tells Xata how to create each group. If you add `product_id` * we will create a new group for every unique `product_id`. * * `summaries`: tells Xata which calculations to run on each group. Xata * currently supports count, min, max, sum, average. * * `sort`: tells Xata in which order you'd like to see results. You may * sort by fields specified in `columns` as well as the summary names * defined in `summaries`. * * note: Sorting on summarized values can be slower on very large tables; * this will impact your rate limit significantly more than other queries. * Try use `filter` to reduce the amount of data being processed in order * to reduce impact on your limits. * * `summariesFilter`: tells Xata how to filter the results of a summary. * It has the same syntax as `filter`, however, by using `summariesFilter` * you may also filter on the results of a query. * * note: This is a much slower to use than `filter`. We recommend using * `filter` wherever possible and `summariesFilter` when it's not * possible to use `filter`. * * `page.size`: tells Xata how many records to return. If unspecified, Xata * will return the default size. */ declare const summarizeTable: (variables: SummarizeTableVariables, signal?: AbortSignal) => Promise; type AggregateTablePathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; /** * The Table name */ tableName: TableName; workspace: string; region: string; }; type AggregateTableError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type AggregateTableRequestBody = { filter?: FilterExpression; aggs?: AggExpressionMap; }; type AggregateTableVariables = { body?: AggregateTableRequestBody; pathParams: AggregateTablePathParams; } & DataPlaneFetcherExtraProps; /** * This endpoint allows you to run aggregations (analytics) on the data from one table. * While the summary endpoint is served from a transactional store and the results are strongly * consistent, the aggregate endpoint is served from our columnar store and the results are * only eventually consistent. On the other hand, the aggregate endpoint uses a * store that is more appropriate for analytics, makes use of approximation algorithms * (e.g for cardinality), and is generally faster and can do more complex aggregations. * * For usage, see the [Aggregation documentation](https://xata.io/docs/sdk/aggregate). */ declare const aggregateTable: (variables: AggregateTableVariables, signal?: AbortSignal) => Promise; type FileAccessPathParams = { /** * The File Access Identifier */ fileId: FileAccessID; workspace: string; region: string; }; type FileAccessQueryParams = { /** * File access signature */ verify?: FileSignature; }; type FileAccessError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type FileAccessVariables = { pathParams: FileAccessPathParams; queryParams?: FileAccessQueryParams; } & DataPlaneFetcherExtraProps; /** * Retrieve file content by access id */ declare const fileAccess: (variables: FileAccessVariables, signal?: AbortSignal) => Promise; type FileUploadPathParams = { /** * The File Access Identifier */ fileId: FileAccessID; workspace: string; region: string; }; type FileUploadQueryParams = { /** * File access signature */ verify?: FileSignature; }; type FileUploadError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; }>; type FileUploadVariables = { body?: Blob; pathParams: FileUploadPathParams; queryParams?: FileUploadQueryParams; } & DataPlaneFetcherExtraProps; /** * Upload file using an upload url */ declare const fileUpload: (variables: FileUploadVariables, signal?: AbortSignal) => Promise; type SqlQueryPathParams = { /** * The DBBranchName matches the pattern `{db_name}:{branch_name}`. */ dbBranchName: DBBranchName; workspace: string; region: string; }; type SqlQueryError = ErrorWrapper<{ status: 400; payload: BadRequestError$1; } | { status: 401; payload: AuthError$1; } | { status: 404; payload: SimpleError$1; } | { status: 503; payload: ServiceUnavailableError; }>; type SqlQueryRequestBody = { /** * The SQL statement. * * @minLength 1 */ statement: string; /** * The query parameter list. */ params?: any[] | null; /** * The consistency level for this request. * * @default strong */ consistency?: 'strong' | 'eventual'; /** * The response type. * * @default json */ responseType?: 'json' | 'array'; }; type SqlQueryVariables = { body: SqlQueryRequestBody; pathParams: SqlQueryPathParams; } & DataPlaneFetcherExtraProps; /** * Run an SQL query across the database branch. */ declare const sqlQuery: (variables: SqlQueryVariables, signal?: AbortSignal) => Promise; declare const operationsByTag: { branch: { getBranchList: (variables: GetBranchListVariables, signal?: AbortSignal | undefined) => Promise; getBranchDetails: (variables: GetBranchDetailsVariables, signal?: AbortSignal | undefined) => Promise; createBranch: (variables: CreateBranchVariables, signal?: AbortSignal | undefined) => Promise; deleteBranch: (variables: DeleteBranchVariables, signal?: AbortSignal | undefined) => Promise; copyBranch: (variables: CopyBranchVariables, signal?: AbortSignal | undefined) => Promise; updateBranchMetadata: (variables: UpdateBranchMetadataVariables, signal?: AbortSignal | undefined) => Promise; getBranchMetadata: (variables: GetBranchMetadataVariables, signal?: AbortSignal | undefined) => Promise; getBranchStats: (variables: GetBranchStatsVariables, signal?: AbortSignal | undefined) => Promise; getGitBranchesMapping: (variables: GetGitBranchesMappingVariables, signal?: AbortSignal | undefined) => Promise; addGitBranchesEntry: (variables: AddGitBranchesEntryVariables, signal?: AbortSignal | undefined) => Promise; removeGitBranchesEntry: (variables: RemoveGitBranchesEntryVariables, signal?: AbortSignal | undefined) => Promise; resolveBranch: (variables: ResolveBranchVariables, signal?: AbortSignal | undefined) => Promise; }; workspaces: { getWorkspacesList: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; createWorkspace: (variables: CreateWorkspaceVariables, signal?: AbortSignal | undefined) => Promise; getWorkspace: (variables: GetWorkspaceVariables, signal?: AbortSignal | undefined) => Promise; updateWorkspace: (variables: UpdateWorkspaceVariables, signal?: AbortSignal | undefined) => Promise; deleteWorkspace: (variables: DeleteWorkspaceVariables, signal?: AbortSignal | undefined) => Promise; getWorkspaceSettings: (variables: GetWorkspaceSettingsVariables, signal?: AbortSignal | undefined) => Promise; updateWorkspaceSettings: (variables: UpdateWorkspaceSettingsVariables, signal?: AbortSignal | undefined) => Promise; getWorkspaceMembersList: (variables: GetWorkspaceMembersListVariables, signal?: AbortSignal | undefined) => Promise; updateWorkspaceMemberRole: (variables: UpdateWorkspaceMemberRoleVariables, signal?: AbortSignal | undefined) => Promise; removeWorkspaceMember: (variables: RemoveWorkspaceMemberVariables, signal?: AbortSignal | undefined) => Promise; }; migrations: { applyMigration: (variables: ApplyMigrationVariables, signal?: AbortSignal | undefined) => Promise; adaptTable: (variables: AdaptTableVariables, signal?: AbortSignal | undefined) => Promise; adaptAllTables: (variables: AdaptAllTablesVariables, signal?: AbortSignal | undefined) => Promise; getBranchMigrationJobStatus: (variables: GetBranchMigrationJobStatusVariables, signal?: AbortSignal | undefined) => Promise; getMigrationJobStatus: (variables: GetMigrationJobStatusVariables, signal?: AbortSignal | undefined) => Promise; getMigrationHistory: (variables: GetMigrationHistoryVariables, signal?: AbortSignal | undefined) => Promise; getSchema: (variables: GetSchemaVariables, signal?: AbortSignal | undefined) => Promise; getBranchMigrationHistory: (variables: GetBranchMigrationHistoryVariables, signal?: AbortSignal | undefined) => Promise; getBranchMigrationPlan: (variables: GetBranchMigrationPlanVariables, signal?: AbortSignal | undefined) => Promise; executeBranchMigrationPlan: (variables: ExecuteBranchMigrationPlanVariables, signal?: AbortSignal | undefined) => Promise; getBranchSchemaHistory: (variables: GetBranchSchemaHistoryVariables, signal?: AbortSignal | undefined) => Promise; compareBranchWithUserSchema: (variables: CompareBranchWithUserSchemaVariables, signal?: AbortSignal | undefined) => Promise; compareBranchSchemas: (variables: CompareBranchSchemasVariables, signal?: AbortSignal | undefined) => Promise; updateBranchSchema: (variables: UpdateBranchSchemaVariables, signal?: AbortSignal | undefined) => Promise; previewBranchSchemaEdit: (variables: PreviewBranchSchemaEditVariables, signal?: AbortSignal | undefined) => Promise; applyBranchSchemaEdit: (variables: ApplyBranchSchemaEditVariables, signal?: AbortSignal | undefined) => Promise; pushBranchMigrations: (variables: PushBranchMigrationsVariables, signal?: AbortSignal | undefined) => Promise; }; records: { branchTransaction: (variables: BranchTransactionVariables, signal?: AbortSignal | undefined) => Promise; insertRecord: (variables: InsertRecordVariables, signal?: AbortSignal | undefined) => Promise; getRecord: (variables: GetRecordVariables, signal?: AbortSignal | undefined) => Promise; insertRecordWithID: (variables: InsertRecordWithIDVariables, signal?: AbortSignal | undefined) => Promise; updateRecordWithID: (variables: UpdateRecordWithIDVariables, signal?: AbortSignal | undefined) => Promise; upsertRecordWithID: (variables: UpsertRecordWithIDVariables, signal?: AbortSignal | undefined) => Promise; deleteRecord: (variables: DeleteRecordVariables, signal?: AbortSignal | undefined) => Promise; bulkInsertTableRecords: (variables: BulkInsertTableRecordsVariables, signal?: AbortSignal | undefined) => Promise; }; database: { getDatabaseSettings: (variables: GetDatabaseSettingsVariables, signal?: AbortSignal | undefined) => Promise; updateDatabaseSettings: (variables: UpdateDatabaseSettingsVariables, signal?: AbortSignal | undefined) => Promise; }; migrationRequests: { queryMigrationRequests: (variables: QueryMigrationRequestsVariables, signal?: AbortSignal | undefined) => Promise; createMigrationRequest: (variables: CreateMigrationRequestVariables, signal?: AbortSignal | undefined) => Promise; getMigrationRequest: (variables: GetMigrationRequestVariables, signal?: AbortSignal | undefined) => Promise; updateMigrationRequest: (variables: UpdateMigrationRequestVariables, signal?: AbortSignal | undefined) => Promise; listMigrationRequestsCommits: (variables: ListMigrationRequestsCommitsVariables, signal?: AbortSignal | undefined) => Promise; compareMigrationRequest: (variables: CompareMigrationRequestVariables, signal?: AbortSignal | undefined) => Promise; getMigrationRequestIsMerged: (variables: GetMigrationRequestIsMergedVariables, signal?: AbortSignal | undefined) => Promise; mergeMigrationRequest: (variables: MergeMigrationRequestVariables, signal?: AbortSignal | undefined) => Promise; }; table: { createTable: (variables: CreateTableVariables, signal?: AbortSignal | undefined) => Promise; deleteTable: (variables: DeleteTableVariables, signal?: AbortSignal | undefined) => Promise; updateTable: (variables: UpdateTableVariables, signal?: AbortSignal | undefined) => Promise; getTableSchema: (variables: GetTableSchemaVariables, signal?: AbortSignal | undefined) => Promise; setTableSchema: (variables: SetTableSchemaVariables, signal?: AbortSignal | undefined) => Promise; getTableColumns: (variables: GetTableColumnsVariables, signal?: AbortSignal | undefined) => Promise; addTableColumn: (variables: AddTableColumnVariables, signal?: AbortSignal | undefined) => Promise; getColumn: (variables: GetColumnVariables, signal?: AbortSignal | undefined) => Promise; updateColumn: (variables: UpdateColumnVariables, signal?: AbortSignal | undefined) => Promise; deleteColumn: (variables: DeleteColumnVariables, signal?: AbortSignal | undefined) => Promise; }; files: { getFileItem: (variables: GetFileItemVariables, signal?: AbortSignal | undefined) => Promise; putFileItem: (variables: PutFileItemVariables, signal?: AbortSignal | undefined) => Promise; deleteFileItem: (variables: DeleteFileItemVariables, signal?: AbortSignal | undefined) => Promise; getFile: (variables: GetFileVariables, signal?: AbortSignal | undefined) => Promise; putFile: (variables: PutFileVariables, signal?: AbortSignal | undefined) => Promise; deleteFile: (variables: DeleteFileVariables, signal?: AbortSignal | undefined) => Promise; fileAccess: (variables: FileAccessVariables, signal?: AbortSignal | undefined) => Promise; fileUpload: (variables: FileUploadVariables, signal?: AbortSignal | undefined) => Promise; }; searchAndFilter: { queryTable: (variables: QueryTableVariables, signal?: AbortSignal | undefined) => Promise; searchBranch: (variables: SearchBranchVariables, signal?: AbortSignal | undefined) => Promise; searchTable: (variables: SearchTableVariables, signal?: AbortSignal | undefined) => Promise; vectorSearchTable: (variables: VectorSearchTableVariables, signal?: AbortSignal | undefined) => Promise; askTable: (variables: AskTableVariables, signal?: AbortSignal | undefined) => Promise; askTableSession: (variables: AskTableSessionVariables, signal?: AbortSignal | undefined) => Promise; summarizeTable: (variables: SummarizeTableVariables, signal?: AbortSignal | undefined) => Promise; aggregateTable: (variables: AggregateTableVariables, signal?: AbortSignal | undefined) => Promise; }; sql: { sqlQuery: (variables: SqlQueryVariables, signal?: AbortSignal | undefined) => Promise; }; oAuth: { getAuthorizationCode: (variables: GetAuthorizationCodeVariables, signal?: AbortSignal | undefined) => Promise; grantAuthorizationCode: (variables: GrantAuthorizationCodeVariables, signal?: AbortSignal | undefined) => Promise; getUserOAuthClients: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; deleteUserOAuthClient: (variables: DeleteUserOAuthClientVariables, signal?: AbortSignal | undefined) => Promise; getUserOAuthAccessTokens: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; deleteOAuthAccessToken: (variables: DeleteOAuthAccessTokenVariables, signal?: AbortSignal | undefined) => Promise; updateOAuthAccessToken: (variables: UpdateOAuthAccessTokenVariables, signal?: AbortSignal | undefined) => Promise; }; users: { getUser: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; updateUser: (variables: UpdateUserVariables, signal?: AbortSignal | undefined) => Promise; deleteUser: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; }; authentication: { getUserAPIKeys: (variables: ControlPlaneFetcherExtraProps, signal?: AbortSignal | undefined) => Promise; createUserAPIKey: (variables: CreateUserAPIKeyVariables, signal?: AbortSignal | undefined) => Promise; deleteUserAPIKey: (variables: DeleteUserAPIKeyVariables, signal?: AbortSignal | undefined) => Promise; }; invites: { inviteWorkspaceMember: (variables: InviteWorkspaceMemberVariables, signal?: AbortSignal | undefined) => Promise; updateWorkspaceMemberInvite: (variables: UpdateWorkspaceMemberInviteVariables, signal?: AbortSignal | undefined) => Promise; cancelWorkspaceMemberInvite: (variables: CancelWorkspaceMemberInviteVariables, signal?: AbortSignal | undefined) => Promise; acceptWorkspaceMemberInvite: (variables: AcceptWorkspaceMemberInviteVariables, signal?: AbortSignal | undefined) => Promise; resendWorkspaceMemberInvite: (variables: ResendWorkspaceMemberInviteVariables, signal?: AbortSignal | undefined) => Promise; }; xbcontrolOther: { listClusters: (variables: ListClustersVariables, signal?: AbortSignal | undefined) => Promise; createCluster: (variables: CreateClusterVariables, signal?: AbortSignal | undefined) => Promise; getCluster: (variables: GetClusterVariables, signal?: AbortSignal | undefined) => Promise; updateCluster: (variables: UpdateClusterVariables, signal?: AbortSignal | undefined) => Promise; }; databases: { getDatabaseList: (variables: GetDatabaseListVariables, signal?: AbortSignal | undefined) => Promise; createDatabase: (variables: CreateDatabaseVariables, signal?: AbortSignal | undefined) => Promise; deleteDatabase: (variables: DeleteDatabaseVariables, signal?: AbortSignal | undefined) => Promise; getDatabaseMetadata: (variables: GetDatabaseMetadataVariables, signal?: AbortSignal | undefined) => Promise; updateDatabaseMetadata: (variables: UpdateDatabaseMetadataVariables, signal?: AbortSignal | undefined) => Promise; renameDatabase: (variables: RenameDatabaseVariables, signal?: AbortSignal | undefined) => Promise; getDatabaseGithubSettings: (variables: GetDatabaseGithubSettingsVariables, signal?: AbortSignal | undefined) => Promise; updateDatabaseGithubSettings: (variables: UpdateDatabaseGithubSettingsVariables, signal?: AbortSignal | undefined) => Promise; deleteDatabaseGithubSettings: (variables: DeleteDatabaseGithubSettingsVariables, signal?: AbortSignal | undefined) => Promise; listRegions: (variables: ListRegionsVariables, signal?: AbortSignal | undefined) => Promise; }; }; type HostAliases = 'production' | 'staging' | 'dev' | 'local'; type ProviderBuilder = { main: string; workspaces: string; }; type HostProvider = HostAliases | ProviderBuilder; declare function getHostUrl(provider: HostProvider, type: keyof ProviderBuilder): string; declare function isHostProviderAlias(alias?: HostProvider | string): alias is HostAliases; declare function isHostProviderBuilder(builder: HostProvider): builder is ProviderBuilder; declare function parseProviderString(provider?: string): HostProvider | null; declare function buildProviderString(provider: HostProvider): string; declare function parseWorkspacesUrlParts(url: string): { workspace: string; region: string; database: string; branch?: string; host: HostAliases; } | null; type responses_AggResponse = AggResponse; type responses_BranchMigrationPlan = BranchMigrationPlan; type responses_BulkError = BulkError; type responses_BulkInsertResponse = BulkInsertResponse; type responses_PutFileResponse = PutFileResponse; type responses_QueryResponse = QueryResponse; type responses_RateLimitError = RateLimitError; type responses_RecordResponse = RecordResponse; type responses_RecordUpdateResponse = RecordUpdateResponse; type responses_SQLResponse = SQLResponse; type responses_SchemaCompareResponse = SchemaCompareResponse; type responses_SchemaUpdateResponse = SchemaUpdateResponse; type responses_SearchResponse = SearchResponse; type responses_ServiceUnavailableError = ServiceUnavailableError; type responses_SummarizeResponse = SummarizeResponse; declare namespace responses { export type { responses_AggResponse as AggResponse, AuthError$1 as AuthError, BadRequestError$1 as BadRequestError, responses_BranchMigrationPlan as BranchMigrationPlan, responses_BulkError as BulkError, responses_BulkInsertResponse as BulkInsertResponse, responses_PutFileResponse as PutFileResponse, responses_QueryResponse as QueryResponse, responses_RateLimitError as RateLimitError, responses_RecordResponse as RecordResponse, responses_RecordUpdateResponse as RecordUpdateResponse, responses_SQLResponse as SQLResponse, responses_SchemaCompareResponse as SchemaCompareResponse, responses_SchemaUpdateResponse as SchemaUpdateResponse, responses_SearchResponse as SearchResponse, responses_ServiceUnavailableError as ServiceUnavailableError, SimpleError$1 as SimpleError, responses_SummarizeResponse as SummarizeResponse }; } type schemas_APIKeyName = APIKeyName; type schemas_AccessToken = AccessToken; type schemas_AggExpression = AggExpression; type schemas_AggExpressionMap = AggExpressionMap; type schemas_ApplyMigrationResponse = ApplyMigrationResponse; type schemas_AuthorizationCodeRequest = AuthorizationCodeRequest; type schemas_AuthorizationCodeResponse = AuthorizationCodeResponse; type schemas_AutoscalingConfig = AutoscalingConfig; type schemas_AutoscalingConfigResponse = AutoscalingConfigResponse; type schemas_AverageAgg = AverageAgg; type schemas_BoosterExpression = BoosterExpression; type schemas_Branch = Branch; type schemas_BranchMigration = BranchMigration; type schemas_BranchOp = BranchOp; type schemas_BranchSchema = BranchSchema; type schemas_BranchWithCopyID = BranchWithCopyID; type schemas_ClusterConfiguration = ClusterConfiguration; type schemas_ClusterConfigurationResponse = ClusterConfigurationResponse; type schemas_ClusterCreateDetails = ClusterCreateDetails; type schemas_ClusterID = ClusterID; type schemas_ClusterMetadata = ClusterMetadata; type schemas_ClusterResponse = ClusterResponse; type schemas_ClusterShortMetadata = ClusterShortMetadata; type schemas_ClusterUpdateDetails = ClusterUpdateDetails; type schemas_ClusterUpdateMetadata = ClusterUpdateMetadata; type schemas_Column = Column; type schemas_ColumnFile = ColumnFile; type schemas_ColumnLink = ColumnLink; type schemas_ColumnMigration = ColumnMigration; type schemas_ColumnName = ColumnName; type schemas_ColumnOpAdd = ColumnOpAdd; type schemas_ColumnOpRemove = ColumnOpRemove; type schemas_ColumnOpRename = ColumnOpRename; type schemas_ColumnVector = ColumnVector; type schemas_ColumnsProjection = ColumnsProjection; type schemas_Commit = Commit; type schemas_CountAgg = CountAgg; type schemas_DBBranch = DBBranch; type schemas_DBBranchName = DBBranchName; type schemas_DailyTimeWindow = DailyTimeWindow; type schemas_DataInputRecord = DataInputRecord; type schemas_DatabaseGithubSettings = DatabaseGithubSettings; type schemas_DatabaseMetadata = DatabaseMetadata; type schemas_DatabaseSettings = DatabaseSettings; type schemas_DateHistogramAgg = DateHistogramAgg; type schemas_FileAccessID = FileAccessID; type schemas_FileItemID = FileItemID; type schemas_FileName = FileName; type schemas_FileResponse = FileResponse; type schemas_FileSignature = FileSignature; type schemas_FilterColumn = FilterColumn; type schemas_FilterColumnIncludes = FilterColumnIncludes; type schemas_FilterExpression = FilterExpression; type schemas_FilterList = FilterList; type schemas_FilterPredicate = FilterPredicate; type schemas_FilterPredicateOp = FilterPredicateOp; type schemas_FilterPredicateRangeOp = FilterPredicateRangeOp; type schemas_FilterRangeValue = FilterRangeValue; type schemas_FilterValue = FilterValue; type schemas_FuzzinessExpression = FuzzinessExpression; type schemas_HighlightExpression = HighlightExpression; type schemas_InputFile = InputFile; type schemas_InputFileArray = InputFileArray; type schemas_InputFileEntry = InputFileEntry; type schemas_InviteID = InviteID; type schemas_InviteKey = InviteKey; type schemas_ListBranchesResponse = ListBranchesResponse; type schemas_ListClustersResponse = ListClustersResponse; type schemas_ListDatabasesResponse = ListDatabasesResponse; type schemas_ListGitBranchesResponse = ListGitBranchesResponse; type schemas_ListRegionsResponse = ListRegionsResponse; type schemas_MaintenanceConfig = MaintenanceConfig; type schemas_MaintenanceConfigResponse = MaintenanceConfigResponse; type schemas_MaxAgg = MaxAgg; type schemas_MediaType = MediaType; type schemas_MetricsDatapoint = MetricsDatapoint; type schemas_MetricsLatency = MetricsLatency; type schemas_Migration = Migration; type schemas_MigrationColumnOp = MigrationColumnOp; type schemas_MigrationHistoryItem = MigrationHistoryItem; type schemas_MigrationHistoryResponse = MigrationHistoryResponse; type schemas_MigrationJobID = MigrationJobID; type schemas_MigrationJobStatus = MigrationJobStatus; type schemas_MigrationJobStatusResponse = MigrationJobStatusResponse; type schemas_MigrationJobType = MigrationJobType; type schemas_MigrationObject = MigrationObject; type schemas_MigrationOp = MigrationOp; type schemas_MigrationRequest = MigrationRequest; type schemas_MigrationRequestNumber = MigrationRequestNumber; type schemas_MigrationTableOp = MigrationTableOp; type schemas_MigrationType = MigrationType; type schemas_MinAgg = MinAgg; type schemas_NumericHistogramAgg = NumericHistogramAgg; type schemas_OAuthAccessToken = OAuthAccessToken; type schemas_OAuthClientID = OAuthClientID; type schemas_OAuthClientPublicDetails = OAuthClientPublicDetails; type schemas_OAuthResponseType = OAuthResponseType; type schemas_OAuthScope = OAuthScope; type schemas_ObjectValue = ObjectValue; type schemas_PageConfig = PageConfig; type schemas_PageResponse = PageResponse; type schemas_PageSize = PageSize; type schemas_PageToken = PageToken; type schemas_PercentilesAgg = PercentilesAgg; type schemas_PrefixExpression = PrefixExpression; type schemas_ProjectionConfig = ProjectionConfig; type schemas_QueryColumnsProjection = QueryColumnsProjection; type schemas_RecordID = RecordID; type schemas_RecordMeta = RecordMeta; type schemas_RecordsMetadata = RecordsMetadata; type schemas_Region = Region; type schemas_RevLink = RevLink; type schemas_Role = Role; type schemas_SQLRecord = SQLRecord; type schemas_Schema = Schema; type schemas_SchemaEditScript = SchemaEditScript; type schemas_SearchPageConfig = SearchPageConfig; type schemas_SortExpression = SortExpression; type schemas_SortOrder = SortOrder; type schemas_StartedFromMetadata = StartedFromMetadata; type schemas_SumAgg = SumAgg; type schemas_SummaryExpression = SummaryExpression; type schemas_SummaryExpressionList = SummaryExpressionList; type schemas_Table = Table; type schemas_TableMigration = TableMigration; type schemas_TableName = TableName; type schemas_TableOpAdd = TableOpAdd; type schemas_TableOpRemove = TableOpRemove; type schemas_TableOpRename = TableOpRename; type schemas_TableRename = TableRename; type schemas_TargetExpression = TargetExpression; type schemas_TopValuesAgg = TopValuesAgg; type schemas_TransactionDeleteOp = TransactionDeleteOp; type schemas_TransactionError = TransactionError; type schemas_TransactionFailure = TransactionFailure; type schemas_TransactionGetOp = TransactionGetOp; type schemas_TransactionInsertOp = TransactionInsertOp; type schemas_TransactionResultColumns = TransactionResultColumns; type schemas_TransactionResultDelete = TransactionResultDelete; type schemas_TransactionResultGet = TransactionResultGet; type schemas_TransactionResultInsert = TransactionResultInsert; type schemas_TransactionResultUpdate = TransactionResultUpdate; type schemas_TransactionSuccess = TransactionSuccess; type schemas_TransactionUpdateOp = TransactionUpdateOp; type schemas_UniqueCountAgg = UniqueCountAgg; type schemas_User = User; type schemas_UserID = UserID; type schemas_UserWithID = UserWithID; type schemas_WeeklyTimeWindow = WeeklyTimeWindow; type schemas_Workspace = Workspace; type schemas_WorkspaceID = WorkspaceID; type schemas_WorkspaceInvite = WorkspaceInvite; type schemas_WorkspaceMember = WorkspaceMember; type schemas_WorkspaceMembers = WorkspaceMembers; type schemas_WorkspaceMeta = WorkspaceMeta; type schemas_WorkspacePlan = WorkspacePlan; type schemas_WorkspaceSettings = WorkspaceSettings; declare namespace schemas { export type { schemas_APIKeyName as APIKeyName, schemas_AccessToken as AccessToken, schemas_AggExpression as AggExpression, schemas_AggExpressionMap as AggExpressionMap, AggResponse$1 as AggResponse, schemas_ApplyMigrationResponse as ApplyMigrationResponse, schemas_AuthorizationCodeRequest as AuthorizationCodeRequest, schemas_AuthorizationCodeResponse as AuthorizationCodeResponse, schemas_AutoscalingConfig as AutoscalingConfig, schemas_AutoscalingConfigResponse as AutoscalingConfigResponse, schemas_AverageAgg as AverageAgg, schemas_BoosterExpression as BoosterExpression, schemas_Branch as Branch, BranchMetadata$1 as BranchMetadata, schemas_BranchMigration as BranchMigration, BranchName$1 as BranchName, schemas_BranchOp as BranchOp, schemas_BranchSchema as BranchSchema, schemas_BranchWithCopyID as BranchWithCopyID, schemas_ClusterConfiguration as ClusterConfiguration, schemas_ClusterConfigurationResponse as ClusterConfigurationResponse, schemas_ClusterCreateDetails as ClusterCreateDetails, schemas_ClusterID as ClusterID, schemas_ClusterMetadata as ClusterMetadata, schemas_ClusterResponse as ClusterResponse, schemas_ClusterShortMetadata as ClusterShortMetadata, schemas_ClusterUpdateDetails as ClusterUpdateDetails, schemas_ClusterUpdateMetadata as ClusterUpdateMetadata, schemas_Column as Column, schemas_ColumnFile as ColumnFile, schemas_ColumnLink as ColumnLink, schemas_ColumnMigration as ColumnMigration, schemas_ColumnName as ColumnName, schemas_ColumnOpAdd as ColumnOpAdd, schemas_ColumnOpRemove as ColumnOpRemove, schemas_ColumnOpRename as ColumnOpRename, schemas_ColumnVector as ColumnVector, schemas_ColumnsProjection as ColumnsProjection, schemas_Commit as Commit, schemas_CountAgg as CountAgg, schemas_DBBranch as DBBranch, schemas_DBBranchName as DBBranchName, DBName$1 as DBName, schemas_DailyTimeWindow as DailyTimeWindow, schemas_DataInputRecord as DataInputRecord, schemas_DatabaseGithubSettings as DatabaseGithubSettings, schemas_DatabaseMetadata as DatabaseMetadata, schemas_DatabaseSettings as DatabaseSettings, DateBooster$1 as DateBooster, schemas_DateHistogramAgg as DateHistogramAgg, DateTime$1 as DateTime, schemas_FileAccessID as FileAccessID, schemas_FileItemID as FileItemID, schemas_FileName as FileName, schemas_FileResponse as FileResponse, schemas_FileSignature as FileSignature, schemas_FilterColumn as FilterColumn, schemas_FilterColumnIncludes as FilterColumnIncludes, schemas_FilterExpression as FilterExpression, schemas_FilterList as FilterList, schemas_FilterPredicate as FilterPredicate, schemas_FilterPredicateOp as FilterPredicateOp, schemas_FilterPredicateRangeOp as FilterPredicateRangeOp, schemas_FilterRangeValue as FilterRangeValue, schemas_FilterValue as FilterValue, schemas_FuzzinessExpression as FuzzinessExpression, schemas_HighlightExpression as HighlightExpression, schemas_InputFile as InputFile, schemas_InputFileArray as InputFileArray, schemas_InputFileEntry as InputFileEntry, schemas_InviteID as InviteID, schemas_InviteKey as InviteKey, schemas_ListBranchesResponse as ListBranchesResponse, schemas_ListClustersResponse as ListClustersResponse, schemas_ListDatabasesResponse as ListDatabasesResponse, schemas_ListGitBranchesResponse as ListGitBranchesResponse, schemas_ListRegionsResponse as ListRegionsResponse, schemas_MaintenanceConfig as MaintenanceConfig, schemas_MaintenanceConfigResponse as MaintenanceConfigResponse, schemas_MaxAgg as MaxAgg, schemas_MediaType as MediaType, schemas_MetricsDatapoint as MetricsDatapoint, schemas_MetricsLatency as MetricsLatency, schemas_Migration as Migration, schemas_MigrationColumnOp as MigrationColumnOp, schemas_MigrationHistoryItem as MigrationHistoryItem, schemas_MigrationHistoryResponse as MigrationHistoryResponse, schemas_MigrationJobID as MigrationJobID, schemas_MigrationJobStatus as MigrationJobStatus, schemas_MigrationJobStatusResponse as MigrationJobStatusResponse, schemas_MigrationJobType as MigrationJobType, schemas_MigrationObject as MigrationObject, schemas_MigrationOp as MigrationOp, schemas_MigrationRequest as MigrationRequest, schemas_MigrationRequestNumber as MigrationRequestNumber, MigrationStatus$1 as MigrationStatus, schemas_MigrationTableOp as MigrationTableOp, schemas_MigrationType as MigrationType, schemas_MinAgg as MinAgg, NumericBooster$1 as NumericBooster, schemas_NumericHistogramAgg as NumericHistogramAgg, schemas_OAuthAccessToken as OAuthAccessToken, schemas_OAuthClientID as OAuthClientID, schemas_OAuthClientPublicDetails as OAuthClientPublicDetails, schemas_OAuthResponseType as OAuthResponseType, schemas_OAuthScope as OAuthScope, schemas_ObjectValue as ObjectValue, schemas_PageConfig as PageConfig, schemas_PageResponse as PageResponse, schemas_PageSize as PageSize, schemas_PageToken as PageToken, schemas_PercentilesAgg as PercentilesAgg, schemas_PrefixExpression as PrefixExpression, schemas_ProjectionConfig as ProjectionConfig, schemas_QueryColumnsProjection as QueryColumnsProjection, schemas_RecordID as RecordID, schemas_RecordMeta as RecordMeta, schemas_RecordsMetadata as RecordsMetadata, schemas_Region as Region, schemas_RevLink as RevLink, schemas_Role as Role, schemas_SQLRecord as SQLRecord, schemas_Schema as Schema, schemas_SchemaEditScript as SchemaEditScript, schemas_SearchPageConfig as SearchPageConfig, schemas_SortExpression as SortExpression, schemas_SortOrder as SortOrder, schemas_StartedFromMetadata as StartedFromMetadata, schemas_SumAgg as SumAgg, schemas_SummaryExpression as SummaryExpression, schemas_SummaryExpressionList as SummaryExpressionList, schemas_Table as Table, schemas_TableMigration as TableMigration, schemas_TableName as TableName, schemas_TableOpAdd as TableOpAdd, schemas_TableOpRemove as TableOpRemove, schemas_TableOpRename as TableOpRename, schemas_TableRename as TableRename, schemas_TargetExpression as TargetExpression, schemas_TopValuesAgg as TopValuesAgg, schemas_TransactionDeleteOp as TransactionDeleteOp, schemas_TransactionError as TransactionError, schemas_TransactionFailure as TransactionFailure, schemas_TransactionGetOp as TransactionGetOp, schemas_TransactionInsertOp as TransactionInsertOp, TransactionOperation$1 as TransactionOperation, schemas_TransactionResultColumns as TransactionResultColumns, schemas_TransactionResultDelete as TransactionResultDelete, schemas_TransactionResultGet as TransactionResultGet, schemas_TransactionResultInsert as TransactionResultInsert, schemas_TransactionResultUpdate as TransactionResultUpdate, schemas_TransactionSuccess as TransactionSuccess, schemas_TransactionUpdateOp as TransactionUpdateOp, schemas_UniqueCountAgg as UniqueCountAgg, schemas_User as User, schemas_UserID as UserID, schemas_UserWithID as UserWithID, ValueBooster$1 as ValueBooster, schemas_WeeklyTimeWindow as WeeklyTimeWindow, schemas_Workspace as Workspace, schemas_WorkspaceID as WorkspaceID, schemas_WorkspaceInvite as WorkspaceInvite, schemas_WorkspaceMember as WorkspaceMember, schemas_WorkspaceMembers as WorkspaceMembers, schemas_WorkspaceMeta as WorkspaceMeta, schemas_WorkspacePlan as WorkspacePlan, schemas_WorkspaceSettings as WorkspaceSettings, XataRecord$1 as XataRecord }; } type ApiExtraProps = Omit; interface XataApiClientOptions { fetch?: FetchImpl; apiKey?: string; host?: HostProvider; trace?: TraceFunction; clientName?: string; xataAgentExtra?: Record; } declare class XataApiClient { #private; constructor(options?: XataApiClientOptions); get user(): UserApi; get authentication(): AuthenticationApi; get workspaces(): WorkspaceApi; get invites(): InvitesApi; get database(): DatabaseApi; get branches(): BranchApi; get migrations(): MigrationsApi; get migrationRequests(): MigrationRequestsApi; get tables(): TableApi; get records(): RecordsApi; get files(): FilesApi; get searchAndFilter(): SearchAndFilterApi; } declare class UserApi { private extraProps; constructor(extraProps: ApiExtraProps); getUser(): Promise; updateUser({ user }: { user: User; }): Promise; deleteUser(): Promise; } declare class AuthenticationApi { private extraProps; constructor(extraProps: ApiExtraProps); getUserAPIKeys(): Promise; createUserAPIKey({ name }: { name: APIKeyName; }): Promise; deleteUserAPIKey({ name }: { name: APIKeyName; }): Promise; } declare class WorkspaceApi { private extraProps; constructor(extraProps: ApiExtraProps); getWorkspacesList(): Promise; createWorkspace({ data }: { data: WorkspaceMeta; }): Promise; getWorkspace({ workspace }: { workspace: WorkspaceID; }): Promise; updateWorkspace({ workspace, update }: { workspace: WorkspaceID; update: WorkspaceMeta; }): Promise; deleteWorkspace({ workspace }: { workspace: WorkspaceID; }): Promise; getWorkspaceMembersList({ workspace }: { workspace: WorkspaceID; }): Promise; updateWorkspaceMemberRole({ workspace, user, role }: { workspace: WorkspaceID; user: UserID; role: Role; }): Promise; removeWorkspaceMember({ workspace, user }: { workspace: WorkspaceID; user: UserID; }): Promise; } declare class InvitesApi { private extraProps; constructor(extraProps: ApiExtraProps); inviteWorkspaceMember({ workspace, email, role }: { workspace: WorkspaceID; email: string; role: Role; }): Promise; updateWorkspaceMemberInvite({ workspace, invite, role }: { workspace: WorkspaceID; invite: InviteID; role: Role; }): Promise; cancelWorkspaceMemberInvite({ workspace, invite }: { workspace: WorkspaceID; invite: InviteID; }): Promise; acceptWorkspaceMemberInvite({ workspace, key }: { workspace: WorkspaceID; key: InviteKey; }): Promise; resendWorkspaceMemberInvite({ workspace, invite }: { workspace: WorkspaceID; invite: InviteID; }): Promise; } declare class BranchApi { private extraProps; constructor(extraProps: ApiExtraProps); getBranchList({ workspace, region, database }: { workspace: WorkspaceID; region: string; database: DBName$1; }): Promise; getBranchDetails({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; createBranch({ workspace, region, database, branch, from, metadata }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; from?: string; metadata?: BranchMetadata$1; }): Promise; deleteBranch({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; copyBranch({ workspace, region, database, branch, destinationBranch, limit }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; destinationBranch: BranchName$1; limit?: number; }): Promise; updateBranchMetadata({ workspace, region, database, branch, metadata }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; metadata: BranchMetadata$1; }): Promise; getBranchMetadata({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; getBranchStats({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; getGitBranchesMapping({ workspace, region, database }: { workspace: WorkspaceID; region: string; database: DBName$1; }): Promise; addGitBranchesEntry({ workspace, region, database, gitBranch, xataBranch }: { workspace: WorkspaceID; region: string; database: DBName$1; gitBranch: string; xataBranch: BranchName$1; }): Promise; removeGitBranchesEntry({ workspace, region, database, gitBranch }: { workspace: WorkspaceID; region: string; database: DBName$1; gitBranch: string; }): Promise; resolveBranch({ workspace, region, database, gitBranch, fallbackBranch }: { workspace: WorkspaceID; region: string; database: DBName$1; gitBranch?: string; fallbackBranch?: string; }): Promise; pgRollMigrationHistory({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; applyMigration({ workspace, region, database, branch, migration }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; migration: Migration; }): Promise; } declare class TableApi { private extraProps; constructor(extraProps: ApiExtraProps); createTable({ workspace, region, database, branch, table }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; }): Promise; deleteTable({ workspace, region, database, branch, table }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; }): Promise; updateTable({ workspace, region, database, branch, table, update }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; update: UpdateTableRequestBody; }): Promise; getTableSchema({ workspace, region, database, branch, table }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; }): Promise; setTableSchema({ workspace, region, database, branch, table, schema }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; schema: SetTableSchemaRequestBody; }): Promise; getTableColumns({ workspace, region, database, branch, table }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; }): Promise; addTableColumn({ workspace, region, database, branch, table, column }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; column: Column; }): Promise; getColumn({ workspace, region, database, branch, table, column }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; column: ColumnName; }): Promise; updateColumn({ workspace, region, database, branch, table, column, update }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; column: ColumnName; update: UpdateColumnRequestBody; }): Promise; deleteColumn({ workspace, region, database, branch, table, column }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; column: ColumnName; }): Promise; } declare class RecordsApi { private extraProps; constructor(extraProps: ApiExtraProps); insertRecord({ workspace, region, database, branch, table, record, columns }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: Record; columns?: ColumnsProjection; }): Promise; getRecord({ workspace, region, database, branch, table, id, columns }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; id: RecordID; columns?: ColumnsProjection; }): Promise; insertRecordWithID({ workspace, region, database, branch, table, id, record, columns, createOnly, ifVersion }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; id: RecordID; record: Record; columns?: ColumnsProjection; createOnly?: boolean; ifVersion?: number; }): Promise; updateRecordWithID({ workspace, region, database, branch, table, id, record, columns, ifVersion }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; id: RecordID; record: Record; columns?: ColumnsProjection; ifVersion?: number; }): Promise; upsertRecordWithID({ workspace, region, database, branch, table, id, record, columns, ifVersion }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; id: RecordID; record: Record; columns?: ColumnsProjection; ifVersion?: number; }): Promise; deleteRecord({ workspace, region, database, branch, table, id, columns }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; id: RecordID; columns?: ColumnsProjection; }): Promise; bulkInsertTableRecords({ workspace, region, database, branch, table, records, columns }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; records: Record[]; columns?: ColumnsProjection; }): Promise; branchTransaction({ workspace, region, database, branch, operations }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; operations: TransactionOperation$1[]; }): Promise; } declare class FilesApi { private extraProps; constructor(extraProps: ApiExtraProps); getFileItem({ workspace, region, database, branch, table, record, column, fileId }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; fileId: string; }): Promise; putFileItem({ workspace, region, database, branch, table, record, column, fileId, file }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; fileId: string; file: any; }): Promise; deleteFileItem({ workspace, region, database, branch, table, record, column, fileId }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; fileId: string; }): Promise; getFile({ workspace, region, database, branch, table, record, column }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; }): Promise; putFile({ workspace, region, database, branch, table, record, column, file }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; file: Blob; }): Promise; deleteFile({ workspace, region, database, branch, table, record, column }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; record: RecordID; column: ColumnName; }): Promise; fileAccess({ workspace, region, fileId, verify }: { workspace: WorkspaceID; region: string; fileId: string; verify?: FileSignature; }): Promise; } declare class SearchAndFilterApi { private extraProps; constructor(extraProps: ApiExtraProps); queryTable({ workspace, region, database, branch, table, filter, sort, page, columns, consistency }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; filter?: FilterExpression; sort?: SortExpression; page?: PageConfig; columns?: ColumnsProjection; consistency?: 'strong' | 'eventual'; }): Promise; searchTable({ workspace, region, database, branch, table, query, fuzziness, target, prefix, filter, highlight, boosters }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; query: string; fuzziness?: FuzzinessExpression; target?: TargetExpression; prefix?: PrefixExpression; filter?: FilterExpression; highlight?: HighlightExpression; boosters?: BoosterExpression[]; }): Promise; searchBranch({ workspace, region, database, branch, tables, query, fuzziness, prefix, highlight }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; tables?: (string | { table: string; filter?: FilterExpression; target?: TargetExpression; boosters?: BoosterExpression[]; })[]; query: string; fuzziness?: FuzzinessExpression; prefix?: PrefixExpression; highlight?: HighlightExpression; }): Promise; vectorSearchTable({ workspace, region, database, branch, table, queryVector, column, similarityFunction, size, filter }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; queryVector: number[]; column: string; similarityFunction?: string; size?: number; filter?: FilterExpression; }): Promise; askTable({ workspace, region, database, branch, table, options }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; options: AskTableRequestBody; }): Promise; askTableSession({ workspace, region, database, branch, table, sessionId, message }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; sessionId: string; message: string; }): Promise; summarizeTable({ workspace, region, database, branch, table, filter, columns, summaries, sort, summariesFilter, page, consistency }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; filter?: FilterExpression; columns?: ColumnsProjection; summaries?: SummaryExpressionList; sort?: SortExpression; summariesFilter?: FilterExpression; page?: { size?: number; }; consistency?: 'strong' | 'eventual'; }): Promise; aggregateTable({ workspace, region, database, branch, table, filter, aggs }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; table: TableName; filter?: FilterExpression; aggs?: AggExpressionMap; }): Promise; } declare class MigrationRequestsApi { private extraProps; constructor(extraProps: ApiExtraProps); queryMigrationRequests({ workspace, region, database, filter, sort, page, columns }: { workspace: WorkspaceID; region: string; database: DBName$1; filter?: FilterExpression; sort?: SortExpression; page?: PageConfig; columns?: ColumnsProjection; }): Promise; createMigrationRequest({ workspace, region, database, migration }: { workspace: WorkspaceID; region: string; database: DBName$1; migration: CreateMigrationRequestRequestBody; }): Promise; getMigrationRequest({ workspace, region, database, migrationRequest }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; }): Promise; updateMigrationRequest({ workspace, region, database, migrationRequest, update }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; update: UpdateMigrationRequestRequestBody; }): Promise; listMigrationRequestsCommits({ workspace, region, database, migrationRequest, page }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; page?: { after?: string; before?: string; size?: number; }; }): Promise; compareMigrationRequest({ workspace, region, database, migrationRequest }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; }): Promise; getMigrationRequestIsMerged({ workspace, region, database, migrationRequest }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; }): Promise; mergeMigrationRequest({ workspace, region, database, migrationRequest }: { workspace: WorkspaceID; region: string; database: DBName$1; migrationRequest: MigrationRequestNumber; }): Promise; } declare class MigrationsApi { private extraProps; constructor(extraProps: ApiExtraProps); getBranchMigrationHistory({ workspace, region, database, branch, limit, startFrom }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; limit?: number; startFrom?: string; }): Promise; getBranchMigrationPlan({ workspace, region, database, branch, schema }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; schema: Schema; }): Promise; executeBranchMigrationPlan({ workspace, region, database, branch, plan }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; plan: ExecuteBranchMigrationPlanRequestBody; }): Promise; getBranchSchemaHistory({ workspace, region, database, branch, page }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; page?: { after?: string; before?: string; size?: number; }; }): Promise; compareBranchWithUserSchema({ workspace, region, database, branch, schema, schemaOperations, branchOperations }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; schema: Schema; schemaOperations?: MigrationOp[]; branchOperations?: MigrationOp[]; }): Promise; compareBranchSchemas({ workspace, region, database, branch, compare, sourceBranchOperations, targetBranchOperations }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; compare: BranchName$1; sourceBranchOperations?: MigrationOp[]; targetBranchOperations?: MigrationOp[]; }): Promise; updateBranchSchema({ workspace, region, database, branch, migration }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; migration: Migration; }): Promise; previewBranchSchemaEdit({ workspace, region, database, branch, data }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; data: { edits?: SchemaEditScript; }; }): Promise; applyBranchSchemaEdit({ workspace, region, database, branch, edits }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; edits: SchemaEditScript; }): Promise; pushBranchMigrations({ workspace, region, database, branch, migrations }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; migrations: MigrationObject[]; }): Promise; getSchema({ workspace, region, database, branch }: { workspace: WorkspaceID; region: string; database: DBName$1; branch: BranchName$1; }): Promise; } declare class DatabaseApi { private extraProps; constructor(extraProps: ApiExtraProps); getDatabaseList({ workspace }: { workspace: WorkspaceID; }): Promise; createDatabase({ workspace, database, data, headers }: { workspace: WorkspaceID; database: DBName$1; data: CreateDatabaseRequestBody; headers?: Record; }): Promise; deleteDatabase({ workspace, database }: { workspace: WorkspaceID; database: DBName$1; }): Promise; getDatabaseMetadata({ workspace, database }: { workspace: WorkspaceID; database: DBName$1; }): Promise; updateDatabaseMetadata({ workspace, database, metadata }: { workspace: WorkspaceID; database: DBName$1; metadata: DatabaseMetadata; }): Promise; renameDatabase({ workspace, database, newName }: { workspace: WorkspaceID; database: DBName$1; newName: DBName$1; }): Promise; getDatabaseGithubSettings({ workspace, database }: { workspace: WorkspaceID; database: DBName$1; }): Promise; updateDatabaseGithubSettings({ workspace, database, settings }: { workspace: WorkspaceID; database: DBName$1; settings: DatabaseGithubSettings; }): Promise; deleteDatabaseGithubSettings({ workspace, database }: { workspace: WorkspaceID; database: DBName$1; }): Promise; listRegions({ workspace }: { workspace: WorkspaceID; }): Promise; } declare class XataApiPlugin implements XataPlugin { build(options: XataPluginOptions): XataApiClient; } type StringKeys = Extract; type Values = O[StringKeys]; type UnionToIntersection = (T extends any ? (x: T) => any : never) extends (x: infer R) => any ? R : never; type If = Condition extends true ? Then : Else; type IsObject = T extends Record ? true : false; type IsArray = T extends Array ? true : false; type RequiredBy = T & { [P in K]-?: NonNullable; }; type GetArrayInnerType = T[number]; type SingleOrArray = T | T[]; type Dictionary = Record; type OmitBy = T extends any ? Omit : never; type Without = { [P in Exclude]?: never; }; type ExclusiveOr = T | U extends object ? (Without & U) | (Without & T) : T | U; type Explode = keyof T extends infer K ? K extends unknown ? { [I in keyof T]: I extends K ? T[I] : never; } : never : never; type AtMostOne = Explode>; type AtLeastOne; }> = Partial & U[keyof U]; type ExactlyOne = AtMostOne & AtLeastOne; type Fn = (...args: any[]) => any; type NarrowRaw = (A extends [] ? [] : never) | (A extends Narrowable ? A : never) | { [K in keyof A]: A[K] extends Fn ? A[K] : NarrowRaw; }; type Narrowable = string | number | bigint | boolean; type Try = A1 extends A2 ? A1 : Catch; type Narrow = Try>; interface ImageTransformations { /** * Whether to preserve animation frames from input files. Default is true. * Setting it to false reduces animations to still images. This setting is * recommended when enlarging images or processing arbitrary user content, * because large GIF animations can weigh tens or even hundreds of megabytes. * It is also useful to set anim:false when using format:"json" to get the * response quicker without the number of frames. */ anim?: boolean; /** * Background color to add underneath the image. Applies only to images with * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), * hsl(…), etc.) */ background?: string; /** * Radius of a blur filter (approximate gaussian). Maximum supported radius * is 250. */ blur?: number; /** * Increase brightness by a factor. A value of 1.0 equals no change, a value * of 0.5 equals half brightness, and a value of 2.0 equals twice as bright. * 0 is ignored. */ brightness?: number; /** * Slightly reduces latency on a cache miss by selecting a * quickest-to-compress file format, at a cost of increased file size and * lower image quality. It will usually override the format option and choose * JPEG over WebP or AVIF. We do not recommend using this option, except in * unusual circumstances like resizing uncacheable dynamically-generated * images. */ compression?: 'fast'; /** * Increase contrast by a factor. A value of 1.0 equals no change, a value of * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is * ignored. */ contrast?: number; /** * Download file. Forces browser to download the image. * Value is used for the download file name. Extension is optional. */ download?: string; /** * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it * easier to specify higher-DPI sizes in . */ dpr?: number; /** * Resizing mode as a string. It affects interpretation of width and height * options: * - scale-down: Similar to contain, but the image is never enlarged. If * the image is larger than given width or height, it will be resized. * Otherwise its original size will be kept. * - contain: Resizes to maximum size that fits within the given width and * height. If only a single dimension is given (e.g. only width), the * image will be shrunk or enlarged to exactly match that dimension. * Aspect ratio is always preserved. * - cover: Resizes (shrinks or enlarges) to fill the entire area of width * and height. If the image has an aspect ratio different from the ratio * of width and height, it will be cropped to fit. * - crop: The image will be shrunk and cropped to fit within the area * specified by width and height. The image will not be enlarged. For images * smaller than the given dimensions it's the same as scale-down. For * images larger than the given dimensions, it's the same as cover. * See also trim. * - pad: Resizes to the maximum size that fits within the given width and * height, and then fills the remaining area with a background color * (white by default). Use of this mode is not recommended, as the same * effect can be more efficiently achieved with the contain mode and the * CSS object-fit: contain property. */ fit?: 'scale-down' | 'contain' | 'cover' | 'crop' | 'pad'; /** * Output format to generate. It can be: * - avif: generate images in AVIF format. * - webp: generate images in Google WebP format. Set quality to 100 to get * the WebP-lossless format. * - json: instead of generating an image, outputs information about the * image, in JSON format. The JSON object will contain image size * (before and after resizing), source image’s MIME type, file size, etc. * - jpeg: generate images in JPEG format. * - png: generate images in PNG format. */ format?: 'auto' | 'avif' | 'webp' | 'json' | 'jpeg' | 'png'; /** * Increase exposure by a factor. A value of 1.0 equals no change, a value of * 0.5 darkens the image, and a value of 2.0 lightens the image. 0 is ignored. */ gamma?: number; /** * When cropping with fit: "cover", this defines the side or point that should * be left uncropped. The value is either a string * "left", "right", "top", "bottom", "auto", or "center" (the default), * or an object {x, y} containing focal point coordinates in the original * image expressed as fractions ranging from 0.0 (top or left) to 1.0 * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will * crop bottom or left and right sides as necessary, but won’t crop anything * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to * preserve as much as possible around a point at 20% of the height of the * source image. */ gravity?: 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | { x: number; y: number; }; /** * Maximum height in image pixels. The value must be an integer. */ height?: number; /** * What EXIF data should be preserved in the output image. Note that EXIF * rotation and embedded color profiles are always applied ("baked in" into * the image), and aren't affected by this option. Note that if the Polish * feature is enabled, all metadata may have been removed already and this * option may have no effect. * - keep: Preserve most of EXIF metadata, including GPS location if there's * any. * - copyright: Only keep the copyright tag, and discard everything else. * This is the default behavior for JPEG files. * - none: Discard all invisible EXIF metadata. Currently WebP and PNG * output formats always discard metadata. */ metadata?: 'keep' | 'copyright' | 'none'; /** * Quality setting from 1-100 (useful values are in 60-90 range). Lower values * make images look worse, but load faster. The default is 85. It applies only * to JPEG and WebP images. It doesn’t have any effect on PNG. */ quality?: number; /** * Number of degrees (90, 180, 270) to rotate the image by. width and height * options refer to axes after rotation. */ rotate?: 0 | 90 | 180 | 270 | 360; /** * Strength of sharpening filter to apply to the image. Floating-point * number between 0 (no sharpening, default) and 10 (maximum). 1.0 is a * recommended value for downscaled images. */ sharpen?: number; /** * An object with four properties {left, top, right, bottom} that specify * a number of pixels to cut off on each side. Allows removal of borders * or cutting out a specific fragment of an image. Trimming is performed * before resizing or rotation. Takes dpr into account. */ trim?: { left?: number; top?: number; right?: number; bottom?: number; }; /** * Maximum width in image pixels. The value must be an integer. */ width?: number; } declare function transformImage(url: string, ...transformations: ImageTransformations[]): string; declare function transformImage(url: string | undefined, ...transformations: ImageTransformations[]): string | undefined; declare class Buffer extends Uint8Array { /** * Allocates a new buffer containing the given `str`. * * @param str String to store in buffer. * @param encoding Encoding to use, optional. Default is `utf8`. */ constructor(str: string, encoding?: Encoding); /** * Allocates a new buffer of `size` octets. * * @param size Count of octets to allocate. */ constructor(size: number); /** * Allocates a new buffer containing the given `array` of octets. * * @param array The octets to store. */ constructor(array: Uint8Array); /** * Allocates a new buffer containing the given `array` of octet values. * * @param array */ constructor(array: number[]); /** * Allocates a new buffer containing the given `array` of octet values. * * @param array * @param encoding */ constructor(array: number[], encoding: Encoding); /** * Copies the passed `buffer` data onto a new `Buffer` instance. * * @param buffer */ constructor(buffer: Buffer); /** * When passed a reference to the .buffer property of a TypedArray instance, the newly created Buffer will share * the same allocated memory as the TypedArray. The optional `byteOffset` and `length` arguments specify a memory * range within the `arrayBuffer` that will be shared by the Buffer. * * @param buffer The .buffer property of a TypedArray or a new ArrayBuffer(). * @param byteOffset * @param length */ constructor(buffer: ArrayBuffer, byteOffset?: number, length?: number); /** * Return JSON representation of the buffer. */ toJSON(): { type: 'Buffer'; data: number[]; }; /** * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length` * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string, * only part of `string` will be written. However, partially encoded characters will not be written. * * @param string String to write to `buf`. * @param encoding The character encoding of `string`. Default: `utf8`. */ write(string: string, encoding?: Encoding): number; /** * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length` * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string, * only part of `string` will be written. However, partially encoded characters will not be written. * * @param string String to write to `buf`. * @param offset Number of bytes to skip before starting to write `string`. Default: `0`. * @param length Maximum number of bytes to write: Default: `buf.length - offset`. * @param encoding The character encoding of `string`. Default: `utf8`. */ write(string: string, offset?: number, length?: number, encoding?: Encoding): number; /** * Decodes the buffer to a string according to the specified character encoding. * Passing `start` and `end` will decode only a subset of the buffer. * * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte * will be replaced with `U+FFFD`. * * @param encoding * @param start * @param end */ toString(encoding?: Encoding, start?: number, end?: number): string; /** * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data. * * @param otherBuffer */ equals(otherBuffer: Buffer): boolean; /** * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after, * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each * buffer. * * - `0` is returned if `otherBuffer` is the same as this buffer. * - `1` is returned if `otherBuffer` should come before this buffer when sorted. * - `-1` is returned if `otherBuffer` should come after this buffer when sorted. * * @param otherBuffer The buffer to compare to. * @param targetStart The offset within `otherBuffer` at which to begin comparison. * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive). * @param sourceStart The offset within this buffer at which to begin comparison. * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive). */ compare(otherBuffer: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; /** * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory * region overlaps with this buffer. * * @param targetBuffer The target buffer to copy into. * @param targetStart The offset within `targetBuffer` at which to begin writing. * @param sourceStart The offset within this buffer at which to begin copying. * @param sourceEnd The offset within this buffer at which to end copying (exclusive). */ copy(targetBuffer: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; /** * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start` * and `end` indices. This is the same behavior as `buf.subarray()`. * * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy * the slice, use `Uint8Array.prototype.slice()`. * * @param start * @param end */ slice(start?: number, end?: number): Buffer; /** * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits * of accuracy. Behavior is undefined when value is anything other than an unsigned integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param byteLength Number of bytes to write, between 0 and 6. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; /** * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param byteLength Number of bytes to write, between 0 and 6. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; /** * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits * of accuracy. Behavior is undefined when `value` is anything other than a signed integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param byteLength Number of bytes to write, between 0 and 6. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; /** * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits * of accuracy. Behavior is undefined when `value` is anything other than a signed integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param byteLength Number of bytes to write, between 0 and 6. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; /** * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an * unsigned, little-endian integer supporting up to 48 bits of accuracy. * * @param offset Number of bytes to skip before starting to read. * @param byteLength Number of bytes to read, between 0 and 6. * @param noAssert */ readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; /** * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an * unsigned, big-endian integer supporting up to 48 bits of accuracy. * * @param offset Number of bytes to skip before starting to read. * @param byteLength Number of bytes to read, between 0 and 6. * @param noAssert */ readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; /** * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a * little-endian, two's complement signed value supporting up to 48 bits of accuracy. * * @param offset Number of bytes to skip before starting to read. * @param byteLength Number of bytes to read, between 0 and 6. * @param noAssert */ readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; /** * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a * big-endian, two's complement signed value supporting up to 48 bits of accuracy. * * @param offset Number of bytes to skip before starting to read. * @param byteLength Number of bytes to read, between 0 and 6. * @param noAssert */ readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; /** * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readUInt8(offset: number, noAssert?: boolean): number; /** * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readUInt16LE(offset: number, noAssert?: boolean): number; /** * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readUInt16BE(offset: number, noAssert?: boolean): number; /** * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readUInt32LE(offset: number, noAssert?: boolean): number; /** * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readUInt32BE(offset: number, noAssert?: boolean): number; /** * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted * as two's complement signed values. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readInt8(offset: number, noAssert?: boolean): number; /** * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` * are interpreted as two's complement signed values. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readInt16LE(offset: number, noAssert?: boolean): number; /** * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` * are interpreted as two's complement signed values. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readInt16BE(offset: number, noAssert?: boolean): number; /** * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` * are interpreted as two's complement signed values. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readInt32LE(offset: number, noAssert?: boolean): number; /** * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` * are interpreted as two's complement signed values. * * @param offset Number of bytes to skip before starting to read. * @param noAssert */ readInt32BE(offset: number, noAssert?: boolean): number; /** * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place. * Throws a `RangeError` if `buf.length` is not a multiple of 2. */ swap16(): Buffer; /** * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place. * Throws a `RangeError` if `buf.length` is not a multiple of 4. */ swap32(): Buffer; /** * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place. * Throws a `RangeError` if `buf.length` is not a multiple of 8. */ swap64(): Buffer; /** * Swaps two octets. * * @param b * @param n * @param m */ private _swap; /** * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer. * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUInt8(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUInt16LE(value: number | string, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer. * Behavior is undefined when `value` is anything other than a signed 8-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeInt8(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeInt16LE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeInt16BE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeInt32LE(value: number, offset: number, noAssert?: boolean): number; /** * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer. * * @param value Number to write. * @param offset Number of bytes to skip before starting to write. * @param noAssert * @returns `offset` plus the number of bytes written. */ writeInt32BE(value: number, offset: number, noAssert?: boolean): number; /** * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`. * * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that * character that fit into `buf` are written. * * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown. * * @param value * @param encoding */ fill(value: any, offset?: number, end?: number, encoding?: Encoding): this; /** * Returns the index of the specified value. * * If `value` is: * - a string, `value` is interpreted according to the character encoding in `encoding`. * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`. * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`. * * Any other types will throw a `TypeError`. * * @param value What to search for. * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end. * @param encoding If `value` is a string, this is the encoding used to search. * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found. */ indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: Encoding): number; /** * Gets the last index of the specified value. * * @see indexOf() * @param value * @param byteOffset * @param encoding */ lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: Encoding): number; private _bidirectionalIndexOf; /** * Equivalent to `buf.indexOf() !== -1`. * * @param value * @param byteOffset * @param encoding */ includes(value: string | number | Buffer, byteOffset?: number, encoding?: Encoding): boolean; /** * Allocates a new Buffer using an `array` of octet values. * * @param array */ static from(array: number[]): Buffer; /** * When passed a reference to the .buffer property of a TypedArray instance, the newly created Buffer will share * the same allocated memory as the TypedArray. The optional `byteOffset` and `length` arguments specify a memory * range within the `arrayBuffer` that will be shared by the Buffer. * * @param buffer The .buffer property of a TypedArray or a new ArrayBuffer(). * @param byteOffset * @param length */ static from(buffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; /** * Copies the passed `buffer` data onto a new Buffer instance. * * @param buffer */ static from(buffer: Buffer | Uint8Array): Buffer; /** * Creates a new Buffer containing the given string `str`. If provided, the `encoding` parameter identifies the * character encoding. * * @param str String to store in buffer. * @param encoding Encoding to use, optional. Default is `utf8`. */ static from(str: string, encoding?: Encoding): Buffer; /** * Returns true if `obj` is a Buffer. * * @param obj */ static isBuffer(obj: any): obj is Buffer; /** * Returns true if `encoding` is a supported encoding. * * @param encoding */ static isEncoding(encoding: string): encoding is Encoding; /** * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that * returns the number of characters in the string. * * @param string The string to test. * @param encoding The encoding to use for calculation. Defaults is `utf8`. */ static byteLength(string: string | Buffer | ArrayBuffer, encoding?: Encoding): number; /** * Returns a Buffer which is the result of concatenating all the buffers in the list together. * * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer. * - If the list has exactly one item, then the first item is returned. * - If the list has more than one item, then a new buffer is created. * * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at * a small computational expense. * * @param list An array of Buffer objects to concatenate. * @param totalLength Total length of the buffers when concatenated. */ static concat(list: Uint8Array[], totalLength?: number): Buffer; /** * The same as `buf1.compare(buf2)`. */ static compare(buf1: Uint8Array, buf2: Uint8Array): number; /** * Allocates a new buffer of `size` octets. * * @param size The number of octets to allocate. * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise. * @param encoding The encoding used for the call to `buf.fill()` while initializing. */ static alloc(size: number, fill?: string | Buffer | number, encoding?: Encoding): Buffer; /** * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown. * * @param size */ static allocUnsafe(size: number): Buffer; /** * Returns true if the given `obj` is an instance of `type`. * * @param obj * @param type */ private static _isInstance; private static _checked; private static _blitBuffer; private static _utf8Write; private static _asciiWrite; private static _base64Write; private static _ucs2Write; private static _hexWrite; private static _utf8ToBytes; private static _base64ToBytes; private static _asciiToBytes; private static _utf16leToBytes; private static _hexSlice; private static _base64Slice; private static _utf8Slice; private static _decodeCodePointsArray; private static _asciiSlice; private static _latin1Slice; private static _utf16leSlice; private static _arrayIndexOf; private static _checkOffset; private static _checkInt; private static _getEncoding; } /** * The encodings that are supported in both native and polyfilled `Buffer` instances. */ type Encoding = 'ascii' | 'utf8' | 'utf16le' | 'ucs2' | 'binary' | 'hex' | 'latin1' | 'base64'; type XataFileEditableFields = Partial>; type XataFileFields = Partial]: XataArrayFile[K] extends Function ? never : K; }[keyof XataArrayFile]>>; declare class XataFile { /** * Identifier of the file. */ id?: string; /** * Name of the file. */ name?: string; /** * Media type of the file. */ mediaType?: string; /** * Base64 encoded content of the file. */ base64Content?: string; /** * Whether to enable public url for the file. */ enablePublicUrl?: boolean; /** * Timeout for the signed url in seconds. Default: 60 seconds (1 minute). */ signedUrlTimeout?: number; /** * Time to live for upload URLs in seconds. Default: 86400 seconds (24 hours). */ uploadUrlTimeout?: number; /** * Size of the file. */ size?: number; /** * Version of the file. */ version?: number; /** * Url of the file. */ url?: string; /** * Signed url of the file (if requested, a temporary signed url will be returned). */ signedUrl?: string; /** * Upload url of the file (if requested, a temporary upload url will be returned). */ uploadUrl?: string; /** * Attributes of the file. */ attributes?: Record; constructor(file: Partial); static fromBuffer(buffer: Buffer, options?: XataFileEditableFields): XataFile; toBuffer(): Buffer; static fromArrayBuffer(arrayBuffer: ArrayBuffer, options?: XataFileEditableFields): XataFile; toArrayBuffer(): ArrayBuffer; static fromUint8Array(uint8Array: Uint8Array, options?: XataFileEditableFields): XataFile; toUint8Array(): Uint8Array; static fromBlob(file: Blob, options?: XataFileEditableFields): Promise; toBlob(): Blob; static fromString(string: string, options?: XataFileEditableFields): XataFile; toString(): string; static fromBase64(base64Content: string, options?: XataFileEditableFields): XataFile; toBase64(): string; transform(...options: ImageTransformations[]): { url: string | undefined; signedUrl: string | undefined; metadataUrl: string | undefined; metadataSignedUrl: string | undefined; }; } type XataArrayFile = Identifiable & XataFile; type SelectableColumn = '*' | 'id' | `xata.${'version' | 'createdAt' | 'updatedAt'}` | DataProps | NestedColumns; type ExpandedColumnNotation = { name: string; columns?: SelectableColumn[]; as?: string; limit?: number; offset?: number; order?: { column: string; order: 'asc' | 'desc'; }[]; }; type SelectableColumnWithObjectNotation = SelectableColumn | ExpandedColumnNotation; declare function isValidExpandedColumn(column: any): column is ExpandedColumnNotation; declare function isValidSelectableColumns(columns: any): columns is SelectableColumn[]; type StringColumns = T extends string ? T : never; type ProjectionColumns = T extends string ? never : T extends { as: infer As; } ? NonNullable extends string ? NonNullable : never : never; type WildcardColumns = Values<{ [K in SelectableColumn]: K extends `${string}*` ? K : never; }>; type ColumnsByValue = Values<{ [K in SelectableColumn]: ValueAtColumn extends infer C ? C extends Value ? K extends WildcardColumns ? never : K : never : never; }>; type SelectedPick[]> = XataRecord & UnionToIntersection]: NestedValueAtColumn & XataRecord; }>> & UnionToIntersection]: { [Key in K]: { records: (Record & XataRecord)[]; }; }; }>>; type ValueAtColumn = RecursivePath['length'] extends MAX_RECURSION ? never : Key extends '*' ? Values : Key extends 'id' ? string : Key extends 'xata.version' ? number : Key extends 'xata.createdAt' ? Date : Key extends 'xata.updatedAt' ? Date : Key extends keyof Object ? Object[Key] : Key extends `${infer K}.${infer V}` ? K extends keyof Object ? Values extends infer Item ? Item extends Record ? V extends SelectableColumn ? { V: ValueAtColumn; } : never : Object[K] : never> : never : never; type MAX_RECURSION = 3; type NestedColumns = RecursivePath['length'] extends MAX_RECURSION ? never : If, Values<{ [K in DataProps]: NonNullable extends infer Item ? If, Item extends (infer Type)[] ? Type extends XataArrayFile ? K | `${K}.${keyof XataFileFields | '*'}` : K | `${K}.${StringKeys | '*'}` : never, If, Item extends XataRecord ? SelectableColumn extends infer Column ? Column extends string ? K | `${K}.${Column}` : never : never : Item extends Date ? K : Item extends XataFile ? K | `${K}.${keyof XataFileFields | '*'}` : `${K}.${StringKeys | '*'}`, // This allows usage of objects that are not links K>> : never; }>, never>; type DataProps = Exclude, StringKeys>; type NestedValueAtColumn> = Key extends `${infer N}.${infer M}` ? N extends DataProps ? { [K in N]: M extends SelectableColumn> ? NonNullable extends XataFile ? ForwardNullable : NonNullable extends XataRecord ? ForwardNullable, M> & XataRecord> : ForwardNullable, M>> : NonNullable extends (infer ArrayType)[] ? ArrayType extends XataArrayFile ? ForwardNullable : M extends SelectableColumn> ? ForwardNullable, M>[]> : unknown : unknown; } : unknown : Key extends DataProps ? { [K in Key]: NonNullable extends XataRecord ? ForwardNullable, ['*']>, 'xata' | 'getMetadata'>> : O[K]; } : Key extends '*' ? { [K in StringKeys]: NonNullable extends XataRecord ? ForwardNullable>> : O[K]; } : unknown; type ForwardNullable = T extends NonNullable ? R : R | null; declare const RecordColumnTypes: readonly ["bool", "int", "float", "string", "text", "email", "multiple", "link", "datetime", "vector", "file[]", "file", "json"]; type Identifier = string; /** * Represents an identifiable record from the database. */ interface Identifiable { /** * Unique id of this record. */ id: Identifier; } interface BaseData { [key: string]: any; } /** * Represents a persisted record from the database. */ interface XataRecord = XataRecord> extends Identifiable { /** * Metadata of this record. */ xata: XataRecordMetadata; /** * Get metadata of this record. * @deprecated Use `xata` property instead. */ getMetadata(): XataRecordMetadata; /** * Get an object representation of this record. */ toSerializable(): JSONData; /** * Get a string representation of this record. */ toString(): string; /** * Retrieves a refreshed copy of the current record from the database. * @param columns The columns to retrieve. If not specified, all first level properties are retrieved. * @returns The persisted record with the selected columns, null if not found. */ read>(columns: K[]): Promise> | null>; /** * Retrieves a refreshed copy of the current record from the database. * @returns The persisted record with all first level properties, null if not found. */ read(): Promise> | null>; /** * Performs a partial update of the current record. On success a new object is * returned and the current object is not mutated. * @param partialUpdate The columns and their values that have to be updated. * @param columns The columns to retrieve. If not specified, all first level properties are retrieved. * @returns The persisted record with the selected columns, null if not found. */ update>(partialUpdate: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; /** * Performs a partial update of the current record. On success a new object is * returned and the current object is not mutated. * @param partialUpdate The columns and their values that have to be updated. * @returns The persisted record with all first level properties, null if not found. */ update(partialUpdate: Partial>, options?: { ifVersion?: number; }): Promise> | null>; /** * Performs a replace of the current record. On success a new object is * returned and the current object is not mutated. * @param partialUpdate The columns and their values that have to be updated. * @param columns The columns to retrieve. If not specified, all first level properties are retrieved. * @returns The persisted record with the selected columns, null if not found. */ replace>(object: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; /** * Performs a replace of the current record. On success a new object is * returned and the current object is not mutated. * @param partialUpdate The columns and their values that have to be updated. * @returns The persisted record with all first level properties, null if not found. */ replace(object: Partial>, options?: { ifVersion?: number; }): Promise> | null>; /** * Performs a deletion of the current record in the database. * @param columns The columns to retrieve. If not specified, all first level properties are retrieved. * @returns The deleted record, null if not found. */ delete>(columns: K[]): Promise> | null>; /** * Performs a deletion of the current record in the database. * @returns The deleted record, null if not found. */ delete(): Promise> | null>; } type Link = XataRecord; type XataRecordMetadata = { /** * Number that is increased every time the record is updated. */ version: number; /** * Timestamp when the record was created. */ createdAt: Date; /** * Timestamp when the record was last updated. */ updatedAt: Date; }; declare function isIdentifiable(x: any): x is Identifiable & Record; declare function isXataRecord(x: any): x is XataRecord & Record; type NumericOperator = ExclusiveOr<{ $increment?: number; }, ExclusiveOr<{ $decrement?: number; }, ExclusiveOr<{ $multiply?: number; }, { $divide?: number; }>>>; type InputXataFile = Partial | Promise>; type EditableDataFields = T extends XataRecord ? { id: Identifier; } | Identifier : NonNullable extends XataRecord ? { id: Identifier; } | Identifier | null | undefined : T extends Date ? string | Date : NonNullable extends Date ? string | Date | null | undefined : T extends XataFile ? InputXataFile : T extends XataFile[] ? InputXataFile[] : T extends number ? number | NumericOperator : T; type EditableData = Identifiable & Partial; }, keyof XataRecord>>; type JSONDataFile = { [K in keyof XataFile]: XataFile[K] extends Function ? never : XataFile[K]; }; type JSONDataFields = T extends XataFile ? JSONDataFile : NonNullable extends XataFile ? JSONDataFile | null | undefined : T extends XataRecord ? JSONData : NonNullable extends XataRecord ? JSONData | null | undefined : T extends Date ? string : NonNullable extends Date ? string | null | undefined : T; type JSONDataBase = Identifiable & { /** * Metadata about the record. */ xata: { /** * Timestamp when the record was created. */ createdAt: string; /** * Timestamp when the record was last updated. */ updatedAt: string; /** * Number that is increased every time the record is updated. */ version: number; }; }; type JSONData = JSONDataBase & Partial; }, keyof XataRecord>>; type JSONValue = Value & { __json: true; }; type JSONFilterColumns = Values<{ [K in keyof Record]: NonNullable extends JSONValue ? K extends string ? `${K}->${string}` : never : never; }>; type FilterColumns = ColumnsByValue | `xata.${keyof XataRecordMetadata}`; type FilterValueAtColumn = NonNullable> extends JSONValue ? PropertyFilter : Filter>>; /** * PropertyMatchFilter * Example: { "filter": { "name": "value", "name": { "$is": "value", "$any": [ "value1", "value2" ], }, } } */ type PropertyAccessFilter = { [key in FilterColumns]?: NestedApiFilter> | PropertyFilter>; } & { [key in JSONFilterColumns]?: PropertyFilter; }; type PropertyFilter = T | { $is: T; } | { $isNot: T; } | { $any: T[]; } | { $none: T[]; } | ValueTypeFilters; type IncludesFilter = PropertyFilter | { [key in '$all' | '$none' | '$any']?: IncludesFilter | Array | { $not: IncludesFilter; }>; }; type StringTypeFilter = { [key in '$contains' | '$iContains' | '$pattern' | '$iPattern' | '$startsWith' | '$endsWith']?: string; }; type ComparableType = number | Date; type ComparableTypeFilter = { [key in '$gt' | '$lt' | '$ge' | '$le']?: T; }; type ArrayFilter = { [key in '$includes']?: SingleOrArray | ValueTypeFilters> | IncludesFilter; } | { [key in '$includesAll' | '$includesNone' | '$includesAny']?: T | Array | { $not: PropertyFilter; }>; }; type ValueTypeFilters = T | T extends string ? StringTypeFilter : T extends number ? ComparableTypeFilter : T extends Date ? ComparableTypeFilter : T extends Array ? ArrayFilter : never; /** * AggregatorFilter * Example: { "filter": { "$any": { "dark": true, "plan": "free" } }, } { "filter": { "$any": [ { "name": "r1", }, { "name": "r2", }, ], } */ type AggregatorFilter = { [key in '$all' | '$any' | '$not' | '$none']?: SingleOrArray>; }; /** * Existance filter * Example: { filter: { $exists: "dark" } } */ type ExistanceFilter = { [key in '$exists' | '$notExists']?: FilterColumns; }; type BaseApiFilter = PropertyAccessFilter | AggregatorFilter | ExistanceFilter; /** * Nested filter * Injects the Api filters on nested properties * Example: { filter: { settings: { plan: { $any: ['free', 'trial'] } } } } */ type NestedApiFilter = { [key in keyof T]?: T[key] extends Record ? SingleOrArray> : PropertyFilter; }; type Filter = T extends Record ? T extends (infer ArrayType)[] ? ArrayType | ArrayType[] | ArrayFilter | ArrayFilter : T extends Date ? PropertyFilter : BaseApiFilter | NestedApiFilter : PropertyFilter; type DateBooster = { origin?: string; scale: string; decay: number; /** * The factor with which to multiply the added boost. * * @minimum 0 */ factor?: number; }; type NumericBooster = { factor: number; /** * Modifier to be applied to the column value, before being multiplied with the factor. The possible values are: * - none (default). * - log: common logarithm (base 10) * - log1p: add 1 then take the common logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - ln: natural logarithm (base e) * - ln1p: add 1 then take the natural logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - square: raise the value to the power of two. * - sqrt: take the square root of the value. * - reciprocal: reciprocate the value (if the value is `x`, the reciprocal is `1/x`). */ modifier?: 'none' | 'log' | 'log1p' | 'ln' | 'ln1p' | 'square' | 'sqrt' | 'reciprocal'; }; type ValueBooster = { value: T; factor: number; /** * Modifier to be applied to the column value, before being multiplied with the factor. The possible values are: * - none (default). * - log: common logarithm (base 10) * - log1p: add 1 then take the common logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - ln: natural logarithm (base e) * - ln1p: add 1 then take the natural logarithm. This ensures that the value is positive if the * value is between 0 and 1. * - square: raise the value to the power of two. * - sqrt: take the square root of the value. * - reciprocal: reciprocate the value (if the value is `x`, the reciprocal is `1/x`). */ modifier?: 'none' | 'log' | 'log1p' | 'ln' | 'ln1p' | 'square' | 'sqrt' | 'reciprocal'; }; type Boosters = Values<{ [K in SelectableColumn]: NonNullable> extends Date ? { dateBooster: { column: K; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: Filter; } & DateBooster; } : NonNullable> extends number ? ExclusiveOr<{ numericBooster?: { column: K; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: Filter; } & NumericBooster; }, { valueBooster?: { column: K; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: Filter; } & ValueBooster; }> : NonNullable> extends string | boolean ? { valueBooster: { column: K; /** * Only apply this booster to the records for which the provided filter matches. */ ifMatchesFilter?: Filter; } & ValueBooster>>; } : never; }>; type TargetColumn = SelectableColumn | { /** * The name of the column. */ column: SelectableColumn; /** * The weight of the column. * * @default 1 * @maximum 10 * @minimum 1 */ weight?: number; }; type SearchOptions, Tables extends StringKeys> = { fuzziness?: FuzzinessExpression; prefix?: PrefixExpression; highlight?: HighlightExpression; tables?: Array>]: { table: Model; target?: TargetColumn[]; filter?: Filter>; boosters?: Boosters[]; }; }>>; page?: SearchPageConfig; }; type TotalCount = Pick; type SearchPluginResult> = { all: >(query: string, options?: SearchOptions) => Promise['tables']>>>]: { table: Model; record: Awaited>>; }; }>[]; }>; byTable: >(query: string, options?: SearchOptions) => Promise['tables']>>>]?: Awaited>[]>; }; }>; }; declare class SearchPlugin> extends XataPlugin { #private; private db; constructor(db: SchemaPluginResult); build(pluginOptions: XataPluginOptions): SearchPluginResult; } type SearchXataRecord = Omit & { xata: XataRecordMetadata & SearchExtraProperties; getMetadata: () => XataRecordMetadata & SearchExtraProperties; }; type SearchExtraProperties = { table: string; highlight?: { [key: string]: string[] | { [key: string]: any; }; }; score?: number; }; type ReturnTable = Table extends Tables ? Table : never; type ExtractTables, Tables extends StringKeys, TableOptions extends GetArrayInnerType>['tables']>>> = TableOptions extends `${infer Table}` ? ReturnTable : TableOptions extends { table: infer Table; } ? ReturnTable : never; /** * The description of a single aggregation operation. The key represents the */ type AggregationExpression = ExactlyOne<{ count: CountAggregation; sum: SumAggregation; max: MaxAggregation; min: MinAggregation; average: AverageAggregation; percentiles: PercentilesAggregation; uniqueCount: UniqueCountAggregation; dateHistogram: DateHistogramAggregation; topValues: TopValuesAggregation; numericHistogram: NumericHistogramAggregation; }>; type AggregationResult>> = { aggs: { [K in keyof Expression]: AggregationResultItem; }; }; type AggregationExpressionType> = keyof T; type AggregationResultItem> = AggregationExpressionType extends infer Type ? Type extends keyof AggregationExpressionResultTypes ? AggregationExpressionResultTypes[Type] : never : never; /** * Count the number of records with an optional filter. */ type CountAggregation = { filter?: Filter; } | '*'; /** * The sum of the numeric values in a particular column. */ type SumAggregation = { /** * The column on which to compute the sum. Must be a numeric type. */ column: ColumnsByValue; }; /** * The max of the numeric values in a particular column. */ type MaxAggregation = { /** * The column on which to compute the max. Must be a numeric type. */ column: ColumnsByValue; }; /** * The min of the numeric values in a particular column. */ type MinAggregation = { /** * The column on which to compute the min. Must be a numeric type. */ column: ColumnsByValue; }; /** * The average of the numeric values in a particular column. */ type AverageAggregation = { /** * The column on which to compute the average. Must be a numeric type. */ column: ColumnsByValue; }; /** * Calculate given percentiles of the numeric values in a particular column. */ type PercentilesAggregation = { /** * The column on which to compute the average. Must be a numeric type. */ column: ColumnsByValue; percentiles: number[]; }; /** * Count the number of distinct values in a particular column. */ type UniqueCountAggregation = { /** * The column from where to count the unique values. */ column: ColumnsByValue; /** * The threshold under which the unique count is exact. If the number of unique * values in the column is higher than this threshold, the results are approximative. * Maximum value is 40,000, default value is 3000. */ precisionThreshold?: number; }; /** * Split data into buckets by a datetime column. Accepts sub-aggregations for each bucket. */ type DateHistogramAggregation = { /** * The column to use for bucketing. Must be of type datetime. */ column: ColumnsByValue; /** * The fixed interval to use when bucketing. * It is fromatted as number + units, for example: `5d`, `20m`, `10s`. * * @pattern ^(\d+)(d|h|m|s|ms)$ */ interval?: string; /** * The calendar-aware interval to use when bucketing. Possible values are: `minute`, * `hour`, `day`, `week`, `month`, `quarter`, `year`. */ calendarInterval?: 'minute' | 'hour' | 'day' | 'week' | 'month' | 'quarter' | 'year'; /** * The timezone to use for bucketing. By default, UTC is assumed. * The accepted format is as an ISO 8601 UTC offset. For example: `+01:00` or * `-08:00`. * * @pattern ^[+-][01]\d:[0-5]\d$ */ timezone?: string; aggs?: Dictionary>; }; /** * Split data into buckets by the unique values in a column. Accepts sub-aggregations for each bucket. * The top values as ordered by the number of records (`$count``) are returned. */ type TopValuesAggregation = { /** * The column to use for bucketing. Accepted types are `string`, `email`, `int`, `float`, or `bool`. */ column: ColumnsByValue; aggs?: Dictionary>; /** * The maximum number of unique values to return. * * @default 10 * @maximum 1000 */ size?: number; }; /** * Split data into buckets by dynamic numeric ranges. Accepts sub-aggregations for each bucket. */ type NumericHistogramAggregation = { /** * The column to use for bucketing. Must be of numeric type. */ column: ColumnsByValue; /** * The numeric interval to use for bucketing. The resulting buckets will be ranges * with this value as size. * * @minimum 0 */ interval: number; /** * By default the bucket keys start with 0 and then continue in `interval` steps. The bucket * boundaries can be shiftend by using the offset option. For example, if the `interval` is 100, * but you prefer the bucket boundaries to be `[50, 150), [150, 250), etc.`, you can set `offset` * to 50. * * @default 0 */ offset?: number; aggs?: Dictionary>; }; type AggregationExpressionResultTypes = { count: number; sum: number | null; max: number | null; min: number | null; average: number | null; percentiles: { values: { [key: string]: number; }; }; uniqueCount: number; dateHistogram: ComplexAggregationResult; topValues: ComplexAggregationResult; numericHistogram: ComplexAggregationResult; }; type ComplexAggregationResult = { values: Array<{ $key: string | number; $count: number; [key: string]: any; }>; }; type KeywordAskOptions = { searchType?: 'keyword'; search?: { fuzziness?: FuzzinessExpression; target?: TargetColumn[]; prefix?: PrefixExpression; filter?: Filter; boosters?: Boosters[]; }; }; type VectorAskOptions = { searchType?: 'vector'; vectorSearch?: { /** * The column to use for vector search. It must be of type `vector`. */ column: string; /** * The column containing the text for vector search. Must be of type `text`. */ contentColumn: string; filter?: Filter; }; }; type TypeAskOptions = KeywordAskOptions | VectorAskOptions; type BaseAskOptions = { rules?: string[]; sessionId?: string; }; type AskOptions = TypeAskOptions & BaseAskOptions; type AskResult = { answer?: string; records?: string[]; sessionId?: string; }; type SortDirection = 'asc' | 'desc'; type RandomFilter = { '*': 'random'; }; type RandomFilterExtended = { column: '*'; direction: 'random'; }; type SortColumns = ColumnsByValue | `xata.${keyof XataRecordMetadata}`; type SortFilterExtended> = RandomFilterExtended | { column: Columns; direction?: SortDirection; }; type SortFilter> = Columns | SortFilterExtended | SortFilterBase | RandomFilter; type SortFilterBase> = Values<{ [Key in Columns]: { [K in Key]: SortDirection; }; }>; type SummarizeExpression = ExactlyOne<{ count: ColumnsByValue | '*'; min: ColumnsByValue; max: ColumnsByValue; sum: ColumnsByValue; average: ColumnsByValue; }>; type SummarizeParams>, Columns extends SelectableColumn[]> = { summaries?: Expression; summariesFilter?: SummarizeFilter; filter?: Filter; columns?: Columns; sort?: SummarizeSort; pagination?: { size: number; }; consistency?: 'strong' | 'eventual'; }; type SummarizeResult>, Columns extends SelectableColumn[]> = { summaries: SummarizeResultItem[]; }; type SummarizeExpressionResultTypes = { count: number; min: Value; max: Value; sum: number; average: number; }; type SummarizeSort>> = SingleOrArray | StringKeys>>; type SummarizeValuePick>> = { [K in StringKeys]: StringKeys extends infer SummarizeOperation ? SummarizeOperation extends keyof Expression[K] ? Expression[K][SummarizeOperation] extends infer Column ? Column extends SelectableColumn ? SummarizeOperation extends keyof SummarizeExpressionResultTypes ? SummarizeExpressionResultTypes>[SummarizeOperation] : never : never : never : never : never; }; type SummarizeFilter>> = Filter>; type SummarizeResultItem>, Columns extends SelectableColumn[]> = SummarizeValuePick & SelectedPick; type BaseOptions = { columns?: SelectableColumnWithObjectNotation[]; consistency?: 'strong' | 'eventual'; cache?: number; fetchOptions?: Record; }; type CursorQueryOptions = { pagination?: CursorNavigationOptions & OffsetNavigationOptions; filter?: never; sort?: never; }; type OffsetQueryOptions = { pagination?: OffsetNavigationOptions; filter?: FilterExpression; sort?: SingleOrArray>; }; type QueryOptions = BaseOptions & (CursorQueryOptions | OffsetQueryOptions); /** * Query objects contain the information of all filters, sorting, etc. to be included in the database query. * * Query objects are immutable. Any method that adds more constraints or options to the query will return * a new Query object containing the both the previous and the new constraints and options. */ declare class Query implements Paginable { #private; readonly meta: PaginationQueryMeta; readonly records: PageRecordArray; constructor(repository: RestRepository | null, table: { name: string; schema?: Table; }, data: Partial>, rawParent?: Partial>); getQueryOptions(): QueryOptions; key(): string; /** * Builds a new query object representing a logical OR between the given subqueries. * @param queries An array of subqueries. * @returns A new Query object. */ any(...queries: Query[]): Query; /** * Builds a new query object representing a logical AND between the given subqueries. * @param queries An array of subqueries. * @returns A new Query object. */ all(...queries: Query[]): Query; /** * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2 * @param queries An array of subqueries. * @returns A new Query object. */ not(...queries: Query[]): Query; /** * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2 * @param queries An array of subqueries. * @returns A new Query object. */ none(...queries: Query[]): Query; /** * Builds a new query object adding one or more constraints. Examples: * * ``` * query.filter("columnName", columnValue) * query.filter("columnName", operator(columnValue)) // Use gt, gte, lt, lte, startsWith,... * ``` * * @param column The name of the column to filter. * @param value The value to filter. * @returns A new Query object. */ filter | JSONFilterColumns>(column: F, value: FilterValueAtColumn): Query; /** * Builds a new query object adding one or more constraints. Examples: * * ``` * query.filter({ "columnName": columnValue }) * query.filter({ * "columnName": operator(columnValue) // Use gt, gte, lt, lte, startsWith,... * }) * ``` * * @param filter A filter object * @returns A new Query object. */ filter(filter?: Filter): Query; /** * Builds a new query with a new sort option. * @param column The column name. * @param direction The direction. Either ascending or descending. * @returns A new Query object. */ sort>(column: F, direction: SortDirection): Query; sort(column: '*', direction: 'random'): Query; sort>(column: F): Query; /** * Builds a new query specifying the set of columns to be returned in the query response. * @param columns Array of column names to be returned by the query. * @returns A new Query object. */ select>(columns: K[]): Query>; /** * Get paginated results * * @returns A page of results */ getPaginated(): Promise>; /** * Get paginated results * * @param options Pagination options * @returns A page of results */ getPaginated(options: OmitBy, 'columns'>): Promise>; /** * Get paginated results * * @param options Pagination options * @returns A page of results */ getPaginated, 'columns'>>(options: Options): Promise>>; /** * Get results in an iterator * * @async * @returns Async interable of results */ [Symbol.asyncIterator](): AsyncIterableIterator; /** * Build an iterator of results * * @returns Async generator of results array */ getIterator(): AsyncGenerator; /** * Build an iterator of results * * @param options Pagination options with batchSize * @returns Async generator of results array */ getIterator(options: OmitBy, 'columns' | 'pagination'> & { batchSize?: number; }): AsyncGenerator; /** * Build an iterator of results * * @param options Pagination options with batchSize * @returns Async generator of results array */ getIterator, 'pagination'>, 'columns'> & { batchSize?: number; }>(options: Options): AsyncGenerator[]>; /** * Performs the query in the database and returns a set of results. * @returns An array of records from the database. */ getMany(): Promise>; /** * Performs the query in the database and returns a set of results. * @param options Additional options to be used when performing the query. * @returns An array of records from the database. */ getMany, 'columns'>>(options: Options): Promise>>; /** * Performs the query in the database and returns a set of results. * @param options Additional options to be used when performing the query. * @returns An array of records from the database. */ getMany(options: OmitBy, 'columns'>): Promise>; /** * Performs the query in the database and returns all the results. * Warning: If there are a large number of results, this method can have performance implications. * @returns An array of records from the database. */ getAll(): Promise>; /** * Performs the query in the database and returns all the results. * Warning: If there are a large number of results, this method can have performance implications. * @param options Additional options to be used when performing the query. * @returns An array of records from the database. */ getAll, 'pagination'>, 'columns'> & { batchSize?: number; }>(options: Options): Promise>>; /** * Performs the query in the database and returns all the results. * Warning: If there are a large number of results, this method can have performance implications. * @param options Additional options to be used when performing the query. * @returns An array of records from the database. */ getAll(options: OmitBy, 'columns' | 'pagination'> & { batchSize?: number; }): Promise>; /** * Performs the query in the database and returns the first result. * @returns The first record that matches the query, or null if no record matched the query. */ getFirst(): Promise; /** * Performs the query in the database and returns the first result. * @param options Additional options to be used when performing the query. * @returns The first record that matches the query, or null if no record matched the query. */ getFirst, 'pagination'>, 'columns'>>(options: Options): Promise | null>; /** * Performs the query in the database and returns the first result. * @param options Additional options to be used when performing the query. * @returns The first record that matches the query, or null if no record matched the query. */ getFirst(options: OmitBy, 'columns' | 'pagination'>): Promise; /** * Performs the query in the database and returns the first result. * @returns The first record that matches the query, or null if no record matched the query. * @throws if there are no results. */ getFirstOrThrow(): Promise; /** * Performs the query in the database and returns the first result. * @param options Additional options to be used when performing the query. * @returns The first record that matches the query, or null if no record matched the query. * @throws if there are no results. */ getFirstOrThrow, 'pagination'>, 'columns'>>(options: Options): Promise>; /** * Performs the query in the database and returns the first result. * @param options Additional options to be used when performing the query. * @returns The first record that matches the query, or null if no record matched the query. * @throws if there are no results. */ getFirstOrThrow(options: OmitBy, 'columns' | 'pagination'>): Promise; summarize>, Columns extends SelectableColumn[]>(params?: SummarizeParams): Promise>; /** * Builds a new query object adding a cache TTL in milliseconds. * @param ttl The cache TTL in milliseconds. * @returns A new Query object. */ cache(ttl: number): Query; /** * Retrieve next page of records * * @returns A new page object. */ nextPage(size?: number, offset?: number): Promise>; /** * Retrieve previous page of records * * @returns A new page object */ previousPage(size?: number, offset?: number): Promise>; /** * Retrieve start page of records * * @returns A new page object */ startPage(size?: number, offset?: number): Promise>; /** * Retrieve last page of records * * @returns A new page object */ endPage(size?: number, offset?: number): Promise>; /** * @returns Boolean indicating if there is a next page */ hasNextPage(): boolean; } type PaginationQueryMeta = { page: { cursor: string; more: boolean; size: number; }; }; interface Paginable { meta: PaginationQueryMeta; records: PageRecordArray; nextPage(size?: number, offset?: number): Promise>; previousPage(size?: number, offset?: number): Promise>; startPage(size?: number, offset?: number): Promise>; endPage(size?: number, offset?: number): Promise>; hasNextPage(): boolean; } /** * A Page contains a set of results from a query plus metadata about the retrieved * set of values such as the cursor, required to retrieve additional records. */ declare class Page implements Paginable { #private; /** * Page metadata, required to retrieve additional records. */ readonly meta: PaginationQueryMeta; /** * The set of results for this page. */ readonly records: PageRecordArray; constructor(query: Query, meta: PaginationQueryMeta, records?: Result[]); /** * Retrieves the next page of results. * @param size Maximum number of results to be retrieved. * @param offset Number of results to skip when retrieving the results. * @returns The next page or results. */ nextPage(size?: number, offset?: number): Promise>; /** * Retrieves the previous page of results. * @param size Maximum number of results to be retrieved. * @param offset Number of results to skip when retrieving the results. * @returns The previous page or results. */ previousPage(size?: number, offset?: number): Promise>; /** * Retrieves the start page of results. * @param size Maximum number of results to be retrieved. * @param offset Number of results to skip when retrieving the results. * @returns The start page or results. */ startPage(size?: number, offset?: number): Promise>; /** * Retrieves the end page of results. * @param size Maximum number of results to be retrieved. * @param offset Number of results to skip when retrieving the results. * @returns The end page or results. */ endPage(size?: number, offset?: number): Promise>; /** * Shortcut method to check if there will be additional results if the next page of results is retrieved. * @returns Whether or not there will be additional results in the next page of results. */ hasNextPage(): boolean; } type CursorNavigationOptions = { start?: string; } | { end?: string; } | { after?: string; before?: string; }; type OffsetNavigationOptions = { size?: number; offset?: number; }; declare const PAGINATION_MAX_SIZE = 1000; declare const PAGINATION_DEFAULT_SIZE = 20; declare const PAGINATION_MAX_OFFSET = 49000; declare const PAGINATION_DEFAULT_OFFSET = 0; declare function isCursorPaginationOptions(options: Record | undefined | null): options is CursorNavigationOptions; declare class RecordArray extends Array { constructor(overrideRecords?: Result[]); static parseConstructorParams(...args: any[]): any[]; toArray(): Result[]; toSerializable(): JSONData[]; toString(): string; map(callbackfn: (value: Result, index: number, array: Result[]) => U, thisArg?: any): U[]; } declare class PageRecordArray extends Array { #private; constructor(page: Paginable, overrideRecords?: Result[]); static parseConstructorParams(...args: any[]): any[]; toArray(): Result[]; toSerializable(): JSONData[]; toString(): string; map(callbackfn: (value: Result, index: number, array: Result[]) => U, thisArg?: any): U[]; /** * Retrieve next page of records * * @returns A new array of objects */ nextPage(size?: number, offset?: number): Promise>; /** * Retrieve previous page of records * * @returns A new array of objects */ previousPage(size?: number, offset?: number): Promise>; /** * Retrieve start page of records * * @returns A new array of objects */ startPage(size?: number, offset?: number): Promise>; /** * Retrieve end page of records * * @returns A new array of objects */ endPage(size?: number, offset?: number): Promise>; /** * @returns Boolean indicating if there is a next page */ hasNextPage(): boolean; } /** * Common interface for performing operations on a table. */ declare abstract class Repository extends Query>> { abstract create>(object: Omit, 'id'> & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; abstract create(object: Omit, 'id'> & Partial, options?: { ifVersion?: number; }): Promise>>; /** * Creates a single record in the table with a unique id. * @param id The unique id. * @param object Object containing the column names with their values to be stored in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. */ abstract create>(id: Identifier, object: Omit, 'id'>, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Creates a single record in the table with a unique id. * @param id The unique id. * @param object Object containing the column names with their values to be stored in the table. * @returns The full persisted record. */ abstract create(id: Identifier, object: Omit, 'id'>, options?: { ifVersion?: number; }): Promise>>; /** * Creates multiple records in the table. * @param objects Array of objects with the column names and the values to be stored in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the persisted records in order. */ abstract create>(objects: Array, 'id'> & Partial>, columns: K[]): Promise>[]>; /** * Creates multiple records in the table. * @param objects Array of objects with the column names and the values to be stored in the table. * @returns Array of the persisted records in order. */ abstract create(objects: Array, 'id'> & Partial>): Promise>[]>; /** * Queries a single record from the table given its unique id. * @param id The unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted record for the given id or null if the record could not be found. */ abstract read>(id: Identifier, columns: K[]): Promise | null>>; /** * Queries a single record from the table given its unique id. * @param id The unique id. * @returns The persisted record for the given id or null if the record could not be found. */ abstract read(id: Identifier): Promise | null>>; /** * Queries multiple records from the table given their unique id. * @param ids The unique ids array. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted records for the given ids in order (if a record could not be found null is returned). */ abstract read>(ids: ReadonlyArray, columns: K[]): Promise> | null>>; /** * Queries multiple records from the table given their unique id. * @param ids The unique ids array. * @returns The persisted records for the given ids in order (if a record could not be found null is returned). */ abstract read(ids: ReadonlyArray): Promise> | null>>; /** * Queries a single record from the table by the id in the object. * @param object Object containing the id of the record. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted record for the given id or null if the record could not be found. */ abstract read>(object: Identifiable, columns: K[]): Promise | null>>; /** * Queries a single record from the table by the id in the object. * @param object Object containing the id of the record. * @returns The persisted record for the given id or null if the record could not be found. */ abstract read(object: Identifiable): Promise | null>>; /** * Queries multiple records from the table by the ids in the objects. * @param objects Array of objects containing the ids of the records. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted records for the given ids in order (if a record could not be found null is returned). */ abstract read>(objects: Identifiable[], columns: K[]): Promise> | null>>; /** * Queries multiple records from the table by the ids in the objects. * @param objects Array of objects containing the ids of the records. * @returns The persisted records for the given ids in order (if a record could not be found null is returned). */ abstract read(objects: Identifiable[]): Promise> | null>>; /** * Queries a single record from the table given its unique id. * @param id The unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted record for the given id. * @throws If the record could not be found. */ abstract readOrThrow>(id: Identifier, columns: K[]): Promise>>; /** * Queries a single record from the table given its unique id. * @param id The unique id. * @returns The persisted record for the given id. * @throws If the record could not be found. */ abstract readOrThrow(id: Identifier): Promise>>; /** * Queries multiple records from the table given their unique id. * @param ids The unique ids array. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted records for the given ids in order. * @throws If one or more records could not be found. */ abstract readOrThrow>(ids: ReadonlyArray, columns: K[]): Promise>>>; /** * Queries multiple records from the table given their unique id. * @param ids The unique ids array. * @returns The persisted records for the given ids in order. * @throws If one or more records could not be found. */ abstract readOrThrow(ids: ReadonlyArray): Promise>>>; /** * Queries a single record from the table by the id in the object. * @param object Object containing the id of the record. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted record for the given id. * @throws If the record could not be found. */ abstract readOrThrow>(object: Identifiable, columns: K[]): Promise>>; /** * Queries a single record from the table by the id in the object. * @param object Object containing the id of the record. * @returns The persisted record for the given id. * @throws If the record could not be found. */ abstract readOrThrow(object: Identifiable): Promise>>; /** * Queries multiple records from the table by the ids in the objects. * @param objects Array of objects containing the ids of the records. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The persisted records for the given ids in order. * @throws If one or more records could not be found. */ abstract readOrThrow>(objects: Identifiable[], columns: K[]): Promise>>>; /** * Queries multiple records from the table by the ids in the objects. * @param objects Array of objects containing the ids of the records. * @returns The persisted records for the given ids in order. * @throws If one or more records could not be found. */ abstract readOrThrow(objects: Identifiable[]): Promise>>>; /** * Partially update a single record. * @param object An object with its id and the columns to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record, null if the record could not be found. */ abstract update>(object: Partial> & Identifiable, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; /** * Partially update a single record. * @param object An object with its id and the columns to be updated. * @returns The full persisted record, null if the record could not be found. */ abstract update(object: Partial> & Identifiable, options?: { ifVersion?: number; }): Promise> | null>; /** * Partially update a single record given its unique id. * @param id The unique id. * @param object The column names and their values that have to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record, null if the record could not be found. */ abstract update>(id: Identifier, object: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; /** * Partially update a single record given its unique id. * @param id The unique id. * @param object The column names and their values that have to be updated. * @returns The full persisted record, null if the record could not be found. */ abstract update(id: Identifier, object: Partial>, options?: { ifVersion?: number; }): Promise> | null>; /** * Partially updates multiple records. * @param objects An array of objects with their ids and columns to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the persisted records in order (if a record could not be found null is returned). */ abstract update>(objects: Array> & Identifiable>, columns: K[]): Promise> | null>>; /** * Partially updates multiple records. * @param objects An array of objects with their ids and columns to be updated. * @returns Array of the persisted records in order (if a record could not be found null is returned). */ abstract update(objects: Array> & Identifiable>): Promise> | null>>; /** * Partially update a single record. * @param object An object with its id and the columns to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. * @throws If the record could not be found. */ abstract updateOrThrow>(object: Partial> & Identifiable, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Partially update a single record. * @param object An object with its id and the columns to be updated. * @returns The full persisted record. * @throws If the record could not be found. */ abstract updateOrThrow(object: Partial> & Identifiable, options?: { ifVersion?: number; }): Promise>>; /** * Partially update a single record given its unique id. * @param id The unique id. * @param object The column names and their values that have to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. * @throws If the record could not be found. */ abstract updateOrThrow>(id: Identifier, object: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Partially update a single record given its unique id. * @param id The unique id. * @param object The column names and their values that have to be updated. * @returns The full persisted record. * @throws If the record could not be found. */ abstract updateOrThrow(id: Identifier, object: Partial>, options?: { ifVersion?: number; }): Promise>>; /** * Partially updates multiple records. * @param objects An array of objects with their ids and columns to be updated. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the persisted records in order. * @throws If one or more records could not be found. */ abstract updateOrThrow>(objects: Array> & Identifiable>, columns: K[]): Promise>[]>; /** * Partially updates multiple records. * @param objects An array of objects with their ids and columns to be updated. * @returns Array of the persisted records in order. * @throws If one or more records could not be found. */ abstract updateOrThrow(objects: Array> & Identifiable>): Promise>[]>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param object Object containing the column names with their values to be persisted in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. */ abstract createOrUpdate>(object: Omit, 'id'> & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param object Object containing the column names with their values to be persisted in the table. * @returns The full persisted record. */ abstract createOrUpdate(object: Omit, 'id'> & Partial, options?: { ifVersion?: number; }): Promise>>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param id A unique id. * @param object The column names and the values to be persisted. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. */ abstract createOrUpdate>(id: Identifier | undefined, object: Omit, 'id'>, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param id A unique id. * @param object The column names and the values to be persisted. * @returns The full persisted record. */ abstract createOrUpdate(id: Identifier | undefined, object: Omit, 'id'>, options?: { ifVersion?: number; }): Promise>>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param objects Array of objects with the column names and the values to be stored in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the persisted records. */ abstract createOrUpdate>(objects: Array, 'id'> & Partial>, columns: K[]): Promise>[]>; /** * Creates or updates a single record. If a record exists with the given id, * it will be partially updated, otherwise a new record will be created. * @param objects Array of objects with the column names and the values to be stored in the table. * @returns Array of the persisted records. */ abstract createOrUpdate(objects: Array, 'id'> & Partial>): Promise>[]>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param object Object containing the column names with their values to be persisted in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. */ abstract createOrReplace>(object: Omit, 'id'> & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param object Object containing the column names with their values to be persisted in the table. * @returns The full persisted record. */ abstract createOrReplace(object: Omit, 'id'> & Partial, options?: { ifVersion?: number; }): Promise>>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param id A unique id. * @param object The column names and the values to be persisted. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The full persisted record. */ abstract createOrReplace>(id: Identifier | undefined, object: Omit, 'id'>, columns: K[], options?: { ifVersion?: number; }): Promise>>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param id A unique id. * @param object The column names and the values to be persisted. * @returns The full persisted record. */ abstract createOrReplace(id: Identifier | undefined, object: Omit, 'id'>, options?: { ifVersion?: number; }): Promise>>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param objects Array of objects with the column names and the values to be stored in the table. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the persisted records. */ abstract createOrReplace>(objects: Array, 'id'> & Partial>, columns: K[]): Promise>[]>; /** * Creates or replaces a single record. If a record exists with the given id, * it will be replaced, otherwise a new record will be created. * @param objects Array of objects with the column names and the values to be stored in the table. * @returns Array of the persisted records. */ abstract createOrReplace(objects: Array, 'id'> & Partial>): Promise>[]>; /** * Deletes a record given its unique id. * @param object An object with a unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The deleted record, null if the record could not be found. */ abstract delete>(object: Identifiable & Partial>, columns: K[]): Promise> | null>; /** * Deletes a record given its unique id. * @param object An object with a unique id. * @returns The deleted record, null if the record could not be found. */ abstract delete(object: Identifiable & Partial>): Promise> | null>; /** * Deletes a record given a unique id. * @param id The unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The deleted record, null if the record could not be found. */ abstract delete>(id: Identifier, columns: K[]): Promise> | null>; /** * Deletes a record given a unique id. * @param id The unique id. * @returns The deleted record, null if the record could not be found. */ abstract delete(id: Identifier): Promise> | null>; /** * Deletes multiple records given an array of objects with ids. * @param objects An array of objects with unique ids. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the deleted records in order (if a record could not be found null is returned). */ abstract delete>(objects: Array> & Identifiable>, columns: K[]): Promise> | null>>; /** * Deletes multiple records given an array of objects with ids. * @param objects An array of objects with unique ids. * @returns Array of the deleted records in order (if a record could not be found null is returned). */ abstract delete(objects: Array> & Identifiable>): Promise> | null>>; /** * Deletes multiple records given an array of unique ids. * @param objects An array of ids. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the deleted records in order (if a record could not be found null is returned). */ abstract delete>(objects: Identifier[], columns: K[]): Promise> | null>>; /** * Deletes multiple records given an array of unique ids. * @param objects An array of ids. * @returns Array of the deleted records in order (if a record could not be found null is returned). */ abstract delete(objects: Identifier[]): Promise> | null>>; /** * Deletes a record given its unique id. * @param object An object with a unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The deleted record, null if the record could not be found. * @throws If the record could not be found. */ abstract deleteOrThrow>(object: Identifiable, columns: K[]): Promise>>; /** * Deletes a record given its unique id. * @param object An object with a unique id. * @returns The deleted record, null if the record could not be found. * @throws If the record could not be found. */ abstract deleteOrThrow(object: Identifiable): Promise>>; /** * Deletes a record given a unique id. * @param id The unique id. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns The deleted record, null if the record could not be found. * @throws If the record could not be found. */ abstract deleteOrThrow>(id: Identifier, columns: K[]): Promise>>; /** * Deletes a record given a unique id. * @param id The unique id. * @returns The deleted record, null if the record could not be found. * @throws If the record could not be found. */ abstract deleteOrThrow(id: Identifier): Promise>>; /** * Deletes multiple records given an array of objects with ids. * @param objects An array of objects with unique ids. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the deleted records in order (if a record could not be found null is returned). * @throws If one or more records could not be found. */ abstract deleteOrThrow>(objects: Array> & Identifiable>, columns: K[]): Promise>>>; /** * Deletes multiple records given an array of objects with ids. * @param objects An array of objects with unique ids. * @returns Array of the deleted records in order (if a record could not be found null is returned). * @throws If one or more records could not be found. */ abstract deleteOrThrow(objects: Array> & Identifiable>): Promise>>>; /** * Deletes multiple records given an array of unique ids. * @param objects An array of ids. * @param columns Array of columns to be returned. If not specified, first level columns will be returned. * @returns Array of the deleted records in order (if a record could not be found null is returned). * @throws If one or more records could not be found. */ abstract deleteOrThrow>(objects: Identifier[], columns: K[]): Promise>>>; /** * Deletes multiple records given an array of unique ids. * @param objects An array of ids. * @returns Array of the deleted records in order. * @throws If one or more records could not be found. */ abstract deleteOrThrow(objects: Identifier[]): Promise>>>; /** * Search for records in the table. * @param query The query to search for. * @param options The options to search with (like: fuzziness) * @returns The found records. */ abstract search(query: string, options?: { fuzziness?: FuzzinessExpression; prefix?: PrefixExpression; highlight?: HighlightExpression; filter?: Filter; boosters?: Boosters[]; page?: SearchPageConfig; target?: TargetColumn[]; }): Promise<{ records: SearchXataRecord>[]; } & TotalCount>; /** * Search for vectors in the table. * @param column The column to search for. * @param query The vector to search for similarities. Must have the same dimension as the vector column used. * @param options The options to search with (like: spaceFunction) */ abstract vectorSearch>(column: F, query: number[], options?: { /** * The function used to measure the distance between two points. Can be one of: * `cosineSimilarity`, `l1`, `l2`. The default is `cosineSimilarity`. * * @default cosineSimilarity */ similarityFunction?: string; /** * Number of results to return. * * @default 10 * @maximum 100 * @minimum 1 */ size?: number; filter?: Filter; }): Promise<{ records: SearchXataRecord>[]; } & TotalCount>; /** * Aggregates records in the table. * @param expression The aggregations to perform. * @param filter The filter to apply to the queried records. * @returns The requested aggregations. */ abstract aggregate>>(expression?: Expression, filter?: Filter): Promise>; /** * Experimental: Ask the database to perform a natural language question. */ abstract ask(question: string, options?: AskOptions): Promise; /** * Experimental: Ask the database to perform a natural language question. */ abstract ask(question: string, options: AskOptions): Promise; /** * Experimental: Ask the database to perform a natural language question. */ abstract ask(question: string, options: AskOptions & { onMessage: (message: AskResult) => void; }): void; abstract query(query: Query): Promise>; } declare class RestRepository extends Query> implements Repository { #private; constructor(options: { table: string; db: SchemaPluginResult; pluginOptions: XataPluginOptions; schemaTables?: Table[]; }); create>(object: EditableData & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; create(object: EditableData & Partial, options?: { ifVersion?: number; }): Promise>>; create>(id: Identifier, object: EditableData, columns: K[], options?: { ifVersion?: number; }): Promise>>; create(id: Identifier, object: EditableData, options?: { ifVersion?: number; }): Promise>>; create>(objects: Array & Partial>, columns: K[]): Promise>[]>; create(objects: Array & Partial>): Promise>[]>; read>(id: Identifier, columns: K[]): Promise | null>>; read(id: string): Promise | null>>; read>(ids: ReadonlyArray, columns: K[]): Promise> | null>>; read(ids: ReadonlyArray): Promise> | null>>; read>(object: Identifiable, columns: K[]): Promise | null>>; read(object: Identifiable): Promise | null>>; read>(objects: Identifiable[], columns: K[]): Promise> | null>>; read(objects: Identifiable[]): Promise> | null>>; readOrThrow>(id: Identifier, columns: K[]): Promise>>; readOrThrow(id: Identifier): Promise>>; readOrThrow>(ids: ReadonlyArray, columns: K[]): Promise>>>; readOrThrow(ids: ReadonlyArray): Promise>>>; readOrThrow>(object: Identifiable, columns: K[]): Promise>>; readOrThrow(object: Identifiable): Promise>>; readOrThrow>(objects: Identifiable[], columns: K[]): Promise>>>; readOrThrow(objects: Identifiable[]): Promise>>>; update>(object: Partial> & Identifiable, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; update(object: Partial> & Identifiable, options?: { ifVersion?: number; }): Promise> | null>; update>(id: Identifier, object: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise> | null>; update(id: Identifier, object: Partial>, options?: { ifVersion?: number; }): Promise> | null>; update>(objects: Array> & Identifiable>, columns: K[]): Promise> | null>>; update(objects: Array> & Identifiable>): Promise> | null>>; updateOrThrow>(object: Partial> & Identifiable, columns: K[], options?: { ifVersion?: number; }): Promise>>; updateOrThrow(object: Partial> & Identifiable, options?: { ifVersion?: number; }): Promise>>; updateOrThrow>(id: Identifier, object: Partial>, columns: K[], options?: { ifVersion?: number; }): Promise>>; updateOrThrow(id: Identifier, object: Partial>, options?: { ifVersion?: number; }): Promise>>; updateOrThrow>(objects: Array> & Identifiable>, columns: K[]): Promise>[]>; updateOrThrow(objects: Array> & Identifiable>): Promise>[]>; createOrUpdate>(object: EditableData & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; createOrUpdate(object: EditableData & Partial, options?: { ifVersion?: number; }): Promise>>; createOrUpdate>(id: Identifier, object: Omit, 'id'>, columns: K[], options?: { ifVersion?: number; }): Promise>>; createOrUpdate(id: Identifier, object: Omit, 'id'>, options?: { ifVersion?: number; }): Promise>>; createOrUpdate>(objects: Array & Partial>, columns: K[]): Promise>[]>; createOrUpdate(objects: Array & Partial>): Promise>[]>; createOrReplace>(object: EditableData & Partial, columns: K[], options?: { ifVersion?: number; }): Promise>>; createOrReplace(object: EditableData & Partial, options?: { ifVersion?: number; }): Promise>>; createOrReplace>(id: Identifier | undefined, object: Omit, 'id'>, columns: K[], options?: { ifVersion?: number; }): Promise>>; createOrReplace(id: Identifier | undefined, object: Omit, 'id'>, options?: { ifVersion?: number; }): Promise>>; createOrReplace>(objects: Array & Partial>, columns: K[]): Promise>[]>; createOrReplace(objects: Array & Partial>): Promise>[]>; delete>(object: Identifiable, columns: K[]): Promise> | null>; delete(object: Identifiable): Promise> | null>; delete>(id: Identifier, columns: K[]): Promise> | null>; delete(id: Identifier): Promise> | null>; delete>(objects: Array> & Identifiable>, columns: K[]): Promise> | null>>; delete(objects: Array> & Identifiable>): Promise> | null>>; delete>(objects: Identifier[], columns: K[]): Promise> | null>>; delete(objects: Identifier[]): Promise> | null>>; deleteOrThrow>(object: Identifiable, columns: K[]): Promise>>; deleteOrThrow(object: Identifiable): Promise>>; deleteOrThrow>(id: Identifier, columns: K[]): Promise>>; deleteOrThrow(id: Identifier): Promise>>; deleteOrThrow>(objects: Array> & Identifiable>, columns: K[]): Promise>>>; deleteOrThrow(objects: Array> & Identifiable>): Promise>>>; deleteOrThrow>(objects: Identifier[], columns: K[]): Promise>>>; deleteOrThrow(objects: Identifier[]): Promise>>>; search(query: string, options?: { fuzziness?: FuzzinessExpression; prefix?: PrefixExpression; highlight?: HighlightExpression; filter?: Filter; boosters?: Boosters[]; page?: SearchPageConfig; target?: TargetColumn[]; }): Promise<{ records: any; totalCount: number; }>; vectorSearch>(column: F, query: number[], options?: { similarityFunction?: string | undefined; size?: number | undefined; filter?: Filter | undefined; } | undefined): Promise<{ records: SearchXataRecord>[]; } & TotalCount>; aggregate>>(aggs?: Expression, filter?: Filter): Promise; query(query: Query): Promise>; summarizeTable(query: Query, summaries?: Dictionary>, summariesFilter?: FilterExpression): Promise<{ summaries: Record[]; }>; ask(question: string, options?: AskOptions & { onMessage?: (message: AskResult) => void; }): any; } type BaseSchema = { name: string; columns: readonly ({ name: string; type: Column['type']; notNull?: boolean; } | { name: string; type: 'link'; link: { table: string; }; })[]; }; type SchemaInference = T extends never[] ? Record> : T extends readonly unknown[] ? T[number] extends { name: string; columns: readonly unknown[]; } ? { [K in T[number]['name']]: TableType; } : never : never; type TableType = Tables & { name: TableName; } extends infer Table ? Table extends { name: string; columns: infer Columns; } ? Columns extends readonly unknown[] ? Columns[number] extends { name: string; type: string; } ? Identifiable & UnionToIntersection; }>> : never : never : never : never; type PropertyType = Properties & { name: PropertyName; } extends infer Property ? Property extends { name: string; type: infer Type; link?: { table: infer LinkedTable; }; notNull?: infer NotNull; } ? NotNull extends true ? { [K in PropertyName]: InnerType; } : { [K in PropertyName]?: InnerType | null; } : never : never; type InnerType = Type extends 'string' | 'text' | 'email' | 'character' | 'varchar' | 'character varying' | `varchar(${number})` | `character(${number})` ? string : Type extends 'int' | 'float' | 'bigint' | 'int8' | 'integer' | 'int4' | 'smallint' | 'double precision' | 'float8' | 'real' | 'numeric' ? number : Type extends 'bool' | 'boolean' ? boolean : Type extends 'datetime' | 'timestamptz' ? Date : Type extends 'multiple' | 'text[]' ? string[] : Type extends 'vector' | 'real[]' | 'float[]' | 'double precision[]' | 'float8[]' | 'numeric[]' ? number[] : Type extends 'int[]' | 'bigint[]' | 'int8[]' | 'integer[]' | 'int4[]' | 'smallint[]' ? number[] : Type extends 'bool[]' | 'boolean[]' ? boolean[] : Type extends 'file' | 'xata_file' ? XataFile : Type extends 'file[]' | 'xata_file_array' ? XataArrayFile[] : Type extends 'json' | 'jsonb' ? JSONValue : Type extends 'link' ? TableType & XataRecord : string; /** * Operator to restrict results to only values that are greater than the given value. */ declare const greaterThan: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are greater than the given value. */ declare const gt: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are greater than or equal to the given value. */ declare const greaterThanEquals: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are greater than or equal to the given value. */ declare const greaterEquals: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are greater than or equal to the given value. */ declare const gte: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are greater than or equal to the given value. */ declare const ge: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than the given value. */ declare const lessThan: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than the given value. */ declare const lt: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than or equal to the given value. */ declare const lessThanEquals: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than or equal to the given value. */ declare const lessEquals: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than or equal to the given value. */ declare const lte: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are lower than or equal to the given value. */ declare const le: (value: T) => ComparableTypeFilter; /** * Operator to restrict results to only values that are not null. */ declare const exists: (column?: FilterColumns) => ExistanceFilter; /** * Operator to restrict results to only values that are null. */ declare const notExists: (column?: FilterColumns) => ExistanceFilter; /** * Operator to restrict results to only values that start with the given prefix. */ declare const startsWith: (value: string) => StringTypeFilter; /** * Operator to restrict results to only values that end with the given suffix. */ declare const endsWith: (value: string) => StringTypeFilter; /** * Operator to restrict results to only values that match the given pattern. */ declare const pattern: (value: string) => StringTypeFilter; /** * Operator to restrict results to only values that match the given pattern (case insensitive). */ declare const iPattern: (value: string) => StringTypeFilter; /** * Operator to restrict results to only values that are equal to the given value. */ declare const is: (value: T) => PropertyFilter; /** * Operator to restrict results to only values that are equal to the given value. */ declare const equals: (value: T) => PropertyFilter; /** * Operator to restrict results to only values that are not equal to the given value. */ declare const isNot: (value: T) => PropertyFilter; /** * Operator to restrict results to only values that contain the given value. */ declare const contains: (value: string) => StringTypeFilter; /** * Operator to restrict results to only values that contain the given value (case insensitive). */ declare const iContains: (value: string) => StringTypeFilter; /** * Operator to restrict results if some array items match the predicate. */ declare const includes: (value: T) => ArrayFilter; /** * Operator to restrict results if all array items match the predicate. */ declare const includesAll: (value: T) => ArrayFilter; /** * Operator to restrict results if none array items match the predicate. */ declare const includesNone: (value: T) => ArrayFilter; /** * Operator to restrict results if some array items match the predicate. */ declare const includesAny: (value: T) => ArrayFilter; type SchemaDefinition = { table: string; }; type SchemaPluginResult> = { [Key in keyof Schemas]: Repository; }; declare class SchemaPlugin> extends XataPlugin { #private; constructor(); build(pluginOptions: XataPluginOptions): SchemaPluginResult; } type BinaryFile = string | Blob | ArrayBuffer | XataFile | Promise; type FilesPluginResult> = { download: >(location: DownloadDestination) => Promise; upload: >(location: UploadDestination, file: BinaryFile, options?: { mediaType?: string; }) => Promise; delete: >(location: DownloadDestination) => Promise; }; type UploadDestination, Tables extends StringKeys> = Values<{ [Model in GetArrayInnerType>]: { table: Model; column: ColumnsByValue; record: string; } | { table: Model; column: ColumnsByValue; record: string; fileId?: string; }; }>; type DownloadDestination, Tables extends StringKeys> = Values<{ [Model in GetArrayInnerType>]: { table: Model; column: ColumnsByValue; record: string; } | { table: Model; column: ColumnsByValue; record: string; fileId: string; }; }>; declare class FilesPlugin> extends XataPlugin { build(pluginOptions: XataPluginOptions): FilesPluginResult; } type SQLQueryParams = { /** * The SQL statement to execute. * @example * ```ts * const { records } = await xata.sql({ * statement: `SELECT * FROM teams WHERE name = $1`, * params: ['A name'] * }); * ``` * * Be careful when using this with user input and use parametrized statements to avoid SQL injection. */ statement: string; /** * The parameters to pass to the SQL statement. */ params?: T; /** * The consistency level to use when executing the query. */ consistency?: 'strong' | 'eventual'; /** * The response type to use when executing the query. */ responseType?: 'json' | 'array'; }; type SQLQuery = TemplateStringsArray | SQLQueryParams; type SQLResponseType = 'json' | 'array'; type SQLQueryResultJSON = { /** * The records returned by the query. */ records: T[]; /** * The columns metadata returned by the query. */ columns: Array<{ name: string; type: string; }>; /** * Optional warning message returned by the query. */ warning?: string; }; type SQLQueryResultArray = { /** * The records returned by the query. */ rows: any[][]; /** * The columns metadata returned by the query. */ columns: Array<{ name: string; type: string; }>; /** * Optional warning message returned by the query. */ warning?: string; }; type SQLQueryResult = Mode extends 'json' ? SQLQueryResultJSON : Mode extends 'array' ? SQLQueryResultArray : never; type SQLPluginFunction = (query: Query, ...parameters: any[]) => Promise ? Query['responseType'] extends SQLResponseType ? NonNullable : 'json' : 'json'>>; type SQLPluginResult = SQLPluginFunction & { /** * Connection string to use when connecting to the database. * It includes the workspace, region, database and branch. * Connects with the same credentials as the Xata client. */ connectionString: string; }; declare class SQLPlugin extends XataPlugin { build(pluginOptions: XataPluginOptions): SQLPluginResult; } type TransactionOperation, Tables extends StringKeys> = { insert: Values<{ [Model in GetArrayInnerType>]: { table: Model; } & InsertTransactionOperation; }>; } | { update: Values<{ [Model in GetArrayInnerType>]: { table: Model; } & UpdateTransactionOperation; }>; } | { delete: Values<{ [Model in GetArrayInnerType>]: { table: Model; } & DeleteTransactionOperation; }>; } | { get: Values<{ [Model in GetArrayInnerType>]: { table: Model; } & GetTransactionOperation; }>; }; type InsertTransactionOperation = { record: Partial>; ifVersion?: number; createOnly?: boolean; }; type UpdateTransactionOperation = { id: string; fields: Partial>; ifVersion?: number; upsert?: boolean; }; type DeleteTransactionOperation = { id: string; failIfMissing?: boolean; }; type GetTransactionOperation = { id: string; columns?: SelectableColumn[]; }; type TransactionOperationSingleResult, Tables extends StringKeys, Operation extends TransactionOperation> = Operation extends { insert: { table: Tables; record: { id: infer Id; }; }; } ? { operation: 'insert'; id: Id; rows: number; } : Operation extends { insert: { table: Tables; }; } ? { operation: 'insert'; id: string; rows: number; } : Operation extends { update: { table: Tables; id: infer Id; }; } ? { operation: 'update'; id: Id; rows: number; } : Operation extends { delete: { table: Tables; }; } ? { operation: 'delete'; rows: number; } : Operation extends { get: { table: infer Table; }; } ? Table extends Tables ? { operation: 'get'; columns: SelectedPick; } : never : never; type TransactionOperationResults, Table extends StringKeys, Operations extends TransactionOperation[]> = Operations extends [infer Head, ...infer Rest] ? Head extends TransactionOperation ? Rest extends TransactionOperation[] ? [TransactionOperationSingleResult, ...TransactionOperationResults] : never : never : []; type TransactionResults, Table extends StringKeys, Operations extends TransactionOperation[]> = { results: TransactionOperationResults; }; type TransactionPluginResult> = { run: , Operations extends TransactionOperation[]>(operations: Narrow) => Promise>; }; declare class TransactionPlugin> extends XataPlugin { build(pluginOptions: XataPluginOptions): TransactionPluginResult; } type BaseClientOptions = { fetch?: FetchImpl; host?: HostProvider; apiKey?: string; databaseURL?: string; branch?: string; cache?: CacheImpl; trace?: TraceFunction; enableBrowser?: boolean; clientName?: string; xataAgentExtra?: Record; }; declare const buildClient: = {}>(plugins?: Plugins) => ClientConstructor; interface ClientConstructor> { new = {}>(options?: Partial, schemaTables?: readonly BaseSchema[]): Omit<{ db: Awaited['build']>>; search: Awaited['build']>>; transactions: Awaited['build']>>; sql: Awaited>; files: Awaited['build']>>; }, keyof Plugins> & { [Key in StringKeys>]: Awaited[Key]['build']>>; } & { getConfig(): Promise<{ databaseURL: string; branch: string; }>; }; } declare const BaseClient_base: ClientConstructor<{}>; declare class BaseClient extends BaseClient_base> { } declare class Serializer { classes: Record; add(clazz: any): void; toJSON(data: T): string; fromJSON(json: string): T; } type SerializedString = string | (string & { __type: T; }); type DeserializedType = T extends SerializedString ? U : T; declare const serialize: (data: T) => SerializedString; declare const deserialize: >(json: T) => SerializerResult>; type SerializerResult = T extends XataRecord ? Identifiable & Omit<{ [K in keyof T]: SerializerResult; }, keyof XataRecord> : T extends any[] ? SerializerResult[] : T; declare function getDatabaseURL(): string | undefined; declare function getAPIKey(): string | undefined; declare function getBranch(): string | undefined; declare function buildPreviewBranchName({ org, branch }: { org: string; branch: string; }): string; declare function getPreviewBranch(): string | undefined; declare class XataError extends Error { readonly status: number; constructor(message: string, status: number); } export { type AcceptWorkspaceMemberInviteError, type AcceptWorkspaceMemberInvitePathParams, type AcceptWorkspaceMemberInviteVariables, type AdaptAllTablesError, type AdaptAllTablesPathParams, type AdaptAllTablesVariables, type AdaptTableError, type AdaptTablePathParams, type AdaptTableVariables, type AddGitBranchesEntryError, type AddGitBranchesEntryPathParams, type AddGitBranchesEntryRequestBody, type AddGitBranchesEntryResponse, type AddGitBranchesEntryVariables, type AddTableColumnError, type AddTableColumnPathParams, type AddTableColumnVariables, type AggregateTableError, type AggregateTablePathParams, type AggregateTableRequestBody, type AggregateTableVariables, type ApiExtraProps, type ApplyBranchSchemaEditError, type ApplyBranchSchemaEditPathParams, type ApplyBranchSchemaEditRequestBody, type ApplyBranchSchemaEditVariables, type ApplyMigrationError, type ApplyMigrationPathParams, type ApplyMigrationRequestBody, type ApplyMigrationVariables, type AskOptions, type AskResult, type AskTableError, type AskTablePathParams, type AskTableRequestBody, type AskTableResponse, type AskTableSessionError, type AskTableSessionPathParams, type AskTableSessionRequestBody, type AskTableSessionResponse, type AskTableSessionVariables, type AskTableVariables, BaseClient, type BaseClientOptions, type BaseData, type BaseSchema, type BinaryFile, type BranchTransactionError, type BranchTransactionPathParams, type BranchTransactionRequestBody, type BranchTransactionVariables, Buffer, type BulkInsertTableRecordsError, type BulkInsertTableRecordsPathParams, type BulkInsertTableRecordsQueryParams, type BulkInsertTableRecordsRequestBody, type BulkInsertTableRecordsVariables, type CacheImpl, type CancelWorkspaceMemberInviteError, type CancelWorkspaceMemberInvitePathParams, type CancelWorkspaceMemberInviteVariables, type ClientConstructor, type ColumnsByValue, type CompareBranchSchemasError, type CompareBranchSchemasPathParams, type CompareBranchSchemasRequestBody, type CompareBranchSchemasVariables, type CompareBranchWithUserSchemaError, type CompareBranchWithUserSchemaPathParams, type CompareBranchWithUserSchemaRequestBody, type CompareBranchWithUserSchemaVariables, type CompareMigrationRequestError, type CompareMigrationRequestPathParams, type CompareMigrationRequestVariables, type CopyBranchError, type CopyBranchPathParams, type CopyBranchRequestBody, type CopyBranchVariables, type CreateBranchError, type CreateBranchPathParams, type CreateBranchQueryParams, type CreateBranchRequestBody, type CreateBranchResponse, type CreateBranchVariables, type CreateClusterError, type CreateClusterPathParams, type CreateClusterVariables, type CreateDatabaseError, type CreateDatabasePathParams, type CreateDatabaseRequestBody, type CreateDatabaseResponse, type CreateDatabaseVariables, type CreateMigrationRequestError, type CreateMigrationRequestPathParams, type CreateMigrationRequestRequestBody, type CreateMigrationRequestResponse, type CreateMigrationRequestVariables, type CreateTableError, type CreateTablePathParams, type CreateTableResponse, type CreateTableVariables, type CreateUserAPIKeyError, type CreateUserAPIKeyPathParams, type CreateUserAPIKeyResponse, type CreateUserAPIKeyVariables, type CreateWorkspaceError, type CreateWorkspaceVariables, type CursorNavigationOptions, type DeleteBranchError, type DeleteBranchPathParams, type DeleteBranchResponse, type DeleteBranchVariables, type DeleteColumnError, type DeleteColumnPathParams, type DeleteColumnVariables, type DeleteDatabaseError, type DeleteDatabaseGithubSettingsError, type DeleteDatabaseGithubSettingsPathParams, type DeleteDatabaseGithubSettingsVariables, type DeleteDatabasePathParams, type DeleteDatabaseResponse, type DeleteDatabaseVariables, type DeleteFileError, type DeleteFileItemError, type DeleteFileItemPathParams, type DeleteFileItemVariables, type DeleteFilePathParams, type DeleteFileVariables, type DeleteOAuthAccessTokenError, type DeleteOAuthAccessTokenPathParams, type DeleteOAuthAccessTokenVariables, type DeleteRecordError, type DeleteRecordPathParams, type DeleteRecordQueryParams, type DeleteRecordVariables, type DeleteTableError, type DeleteTablePathParams, type DeleteTableResponse, type DeleteTableVariables, type DeleteTransactionOperation, type DeleteUserAPIKeyError, type DeleteUserAPIKeyPathParams, type DeleteUserAPIKeyVariables, type DeleteUserError, type DeleteUserOAuthClientError, type DeleteUserOAuthClientPathParams, type DeleteUserOAuthClientVariables, type DeleteUserVariables, type DeleteWorkspaceError, type DeleteWorkspacePathParams, type DeleteWorkspaceVariables, type DeserializedType, type DownloadDestination, type EditableData, type ExecuteBranchMigrationPlanError, type ExecuteBranchMigrationPlanPathParams, type ExecuteBranchMigrationPlanRequestBody, type ExecuteBranchMigrationPlanVariables, type FetchImpl, FetcherError, type FetcherExtraProps, type FileAccessError, type FileAccessPathParams, type FileAccessQueryParams, type FileAccessVariables, type FileUploadError, type FileUploadPathParams, type FileUploadQueryParams, type FileUploadVariables, FilesPlugin, type FilesPluginResult, type GetAuthorizationCodeError, type GetAuthorizationCodeQueryParams, type GetAuthorizationCodeVariables, type GetBranchDetailsError, type GetBranchDetailsPathParams, type GetBranchDetailsVariables, type GetBranchListError, type GetBranchListPathParams, type GetBranchListVariables, type GetBranchMetadataError, type GetBranchMetadataPathParams, type GetBranchMetadataVariables, type GetBranchMigrationHistoryError, type GetBranchMigrationHistoryPathParams, type GetBranchMigrationHistoryRequestBody, type GetBranchMigrationHistoryResponse, type GetBranchMigrationHistoryVariables, type GetBranchMigrationJobStatusError, type GetBranchMigrationJobStatusPathParams, type GetBranchMigrationJobStatusVariables, type GetBranchMigrationPlanError, type GetBranchMigrationPlanPathParams, type GetBranchMigrationPlanVariables, type GetBranchSchemaHistoryError, type GetBranchSchemaHistoryPathParams, type GetBranchSchemaHistoryRequestBody, type GetBranchSchemaHistoryResponse, type GetBranchSchemaHistoryVariables, type GetBranchStatsError, type GetBranchStatsPathParams, type GetBranchStatsResponse, type GetBranchStatsVariables, type GetClusterError, type GetClusterPathParams, type GetClusterVariables, type GetColumnError, type GetColumnPathParams, type GetColumnVariables, type GetDatabaseGithubSettingsError, type GetDatabaseGithubSettingsPathParams, type GetDatabaseGithubSettingsVariables, type GetDatabaseListError, type GetDatabaseListPathParams, type GetDatabaseListVariables, type GetDatabaseMetadataError, type GetDatabaseMetadataPathParams, type GetDatabaseMetadataVariables, type GetDatabaseSettingsError, type GetDatabaseSettingsPathParams, type GetDatabaseSettingsVariables, type GetFileError, type GetFileItemError, type GetFileItemPathParams, type GetFileItemVariables, type GetFilePathParams, type GetFileVariables, type GetGitBranchesMappingError, type GetGitBranchesMappingPathParams, type GetGitBranchesMappingVariables, type GetMigrationHistoryError, type GetMigrationHistoryPathParams, type GetMigrationHistoryVariables, type GetMigrationJobStatusError, type GetMigrationJobStatusPathParams, type GetMigrationJobStatusVariables, type GetMigrationRequestError, type GetMigrationRequestIsMergedError, type GetMigrationRequestIsMergedPathParams, type GetMigrationRequestIsMergedResponse, type GetMigrationRequestIsMergedVariables, type GetMigrationRequestPathParams, type GetMigrationRequestVariables, type GetRecordError, type GetRecordPathParams, type GetRecordQueryParams, type GetRecordVariables, type GetSchemaError, type GetSchemaPathParams, type GetSchemaResponse, type GetSchemaVariables, type GetTableColumnsError, type GetTableColumnsPathParams, type GetTableColumnsResponse, type GetTableColumnsVariables, type GetTableSchemaError, type GetTableSchemaPathParams, type GetTableSchemaResponse, type GetTableSchemaVariables, type GetTransactionOperation, type GetUserAPIKeysError, type GetUserAPIKeysResponse, type GetUserAPIKeysVariables, type GetUserError, type GetUserOAuthAccessTokensError, type GetUserOAuthAccessTokensResponse, type GetUserOAuthAccessTokensVariables, type GetUserOAuthClientsError, type GetUserOAuthClientsResponse, type GetUserOAuthClientsVariables, type GetUserVariables, type GetWorkspaceError, type GetWorkspaceMembersListError, type GetWorkspaceMembersListPathParams, type GetWorkspaceMembersListVariables, type GetWorkspacePathParams, type GetWorkspaceSettingsError, type GetWorkspaceSettingsPathParams, type GetWorkspaceSettingsVariables, type GetWorkspaceVariables, type GetWorkspacesListError, type GetWorkspacesListResponse, type GetWorkspacesListVariables, type GrantAuthorizationCodeError, type GrantAuthorizationCodeVariables, type HostProvider, type Identifiable, type ImageTransformations, type InsertRecordError, type InsertRecordPathParams, type InsertRecordQueryParams, type InsertRecordVariables, type InsertRecordWithIDError, type InsertRecordWithIDPathParams, type InsertRecordWithIDQueryParams, type InsertRecordWithIDVariables, type InsertTransactionOperation, type InviteWorkspaceMemberError, type InviteWorkspaceMemberPathParams, type InviteWorkspaceMemberRequestBody, type InviteWorkspaceMemberVariables, type JSONData, type KeywordAskOptions, type Link, type ListClustersError, type ListClustersPathParams, type ListClustersQueryParams, type ListClustersVariables, type ListMigrationRequestsCommitsError, type ListMigrationRequestsCommitsPathParams, type ListMigrationRequestsCommitsRequestBody, type ListMigrationRequestsCommitsResponse, type ListMigrationRequestsCommitsVariables, type ListRegionsError, type ListRegionsPathParams, type ListRegionsVariables, type MergeMigrationRequestError, type MergeMigrationRequestPathParams, type MergeMigrationRequestVariables, type OffsetNavigationOptions, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, type Paginable, type PaginationQueryMeta, type PreviewBranchSchemaEditError, type PreviewBranchSchemaEditPathParams, type PreviewBranchSchemaEditRequestBody, type PreviewBranchSchemaEditResponse, type PreviewBranchSchemaEditVariables, type PushBranchMigrationsError, type PushBranchMigrationsPathParams, type PushBranchMigrationsRequestBody, type PushBranchMigrationsVariables, type PutFileError, type PutFileItemError, type PutFileItemPathParams, type PutFileItemVariables, type PutFilePathParams, type PutFileVariables, Query, type QueryMigrationRequestsError, type QueryMigrationRequestsPathParams, type QueryMigrationRequestsRequestBody, type QueryMigrationRequestsResponse, type QueryMigrationRequestsVariables, type QueryTableError, type QueryTablePathParams, type QueryTableRequestBody, type QueryTableVariables, RecordArray, RecordColumnTypes, type RemoveGitBranchesEntryError, type RemoveGitBranchesEntryPathParams, type RemoveGitBranchesEntryQueryParams, type RemoveGitBranchesEntryVariables, type RemoveWorkspaceMemberError, type RemoveWorkspaceMemberPathParams, type RemoveWorkspaceMemberVariables, type RenameDatabaseError, type RenameDatabasePathParams, type RenameDatabaseRequestBody, type RenameDatabaseVariables, Repository, type ResendWorkspaceMemberInviteError, type ResendWorkspaceMemberInvitePathParams, type ResendWorkspaceMemberInviteVariables, type ResolveBranchError, type ResolveBranchPathParams, type ResolveBranchQueryParams, type ResolveBranchResponse, type ResolveBranchVariables, responses as Responses, RestRepository, SQLPlugin, type SQLPluginResult, type SQLQuery, type SQLQueryParams, type SQLQueryResult, type SchemaDefinition, type SchemaInference, SchemaPlugin, type SchemaPluginResult, schemas as Schemas, type SearchBranchError, type SearchBranchPathParams, type SearchBranchRequestBody, type SearchBranchVariables, type SearchOptions, SearchPlugin, type SearchPluginResult, type SearchTableError, type SearchTablePathParams, type SearchTableRequestBody, type SearchTableVariables, type SearchXataRecord, type SelectableColumn, type SelectableColumnWithObjectNotation, type SelectedPick, type SerializedString, Serializer, type SerializerResult, type SetTableSchemaError, type SetTableSchemaPathParams, type SetTableSchemaRequestBody, type SetTableSchemaVariables, SimpleCache, type SimpleCacheOptions, type SqlQueryError, type SqlQueryPathParams, type SqlQueryRequestBody, type SqlQueryVariables, type SummarizeTableError, type SummarizeTablePathParams, type SummarizeTableRequestBody, type SummarizeTableVariables, type TotalCount, type TransactionOperation, TransactionPlugin, type TransactionPluginResult, type TransactionResults, type UpdateBranchMetadataError, type UpdateBranchMetadataPathParams, type UpdateBranchMetadataVariables, type UpdateBranchSchemaError, type UpdateBranchSchemaPathParams, type UpdateBranchSchemaVariables, type UpdateClusterError, type UpdateClusterPathParams, type UpdateClusterVariables, type UpdateColumnError, type UpdateColumnPathParams, type UpdateColumnRequestBody, type UpdateColumnVariables, type UpdateDatabaseGithubSettingsError, type UpdateDatabaseGithubSettingsPathParams, type UpdateDatabaseGithubSettingsVariables, type UpdateDatabaseMetadataError, type UpdateDatabaseMetadataPathParams, type UpdateDatabaseMetadataRequestBody, type UpdateDatabaseMetadataVariables, type UpdateDatabaseSettingsError, type UpdateDatabaseSettingsPathParams, type UpdateDatabaseSettingsRequestBody, type UpdateDatabaseSettingsVariables, type UpdateMigrationRequestError, type UpdateMigrationRequestPathParams, type UpdateMigrationRequestRequestBody, type UpdateMigrationRequestVariables, type UpdateOAuthAccessTokenError, type UpdateOAuthAccessTokenPathParams, type UpdateOAuthAccessTokenRequestBody, type UpdateOAuthAccessTokenVariables, type UpdateRecordWithIDError, type UpdateRecordWithIDPathParams, type UpdateRecordWithIDQueryParams, type UpdateRecordWithIDVariables, type UpdateTableError, type UpdateTablePathParams, type UpdateTableRequestBody, type UpdateTableVariables, type UpdateTransactionOperation, type UpdateUserError, type UpdateUserVariables, type UpdateWorkspaceError, type UpdateWorkspaceMemberInviteError, type UpdateWorkspaceMemberInvitePathParams, type UpdateWorkspaceMemberInviteRequestBody, type UpdateWorkspaceMemberInviteVariables, type UpdateWorkspaceMemberRoleError, type UpdateWorkspaceMemberRolePathParams, type UpdateWorkspaceMemberRoleRequestBody, type UpdateWorkspaceMemberRoleVariables, type UpdateWorkspacePathParams, type UpdateWorkspaceSettingsError, type UpdateWorkspaceSettingsPathParams, type UpdateWorkspaceSettingsRequestBody, type UpdateWorkspaceSettingsVariables, type UpdateWorkspaceVariables, type UploadDestination, type UpsertRecordWithIDError, type UpsertRecordWithIDPathParams, type UpsertRecordWithIDQueryParams, type UpsertRecordWithIDVariables, type ValueAtColumn, type VectorAskOptions, type VectorSearchTableError, type VectorSearchTablePathParams, type VectorSearchTableRequestBody, type VectorSearchTableVariables, XataApiClient, type XataApiClientOptions, XataApiPlugin, type XataArrayFile, XataError, XataFile, XataPlugin, type XataPluginOptions, type XataRecord, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };