///
import * as v1 from "./v1";
import { Init } from "./request";
export type ApiConfig = {
apiKey: string;
organization?: string;
endpoint?: string;
options?: ApiInit;
};
export type ApiInit = Omit;
export type ApiVersion = "v1" | "v2";
export type ApiClient = (path: string, options: Init, direct?: boolean) => Promise;
export declare class OpenAI {
private config;
constructor(config: ApiConfig);
v1(): {
models: {
list: () => Promise<{
id: string;
object: "model";
owned_by: string;
permission: {
id: string;
object: string;
created: number;
allow_create_engine: boolean;
allow_sampling: boolean;
allow_logprobs: boolean;
allow_search_indices: boolean;
allow_view: boolean;
allow_fine_tuning: boolean;
organization: string;
group: string | null;
is_blocking: boolean;
}[];
root: string;
parent: string | null;
}[]>;
retrieve: (id: string) => Promise<{
id: string;
object: "model";
owned_by: string;
permission: {
id: string;
object: string;
created: number;
allow_create_engine: boolean;
allow_sampling: boolean;
allow_logprobs: boolean;
allow_search_indices: boolean;
allow_view: boolean;
allow_fine_tuning: boolean;
organization: string;
group: string | null;
is_blocking: boolean;
}[];
root: string;
parent: string | null;
}>;
delete: (id: string) => Promise<{
id: string;
object: "model";
deleted: boolean;
}>;
};
completions: {
create: (data: {
model: string;
prompt: string;
suffix?: string | undefined;
max_tokens?: number | undefined;
temperature?: number | undefined;
top_p?: number | undefined;
n?: number | undefined;
stream?: boolean | undefined;
logprobs?: number | undefined;
echo?: boolean | undefined;
stop?: string[] | undefined;
presence_penalty?: number | undefined;
frequency_penalty?: number | undefined;
best_of?: number | undefined;
logit_bias?: {
[key: string]: number;
} | undefined;
user?: string | undefined;
}) => Promise<{
id: string;
object: "text_completion";
created: number;
model: string;
choices: {
text: string;
index: number;
logprobs: {
[key: string]: number;
};
finish_reason: string;
}[];
usage: v1.Usage;
}>;
};
chat: {
create: (data: {
model: string;
messages: {
role: "user" | "system" | "assistant";
content: string;
}[];
temperature?: number | undefined;
top_p?: number | undefined;
n?: number | undefined;
stream?: boolean | undefined;
stop?: string[] | undefined;
max_tokens?: number | undefined;
presence_penalty?: number | undefined;
frequency_penalty?: number | undefined;
logit_bias?: {
[key: string]: number;
} | undefined;
user?: string | undefined;
}) => Promise<{
id: string;
object: "chat.completion";
created: number;
choices: {
index: number;
message: {
role: "user" | "system" | "assistant";
content: string;
};
finish_reason: string;
}[];
usage: v1.Usage;
}>;
};
edits: {
create: (data: {
model: string;
input?: string | undefined;
instruction: string;
n?: number | undefined;
temperature?: number | undefined;
top_p?: number | undefined;
}) => Promise<{
object: "edit";
created: number;
choices: {
index: number;
text: string;
}[];
usage: v1.Usage;
}>;
};
images: {
create: (data: {
prompt: string;
n?: number | undefined;
size?: ("256x256" | "512x512" | "1024x1024") | undefined;
response_format?: ("url" | "b64_json") | undefined;
user?: string | undefined;
}) => Promise<{
created: number;
data: Partial<{
url: string;
b64_json: string;
}>[];
}>;
edit: (request: {
prompt: string;
n?: number | undefined;
size?: ("256x256" | "512x512" | "1024x1024") | undefined;
response_format?: ("url" | "b64_json") | undefined;
user?: string | undefined;
}, image: string | Blob, mask?: string | Blob | undefined) => Promise<{
created: number;
data: Partial<{
url: string;
b64_json: string;
}>[];
}>;
createVariation: (request: {
n?: number | undefined;
size?: ("256x256" | "512x512" | "1024x1024") | undefined;
response_format?: ("url" | "b64_json") | undefined;
user?: string | undefined;
}, image: string | Blob) => Promise<{
created: number;
data: Partial<{
url: string;
b64_json: string;
}>[];
}>;
};
embeddings: {
create: (data: {
model: string;
input: string;
user?: string | undefined;
}) => Promise<{
object: "list";
data: {
object: "embedding";
embedding: number[];
index: number;
}[];
model: string;
usage: Omit;
}>;
};
audio: {
createTranscription: (request: {
model: string;
prompt?: string | undefined;
response_format?: ("json" | "text" | "srt" | "verbose_json" | "vtt") | undefined;
temperature?: number | undefined;
language?: string | undefined;
}, file: string | Blob) => Promise>;
createTranslation: (request: {
model: string;
prompt?: string | undefined;
response_format?: ("json" | "text" | "srt" | "verbose_json" | "vtt") | undefined;
temperature?: number | undefined;
}, file: string | Blob) => Promise>;
};
files: {
list: () => Promise<{
data: v1.FileObject[];
object: "list";
}>;
retrieve: (id: string) => Promise;
upload: (file: string | Blob, purpose: string) => Promise;
delete: (id: string) => Promise<{
id: string;
object: string;
deleted: boolean;
}>;
retrieveContent: (id: string) => Promise;
};
fineTunes: {
create: (data: {
training_file: string;
validation_file?: string | undefined;
model?: string | undefined;
compute_classification_metrics?: boolean | undefined;
classification_n_classes?: number | undefined;
classification_positive_class?: string | undefined;
classification_betas?: number[] | undefined;
suffix?: string | undefined;
} & {
learning_rate_multiplier: number;
prompt_loss_weight: number;
batch_size: number;
n_epochs: number;
}) => Promise<{
id: string;
object: "fine-tune";
model: string;
created_at: number;
fine_tuned_model: string | null;
hyperparameters: {
learning_rate_multiplier: number;
prompt_loss_weight: number;
batch_size: number;
n_epochs: number;
};
organization_id: string;
result_files: v1.FileObject[];
status: string;
validation_files: v1.FileObject[];
training_files: v1.FileObject[];
updated_at: number;
events: {
object: "fine-tune-event";
created_at: number;
level: string;
message: string;
}[];
}>;
list: () => Promise<{
object: "list";
data: Omit<{
id: string;
object: "fine-tune";
model: string;
created_at: number;
fine_tuned_model: string | null;
hyperparameters: {
learning_rate_multiplier: number;
prompt_loss_weight: number;
batch_size: number;
n_epochs: number;
};
organization_id: string;
result_files: v1.FileObject[];
status: string;
validation_files: v1.FileObject[];
training_files: v1.FileObject[];
updated_at: number;
events: {
object: "fine-tune-event";
created_at: number;
level: string;
message: string;
}[];
}, "events">[];
}>;
retrieve: (id: string) => Promise<{
id: string;
object: "fine-tune";
model: string;
created_at: number;
fine_tuned_model: string | null;
hyperparameters: {
learning_rate_multiplier: number;
prompt_loss_weight: number;
batch_size: number;
n_epochs: number;
};
organization_id: string;
result_files: v1.FileObject[];
status: string;
validation_files: v1.FileObject[];
training_files: v1.FileObject[];
updated_at: number;
events: {
object: "fine-tune-event";
created_at: number;
level: string;
message: string;
}[];
}>;
cancel: (id: string) => Promise<{
id: string;
object: "fine-tune";
model: string;
created_at: number;
fine_tuned_model: string | null;
hyperparameters: {
learning_rate_multiplier: number;
prompt_loss_weight: number;
batch_size: number;
n_epochs: number;
};
organization_id: string;
result_files: v1.FileObject[];
status: string;
validation_files: v1.FileObject[];
training_files: v1.FileObject[];
updated_at: number;
events: {
object: "fine-tune-event";
created_at: number;
level: string;
message: string;
}[];
}>;
listEvents: (id: string) => Promise<{
object: "list";
data: {
object: "fine-tune-event";
created_at: number;
level: string;
message: string;
}[];
}>;
};
moderations: {
create: (data: {
input: string;
model?: string | undefined;
}) => Promise<{
id: string;
model: string;
results: {
categories: {
[key: string]: boolean;
};
category_scores: {
[key: string]: number;
};
flagged: boolean;
}[];
}>;
};
};
private makeClient;
}