import type { ResponseChatMessage } from './response-chat-message.js'; import type { ChoiceLogprobs } from './choice-logprobs.js'; /** * Representation of the 'LlmChoice' schema. */ export type LlmChoice = { /** * Index of the choice */ index: number; message: ResponseChatMessage; logprobs?: ChoiceLogprobs; /** * Reason the model stopped generating tokens. 'stop' if the model hit a natural stop point or a provided stop sequence, 'length' if the maximum token number was reached, 'content_filter' if content was omitted due to a filter enforced by the LLM model provider or the content filtering module * @example "stop" */ finish_reason: string; } & Record; //# sourceMappingURL=llm-choice.d.ts.map