/** * This file was auto-generated by Fern from our API Definition. */ import * as Vellum from "../../../../index"; /** * @example * { * mlModel: "x", * inputValues: [{ * name: "x", * type: "STRING", * value: "value" * }, { * name: "x", * type: "STRING", * value: "value" * }], * inputVariables: [{ * id: "x", * key: "key", * type: "STRING" * }, { * id: "x", * key: "key", * type: "STRING" * }], * parameters: {}, * blocks: [{ * blockType: "JINJA", * template: "template" * }, { * blockType: "JINJA", * template: "template" * }] * } */ export interface AdHocExecutePromptStream { mlModel: string; inputValues: Vellum.DeprecatedPromptRequestInput[]; inputVariables: Vellum.VellumVariable[]; parameters: Vellum.PromptParameters; settings?: Vellum.PromptSettings | null; blocks: Vellum.PromptBlock[]; functions?: Vellum.FunctionDefinition[] | null; expandMeta?: Vellum.AdHocExpandMeta | null; }