/** * This file was auto-generated by Fern from our API Definition. */ import * as Vellum from "../index"; /** * An ML Model that your Workspace has access to. */ export interface MlModelRead { id: string; /** The unique name of the ML Model. */ name: string; /** * The organization hosting the ML Model. * * * `ANTHROPIC` - Anthropic * * `AWS_BEDROCK` - AWS Bedrock * * `AZURE_AI_FOUNDRY` - Azure AI Foundry * * `AZURE_OPENAI` - Azure OpenAI * * `BASETEN` - BaseTen * * `CEREBRAS` - Cerebras * * `COHERE` - Cohere * * `CUSTOM` - Custom * * `DEEP_SEEK` - DeepSeek * * `FIREWORKS_AI` - Fireworks AI * * `GOOGLE` - Google * * `GOOGLE_VERTEX_AI` - Google Vertex AI * * `GROQ` - Groq * * `HUGGINGFACE` - HuggingFace * * `IBM_WATSONX` - IBM WatsonX * * `MISTRAL_AI` - Mistral AI * * `MOSAICML` - MosaicML * * `MYSTIC` - Mystic * * `NVIDIA` - NVIDIA * * `OPENAI` - OpenAI * * `OPEN_ROUTER` - Open Router * * `OPENPIPE` - OpenPipe * * `PERPLEXITY` - Perplexity * * `PYQ` - Pyq * * `REPLICATE` - Replicate * * `SAMBANOVA` - SambaNova * * `TOGETHER_AI` - Together AI * * `X_AI` - xAI * * `FASTWEB` - Fastweb * * `SWISSCOM` - Swisscom */ hostedBy: Vellum.MlModelHostingInterface; introducedOn: Date; }