import { Command as $Command } from "@smithy/smithy-client"; import { MetadataBearer as __MetadataBearer } from "@smithy/types"; import { CreateInferenceRecommendationsJobRequest, CreateInferenceRecommendationsJobResponse } from "../models/models_1"; import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; /** * @public */ export type { __MetadataBearer }; export { $Command }; /** * @public * * The input for {@link CreateInferenceRecommendationsJobCommand}. */ export interface CreateInferenceRecommendationsJobCommandInput extends CreateInferenceRecommendationsJobRequest { } /** * @public * * The output of {@link CreateInferenceRecommendationsJobCommand}. */ export interface CreateInferenceRecommendationsJobCommandOutput extends CreateInferenceRecommendationsJobResponse, __MetadataBearer { } declare const CreateInferenceRecommendationsJobCommand_base: { new (input: CreateInferenceRecommendationsJobCommandInput): import("@smithy/smithy-client").CommandImpl; new (input: CreateInferenceRecommendationsJobCommandInput): import("@smithy/smithy-client").CommandImpl; getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; }; /** *

Starts a recommendation job. You can create either an instance recommendation or load test job.

* @example * Use a bare-bones client and the command you need to make an API call. * ```javascript * import { SageMakerClient, CreateInferenceRecommendationsJobCommand } from "@aws-sdk/client-sagemaker"; // ES Modules import * // const { SageMakerClient, CreateInferenceRecommendationsJobCommand } = require("@aws-sdk/client-sagemaker"); // CommonJS import * const client = new SageMakerClient(config); * const input = { // CreateInferenceRecommendationsJobRequest * JobName: "STRING_VALUE", // required * JobType: "Default" || "Advanced", // required * RoleArn: "STRING_VALUE", // required * InputConfig: { // RecommendationJobInputConfig * ModelPackageVersionArn: "STRING_VALUE", * ModelName: "STRING_VALUE", * JobDurationInSeconds: Number("int"), * TrafficPattern: { // TrafficPattern * TrafficType: "PHASES" || "STAIRS", * Phases: [ // Phases * { // Phase * InitialNumberOfUsers: Number("int"), * SpawnRate: Number("int"), * DurationInSeconds: Number("int"), * }, * ], * Stairs: { // Stairs * DurationInSeconds: Number("int"), * NumberOfSteps: Number("int"), * UsersPerStep: Number("int"), * }, * }, * ResourceLimit: { // RecommendationJobResourceLimit * MaxNumberOfTests: Number("int"), * MaxParallelOfTests: Number("int"), * }, * EndpointConfigurations: [ // EndpointInputConfigurations * { // EndpointInputConfiguration * InstanceType: "ml.t2.medium" || "ml.t2.large" || "ml.t2.xlarge" || "ml.t2.2xlarge" || "ml.m4.xlarge" || "ml.m4.2xlarge" || "ml.m4.4xlarge" || "ml.m4.10xlarge" || "ml.m4.16xlarge" || "ml.m5.large" || "ml.m5.xlarge" || "ml.m5.2xlarge" || "ml.m5.4xlarge" || "ml.m5.12xlarge" || "ml.m5.24xlarge" || "ml.m5d.large" || "ml.m5d.xlarge" || "ml.m5d.2xlarge" || "ml.m5d.4xlarge" || "ml.m5d.12xlarge" || "ml.m5d.24xlarge" || "ml.c4.large" || "ml.c4.xlarge" || "ml.c4.2xlarge" || "ml.c4.4xlarge" || "ml.c4.8xlarge" || "ml.p2.xlarge" || "ml.p2.8xlarge" || "ml.p2.16xlarge" || "ml.p3.2xlarge" || "ml.p3.8xlarge" || "ml.p3.16xlarge" || "ml.c5.large" || "ml.c5.xlarge" || "ml.c5.2xlarge" || "ml.c5.4xlarge" || "ml.c5.9xlarge" || "ml.c5.18xlarge" || "ml.c5d.large" || "ml.c5d.xlarge" || "ml.c5d.2xlarge" || "ml.c5d.4xlarge" || "ml.c5d.9xlarge" || "ml.c5d.18xlarge" || "ml.g4dn.xlarge" || "ml.g4dn.2xlarge" || "ml.g4dn.4xlarge" || "ml.g4dn.8xlarge" || "ml.g4dn.12xlarge" || "ml.g4dn.16xlarge" || "ml.r5.large" || "ml.r5.xlarge" || "ml.r5.2xlarge" || "ml.r5.4xlarge" || "ml.r5.12xlarge" || "ml.r5.24xlarge" || "ml.r5d.large" || "ml.r5d.xlarge" || "ml.r5d.2xlarge" || "ml.r5d.4xlarge" || "ml.r5d.12xlarge" || "ml.r5d.24xlarge" || "ml.inf1.xlarge" || "ml.inf1.2xlarge" || "ml.inf1.6xlarge" || "ml.inf1.24xlarge" || "ml.dl1.24xlarge" || "ml.c6i.large" || "ml.c6i.xlarge" || "ml.c6i.2xlarge" || "ml.c6i.4xlarge" || "ml.c6i.8xlarge" || "ml.c6i.12xlarge" || "ml.c6i.16xlarge" || "ml.c6i.24xlarge" || "ml.c6i.32xlarge" || "ml.m6i.large" || "ml.m6i.xlarge" || "ml.m6i.2xlarge" || "ml.m6i.4xlarge" || "ml.m6i.8xlarge" || "ml.m6i.12xlarge" || "ml.m6i.16xlarge" || "ml.m6i.24xlarge" || "ml.m6i.32xlarge" || "ml.r6i.large" || "ml.r6i.xlarge" || "ml.r6i.2xlarge" || "ml.r6i.4xlarge" || "ml.r6i.8xlarge" || "ml.r6i.12xlarge" || "ml.r6i.16xlarge" || "ml.r6i.24xlarge" || "ml.r6i.32xlarge" || "ml.g5.xlarge" || "ml.g5.2xlarge" || "ml.g5.4xlarge" || "ml.g5.8xlarge" || "ml.g5.12xlarge" || "ml.g5.16xlarge" || "ml.g5.24xlarge" || "ml.g5.48xlarge" || "ml.g6.xlarge" || "ml.g6.2xlarge" || "ml.g6.4xlarge" || "ml.g6.8xlarge" || "ml.g6.12xlarge" || "ml.g6.16xlarge" || "ml.g6.24xlarge" || "ml.g6.48xlarge" || "ml.r8g.medium" || "ml.r8g.large" || "ml.r8g.xlarge" || "ml.r8g.2xlarge" || "ml.r8g.4xlarge" || "ml.r8g.8xlarge" || "ml.r8g.12xlarge" || "ml.r8g.16xlarge" || "ml.r8g.24xlarge" || "ml.r8g.48xlarge" || "ml.g6e.xlarge" || "ml.g6e.2xlarge" || "ml.g6e.4xlarge" || "ml.g6e.8xlarge" || "ml.g6e.12xlarge" || "ml.g6e.16xlarge" || "ml.g6e.24xlarge" || "ml.g6e.48xlarge" || "ml.p4d.24xlarge" || "ml.c7g.large" || "ml.c7g.xlarge" || "ml.c7g.2xlarge" || "ml.c7g.4xlarge" || "ml.c7g.8xlarge" || "ml.c7g.12xlarge" || "ml.c7g.16xlarge" || "ml.m6g.large" || "ml.m6g.xlarge" || "ml.m6g.2xlarge" || "ml.m6g.4xlarge" || "ml.m6g.8xlarge" || "ml.m6g.12xlarge" || "ml.m6g.16xlarge" || "ml.m6gd.large" || "ml.m6gd.xlarge" || "ml.m6gd.2xlarge" || "ml.m6gd.4xlarge" || "ml.m6gd.8xlarge" || "ml.m6gd.12xlarge" || "ml.m6gd.16xlarge" || "ml.c6g.large" || "ml.c6g.xlarge" || "ml.c6g.2xlarge" || "ml.c6g.4xlarge" || "ml.c6g.8xlarge" || "ml.c6g.12xlarge" || "ml.c6g.16xlarge" || "ml.c6gd.large" || "ml.c6gd.xlarge" || "ml.c6gd.2xlarge" || "ml.c6gd.4xlarge" || "ml.c6gd.8xlarge" || "ml.c6gd.12xlarge" || "ml.c6gd.16xlarge" || "ml.c6gn.large" || "ml.c6gn.xlarge" || "ml.c6gn.2xlarge" || "ml.c6gn.4xlarge" || "ml.c6gn.8xlarge" || "ml.c6gn.12xlarge" || "ml.c6gn.16xlarge" || "ml.r6g.large" || "ml.r6g.xlarge" || "ml.r6g.2xlarge" || "ml.r6g.4xlarge" || "ml.r6g.8xlarge" || "ml.r6g.12xlarge" || "ml.r6g.16xlarge" || "ml.r6gd.large" || "ml.r6gd.xlarge" || "ml.r6gd.2xlarge" || "ml.r6gd.4xlarge" || "ml.r6gd.8xlarge" || "ml.r6gd.12xlarge" || "ml.r6gd.16xlarge" || "ml.p4de.24xlarge" || "ml.trn1.2xlarge" || "ml.trn1.32xlarge" || "ml.trn1n.32xlarge" || "ml.trn2.48xlarge" || "ml.inf2.xlarge" || "ml.inf2.8xlarge" || "ml.inf2.24xlarge" || "ml.inf2.48xlarge" || "ml.p5.48xlarge" || "ml.p5e.48xlarge" || "ml.p5en.48xlarge" || "ml.m7i.large" || "ml.m7i.xlarge" || "ml.m7i.2xlarge" || "ml.m7i.4xlarge" || "ml.m7i.8xlarge" || "ml.m7i.12xlarge" || "ml.m7i.16xlarge" || "ml.m7i.24xlarge" || "ml.m7i.48xlarge" || "ml.c7i.large" || "ml.c7i.xlarge" || "ml.c7i.2xlarge" || "ml.c7i.4xlarge" || "ml.c7i.8xlarge" || "ml.c7i.12xlarge" || "ml.c7i.16xlarge" || "ml.c7i.24xlarge" || "ml.c7i.48xlarge" || "ml.r7i.large" || "ml.r7i.xlarge" || "ml.r7i.2xlarge" || "ml.r7i.4xlarge" || "ml.r7i.8xlarge" || "ml.r7i.12xlarge" || "ml.r7i.16xlarge" || "ml.r7i.24xlarge" || "ml.r7i.48xlarge" || "ml.c8g.medium" || "ml.c8g.large" || "ml.c8g.xlarge" || "ml.c8g.2xlarge" || "ml.c8g.4xlarge" || "ml.c8g.8xlarge" || "ml.c8g.12xlarge" || "ml.c8g.16xlarge" || "ml.c8g.24xlarge" || "ml.c8g.48xlarge" || "ml.r7gd.medium" || "ml.r7gd.large" || "ml.r7gd.xlarge" || "ml.r7gd.2xlarge" || "ml.r7gd.4xlarge" || "ml.r7gd.8xlarge" || "ml.r7gd.12xlarge" || "ml.r7gd.16xlarge" || "ml.m8g.medium" || "ml.m8g.large" || "ml.m8g.xlarge" || "ml.m8g.2xlarge" || "ml.m8g.4xlarge" || "ml.m8g.8xlarge" || "ml.m8g.12xlarge" || "ml.m8g.16xlarge" || "ml.m8g.24xlarge" || "ml.m8g.48xlarge" || "ml.c6in.large" || "ml.c6in.xlarge" || "ml.c6in.2xlarge" || "ml.c6in.4xlarge" || "ml.c6in.8xlarge" || "ml.c6in.12xlarge" || "ml.c6in.16xlarge" || "ml.c6in.24xlarge" || "ml.c6in.32xlarge" || "ml.p6-b200.48xlarge" || "ml.p6e-gb200.36xlarge", * ServerlessConfig: { // ProductionVariantServerlessConfig * MemorySizeInMB: Number("int"), // required * MaxConcurrency: Number("int"), // required * ProvisionedConcurrency: Number("int"), * }, * InferenceSpecificationName: "STRING_VALUE", * EnvironmentParameterRanges: { // EnvironmentParameterRanges * CategoricalParameterRanges: [ // CategoricalParameters * { // CategoricalParameter * Name: "STRING_VALUE", // required * Value: [ // CategoricalParameterRangeValues // required * "STRING_VALUE", * ], * }, * ], * }, * }, * ], * VolumeKmsKeyId: "STRING_VALUE", * ContainerConfig: { // RecommendationJobContainerConfig * Domain: "STRING_VALUE", * Task: "STRING_VALUE", * Framework: "STRING_VALUE", * FrameworkVersion: "STRING_VALUE", * PayloadConfig: { // RecommendationJobPayloadConfig * SamplePayloadUrl: "STRING_VALUE", * SupportedContentTypes: [ // RecommendationJobSupportedContentTypes * "STRING_VALUE", * ], * }, * NearestModelName: "STRING_VALUE", * SupportedInstanceTypes: [ // RecommendationJobSupportedInstanceTypes * "STRING_VALUE", * ], * SupportedEndpointType: "RealTime" || "Serverless", * DataInputConfig: "STRING_VALUE", * SupportedResponseMIMETypes: [ // RecommendationJobSupportedResponseMIMETypes * "STRING_VALUE", * ], * }, * Endpoints: [ // Endpoints * { // EndpointInfo * EndpointName: "STRING_VALUE", * }, * ], * VpcConfig: { // RecommendationJobVpcConfig * SecurityGroupIds: [ // RecommendationJobVpcSecurityGroupIds // required * "STRING_VALUE", * ], * Subnets: [ // RecommendationJobVpcSubnets // required * "STRING_VALUE", * ], * }, * }, * JobDescription: "STRING_VALUE", * StoppingConditions: { // RecommendationJobStoppingConditions * MaxInvocations: Number("int"), * ModelLatencyThresholds: [ // ModelLatencyThresholds * { // ModelLatencyThreshold * Percentile: "STRING_VALUE", * ValueInMilliseconds: Number("int"), * }, * ], * FlatInvocations: "Continue" || "Stop", * }, * OutputConfig: { // RecommendationJobOutputConfig * KmsKeyId: "STRING_VALUE", * CompiledOutputConfig: { // RecommendationJobCompiledOutputConfig * S3OutputUri: "STRING_VALUE", * }, * }, * Tags: [ // TagList * { // Tag * Key: "STRING_VALUE", // required * Value: "STRING_VALUE", // required * }, * ], * }; * const command = new CreateInferenceRecommendationsJobCommand(input); * const response = await client.send(command); * // { // CreateInferenceRecommendationsJobResponse * // JobArn: "STRING_VALUE", // required * // }; * * ``` * * @param CreateInferenceRecommendationsJobCommandInput - {@link CreateInferenceRecommendationsJobCommandInput} * @returns {@link CreateInferenceRecommendationsJobCommandOutput} * @see {@link CreateInferenceRecommendationsJobCommandInput} for command's `input` shape. * @see {@link CreateInferenceRecommendationsJobCommandOutput} for command's `response` shape. * @see {@link SageMakerClientResolvedConfig | config} for SageMakerClient's `config` shape. * * @throws {@link ResourceInUse} (client fault) *

Resource being accessed is in use.

* * @throws {@link ResourceLimitExceeded} (client fault) *

You have exceeded an SageMaker resource limit. For example, you might have too many training jobs created.

* * @throws {@link SageMakerServiceException} *

Base exception class for all service exceptions from SageMaker service.

* * * @public */ export declare class CreateInferenceRecommendationsJobCommand extends CreateInferenceRecommendationsJobCommand_base { /** @internal type navigation helper, not in runtime. */ protected static __types: { api: { input: CreateInferenceRecommendationsJobRequest; output: CreateInferenceRecommendationsJobResponse; }; sdk: { input: CreateInferenceRecommendationsJobCommandInput; output: CreateInferenceRecommendationsJobCommandOutput; }; }; }