import * as pulumi from "@pulumi/pulumi"; import * as inputs from "../types/input"; import * as outputs from "../types/output"; /** * Manages a Synapse Spark Pool. * * ## Example Usage * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as azure from "@pulumi/azure"; * * const example = new azure.core.ResourceGroup("example", { * name: "example-resources", * location: "West Europe", * }); * const exampleAccount = new azure.storage.Account("example", { * name: "examplestorageacc", * resourceGroupName: example.name, * location: example.location, * accountTier: "Standard", * accountReplicationType: "LRS", * accountKind: "StorageV2", * isHnsEnabled: true, * }); * const exampleDataLakeGen2Filesystem = new azure.storage.DataLakeGen2Filesystem("example", { * name: "example", * storageAccountId: exampleAccount.id, * }); * const exampleWorkspace = new azure.synapse.Workspace("example", { * name: "example", * resourceGroupName: example.name, * location: example.location, * storageDataLakeGen2FilesystemId: exampleDataLakeGen2Filesystem.id, * sqlAdministratorLogin: "sqladminuser", * sqlAdministratorLoginPassword: "H@Sh1CoR3!", * identity: { * type: "SystemAssigned", * }, * }); * const exampleSparkPool = new azure.synapse.SparkPool("example", { * name: "example", * synapseWorkspaceId: exampleWorkspace.id, * nodeSizeFamily: "MemoryOptimized", * nodeSize: "Small", * cacheSize: 100, * autoScale: { * maxNodeCount: 50, * minNodeCount: 3, * }, * autoPause: { * delayInMinutes: 15, * }, * libraryRequirement: { * content: `appnope==0.1.0 * beautifulsoup4==4.6.3 * `, * filename: "requirements.txt", * }, * sparkConfig: { * content: "spark.shuffle.spill true\n", * filename: "config.txt", * }, * sparkVersion: "3.5", * tags: { * ENV: "Production", * }, * }); * ``` * * ## Import * * Synapse Spark Pool can be imported using the `resource id`, e.g. * * ```sh * $ pulumi import azure:synapse/sparkPool:SparkPool example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1/bigDataPools/sparkPool1 * ``` */ export declare class SparkPool extends pulumi.CustomResource { /** * Get an existing SparkPool resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state Any extra arguments used during the lookup. * @param opts Optional settings to control the behavior of the CustomResource. */ static get(name: string, id: pulumi.Input, state?: SparkPoolState, opts?: pulumi.CustomResourceOptions): SparkPool; /** * Returns true if the given object is an instance of SparkPool. This is designed to work even * when multiple copies of the Pulumi SDK have been loaded into the same process. */ static isInstance(obj: any): obj is SparkPool; /** * An `autoPause` block as defined below. */ readonly autoPause: pulumi.Output; /** * An `autoScale` block as defined below. Exactly one of `nodeCount` or `autoScale` must be specified. */ readonly autoScale: pulumi.Output; /** * The cache size in the Spark Pool. */ readonly cacheSize: pulumi.Output; /** * Indicates whether compute isolation is enabled or not. Defaults to `false`. */ readonly computeIsolationEnabled: pulumi.Output; readonly dynamicExecutorAllocationEnabled: pulumi.Output; readonly libraryRequirement: pulumi.Output; readonly maxExecutors: pulumi.Output; readonly minExecutors: pulumi.Output; /** * The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created. */ readonly name: pulumi.Output; /** * The number of nodes in the Spark Pool. Exactly one of `nodeCount` or `autoScale` must be specified. */ readonly nodeCount: pulumi.Output; /** * The level of node in the Spark Pool. Possible values are `Small`, `Medium`, `Large`, `None`, `XLarge`, `XXLarge` and `XXXLarge`. */ readonly nodeSize: pulumi.Output; /** * The kind of nodes that the Spark Pool provides. Possible values are `HardwareAcceleratedFPGA`, `HardwareAcceleratedGPU`, `MemoryOptimized`, and `None`. */ readonly nodeSizeFamily: pulumi.Output; readonly sessionLevelPackagesEnabled: pulumi.Output; readonly sparkConfig: pulumi.Output; readonly sparkEventsFolder: pulumi.Output; readonly sparkLogFolder: pulumi.Output; /** * The Apache Spark version. Possible values are `3.4` and `3.5`. */ readonly sparkVersion: pulumi.Output; /** * The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created. */ readonly synapseWorkspaceId: pulumi.Output; readonly tags: pulumi.Output<{ [key: string]: string; } | undefined>; /** * Create a SparkPool resource with the given unique name, arguments, and options. * * @param name The _unique_ name of the resource. * @param args The arguments to use to populate this resource's properties. * @param opts A bag of options that control this resource's behavior. */ constructor(name: string, args: SparkPoolArgs, opts?: pulumi.CustomResourceOptions); } /** * Input properties used for looking up and filtering SparkPool resources. */ export interface SparkPoolState { /** * An `autoPause` block as defined below. */ autoPause?: pulumi.Input; /** * An `autoScale` block as defined below. Exactly one of `nodeCount` or `autoScale` must be specified. */ autoScale?: pulumi.Input; /** * The cache size in the Spark Pool. */ cacheSize?: pulumi.Input; /** * Indicates whether compute isolation is enabled or not. Defaults to `false`. */ computeIsolationEnabled?: pulumi.Input; dynamicExecutorAllocationEnabled?: pulumi.Input; libraryRequirement?: pulumi.Input; maxExecutors?: pulumi.Input; minExecutors?: pulumi.Input; /** * The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created. */ name?: pulumi.Input; /** * The number of nodes in the Spark Pool. Exactly one of `nodeCount` or `autoScale` must be specified. */ nodeCount?: pulumi.Input; /** * The level of node in the Spark Pool. Possible values are `Small`, `Medium`, `Large`, `None`, `XLarge`, `XXLarge` and `XXXLarge`. */ nodeSize?: pulumi.Input; /** * The kind of nodes that the Spark Pool provides. Possible values are `HardwareAcceleratedFPGA`, `HardwareAcceleratedGPU`, `MemoryOptimized`, and `None`. */ nodeSizeFamily?: pulumi.Input; sessionLevelPackagesEnabled?: pulumi.Input; sparkConfig?: pulumi.Input; sparkEventsFolder?: pulumi.Input; sparkLogFolder?: pulumi.Input; /** * The Apache Spark version. Possible values are `3.4` and `3.5`. */ sparkVersion?: pulumi.Input; /** * The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created. */ synapseWorkspaceId?: pulumi.Input; tags?: pulumi.Input<{ [key: string]: pulumi.Input; }>; } /** * The set of arguments for constructing a SparkPool resource. */ export interface SparkPoolArgs { /** * An `autoPause` block as defined below. */ autoPause?: pulumi.Input; /** * An `autoScale` block as defined below. Exactly one of `nodeCount` or `autoScale` must be specified. */ autoScale?: pulumi.Input; /** * The cache size in the Spark Pool. */ cacheSize?: pulumi.Input; /** * Indicates whether compute isolation is enabled or not. Defaults to `false`. */ computeIsolationEnabled?: pulumi.Input; dynamicExecutorAllocationEnabled?: pulumi.Input; libraryRequirement?: pulumi.Input; maxExecutors?: pulumi.Input; minExecutors?: pulumi.Input; /** * The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created. */ name?: pulumi.Input; /** * The number of nodes in the Spark Pool. Exactly one of `nodeCount` or `autoScale` must be specified. */ nodeCount?: pulumi.Input; /** * The level of node in the Spark Pool. Possible values are `Small`, `Medium`, `Large`, `None`, `XLarge`, `XXLarge` and `XXXLarge`. */ nodeSize: pulumi.Input; /** * The kind of nodes that the Spark Pool provides. Possible values are `HardwareAcceleratedFPGA`, `HardwareAcceleratedGPU`, `MemoryOptimized`, and `None`. */ nodeSizeFamily: pulumi.Input; sessionLevelPackagesEnabled?: pulumi.Input; sparkConfig?: pulumi.Input; sparkEventsFolder?: pulumi.Input; sparkLogFolder?: pulumi.Input; /** * The Apache Spark version. Possible values are `3.4` and `3.5`. */ sparkVersion: pulumi.Input; /** * The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created. */ synapseWorkspaceId: pulumi.Input; tags?: pulumi.Input<{ [key: string]: pulumi.Input; }>; }