import * as pulumi from "@pulumi/pulumi"; import * as inputs from "../types/input"; import * as outputs from "../types/output"; /** * A Spark application is a single Spark workload run on a GDC cluster. * * To get more information about SparkApplication, see: * * * [API documentation](https://cloud.google.com/dataproc-gdc/docs/reference/rest/v1/projects.locations.serviceInstances.sparkApplications) * * How-to Guides * * [Dataproc Intro](https://cloud.google.com/dataproc/) * * ## Example Usage * * ### Dataprocgdc Sparkapplication Basic * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-spark-app-basic", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * sparkApplicationConfig: { * mainClass: "org.apache.spark.examples.SparkPi", * jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * args: ["10000"], * }, * }); * ``` * ### Dataprocgdc Sparkapplication * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const appEnv = new gcp.dataproc.GdcApplicationEnvironment("app_env", { * applicationEnvironmentId: "tf-e2e-spark-app-env", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * }); * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-spark-app", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * labels: { * "test-label": "label-value", * }, * annotations: { * an_annotation: "annotation_value", * }, * properties: { * "spark.executor.instances": "2", * }, * applicationEnvironment: appEnv.name, * version: "1.2", * sparkApplicationConfig: { * mainJarFileUri: "file:///usr/lib/spark/examples/jars/spark-examples.jar", * jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"], * fileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * }, * }); * ``` * ### Dataprocgdc Sparkapplication Pyspark * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-pyspark-app", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * displayName: "A Pyspark application for a Terraform create test", * dependencyImages: ["gcr.io/some/image"], * pysparkApplicationConfig: { * mainPythonFileUri: "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py", * jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * pythonFileUris: ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"], * fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"], * archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"], * args: ["10"], * }, * }); * ``` * ### Dataprocgdc Sparkapplication Sparkr * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-sparkr-app", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * displayName: "A SparkR application for a Terraform create test", * sparkRApplicationConfig: { * mainRFileUri: "gs://some-bucket/something.R", * fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"], * archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"], * args: ["10"], * }, * }); * ``` * ### Dataprocgdc Sparkapplication Sparksql * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-sparksql-app", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * displayName: "A SparkSql application for a Terraform create test", * sparkSqlApplicationConfig: { * jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * queryList: { * queries: ["show tables;"], * }, * scriptVariables: { * MY_VAR: "1", * }, * }, * }); * ``` * ### Dataprocgdc Sparkapplication Sparksql Query File * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", { * sparkApplicationId: "tf-e2e-sparksql-app", * serviceinstance: "do-not-delete-dataproc-gdc-instance", * project: "my-project", * location: "us-west2", * namespace: "default", * displayName: "A SparkSql application for a Terraform create test", * sparkSqlApplicationConfig: { * jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], * queryFileUri: "gs://some-bucket/something.sql", * scriptVariables: { * MY_VAR: "1", * }, * }, * }); * ``` * * ## Import * * SparkApplication can be imported using any of these accepted formats: * * * `projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}` * * * `{{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}` * * * `{{location}}/{{serviceinstance}}/{{spark_application_id}}` * * When using the `pulumi import` command, SparkApplication can be imported using one of the formats above. For example: * * ```sh * $ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}} * ``` * * ```sh * $ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}} * ``` * * ```sh * $ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{location}}/{{serviceinstance}}/{{spark_application_id}} * ``` */ export declare class GdcSparkApplication extends pulumi.CustomResource { /** * Get an existing GdcSparkApplication resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state Any extra arguments used during the lookup. * @param opts Optional settings to control the behavior of the CustomResource. */ static get(name: string, id: pulumi.Input, state?: GdcSparkApplicationState, opts?: pulumi.CustomResourceOptions): GdcSparkApplication; /** * Returns true if the given object is an instance of GdcSparkApplication. This is designed to work even * when multiple copies of the Pulumi SDK have been loaded into the same process. */ static isInstance(obj: any): obj is GdcSparkApplication; /** * The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server. * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration. * Please refer to the field `effectiveAnnotations` for all of the annotations present on the resource. */ readonly annotations: pulumi.Output<{ [key: string]: string; } | undefined>; /** * An ApplicationEnvironment from which to inherit configuration properties. */ readonly applicationEnvironment: pulumi.Output; /** * The timestamp when the resource was created. */ readonly createTime: pulumi.Output; /** * List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used. */ readonly dependencyImages: pulumi.Output; /** * User-provided human-readable name to be used in user interfaces. */ readonly displayName: pulumi.Output; readonly effectiveAnnotations: pulumi.Output<{ [key: string]: string; }>; /** * All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. */ readonly effectiveLabels: pulumi.Output<{ [key: string]: string; }>; /** * The labels to associate with this application. Labels may be used for filtering and billing tracking. * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. * Please refer to the field `effectiveLabels` for all of the labels present on the resource. */ readonly labels: pulumi.Output<{ [key: string]: string; } | undefined>; /** * The location of the spark application. */ readonly location: pulumi.Output; /** * URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA */ readonly monitoringEndpoint: pulumi.Output; /** * Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application} */ readonly name: pulumi.Output; /** * The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster. */ readonly namespace: pulumi.Output; /** * An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA */ readonly outputUri: pulumi.Output; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ readonly project: pulumi.Output; /** * application-specific properties. */ readonly properties: pulumi.Output<{ [key: string]: string; } | undefined>; /** * The combination of labels configured directly on the resource * and default labels configured on the provider. */ readonly pulumiLabels: pulumi.Output<{ [key: string]: string; }>; /** * Represents the PySparkApplicationConfig. * Structure is documented below. */ readonly pysparkApplicationConfig: pulumi.Output; /** * Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated. */ readonly reconciling: pulumi.Output; /** * The id of the service instance to which this spark application belongs. */ readonly serviceinstance: pulumi.Output; /** * Represents the SparkApplicationConfig. * Structure is documented below. */ readonly sparkApplicationConfig: pulumi.Output; /** * The id of the application */ readonly sparkApplicationId: pulumi.Output; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ readonly sparkRApplicationConfig: pulumi.Output; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ readonly sparkSqlApplicationConfig: pulumi.Output; /** * The current state. * Possible values: * * `STATE_UNSPECIFIED` * * `PENDING` * * `RUNNING` * * `CANCELLING` * * `CANCELLED` * * `SUCCEEDED` * * `FAILED` */ readonly state: pulumi.Output; /** * A message explaining the current state. */ readonly stateMessage: pulumi.Output; /** * System generated unique identifier for this application, formatted as UUID4. */ readonly uid: pulumi.Output; /** * The timestamp when the resource was most recently updated. */ readonly updateTime: pulumi.Output; /** * The Dataproc version of this application. */ readonly version: pulumi.Output; /** * Create a GdcSparkApplication resource with the given unique name, arguments, and options. * * @param name The _unique_ name of the resource. * @param args The arguments to use to populate this resource's properties. * @param opts A bag of options that control this resource's behavior. */ constructor(name: string, args: GdcSparkApplicationArgs, opts?: pulumi.CustomResourceOptions); } /** * Input properties used for looking up and filtering GdcSparkApplication resources. */ export interface GdcSparkApplicationState { /** * The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server. * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration. * Please refer to the field `effectiveAnnotations` for all of the annotations present on the resource. */ annotations?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * An ApplicationEnvironment from which to inherit configuration properties. */ applicationEnvironment?: pulumi.Input; /** * The timestamp when the resource was created. */ createTime?: pulumi.Input; /** * List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used. */ dependencyImages?: pulumi.Input[]>; /** * User-provided human-readable name to be used in user interfaces. */ displayName?: pulumi.Input; effectiveAnnotations?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. */ effectiveLabels?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * The labels to associate with this application. Labels may be used for filtering and billing tracking. * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. * Please refer to the field `effectiveLabels` for all of the labels present on the resource. */ labels?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * The location of the spark application. */ location?: pulumi.Input; /** * URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA */ monitoringEndpoint?: pulumi.Input; /** * Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application} */ name?: pulumi.Input; /** * The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster. */ namespace?: pulumi.Input; /** * An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA */ outputUri?: pulumi.Input; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ project?: pulumi.Input; /** * application-specific properties. */ properties?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * The combination of labels configured directly on the resource * and default labels configured on the provider. */ pulumiLabels?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * Represents the PySparkApplicationConfig. * Structure is documented below. */ pysparkApplicationConfig?: pulumi.Input; /** * Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated. */ reconciling?: pulumi.Input; /** * The id of the service instance to which this spark application belongs. */ serviceinstance?: pulumi.Input; /** * Represents the SparkApplicationConfig. * Structure is documented below. */ sparkApplicationConfig?: pulumi.Input; /** * The id of the application */ sparkApplicationId?: pulumi.Input; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ sparkRApplicationConfig?: pulumi.Input; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ sparkSqlApplicationConfig?: pulumi.Input; /** * The current state. * Possible values: * * `STATE_UNSPECIFIED` * * `PENDING` * * `RUNNING` * * `CANCELLING` * * `CANCELLED` * * `SUCCEEDED` * * `FAILED` */ state?: pulumi.Input; /** * A message explaining the current state. */ stateMessage?: pulumi.Input; /** * System generated unique identifier for this application, formatted as UUID4. */ uid?: pulumi.Input; /** * The timestamp when the resource was most recently updated. */ updateTime?: pulumi.Input; /** * The Dataproc version of this application. */ version?: pulumi.Input; } /** * The set of arguments for constructing a GdcSparkApplication resource. */ export interface GdcSparkApplicationArgs { /** * The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server. * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration. * Please refer to the field `effectiveAnnotations` for all of the annotations present on the resource. */ annotations?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * An ApplicationEnvironment from which to inherit configuration properties. */ applicationEnvironment?: pulumi.Input; /** * List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used. */ dependencyImages?: pulumi.Input[]>; /** * User-provided human-readable name to be used in user interfaces. */ displayName?: pulumi.Input; /** * The labels to associate with this application. Labels may be used for filtering and billing tracking. * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. * Please refer to the field `effectiveLabels` for all of the labels present on the resource. */ labels?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * The location of the spark application. */ location: pulumi.Input; /** * The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster. */ namespace?: pulumi.Input; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ project?: pulumi.Input; /** * application-specific properties. */ properties?: pulumi.Input<{ [key: string]: pulumi.Input; }>; /** * Represents the PySparkApplicationConfig. * Structure is documented below. */ pysparkApplicationConfig?: pulumi.Input; /** * The id of the service instance to which this spark application belongs. */ serviceinstance: pulumi.Input; /** * Represents the SparkApplicationConfig. * Structure is documented below. */ sparkApplicationConfig?: pulumi.Input; /** * The id of the application */ sparkApplicationId: pulumi.Input; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ sparkRApplicationConfig?: pulumi.Input; /** * Represents the SparkRApplicationConfig. * Structure is documented below. */ sparkSqlApplicationConfig?: pulumi.Input; /** * The Dataproc version of this application. */ version?: pulumi.Input; }