|
26 | 26 | "endpointUrl": "https://dataproc.europe-west9.rep.googleapis.com/",
|
27 | 27 | "location": "europe-west9"
|
28 | 28 | },
|
| 29 | + { |
| 30 | + "description": "Regional Endpoint", |
| 31 | + "endpointUrl": "https://dataproc.us-central1.rep.googleapis.com/", |
| 32 | + "location": "us-central1" |
| 33 | + }, |
29 | 34 | {
|
30 | 35 | "description": "Regional Endpoint",
|
31 | 36 | "endpointUrl": "https://dataproc.me-central2.rep.googleapis.com/",
|
|
3072 | 3077 | }
|
3073 | 3078 | }
|
3074 | 3079 | },
|
3075 |
| - "revision": "20240617", |
| 3080 | + "revision": "20240821", |
3076 | 3081 | "rootUrl": "https://dataproc.googleapis.com/",
|
3077 | 3082 | "schemas": {
|
3078 | 3083 | "AcceleratorConfig": {
|
|
6375 | 6380 | "description": "Optional. The session template used by the session.Only resource names, including project ID and location, are valid.Example: * https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/sessionTemplates/[template_id] * projects/[project_id]/locations/[dataproc_region]/sessionTemplates/[template_id]The template must be in the same project and Dataproc region as the session.",
|
6376 | 6381 | "type": "string"
|
6377 | 6382 | },
|
| 6383 | + "sparkConnectSession": { |
| 6384 | + "$ref": "SparkConnectConfig", |
| 6385 | + "description": "Optional. Spark connect session config." |
| 6386 | + }, |
6378 | 6387 | "state": {
|
6379 | 6388 | "description": "Output only. A state of the session.",
|
6380 | 6389 | "enum": [
|
|
6567 | 6576 | "$ref": "RuntimeConfig",
|
6568 | 6577 | "description": "Optional. Runtime configuration for session execution."
|
6569 | 6578 | },
|
| 6579 | + "sparkConnectSession": { |
| 6580 | + "$ref": "SparkConnectConfig", |
| 6581 | + "description": "Optional. Spark connect session config." |
| 6582 | + }, |
6570 | 6583 | "updateTime": {
|
6571 | 6584 | "description": "Output only. The time the template was last updated.",
|
6572 | 6585 | "format": "google-datetime",
|
|
6616 | 6629 | "id": "SoftwareConfig",
|
6617 | 6630 | "properties": {
|
6618 | 6631 | "imageVersion": {
|
6619 |
| - "description": "Optional. The version of software inside the cluster. It must be one of the supported Dataproc Versions (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as \"1.2\" (including a subminor version, such as \"1.2.29\"), or the \"preview\" version (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.", |
| 6632 | + "description": "Optional. The version of software inside the cluster. It must be one of the supported Dataproc Versions (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported-dataproc-image-versions), such as \"1.2\" (including a subminor version, such as \"1.2.29\"), or the \"preview\" version (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.", |
6620 | 6633 | "type": "string"
|
6621 | 6634 | },
|
6622 | 6635 | "optionalComponents": {
|
|
6641 | 6654 | ],
|
6642 | 6655 | "enumDescriptions": [
|
6643 | 6656 | "Unspecified component. Specifying this will cause Cluster creation to fail.",
|
6644 |
| - "The Anaconda python distribution. The Anaconda component is not supported in the Dataproc 2.0 image. The 2.0 image is pre-installed with Miniconda.", |
| 6657 | + "The Anaconda component is no longer supported or applicable to supported Dataproc on Compute Engine image versions (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-version-clusters#supported-dataproc-image-versions). It cannot be activated on clusters created with supported Dataproc on Compute Engine image versions.", |
6645 | 6658 | "Docker",
|
6646 | 6659 | "The Druid query engine. (alpha)",
|
6647 | 6660 | "Flink",
|
|
6713 | 6726 | },
|
6714 | 6727 | "type": "object"
|
6715 | 6728 | },
|
| 6729 | + "SparkConnectConfig": { |
| 6730 | + "description": "Spark connect configuration for an interactive session.", |
| 6731 | + "id": "SparkConnectConfig", |
| 6732 | + "properties": {}, |
| 6733 | + "type": "object" |
| 6734 | + }, |
6716 | 6735 | "SparkHistoryServerConfig": {
|
6717 | 6736 | "description": "Spark History Server configuration for the workload.",
|
6718 | 6737 | "id": "SparkHistoryServerConfig",
|
|
0 commit comments