feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,36 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
"""Manage Service Instance resources."""
from googlecloudsdk.calliope import base
from surface.dataproc_gdc.service_instances import _init_extensions as extensions
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
@base.Autogenerated
@base.Hidden
@base.DefaultUniverseOnly
class ServiceInstancesAlpha(extensions.ServiceInstancesAlpha):
"""Manage Service Instance resources."""
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.Hidden
@base.DefaultUniverseOnly
class ServiceInstancesGA(extensions.ServiceInstancesGa):
"""Manage Service Instance resources."""

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
"""File to add optional custom code to extend __init__.py."""
from googlecloudsdk.calliope import base
class ServiceInstancesAlpha(base.Group):
"""Optional no-auto-generated code for ALPHA."""
category = base.DATA_ANALYTICS_CATEGORY
class ServiceInstancesBeta(base.Group):
"""Optional no-auto-generated code for BETA."""
category = base.DATA_ANALYTICS_CATEGORY
class ServiceInstancesGa(base.Group):
"""Optional no-auto-generated code for GA."""
category = base.DATA_ANALYTICS_CATEGORY

View File

@@ -0,0 +1,95 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Create dataproc gdc service instance.
description: |
Create dataproc gdc service instance.
examples: |-
Creates service instance.
$ {command} my-instance --location=my-location --project=my-project --gdce-cluster=my-cluster
arguments:
params:
- group:
mutex: true
help_text: |-
Arguments for the target.
params:
- group:
required: false
help_text: |2-
Gdce cluster information.
params:
- arg_name: gdce-cluster
help_text: |-
Gdce cluster resource id.
is_positional: false
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.default_resources:project_location_cluster
resource_method_params:
serviceInstance.gdceCluster.gdceCluster: '{__relative_name__}'
required: true
- help_text: |-
Identifier. The name of the service instance.
is_positional: true
request_id_field: serviceInstanceId
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance
required: true
- arg_name: display-name
api_field: serviceInstance.displayName
required: false
repeated: false
help_text: |-
User-provided human-readable name to be used in user interfaces.
- arg_name: labels
api_field: serviceInstance.labels
required: false
repeated: true
help_text: |-
The labels to associate with this service instance. Labels may be used for
filtering and billing tracking.
spec:
- api_field: key
- api_field: value
- arg_name: annotations
api_field: serviceInstance.annotations
required: false
repeated: true
help_text: |-
The annotations to associate with this service instance. Annotations may be
used to store client information, but are not used by the server.
spec:
- api_field: key
- api_field: value
- arg_name: service-account
api_field: serviceInstance.serviceAccount
required: false
repeated: false
help_text: |-
Requested service account to associate with ServiceInstance.
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances
async:
collection:
- dataprocgdc.projects.locations.operations

View File

@@ -0,0 +1,87 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Delete dataproc gdc service instance.
description: |
Delete dataproc gdc service instance.
examples: |-
Deletes service instance.
$ {command} my-instance --location=my-location --project=my-project
arguments:
params:
- help_text: |-
Name of the resource
is_positional: true
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance
required: true
- arg_name: force
api_field: force
action: store_true
required: false
type: bool
help_text: |-
If set to true, any jobs and job environments associated with this service
instance
will also be deleted. If false (default) the service instance can only be
deleted if there are no job environments or jobs associated with the
service instance.
- arg_name: allow-missing
api_field: allowMissing
action: store_true
required: false
type: bool
help_text: |-
If set to true, and the service instance is not found, the request will
succeed but no action will be taken on the server
- arg_name: etag
api_field: etag
required: false
repeated: false
help_text: |-
The etag of the service instance. If this is provided, it must
match the server etag.
- arg_name: request-id
api_field: requestId
required: false
repeated: false
help_text: |-
An optional request ID to identify requests. Specify a unique request ID
so that if you must retry your request, the server will know to ignore
the request if it has already been completed. The server will guarantee
that for at least 60 minutes after the first request.
For example, consider a situation where you make an initial request and the
request times out. If you make the request again with the same request
ID, the server can check if original operation with the same request ID
was received, and if so, will ignore the second request. This prevents
clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is
not supported (00000000-0000-0000-0000-000000000000).
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances
async:
collection:
- dataprocgdc.projects.locations.operations

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Get dataproc gdc service instance.
description: |
Get dataproc gdc service instance.
examples: |-
Gets service instance.
$ {command} my-instance --location=my-location --project=my-project
arguments:
params:
- help_text: |-
Name of the resource
is_positional: true
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance
required: true
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances

View File

@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: List dataproc gdc service instances.
description: |
List dataproc gdc service instances.
examples: |-
Lists service instances with given location and project.
$ {command} --location=my-location --project=my-project
arguments:
params:
- help_text: |-
Parent value for ListServiceInstancesRequest
is_positional: false
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location
required: true
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances
response:
id_field: name

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
"""Manage Application Environment resources."""
from googlecloudsdk.calliope import base
from surface.dataproc_gdc.service_instances.application_environments import _init_extensions as extensions
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
@base.Autogenerated
@base.Hidden
class ApplicationEnvironmentsAlpha(extensions.ApplicationEnvironmentsAlpha):
"""Manage Application Environment resources."""

View File

@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
"""File to add optional custom code to extend __init__.py."""
from googlecloudsdk.calliope import base
class ApplicationEnvironmentsAlpha(base.Group):
"""Optional no-auto-generated code for ALPHA."""
class ApplicationEnvironmentsBeta(base.Group):
"""Optional no-auto-generated code for BETA."""
class ApplicationEnvironmentsGa(base.Group):
"""Optional no-auto-generated code for GA."""

View File

@@ -0,0 +1,100 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Create dataproc gdc application environment controller.
description: |
Create dataproc gdc application environment controller.
examples: |-
Creates application environment controller.
$ {command} my-environment --location=my-location --project=my-project --instance=my-instance --namespace=default
arguments:
params:
- help_text: |-
Identifier. Fields 1-6 should exist for all declarative friendly resources per
aip.dev/148 The name of the application environment.
Format:
projects/{project}/locations/{location}/serviceInstances/{service_instance}/applicationEnvironments/{application_environment_id}
is_positional: true
request_id_field: applicationEnvironmentId
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance_application_environment
required: true
- arg_name: display-name
api_field: applicationEnvironment.displayName
required: false
repeated: false
help_text: |-
User-provided human-readable name to be used in user interfaces.
- arg_name: labels
api_field: applicationEnvironment.labels
required: false
repeated: true
help_text: |-
The labels to associate with this application environment. Labels may be
used for filtering and billing tracking.
spec:
- api_field: key
- api_field: value
- arg_name: annotations
api_field: applicationEnvironment.annotations
required: false
repeated: true
help_text: |-
The annotations to associate with this application environment. Annotations
may be used to store client information, but are not used by the server.
spec:
- api_field: key
- api_field: value
- group:
required: false
help_text: |-
Represents the SparkApplicationEnvironmentConfig.
params:
- arg_name: spark-application-environment-config-default-properties
api_field: applicationEnvironment.sparkApplicationEnvironmentConfig.defaultProperties
required: false
repeated: true
help_text: |-
A map of default Spark properties to apply to workloads in this application
environment. These defaults may be overridden by per-application
properties.
spec:
- api_field: key
- api_field: value
- arg_name: spark-application-environment-config-default-version
api_field: applicationEnvironment.sparkApplicationEnvironmentConfig.defaultVersion
required: false
repeated: false
help_text: |-
The default Dataproc version to use for applications submitted to this
application environment
- arg_name: namespace
api_field: applicationEnvironment.namespace
required: false
repeated: false
help_text: |-
The name of the namespace in which to create this ApplicationEnvironment.
This namespace must already exist in the cluster
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances.applicationEnvironments

View File

@@ -0,0 +1,73 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Delete dataproc gdc application environment controller.
description: |
Delete dataproc gdc application environment controller.
examples: |-
Deletes application environment controller.
$ {command} my-environment --location=my-location --project=my-project --instance=my-instance
arguments:
params:
- help_text: |-
The name of the application to delete.
is_positional: true
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance_application_environment
required: true
- arg_name: allow-missing
api_field: allowMissing
action: store_true
required: false
type: bool
help_text: |-
If set to true, and the application is not found, the request will succeed
but no action will be taken on the server
- arg_name: etag
api_field: etag
required: false
repeated: false
help_text: |-
The etag of the application. If this is provided, it must match
the server etag.
- arg_name: request-id
api_field: requestId
required: false
repeated: false
help_text: |-
An optional request ID to identify requests. Specify a unique request ID
so that if you must retry your request, the server will know to ignore
the request if it has already been completed. The server will guarantee
that for at least 60 minutes since the first request.
For example, consider a situation where you make an initial request and the
request times out. If you make the request again with the same request
ID, the server can check if original operation with the same request ID
was received, and if so, will ignore the second request. This prevents
clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is
not supported (00000000-0000-0000-0000-000000000000).
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances.applicationEnvironments

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Get dataproc gdc application environment controller.
description: |
Get dataproc gdc application environment controller.
examples: |-
Gets application environment controller.
$ {command} my-environment --location=my-location --project=my-project --instance=my-instance
arguments:
params:
- help_text: |-
Name of the resource
is_positional: true
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance_application_environment
required: true
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances.applicationEnvironments

View File

@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: List dataproc gdc application environment controllers.
description: |
List dataproc gdc application environment controllers.
examples: |-
Lists application environment controller.
$ {command} --location=my-location --project=my-project --instance=my-instance
arguments:
params:
- help_text: |-
Parent value for ListSparkApplicationsRequest
is_positional: false
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance
required: true
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances.applicationEnvironments
response:
id_field: name

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
- release_tracks:
- ALPHA
auto_generated: true
hidden: true
help_text:
brief: Update dataproc gdc application environment controller.
description: |
Update dataproc gdc application environment controller.
examples: |-
Updates application environment controller.
$ {command} my-environment --location=my-location --project=my-project --instance=my-instance
arguments:
params:
- help_text: |-
Identifier. Fields 1-6 should exist for all declarative friendly resources per
aip.dev/148 The name of the application environment.
Format:
projects/{project}/locations/{location}/serviceInstances/{service_instance}/applicationEnvironments/{application_environment_id}
is_positional: true
resource_spec: !REF googlecloudsdk.command_lib.dataproc_gdc.v1alpha1_resources:project_location_service_instance_application_environment
required: true
- arg_name: display-name
api_field: applicationEnvironment.displayName
required: false
repeated: false
help_text: |-
User-provided human-readable name to be used in user interfaces.
- arg_name: labels
api_field: applicationEnvironment.labels
required: false
repeated: true
help_text: |-
The labels to associate with this application environment. Labels may be
used for filtering and billing tracking.
clearable: true
spec:
- api_field: key
- api_field: value
- arg_name: annotations
api_field: applicationEnvironment.annotations
required: false
repeated: true
help_text: |-
The annotations to associate with this application environment. Annotations
may be used to store client information, but are not used by the server.
clearable: true
spec:
- api_field: key
- api_field: value
- group:
required: false
help_text: |-
Represents the SparkApplicationEnvironmentConfig.
params:
- arg_name: spark-application-environment-config-default-properties
api_field: applicationEnvironment.sparkApplicationEnvironmentConfig.defaultProperties
required: false
repeated: true
help_text: |-
A map of default Spark properties to apply to workloads in this application
environment. These defaults may be overridden by per-application
properties.
clearable: true
spec:
- api_field: key
- api_field: value
- arg_name: spark-application-environment-config-default-version
api_field: applicationEnvironment.sparkApplicationEnvironmentConfig.defaultVersion
required: false
repeated: false
help_text: |-
The default Dataproc version to use for applications submitted to this
application environment
- arg_name: namespace
api_field: applicationEnvironment.namespace
required: false
repeated: false
help_text: |-
The name of the namespace in which to create this ApplicationEnvironment.
This namespace must already exist in the cluster
request:
api_version: v1alpha1
collection:
- dataprocgdc.projects.locations.serviceInstances.applicationEnvironments
update:
read_modify_update: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This file is autogenerated and should not be edited by hand.
# AUTOGEN_CLI_VERSION: HEAD
_PARTIALS_: true

View File

@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for managing Spark Applciations on Dataproc GDC service instances."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.Hidden
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class SparkApplications(base.Group):
"""Manage Dataproc GDC Spark Applications."""
category = base.DATA_ANALYTICS_CATEGORY

View File

@@ -0,0 +1,40 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: Delete a Dataproc GDC spark application.
description: Delete a Dataproc GDC spark application.
examples: |
To delete a Dataproc GDC spark application named 'my-application' in 'my-instance' service instance in project `test-project` in `us-central1`, run:
$ {command} my-application --service-instance=my-instance --project=test-project --location=us-central1
request:
ALPHA:
api_version: v1alpha1
collection: dataprocgdc.projects.locations.serviceInstances.sparkApplications
method: delete
arguments:
resource:
help_text: |
The Dataproc GDC spark application to delete.
spec: !REF googlecloudsdk.command_lib.dataproc_gdc.resources:spark_application
params:
- api_field: allowMissing
arg_name: allow-missing
help_text: |
If set to true, and the service instance is not found, the request will
succeed but no action will be taken on the server
- api_field: etag
arg_name: etag
help_text: |
Optional. The etag of the service instance. If this is provided, it must
match the server etag.
- api_field: requestId
arg_name: request-id
help_text: |
An optional request ID to identify requests. If the service receives two identical
instance delete requests with the same request_id, the second request is
ignored and the operation that corresponds to the first request is returned for both.
The request ID must be a valid UUID with the exception that zero UUID is
not supported (00000000-0000-0000-0000-000000000000).
async:
collection: dataprocgdc.projects.locations.operations

View File

@@ -0,0 +1,18 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: Describe a Dataproc GDC spark application
description: Describe a Dataproc GDC spark application
examples: |
To describe a Dataproc GDC spark application named 'my-application' in service instance 'my-instance' in project `test-project` in `us-central1`, run:
$ {command} my-application --service-instance=my-instance --project=test-project --location=us-central1
request:
ALPHA:
api_version: v1alpha1
collection: dataprocgdc.projects.locations.serviceInstances.sparkApplications
method: get
arguments:
resource:
help_text: |
The Dataproc GDC spark application to describe.
spec: !REF googlecloudsdk.command_lib.dataproc_gdc.resources:spark_application

View File

@@ -0,0 +1,27 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: List Dataproc GDC Spark applications.
description: |
List all Dataproc GDC Spark applications in a service instance.
examples: |
To list Dataproc Spark Applications in service instance in project `test-project` in `us-central1`, run:
$ {command} --project=test-project --location=us-central1 --service-instance=test-service-instance
request:
collection: dataprocgdc.projects.locations.serviceInstances.sparkApplications
ALPHA:
api_version: v1alpha1
response:
id_field: name
arguments:
resource:
help_text: service instance to list all Spark applications.
spec: !REF googlecloudsdk.command_lib.dataproc_gdc.resources:service_instance
output:
format: |
table(
name.basename():label=NAME,
displayName:label=DISPLAY_NAME,
createTime:label=CREATE_TIME,
state:label=STATE
)

View File

@@ -0,0 +1,43 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for Submitting Spark Applciations on Dataproc GDC service instances."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
@base.Hidden
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class SparkApplications(base.Group):
"""Submit Dataproc GDC Spark Applications."""
category = base.DATA_ANALYTICS_CATEGORY
@staticmethod
def Args(parser):
parser.add_argument(
'--dependency-images',
help=(
'Comma separated list of images containing dependencies for the'
' Spark Application.'
),
metavar='IMAGE',
type=arg_parsers.ArgList(),
)

View File

@@ -0,0 +1,164 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud dataproc-gdc instances create` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
from apitools.base.py import encoding
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc_gdc.spark_applications import basecreate as baseSparkApplication
from googlecloudsdk.command_lib.util.args import labels_util
DATAPROCGDC_API_NAME = 'dataprocgdc'
VERSION_MAP = {
base.ReleaseTrack.ALPHA: 'v1alpha1',
base.ReleaseTrack.GA: 'v1',
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class PySpark(baseSparkApplication.BaseGDCSparkApplicationCommand):
"""Create a Dataproc GDC pySpark application.
A pySpark application that run locally on the Dataproc
GDC cluster.
"""
detailed_help = {'EXAMPLES': """\
To create a Dataproc GDC pyspark application with name
`my-application` in location `us-central1` running on a service
instance `my-instance`, run:
$ {command} my-application --service-instance=my-instance
--location=us-central1 --project=test-project
"""}
@staticmethod
def Args(parser):
baseSparkApplication.BaseGDCSparkApplicationCommand.Args(parser)
parser.add_argument(
'job_args',
nargs=argparse.REMAINDER,
help='Arguments to pass to the driver.',
)
parser.add_argument(
'--py-file',
help='Main .py file to run as the driver.',
)
parser.add_argument(
'--py-files',
type=arg_parsers.ArgList(),
metavar='PY_FILE',
default=[],
help=(
'Comma separated list of Python files to be provided to the job. '
'Must be one of the following file formats '
'".py, .zip, or .egg".'
),
)
parser.add_argument(
'--jars',
type=arg_parsers.ArgList(),
metavar='JAR',
default=[],
help=(
'Comma separated list of jar files to be provided to the '
'executor and driver classpaths.'
),
)
parser.add_argument(
'--files',
type=arg_parsers.ArgList(),
metavar='FILE',
default=[],
help=(
'Comma separated list of files to be placed in the working '
'directory of both the app driver and executors.'
),
)
parser.add_argument(
'--archives',
type=arg_parsers.ArgList(),
metavar='ARCHIVE',
default=[],
help=(
'Comma separated list of archives to be extracted into the working '
'directory of each executor. '
'Must be one of the following file formats: .zip, .tar, .tar.gz, '
'or .tgz.'
),
)
def Run(self, args):
api_version = VERSION_MAP.get(self.ReleaseTrack())
messages = apis.GetMessagesModule(DATAPROCGDC_API_NAME, api_version)
application_ref = args.CONCEPTS.application.Parse()
application_environment_ref = args.CONCEPTS.application_environment.Parse()
service_instance_ref = args.CONCEPTS.service_instance.Parse()
if args.annotations:
annotations = encoding.DictToAdditionalPropertyMessage(
args.annotations,
messages.SparkApplication.AnnotationsValue,
sort_items=True,
)
else:
annotations = None
application_environment = None
if application_environment_ref:
application_environment = application_environment_ref.Name()
spark_app_properties = None
if args.properties:
spark_app_properties = encoding.DictToAdditionalPropertyMessage(
args.properties,
messages.SparkApplication.PropertiesValue,
)
create_req = messages.DataprocgdcProjectsLocationsServiceInstancesSparkApplicationsCreateRequest(
parent=service_instance_ref.RelativeName(),
sparkApplication=messages.SparkApplication(
applicationEnvironment=application_environment,
displayName=args.display_name,
labels=labels_util.ParseCreateArgs(
args, messages.SparkApplication.LabelsValue
),
annotations=annotations,
namespace=args.namespace,
properties=spark_app_properties,
version=args.version,
dependencyImages=args.dependency_images or [],
pysparkApplicationConfig=messages.PySparkApplicationConfig(
args=args.job_args or [],
mainPythonFileUri=args.py_file,
fileUris=args.files or [],
jarFileUris=args.jars or [],
pythonFileUris=args.py_files or [],
archiveUris=args.archives or [],
),
),
)
super().Submit(args, application_ref, create_req)

View File

@@ -0,0 +1,150 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud dataproc-gdc instances create` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
from apitools.base.py import encoding
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc_gdc.spark_applications import basecreate as baseSparkApplication
from googlecloudsdk.command_lib.util.args import labels_util
DATAPROCGDC_API_NAME = 'dataprocgdc'
VERSION_MAP = {
base.ReleaseTrack.ALPHA: 'v1alpha1',
base.ReleaseTrack.GA: 'v1',
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class Spark(baseSparkApplication.BaseGDCSparkApplicationCommand):
"""Create a Dataproc GDC spark application.
A spark application that run locally on the Dataproc
GDC cluster.
"""
detailed_help = {'EXAMPLES': """\
To create a Dataproc GDC spark application with name
`my-application` in location `us-central1` running on a service
instance `my-instance`, run:
$ {command} my-application --service-instance=my-instance
--location=us-central1 --project=test-project
"""}
@staticmethod
def Args(parser):
baseSparkApplication.BaseGDCSparkApplicationCommand.Args(parser)
parser.add_argument(
'job_args',
nargs=argparse.REMAINDER,
help='Arguments to pass to the driver.')
parser.add_argument(
'--jar',
help='The HCFS URI of the jar file that contains the main class.',
)
parser.add_argument(
'--main-class',
help=(
'The name of the driver main class. The jar file that contains'
'the class must be in the classpath or specified in jar_file_uris'
),
)
parser.add_argument(
'--jars',
type=arg_parsers.ArgList(),
metavar='JAR',
default=[],
help=('Comma separated list of jar files to be provided to the '
'executor and driver classpaths.'))
parser.add_argument(
'--files',
type=arg_parsers.ArgList(),
metavar='FILE',
default=[],
help=('Comma separated list of files to be placed in the working '
'directory of both the app driver and executors.'))
parser.add_argument(
'--archives',
type=arg_parsers.ArgList(),
metavar='ARCHIVE',
default=[],
help=(
'Comma separated list of archives to be extracted into the working '
'directory of each executor. '
'Must be one of the following file formats: .zip, .tar, .tar.gz, '
'or .tgz.'))
def Run(self, args):
api_version = VERSION_MAP.get(self.ReleaseTrack())
messages = apis.GetMessagesModule(DATAPROCGDC_API_NAME, api_version)
application_ref = args.CONCEPTS.application.Parse()
application_environment_ref = args.CONCEPTS.application_environment.Parse()
service_instance_ref = args.CONCEPTS.service_instance.Parse()
if args.annotations:
annotations = encoding.DictToAdditionalPropertyMessage(
args.annotations,
messages.SparkApplication.AnnotationsValue,
sort_items=True,
)
else:
annotations = None
application_environment = None
if application_environment_ref:
application_environment = application_environment_ref.Name()
spark_app_properties = None
if args.properties:
spark_app_properties = encoding.DictToAdditionalPropertyMessage(
args.properties,
messages.SparkApplication.PropertiesValue,
)
create_req = messages.DataprocgdcProjectsLocationsServiceInstancesSparkApplicationsCreateRequest(
parent=service_instance_ref.RelativeName(),
sparkApplication=messages.SparkApplication(
applicationEnvironment=application_environment,
displayName=args.display_name,
labels=labels_util.ParseCreateArgs(
args, messages.SparkApplication.LabelsValue
),
annotations=annotations,
namespace=args.namespace,
properties=spark_app_properties,
version=args.version,
dependencyImages=args.dependency_images or [],
sparkApplicationConfig=messages.SparkApplicationConfig(
args=args.job_args or [],
mainJarFileUri=args.jar,
fileUris=args.files or [],
jarFileUris=args.jars or [],
mainClass=args.main_class,
archiveUris=args.archives or [],
),
),
)
super().Submit(args, application_ref, create_req)

View File

@@ -0,0 +1,143 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud dataproc-gdc instances create` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
from apitools.base.py import encoding
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc_gdc.spark_applications import basecreate as baseSparkApplication
from googlecloudsdk.command_lib.util.args import labels_util
DATAPROCGDC_API_NAME = 'dataprocgdc'
VERSION_MAP = {
base.ReleaseTrack.ALPHA: 'v1alpha1',
base.ReleaseTrack.GA: 'v1',
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class SparkR(baseSparkApplication.BaseGDCSparkApplicationCommand):
"""Create a Dataproc GDC pySpark application.
A pySpark application that run locally on the Dataproc
GDC cluster.
"""
detailed_help = {'EXAMPLES': """\
To create a Dataproc GDC pyspark application with name
`my-application` in location `us-central1` running on a service
instance `my-instance`, run:
$ {command} my-application --service-instance=my-instance
--location=us-central1 --project=test-project
"""}
@staticmethod
def Args(parser):
baseSparkApplication.BaseGDCSparkApplicationCommand.Args(parser)
parser.add_argument(
'job_args',
nargs=argparse.REMAINDER,
help='Arguments to pass to the driver.',
)
parser.add_argument(
'--r-file',
help=(
'Required. The HCFS URI of the main R file to use as the'
'driver. Must be a .R file.'
),
)
parser.add_argument(
'--files',
type=arg_parsers.ArgList(),
metavar='FILE',
default=[],
help=(
'Comma separated list of files to be placed in the working '
'directory of both the app driver and executors.'
),
)
parser.add_argument(
'--archives',
type=arg_parsers.ArgList(),
metavar='ARCHIVE',
default=[],
help=(
'Comma separated list of archives to be extracted into the working '
'directory of each executor. '
'Must be one of the following file formats: .zip, .tar, .tar.gz, '
'or .tgz.'
),
)
def Run(self, args):
api_version = VERSION_MAP.get(self.ReleaseTrack())
messages = apis.GetMessagesModule(DATAPROCGDC_API_NAME, api_version)
application_ref = args.CONCEPTS.application.Parse()
application_environment_ref = args.CONCEPTS.application_environment.Parse()
service_instance_ref = args.CONCEPTS.service_instance.Parse()
if args.annotations:
annotations = encoding.DictToAdditionalPropertyMessage(
args.annotations,
messages.SparkApplication.AnnotationsValue,
sort_items=True,
)
else:
annotations = None
application_environment = None
if application_environment_ref:
application_environment = application_environment_ref.Name()
spark_app_properties = None
if args.properties:
spark_app_properties = encoding.DictToAdditionalPropertyMessage(
args.properties,
messages.SparkApplication.PropertiesValue,
)
create_req = messages.DataprocgdcProjectsLocationsServiceInstancesSparkApplicationsCreateRequest(
parent=service_instance_ref.RelativeName(),
sparkApplication=messages.SparkApplication(
applicationEnvironment=application_environment,
displayName=args.display_name,
labels=labels_util.ParseCreateArgs(
args, messages.SparkApplication.LabelsValue
),
annotations=annotations,
namespace=args.namespace,
properties=spark_app_properties,
version=args.version,
dependencyImages=args.dependency_images or [],
sparkRApplicationConfig=messages.SparkRApplicationConfig(
args=args.job_args or [],
fileUris=args.files or [],
archiveUris=args.archives or [],
mainRFileUri=args.r_file,
),
),
)
super().Submit(args, application_ref, create_req)

View File

@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud dataproc-gdc instances create` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc_gdc.spark_applications import basecreate as baseSparkApplication
from googlecloudsdk.command_lib.util.args import labels_util
DATAPROCGDC_API_NAME = 'dataprocgdc'
VERSION_MAP = {
base.ReleaseTrack.ALPHA: 'v1alpha1',
base.ReleaseTrack.GA: 'v1',
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class SparkSql(baseSparkApplication.BaseGDCSparkApplicationCommand):
"""Create a Dataproc GDC Spark SQL application.
A Spark SQL application that run locally on the Dataproc
GDC cluster.
"""
detailed_help = {'EXAMPLES': """\
To create a Dataproc GDC spark sql application with name
`my-application` in location `us-central1` running on a service
instance `my-instance`, run:
$ {command} my-application --service-instance=my-instance
--location=us-central1 --project=test-project
"""}
@staticmethod
def Args(parser):
baseSparkApplication.BaseGDCSparkApplicationCommand.Args(parser)
parser.add_argument(
'--file',
help='The HCFS URI of the script that contains SQL queries',
)
parser.add_argument(
'--params',
type=arg_parsers.ArgDict(),
metavar='PROPERTY=VALUE',
help=(
'Mapping of query variable names to values (equivalent to the '
'Spark SQL command: SET `name="value";`) '
),
)
parser.add_argument(
'--query-file',
help='The HCFS URI of the script that contains SQL queries.',
)
parser.add_argument(
'--jars',
type=arg_parsers.ArgList(),
metavar='JAR',
default=[],
help=(
'Comma separated list of jar files to be provided to the '
'executor and driver classpaths.'
),
)
def Run(self, args):
api_version = VERSION_MAP.get(self.ReleaseTrack())
messages = apis.GetMessagesModule(DATAPROCGDC_API_NAME, api_version)
application_ref = args.CONCEPTS.application.Parse()
application_environment_ref = args.CONCEPTS.application_environment.Parse()
service_instance_ref = args.CONCEPTS.service_instance.Parse()
if args.annotations:
annotations = encoding.DictToAdditionalPropertyMessage(
args.annotations,
messages.SparkApplication.AnnotationsValue,
sort_items=True,
)
else:
annotations = None
application_environment = None
if application_environment_ref:
application_environment = application_environment_ref.Name()
spark_sql_application_config = messages.SparkSqlApplicationConfig(
jarFileUris=args.jars or [],
queryFileUri=args.file,
)
if args.params:
spark_sql_application_config.scriptVariables = (
encoding.DictToAdditionalPropertyMessage(
args.params,
messages.SparkSqlApplicationConfig.ScriptVariablesValue,
)
)
spark_app_properties = None
if args.properties:
spark_app_properties = encoding.DictToAdditionalPropertyMessage(
args.properties,
messages.SparkApplication.PropertiesValue,
)
create_req = messages.DataprocgdcProjectsLocationsServiceInstancesSparkApplicationsCreateRequest(
parent=service_instance_ref.RelativeName(),
sparkApplication=messages.SparkApplication(
applicationEnvironment=application_environment,
displayName=args.display_name,
labels=labels_util.ParseCreateArgs(
args, messages.SparkApplication.LabelsValue
),
annotations=annotations,
namespace=args.namespace,
properties=spark_app_properties,
version=args.version,
dependencyImages=args.dependency_images or [],
sparkSqlApplicationConfig=spark_sql_application_config,
),
)
super().Submit(args, application_ref, create_req)