feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for cloud dataproc batches."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Batches(base.Group):
"""Submit Dataproc batch jobs.
Submit Dataproc batch jobs.
Submit a job:
$ {command} submit
List all batch jobs:
$ {command} list
List job details:
$ {command} describe JOB_ID
Delete a batch job:
$ {command} delete JOB_ID
Cancel a running batch job without removing the batch resource:
$ {command} cancel JOB_ID
View job output:
$ {command} wait JOB_ID
"""

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Analyze batches command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
@base.ReleaseTracks(base.ReleaseTrack.BETA)
@base.Hidden
class Analyze(base.Command):
"""Analyze a batch job."""
detailed_help = {'EXAMPLES': """\
To Analyze a batch job, run:
$ {command} my-batch-job --region=us-central1
"""}
@staticmethod
def Args(parser):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
flags.AddBatchResourceArg(parser, 'analyze', dataproc.api_version, True)
flags.AddTimeoutFlag(parser, default='30m')
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
messages = dataproc.messages
batch_id = args.CONCEPTS.batch.Parse()
request = messages.DataprocProjectsLocationsBatchesAnalyzeRequest(
name=batch_id.RelativeName()
)
operation = dataproc.client.projects_locations_batches.Analyze(request)
operation = util.WaitForOperation(
dataproc,
operation,
message='Waiting for analyze batch',
timeout_s=args.timeout,
)
return operation

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Batches cancel command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Cancel(base.Command):
"""Cancel a batch job without removing batch resources."""
detailed_help = {
'EXAMPLES':
"""\
To cancel a batch job "my-batch-job" in the "us-central1" region, run:
$ {command} my-batch-job --region=us-central1
"""
}
@staticmethod
def Args(parser):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
flags.AddBatchResourceArg(parser, 'cancel', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
batch_id = args.CONCEPTS.batch.Parse()
console_io.PromptContinue(
message='The batch [{0}] will be cancelled.'.format(
batch_id.Name()),
cancel_on_no=True,
cancel_string='Cancellation aborted by user.')
# Get the batch workload to obtain operation reference.
batch = dataproc.client.projects_locations_batches.Get(
dataproc.messages.DataprocProjectsLocationsBatchesGetRequest(
name=batch_id.RelativeName()))
log.status.Print('Canceling batch [{}].'.format(batch_id.Name()))
dataproc.client.projects_regions_operations.Cancel(
dataproc.messages.DataprocProjectsRegionsOperationsCancelRequest(
name=batch.operation))

View File

@@ -0,0 +1,77 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Delete batches command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Delete(base.DeleteCommand):
"""Delete a batch job."""
detailed_help = {
'EXAMPLES':
"""\
To delete a batch job, run:
$ {command} my-batch-job --region=us-central1
"""
}
@staticmethod
def Args(parser):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
flags.AddAsync(parser)
flags.AddBatchResourceArg(parser, 'delete', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
messages = dataproc.messages
batch_id = args.CONCEPTS.batch.Parse()
request = messages.DataprocProjectsLocationsBatchesDeleteRequest(
name=batch_id.RelativeName())
console_io.PromptContinue(
message="The batch '{}' will be deleted.".format(batch_id.Name()),
cancel_on_no=True,
cancel_string='Deletion aborted by user.')
dataproc.client.projects_locations_batches.Delete(request)
if args.async_:
log.status.Print("Deleting batch '{}'.".format(batch_id.Name()))
return
def _GetJob(job_ref):
return dataproc.client.projects_locations_batches.Get(
dataproc.messages.DataprocProjectsLocationsBatchesGetRequest(
name=job_ref))
util.WaitForResourceDeletion(
_GetJob, batch_id.RelativeName(), message='Waiting for batch deletion')
log.DeletedResource(batch_id.RelativeName())

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describe batches command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Describe(base.DescribeCommand):
"""Describe a batch job."""
detailed_help = {
'EXAMPLES':
"""\
To describe a batch job, run:
$ {command} EXAMPLE-JOB --region=us-central1
"""
}
@staticmethod
def Args(parser):
dataproc = dp.Dataproc()
flags.AddBatchResourceArg(parser, 'describe', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc()
messages = dataproc.messages
batch_id = args.CONCEPTS.batch.Parse()
request = messages.DataprocProjectsLocationsBatchesGetRequest(
name=batch_id.RelativeName())
return dataproc.client.projects_locations_batches.Get(request)

View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List batch job command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import constants
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import display_helper
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class List(base.ListCommand):
"""List batch jobs in a project.
List batch jobs in a project. Page-size sets the maximum number of jobs
returned per page, and Page-token retrieves subsequent results.
## EXAMPLES
List batch jobs in the "us-central1" region:
$ {command} --region="us-central1"
"""
@staticmethod
def Args(parser):
base.URI_FLAG.RemoveFromParser(parser)
base.PAGE_SIZE_FLAG.SetDefault(parser, constants.DEFAULT_PAGE_SIZE)
# TODO(b/191296541): Use built-in filter arg after it is supported by
# backend.
# Filter is not supported yet.
base.FILTER_FLAG.RemoveFromParser(parser)
# Temporarily add a fake hidden implementation so that no parsing logic
# needs to be changed.
parser.add_argument(
'--filter',
hidden=True,
metavar='EXPRESSION',
require_coverage_in_tests=False,
help="""\
Apply a Boolean filter EXPRESSION to each resource item to be listed
(the '=' equality operator is the only supported operator).
If the expression evaluates true for an item, the item is listed.
This flag interacts with other flags, which are applied in the
following order: *--flatten*, *--sort-by*, *--filter*, *--limit*.
For more information, run 'gcloud topic filters'.""")
flags.AddRegionFlag(parser)
parser.display_info.AddFormat("""
table(
name.basename():label=BATCH_ID,
batchType.yesno(no="-"):label=JOB_TYPE,
state:label=STATUS
)
""")
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
request = List.GetRequest(dataproc.messages,
util.ParseProjectsLocations(dataproc),
args)
jobs = util.YieldFromListWithUnreachableList(
'The following batches are unreachable: %s',
dataproc.client.projects_locations_batches,
request,
limit=args.limit,
field='batches',
batch_size=args.page_size,
batch_size_attribute='pageSize',
)
return (display_helper.DisplayHelper(job) for job in jobs)
@staticmethod
def GetRequest(messages, resource, args):
# Remove args.filter to prevent post-filter behavior.
backend_filter = None
if args.filter:
backend_filter = args.filter
args.filter = None
return messages.DataprocProjectsLocationsBatchesListRequest(
filter=backend_filter,
pageSize=args.page_size,
parent=resource.RelativeName())

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for cloud dataproc batches submit."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc.batches import (
batches_create_request_factory)
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Submit(base.Group):
"""Submit a Dataproc batch job."""
detailed_help = {
'EXAMPLES':
"""\
To submit a PySpark job, run:
$ {command} pyspark my-pyspark.py --region='us-central1' --deps-bucket=gs://my-bucket --py-files='path/to/my/python/scripts'
To submit a Spark job, run:
$ {command} spark --region='us-central1' --deps-bucket=gs://my-bucket --jar='my_jar.jar' -- ARG1 ARG2
To submit a Spark-R job, run:
$ {command} spark-r my-main-r.r --region='us-central1' --deps-bucket=gs://my-bucket -- ARG1 ARG2
To submit a Spark-Sql job, run:
$ {command} spark-sql 'my-sql-script.sql' --region='us-central1' --deps-bucket=gs://my-bucket --vars='variable=value'
"""
}
@staticmethod
def Args(parser):
flags.AddAsync(parser)
batches_create_request_factory.AddArguments(
parser, dp.Dataproc().api_version)

View File

@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit a PySpark batch job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc.batches import batch_submitter
from googlecloudsdk.command_lib.dataproc.batches import pyspark_batch_factory
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class PySpark(base.Command):
"""Submit a PySpark batch job."""
detailed_help = {
'EXAMPLES':
"""\
To submit a PySpark batch job called "my-batch" that runs "my-pyspark.py", run:
$ {command} my-pyspark.py --batch=my-batch --deps-bucket=gs://my-bucket --region=us-central1 --py-files='path/to/my/python/script.py'
"""
}
@staticmethod
def Args(parser):
pyspark_batch_factory.AddArguments(parser)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
pyspark_batch = pyspark_batch_factory.PySparkBatchFactory(
dataproc).UploadLocalFilesAndGetMessage(args)
return batch_submitter.Submit(pyspark_batch, dataproc, args)

View File

@@ -0,0 +1,51 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit a Ray batch job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc.batches import batch_submitter
from googlecloudsdk.command_lib.dataproc.batches import ray_batch_factory
@base.Hidden
class Ray(base.Command):
"""Submit a Ray batch job."""
detailed_help = {
'EXAMPLES':
"""\
To submit a Ray batch job called "my-batch" that runs "my-ray.py", run:
$ {command} my-ray.py --batch=my-batch --deps-bucket=gs://my-bucket --location=us-central1
"""
}
@staticmethod
def Args(parser):
ray_batch_factory.AddArguments(parser)
flags.AddLocationFlag(parser)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.BETA)
ray_batch = ray_batch_factory.RayBatchFactory(
dataproc).UploadLocalFilesAndGetMessage(args)
return batch_submitter.Submit(ray_batch, dataproc, args)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit a Spark batch job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc.batches import batch_submitter
from googlecloudsdk.command_lib.dataproc.batches import spark_batch_factory
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Spark(base.Command):
"""Submit a Spark batch job."""
detailed_help = {
'DESCRIPTION':
"""\
Submit a Spark batch job.
""",
'EXAMPLES':
"""\
To submit a Spark job, run:
$ {command} --region=us-central1 --jar=my_jar.jar --deps-bucket=gs://my-bucket -- ARG1 ARG2
To submit a Spark job that runs a specific class of a jar, run:
$ {command} --region=us-central1 --class=org.my.main.Class --jars=my_jar1.jar,my_jar2.jar --deps-bucket=gs://my-bucket -- ARG1 ARG2
To submit a Spark job that runs a jar installed on the cluster, run:
$ {command} --region=us-central1 --class=org.apache.spark.examples.SparkPi --deps-bucket=gs://my-bucket --jars=file:///usr/lib/spark/examples/jars/spark-examples.jar -- 15
"""
}
@staticmethod
def Args(parser):
spark_batch_factory.AddArguments(parser)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
spark_batch = spark_batch_factory.SparkBatchFactory(
dataproc).UploadLocalFilesAndGetMessage(args)
return batch_submitter.Submit(spark_batch, dataproc, args)

View File

@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit a SparkR batch job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc.batches import batch_submitter
from googlecloudsdk.command_lib.dataproc.batches import sparkr_batch_factory
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class SparkR(base.Command):
"""Submit a Spark R batch job."""
detailed_help = {
'EXAMPLES':
"""\
To submit a Spark R batch job running "my-main-r.r" script and upload it to "gs://my-bucket", run:
$ {command} my-main-r.r --deps-bucket=gs://my-bucket --region='us-central1' -- ARG1 ARG2
"""
}
@staticmethod
def Args(parser):
sparkr_batch_factory.AddArguments(parser)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
sparkr_batch = sparkr_batch_factory.SparkRBatchFactory(
dataproc).UploadLocalFilesAndGetMessage(args)
return batch_submitter.Submit(sparkr_batch, dataproc, args)

View File

@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit a SparkSql batch job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc.batches import batch_submitter
from googlecloudsdk.command_lib.dataproc.batches import sparksql_batch_factory
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class SparkSql(base.Command):
"""Submit a Spark SQL batch job."""
detailed_help = {
'EXAMPLES':
"""\
To submit a Spark SQL job running "my-sql-script.sql" and upload it to "gs://my-bucket", run:
$ {command} my-sql-script.sql --deps-bucket=gs://my-bucket --region=us-central1 --vars="NAME=VALUE,NAME2=VALUE2"
"""
}
@staticmethod
def Args(parser):
sparksql_batch_factory.AddArguments(parser)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
sparksql_batch = sparksql_batch_factory.SparkSqlBatchFactory(
dataproc).UploadLocalFilesAndGetMessage(args)
return batch_submitter.Submit(sparksql_batch, dataproc, args)

View File

@@ -0,0 +1,78 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Batches wait command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc.poller import gce_batch_poller
from googlecloudsdk.api_lib.dataproc.poller import rm_batch_poller
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc.batches import batch_version_util
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Wait(base.Command):
"""View the output of a batch as it runs or after it completes."""
detailed_help = {
'EXAMPLES':
"""\
To see a list of all batches, run:
$ gcloud dataproc batches list
To view the output of "my-batch-job" in "us-central1" as it runs, run:
$ {command} my-batch-job --region=us-central1
"""
}
@staticmethod
def Args(parser):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
flags.AddBatchResourceArg(parser, 'wait', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(base.ReleaseTrack.GA)
batch_id = args.CONCEPTS.batch.Parse()
# Get the batch workload to obtain the resolved version.
batch = dataproc.client.projects_locations_batches.Get(
dataproc.messages.DataprocProjectsLocationsBatchesGetRequest(
name=batch_id.RelativeName()
)
)
if batch_version_util.is_rm_batch(batch):
poller = rm_batch_poller.RmBatchPoller(dataproc)
else:
poller = gce_batch_poller.GceBatchPoller(dataproc)
waiter.WaitFor(
poller,
batch_id.RelativeName(),
max_wait_ms=sys.maxsize,
sleep_ms=5000,
wait_ceiling_ms=5000,
exponential_sleep_multiplier=1.3,
custom_tracker=None,
tracker_update_func=poller.TrackerUpdateFunction)