feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,71 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for cloud dataproc workflow templates."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class WorkflowTemplates(base.Group):
r"""Create and manage Dataproc workflow templates.
Create and manage Dataproc workflow templates.
## EXAMPLES
To create a workflow template, run:
$ {command} create my_template
To instantiate a workflow template, run:
$ {command} instantiate my_template
To instantiate a workflow template from a file, run:
$ {command} instantiate-from-file --file template.yaml
To delete a workflow template, run:
$ {command} delete my_template
To view the details of a workflow template, run:
$ {command} describe my_template
To see the list of all workflow templates, run:
$ {command} list
To remove a job from a workflow template, run:
$ {command} remove-job my_template --step-id id
To update managed cluster in a workflow template, run:
$ {command} set-managed-cluster my_template --num-masters 5
To update cluster selector in a workflow template, run:
$ {command} set-cluster-selector my_template \
--cluster-labels environment=prod
"""
pass

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for adding cloud dataproc jobs to workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class AddJob(base.Group):
r"""Add Dataproc jobs to workflow template.
## EXAMPLES
To add a Hadoop MapReduce job, run:
$ {command} hadoop --workflow-template my_template --jar my_jar.jar \
-- arg1 arg2
To add a Spark Scala or Java job, run:
$ {command} spark --workflow-template my_template --jar my_jar.jar \
-- arg1 arg2
To add a PySpark job, run:
$ {command} pyspark --workflow-template my_template my_script.py \
-- arg1 arg2
To add a Spark SQL job, run:
$ {command} spark-sql --workflow-template my_template --file my_queries.q
To add a Pig job, run:
$ {command} pig --workflow-template my_template --file my_script.pig
To add a Hive job, run:
$ {command} hive --workflow-template my_template --file my_queries.q
"""

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a hadoop job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import hadoop
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Hadoop job executing 'my-jar' jar driver with 'my-class'
containing the main method to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} --step-id=my-step_id --class=my-class --jar=my-jar.jar --workflow-template=my-workflow-template --region=us-central1
""",
}
class Hadoop(hadoop.HadoopBase, base.Command):
"""Add a hadoop job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
hadoop.HadoopBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
driver_group = parser.add_mutually_exclusive_group(required=True)
util.AddJvmDriverFlags(driver_group)
def ConfigureJob(self, messages, job, files_by_type, args):
hadoop.HadoopBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a Hive job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import hive
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Hive job executing query 'QUERY' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} --step-id=my-step_id -e=QUERY --workflow-template=my-workflow-template --region=us-central1
""",
}
class Hive(hive.HiveBase, base.Command):
"""Add a Hive job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
hive.HiveBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
hive.HiveBase.ConfigureJob(messages, job, files_by_type, args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a Pig job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import pig
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Pig job executing query 'QUERY' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} --step-id=my-step_id -e=QUERY --workflow-template=my-workflow-template --region=us-central1
""",
}
class Pig(pig.PigBase, base.Command):
"""Add a Pig job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
pig.PigBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
pig.PigBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a Presto job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import presto
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Presto job that executes query 'QUERY' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id',
run:
$ {command} --step-id=my-step_id -e=QUERY --workflow-template=my-workflow-template --region=us-central1
""",
}
class Presto(presto.PrestoBase, base.Command):
"""Add a Presto job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
presto.PrestoBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
presto.PrestoBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a PySpark job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import pyspark
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a PySpark job with archives 'archive1.tgz' and 'archive2.zip' to a the
workflow template 'my-workflow-template' in region 'us-central1' with
step-id 'my-step-id', run:
$ {command} --step-id=my-step_id --archives="archive1.tgz,archive2.zip" --workflow-template=my-workflow-template --region=us-central1
""",
}
class PySpark(pyspark.PySparkBase, base.Command):
"""Add a PySpark job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
pyspark.PySparkBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
pyspark.PySparkBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels),
args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a Spark job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import spark
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Spark job with files 'file1' and 'file2' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} --step-id=my-step_id --files="file1,file2" --workflow-template=my-workflow-template --region=us-central1
""",
}
class Spark(spark.SparkBase, base.Command):
"""Add a Spark job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
spark.SparkBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
driver_group = parser.add_argument_group()
util.AddJvmDriverFlags(driver_group)
def ConfigureJob(self, messages, job, files_by_type, args):
spark.SparkBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a SparkR job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import spark_r
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a SparkR job executing file 'test.r' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} test.r --step-id=my-step_id --workflow-template=my-workflow-template --region=us-central1
""",
}
class SparkR(spark_r.SparkRBase, base.Command):
"""Add a SparkR job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
spark_r.SparkRBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
spark_r.SparkRBase.ConfigureJob(
messages, job, files_by_type,
self.BuildLoggingConfig(messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a SparkSql job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import spark_sql
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a SparkSql job executing query 'QUERY' to a the workflow template
'my-workflow-template' in region 'us-central1' with step-id 'my-step-id'
, run:
$ {command} --step-id=my-step_id -e=QUERY --workflow-template=my-workflow-template --region=us-central1
""",
}
class SparkSql(spark_sql.SparkSqlBase, base.Command):
"""Add a SparkSql job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
spark_sql.SparkSqlBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
spark_sql.SparkSqlBase.ConfigureJob(messages, job, files_by_type,
self.BuildLoggingConfig(
messages, args.driver_log_levels),
args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add a Trino job to the workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.dataproc.jobs import trino
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a Trino job that executes 'QUERY' to the workflow template
'my-workflow-template' in the 'us-central1' region with 'my-step-id',
run:
$ {command} --step-id=my-step_id -e=QUERY --workflow-template=my-workflow-template --region=us-central1
""",
}
class Trino(trino.TrinoBase, base.Command):
"""Add a Trino job to the workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
trino.TrinoBase.Args(parser)
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddWorkflowTemplatesArgs(parser, dataproc.api_version)
def ConfigureJob(self, messages, job, files_by_type, args):
trino.TrinoBase.ConfigureJob(
messages, job, files_by_type,
self.BuildLoggingConfig(messages, args.driver_log_levels), args)
workflow_templates.ConfigureOrderedJob(messages, job, args)
def Run(self, args):
self.PopulateFilesByType(args)
dataproc = dp.Dataproc(self.ReleaseTrack())
ordered_job = workflow_templates.CreateWorkflowTemplateOrderedJob(
args, dataproc)
self.ConfigureJob(dataproc.messages, ordered_job, self.files_by_type, args)
return workflow_templates.AddJobToWorkflowTemplate(args, dataproc,
ordered_job)

View File

@@ -0,0 +1,82 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.command_lib.util.args import labels_util
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
To create a workflow template named ``my-workflow-template'' in region
``us-central1'' with label params 'key1'='value1' and 'key2'='value2', run:
$ {command} my-workflow-template --region=us-central1 --labels="key1=value1,key2=value2"
""",
}
class Create(base.CreateCommand):
"""Create a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
labels_util.AddCreateLabelsFlags(parser)
workflow_templates.AddDagTimeoutFlag(parser, False)
workflow_templates.AddKmsKeyFlag(parser, False)
flags.AddTemplateResourceArg(parser, 'create', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
messages = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
# TODO(b/109837200) make the dataproc discovery doc parameters consistent
# Parent() fails for the collection because of projectId/projectsId and
# regionId/regionsId inconsistencies.
# parent = template_ref.Parent().RelativePath()
parent = '/'.join(template_ref.RelativeName().split('/')[0:4])
workflow_template = messages.WorkflowTemplate(
id=template_ref.Name(),
name=template_ref.RelativeName(),
labels=labels_util.ParseCreateArgs(
args, messages.WorkflowTemplate.LabelsValue))
if args.dag_timeout:
workflow_template.dagTimeout = six.text_type(args.dag_timeout) + 's'
if args.kms_key:
workflow_template.encryptionConfig = (
workflow_templates.GenerateEncryptionConfig(args.kms_key, dataproc)
)
request = messages.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
parent=parent, workflowTemplate=workflow_template)
template = dataproc.client.projects_regions_workflowTemplates.Create(
request)
return template

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Delete workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
"""\
To delete a workflow template 'my-workflow-template', run:
$ {command} my-workflow-template --region=us-central1
""",
}
class Delete(base.DeleteCommand):
"""Delete a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(parser, 'delete', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
messages = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
request = messages.DataprocProjectsRegionsWorkflowTemplatesDeleteRequest(
name=template_ref.RelativeName())
console_io.PromptContinue(
message="The workflow template '[{0}]' will be deleted.".format(
template_ref.Name()),
cancel_on_no=True)
dataproc.client.projects_regions_workflowTemplates.Delete(request)

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describe workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
DETAILED_HELP = {
'EXAMPLES':
"""\
To describe a workflow template 'my-template' in region 'us-central1', run:
$ {command} workflow-template --region=us-central1
""",
}
class Describe(base.DescribeCommand):
"""Describe a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(parser, 'describe', dataproc.api_version)
flags.AddVersionFlag(parser)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
return workflow_template

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Export workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.export import util as export_util
from googlecloudsdk.core.util import files
DETAILED_HELP = {
'EXAMPLES':
"""\
To export version 1.0 of workflow template for 'my-workflow-template' in region
'us-central1' to template.yaml, run:
$ {command} my-workflow-template --region=us-central1 --destination=path/to/template.yaml --version=1.0
""",
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Describe(base.DescribeCommand):
"""Export a workflow template.
Exports a workflow template's configuration to a file.
This configuration can be imported at a later time.
"""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(parser, 'export', dataproc.api_version)
export_util.AddExportFlags(parser)
flags.AddVersionFlag(parser)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
# Get specified version, or most recent version if no version arg provided.
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
# Filter out OUTPUT_ONLY fields and resource identifying fields. Note this
# needs to be kept in sync with v1 workflow_templates.proto.
workflow_template.id = None
workflow_template.name = None
workflow_template.version = None
workflow_template.createTime = None
workflow_template.updateTime = None
# We do not need to clear any fields from workflow_template.placement.
# 1) Managed cluster:
# a) cluster_name is really a name prefix, so it's okay that multiple
# templates have the same value.
# b) The server does not resolve OUTPUT_ONLY fields when storing a
# workflow template, so cluster_config is fine as is.
# 2) Cluster selector: there are no OUTPUT_ONLY or directly resource
# identifying fields here.
if args.destination:
with files.FileWriter(args.destination) as stream:
export_util.Export(message=workflow_template, stream=stream)
else:
export_util.Export(message=workflow_template, stream=sys.stdout)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Get IAM workflow template policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import iam_helpers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
@base.DefaultUniverseOnly
class GetIamPolicy(base.Command):
"""Get IAM policy for a workflow template.
Gets the IAM policy for a workflow template, given a template ID.
## EXAMPLES
The following command prints the IAM policy for a workflow template with the
ID `example-workflow`:
$ {command} example-workflow
"""
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(
parser, 'retrieve the policy for', api_version=dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
request = msgs.DataprocProjectsRegionsWorkflowTemplatesGetIamPolicyRequest(
resource=template_ref.RelativeName(),
getIamPolicyRequest=msgs.GetIamPolicyRequest(
options=msgs.GetPolicyOptions(
requestedPolicyVersion=iam_helpers.MAX_LIBRARY_IAM_SUPPORTED_VERSION
)
),
)
return dataproc.client.projects_regions_workflowTemplates.GetIamPolicy(
request)

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Import workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.export import util as export_util
from googlecloudsdk.core.console import console_io
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Import(base.UpdateCommand):
"""Import a workflow template.
If the specified template resource already exists, it will be overwritten.
Otherwise, a new template will be created.
To edit an existing template, you can export the template to a file, edit its
configuration, and then import the new configuration.
"""
@classmethod
def GetApiVersion(cls):
"""Returns the API version based on the release track."""
return dp.Dataproc(cls.ReleaseTrack()).api_version
@classmethod
def Args(cls, parser):
flags.AddTemplateResourceArg(
parser, 'import', api_version=cls.GetApiVersion())
export_util.AddImportFlags(parser)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
# TODO(b/109837200) make the dataproc discovery doc parameters consistent
# Parent() fails for the collection because of projectId/projectsId and
# regionId/regionsId inconsistencies.
# parent = template_ref.Parent().RelativePath()
parent = '/'.join(template_ref.RelativeName().split('/')[0:4])
data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
template = export_util.Import(
message_type=msgs.WorkflowTemplate, stream=data)
# Populate id field.
template.id = template_ref.Name()
try:
old_template = dataproc.GetRegionsWorkflowTemplate(template_ref)
except apitools_exceptions.HttpError as error:
if error.status_code != 404:
raise error
# Template does not exist. Create a new one.
request = msgs.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
parent=parent, workflowTemplate=template)
return dataproc.client.projects_regions_workflowTemplates.Create(request)
# Update the existing template.
console_io.PromptContinue(
message=('Workflow template [{0}] will be overwritten.').format(
template.id),
cancel_on_no=True)
# Populate version field and name field.
template.version = old_template.version
template.name = template_ref.RelativeName()
return dataproc.client.projects_regions_workflowTemplates.Update(template)

View File

@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Instantiate a workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import uuid
from apitools.base.py import encoding
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'EXAMPLES':
"""\
To instantiate a workflow template 'my-template' in region 'us-central1'
with parameter set 'param1'='value1' and 'param2'='value2', run:
$ {command} my-template --region=us-central1 --parameters="param1=value1,param2=value2"
""",
}
class Instantiate(base.CreateCommand):
"""Instantiate a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTimeoutFlag(parser, default='24h')
base.ASYNC_FLAG.AddToParser(parser)
flags.AddParametersFlag(parser)
flags.AddTemplateResourceArg(parser, 'run', dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
instantiate_request = dataproc.messages.InstantiateWorkflowTemplateRequest()
instantiate_request.requestId = uuid.uuid4().hex # request UUID
if args.parameters:
instantiate_request.parameters = encoding.DictToAdditionalPropertyMessage(
args.parameters,
msgs.InstantiateWorkflowTemplateRequest.ParametersValue)
request = msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateRequest(
instantiateWorkflowTemplateRequest=instantiate_request,
name=template_ref.RelativeName())
operation = dataproc.client.projects_regions_workflowTemplates.Instantiate(
request)
if args.async_:
log.status.Print('Instantiating [{0}] with operation [{1}].'.format(
template_ref.Name(), operation.name))
return
operation = util.WaitForWorkflowTemplateOperation(
dataproc, operation, timeout_s=args.timeout)
return operation

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Instantiate a workflow template from a file."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import uuid
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import storage_helpers
from googlecloudsdk.api_lib.dataproc import util as dp_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.export import util as export_util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
"""\
To instantiate a workflow template from a yaml file 'template.yaml' in
region 'us-central1', run:
$ {command} --file=template.yaml --region=us-central1
""",
}
class InstantiateFromFile(base.CreateCommand):
"""Instantiate a workflow template from a file."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
flags.AddRegionFlag(parser)
flags.AddFileFlag(parser, 'workflow template', 'run')
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
# Generate uuid for request.
instance_id = uuid.uuid4().hex
regions_ref = dp_util.ParseRegion(dataproc)
if args.file.startswith('gs://'):
data = storage_helpers.ReadObject(args.file)
else:
data = console_io.ReadFromFileOrStdin(args.file, binary=False)
template = export_util.Import(
message_type=msgs.WorkflowTemplate, stream=data)
# Send instantiate inline request.
request = \
msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
parent=regions_ref.RelativeName(),
workflowTemplate=template)
request.requestId = instance_id
operation = \
dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
request)
if args.async_:
log.status.Print('Instantiating with operation [{0}].'.format(
operation.name))
return
operation = dp_util.WaitForWorkflowTemplateOperation(dataproc, operation)
return operation

View File

@@ -0,0 +1,74 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import constants
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
DETAILED_HELP = {
'EXAMPLES':
"""\
To list all workflow-templates from region 'us-central1' run:
$ {command} --region=us-central1
""",
}
class List(base.ListCommand):
"""List workflow templates."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
# TODO(b/65634121): Implement URI listing for dataproc
base.URI_FLAG.RemoveFromParser(parser)
base.PAGE_SIZE_FLAG.SetDefault(parser, constants.DEFAULT_PAGE_SIZE)
flags.AddRegionFlag(parser)
parser.display_info.AddFormat("""
table(
id:label=ID,
jobs.len():label=JOBS,
updateTime:label=UPDATE_TIME,
version:label=VERSION
)
""")
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
messages = dataproc.messages
region_ref = util.ParseRegion(dataproc)
request = messages.DataprocProjectsRegionsWorkflowTemplatesListRequest(
parent=region_ref.RelativeName())
return util.YieldFromListWithUnreachableList(
'The following workflow templates are unreachable: %s',
dataproc.client.projects_regions_workflowTemplates,
request,
limit=args.limit,
field='templates',
batch_size=args.page_size,
batch_size_attribute='pageSize')

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove DAG timeout from workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'EXAMPLES':
"""\
To remove a DAG timeout from a workflow template named
``my-workflow-template'' in region ``us-central1'', run:
$ {command} my-workflow-template --region=us-central1"
""",
}
class RemoveDagTimeout(base.CreateCommand):
"""Remove DAG timeout from a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(parser, 'remove the DAG timeout from',
dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
workflow_template.dagTimeout = None
response = dataproc.client.projects_regions_workflowTemplates.Update(
workflow_template)
log.status.Print('Removed DAG timeout from {0}.'.format(
template_ref.Name()))
return response

View File

@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove Job from workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
"""\
To remove a job with step ID 'step-id' from a workflow template
'workflow-template' in region 'us-central1', run:
$ {command} workflow-template --region=us-central1 --step-id=step-id
""",
}
class RemoveJob(base.UpdateCommand):
"""Remove a job from workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
parser.add_argument(
'--step-id',
metavar='STEP_ID',
type=str,
help='The step ID of the job in the workflow template to remove.')
flags.AddTemplateResourceArg(
parser, 'remove job', api_version=dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
jobs = workflow_template.jobs
job_removed = False
new_jobs = []
for ordered_job in jobs:
if ordered_job.stepId != args.step_id:
new_jobs.append(ordered_job)
else:
console_io.PromptContinue(
message=('The job [{0}] will be removed from workflow template '
'[{1}].').format(args.step_id, workflow_template.id),
cancel_on_no=True)
job_removed = True
if not job_removed:
log.error('Step id [{0}] is not found in workflow template [{1}].'.format(
args.step_id, workflow_template.id))
return # do not update workflow template if job is not removed.
workflow_template.jobs = new_jobs
response = dataproc.client.projects_regions_workflowTemplates.Update(
workflow_template)
return response

View File

@@ -0,0 +1,77 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run a workflow template."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import uuid
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'EXAMPLES':
"""\
To run a workflow template 'my-workflow-template' in region 'us-central1'
, run:
$ {command} my-workflow-template --region=us-central1
""",
}
@base.Deprecate(is_removed=False,
warning='Workflow template run command is deprecated, please '
'use instantiate command: "gcloud beta dataproc '
'workflow-templates instantiate"')
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA)
class Run(base.CreateCommand):
"""Run a workflow template."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
flags.AddTemplateResourceArg(parser, 'run', api_version='v1')
flags.AddTimeoutFlag(parser, default='24h')
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
# TODO (b/68774667): deprecate Run command in favor of Instantiate command.
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
template_ref = args.CONCEPTS.template.Parse()
instantiate_request = dataproc.messages.InstantiateWorkflowTemplateRequest()
instantiate_request.requestId = uuid.uuid4().hex # request UUID
request = msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateRequest(
instantiateWorkflowTemplateRequest=instantiate_request,
name=template_ref.RelativeName())
operation = dataproc.client.projects_regions_workflowTemplates.Instantiate(
request)
if args.async_:
log.status.Print('Running [{0}].'.format(template_ref.Name()))
return
operation = util.WaitForWorkflowTemplateOperation(
dataproc, operation, timeout_s=args.timeout)
return operation

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set cluster selector for workflow-template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.util.args import labels_util
class SetClusterSelector(base.UpdateCommand):
"""Set cluster selector for the workflow template."""
detailed_help = {
'EXAMPLES': """
To set placement cluster selector labels on a workflow template, run:
$ {command} my_template --region=us-central1 --cluster-labels=environment=production
"""
}
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
parser.add_argument(
'--cluster-labels',
metavar='KEY=VALUE',
type=arg_parsers.ArgDict(
key_type=labels_util.KEY_FORMAT_VALIDATOR,
value_type=labels_util.VALUE_FORMAT_VALIDATOR,
min_length=1),
action=arg_parsers.UpdateAction,
help='A list of label KEY=VALUE pairs to add.')
flags.AddTemplateResourceArg(parser, 'set cluster selector',
dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
labels = labels_util.Diff(additions=args.cluster_labels).Apply(
dataproc.messages.ClusterSelector.ClusterLabelsValue).GetOrNone()
cluster_selector = dataproc.messages.ClusterSelector(clusterLabels=labels)
workflow_template.placement = dataproc.messages.WorkflowTemplatePlacement(
clusterSelector=cluster_selector)
response = dataproc.client.projects_regions_workflowTemplates.Update(
workflow_template)
return response

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set DAG timeout on workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc import workflow_templates
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
To add a DAG timeout of 2h (or update an existing one to 2h) on a workflow
template named ``my-workflow-template'' in region ``us-central1'', run:
$ {command} my-workflow-template --region=us-central1 --dag-timeout=2h"
""",
}
class SetDagTimeout(base.CreateCommand):
"""Set DAG timeout on a workflow template."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
workflow_templates.AddDagTimeoutFlag(parser, True)
flags.AddTemplateResourceArg(parser, 'set the DAG timeout on',
dataproc.api_version)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
workflow_template.dagTimeout = six.text_type(args.dag_timeout) + 's'
response = dataproc.client.projects_regions_workflowTemplates.Update(
workflow_template)
log.status.Print('Set a DAG timeout of {0} on {1}.'.format(
workflow_template.dagTimeout, template_ref.Name()))
return response

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set IAM workflow template policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import iam_helpers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.iam import iam_util
@base.DefaultUniverseOnly
class SetIamPolicy(base.Command):
"""Set IAM policy for a workflow template.
Sets the IAM policy for a workflow template, given a template ID and the
policy.
"""
detailed_help = iam_util.GetDetailedHelpForSetIamPolicy('template')
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
flags.AddTemplateResourceArg(parser, 'set the policy on',
dataproc.api_version)
iam_util.AddArgForPolicyFile(parser)
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
policy = iam_util.ParsePolicyFile(args.policy_file, msgs.Policy)
policy.version = iam_helpers.MAX_LIBRARY_IAM_SUPPORTED_VERSION
set_iam_policy_request = msgs.SetIamPolicyRequest(policy=policy)
template_ref = args.CONCEPTS.template.Parse()
request = msgs.DataprocProjectsRegionsWorkflowTemplatesSetIamPolicyRequest(
resource=template_ref.RelativeName(),
setIamPolicyRequest=set_iam_policy_request)
return dataproc.client.projects_regions_workflowTemplates.SetIamPolicy(
request)

View File

@@ -0,0 +1,113 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set managed cluster for workflow template command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataproc import compute_helpers
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import clusters
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.util.args import labels_util
@base.DefaultUniverseOnly
class SetManagedCluster(base.UpdateCommand):
"""Set a managed cluster for the workflow template."""
detailed_help = {
'EXAMPLES': """
To update managed cluster in a workflow template, run:
$ {command} my_template --region=us-central1 --no-address --num-workers=10 \
--worker-machine-type=custom-6-23040
"""
}
@classmethod
def Args(cls, parser):
dataproc = dp.Dataproc(cls.ReleaseTrack())
parser.add_argument(
'--cluster-name',
help="""\
The name of the managed dataproc cluster.
If unspecified, the workflow template ID will be used.""")
clusters.ArgsForClusterRef(
parser,
dataproc,
cls.Beta(),
cls.Alpha(),
include_deprecated=cls.Beta(),
include_gke_platform_args=False)
flags.AddTemplateResourceArg(parser, 'set managed cluster',
dataproc.api_version)
if cls.Beta():
clusters.BetaArgsForClusterRef(parser)
@classmethod
def Beta(cls):
return cls.ReleaseTrack() != base.ReleaseTrack.GA
@classmethod
def Alpha(cls):
return cls.ReleaseTrack() == base.ReleaseTrack.ALPHA
@classmethod
def GetComputeReleaseTrack(cls):
if cls.Beta():
return base.ReleaseTrack.BETA
return base.ReleaseTrack.GA
def Run(self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
template_ref = args.CONCEPTS.template.Parse()
workflow_template = dataproc.GetRegionsWorkflowTemplate(
template_ref, args.version)
if args.cluster_name:
cluster_name = args.cluster_name
else:
cluster_name = template_ref.workflowTemplatesId
compute_resources = compute_helpers.GetComputeResources(
self.GetComputeReleaseTrack(), cluster_name, template_ref.regionsId)
cluster_config = clusters.GetClusterConfig(
args,
dataproc,
template_ref.projectsId,
compute_resources,
self.Beta(),
self.Alpha(),
include_deprecated=self.Beta())
labels = labels_util.ParseCreateArgs(
args, dataproc.messages.ManagedCluster.LabelsValue)
managed_cluster = dataproc.messages.ManagedCluster(
clusterName=cluster_name, config=cluster_config, labels=labels)
workflow_template.placement = dataproc.messages.WorkflowTemplatePlacement(
managedCluster=managed_cluster)
response = dataproc.client.projects_regions_workflowTemplates.Update(
workflow_template)
return response