feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for managing Cloud Composer environments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Environments(base.Group):
"""Create and manage Cloud Composer environments.
The {command} command group lets you create Cloud Composer environments
containing an Apache Airflow setup. Additionally, the command group supports
environment updates including varying number of machines used to run Airflow,
setting Airflow configs, or installing Python dependencies used in Airflow
DAGs. The command group can also be used to delete Composer environments.
"""
Environments.category = base.COMPOSER_CATEGORY

View File

@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command which checks that upgrading a Cloud Composer environment does not result in PyPI module conflicts."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.api_lib.composer import util as api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
To check that upgrading to the 'composer-1.16.5-airflow-1.10.15' image
in a Cloud Composer environment named 'env-1' does not cause
PyPI package conflicts,
run:
$ {command} env-1 --image-version=composer-1.16.5-airflow-1.10.15
"""
}
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class CheckUpgrade(base.Command):
"""Check that upgrading a Cloud Composer environment does not result in PyPI module conflicts."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser, 'to check upgrade for')
base.ASYNC_FLAG.AddToParser(parser)
flags.AddEnvUpgradeFlagsToGroup(parser)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
env_details = environments_api_util.Get(env_resource, self.ReleaseTrack())
if (
args.airflow_version or args.image_version
) and image_versions_command_util.IsDefaultImageVersion(args.image_version):
message = image_versions_command_util.BuildDefaultComposerVersionWarning(
args.image_version, args.airflow_version
)
log.warning(message)
if args.airflow_version:
# Converts airflow_version arg to image_version arg
args.image_version = (
image_versions_command_util.ImageVersionFromAirflowVersion(
args.airflow_version,
env_details.config.softwareConfig.imageVersion,
)
)
# Checks validity of image_version upgrade request.
if args.image_version:
upgrade_validation = (
image_versions_command_util.IsValidImageVersionUpgrade(
env_details.config.softwareConfig.imageVersion, args.image_version
)
)
if not upgrade_validation.upgrade_valid:
raise command_util.InvalidUserInputError(upgrade_validation.error)
operation = environments_api_util.CheckUpgrade(
env_resource, args.image_version, release_track=self.ReleaseTrack())
if args.async_:
return self._AsynchronousExecution(env_resource, operation,
args.image_version)
else:
return self._SynchronousExecution(env_resource, operation,
args.image_version)
def _AsynchronousExecution(self, env_resource, operation, image_version):
details = 'to image {0} with operation [{1}]'.format(
image_version, operation.name)
# pylint: disable=protected-access
# none of the log.CreatedResource, log.DeletedResource etc. matched
log._PrintResourceChange(
'check',
env_resource.RelativeName(),
kind='environment',
is_async=True,
details=details,
failed=None)
# pylint: enable=protected-access
log.Print('If you want to see the result, run:')
log.Print('gcloud composer operations describe ' + operation.name)
def _SynchronousExecution(self, env_resource, operation, image_version):
try:
operations_api_util.WaitForOperation(
operation,
('Waiting for [{}] to be checked for PyPI package conflicts when'
' upgrading to {}. Operation [{}]').format(
env_resource.RelativeName(), image_version, operation.name),
release_track=self.ReleaseTrack())
completed_operation = operations_api_util.GetService(
self.ReleaseTrack()).Get(
api_util.GetMessagesModule(self.ReleaseTrack())
.ComposerProjectsLocationsOperationsGetRequest(
name=operation.name))
log.Print('\nIf you want to see the result once more, run:')
log.Print('gcloud composer operations describe ' + operation.name + '\n')
log.Print('If you want to see history of all operations to be able'
' to display results of previous check-upgrade runs, run:')
log.Print('gcloud composer operations list\n')
log.Print('Response: ')
return completed_operation.response
except command_util.Error as e:
raise command_util.Error(
('Error while checking for PyPI package conflicts'
' [{}]: {}').format(env_resource.RelativeName(), six.text_type(e)))

View File

@@ -0,0 +1,101 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to trigger a database failover."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.api_lib.composer import util as api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES': """\
To run a manual database failover on the environment named ``environment-1'', run:
$ {command} environment-1
"""
}
class DatabaseFailover(base.Command):
"""Run a database failover operation."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to trigger a database failover'
)
base.ASYNC_FLAG.AddToParser(parser)
@staticmethod
def _ValidateEnvironment(env_obj, release_track):
messages = api_util.GetMessagesModule(release_track=release_track)
if (
env_obj.config.resilienceMode is None
or env_obj.config.resilienceMode
== messages.EnvironmentConfig.ResilienceModeValueValuesEnum.RESILIENCE_MODE_UNSPECIFIED
):
raise command_util.InvalidUserInputError(
'Cannot trigger a database failover'
' for environments without enabled high resilience mode.'
)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
release_track = self.ReleaseTrack()
env_obj = environments_api_util.Get(env_ref, release_track=release_track)
self._ValidateEnvironment(env_obj, release_track)
operation = environments_api_util.DatabaseFailover(
env_ref, release_track=release_track
)
if args.async_:
return self._AsynchronousExecution(env_ref, operation)
else:
return self._SynchronousExecution(env_ref, operation)
def _AsynchronousExecution(self, env_resource, operation):
details = 'with operation [{0}]'.format(operation.name)
log.UpdatedResource(
env_resource.RelativeName(),
kind='environment',
is_async=True,
details=details,
)
return operation
def _SynchronousExecution(self, env_resource, operation):
try:
operations_api_util.WaitForOperation(
operation,
'Waiting for [{}] to be updated with [{}]'.format(
env_resource.RelativeName(), operation.name
),
release_track=self.ReleaseTrack(),
)
except command_util.Error as e:
raise command_util.Error(
'Error triggerering a database failover [{}]: {}'.format(
env_resource.RelativeName(), six.text_type(e)
)
)

View File

@@ -0,0 +1,111 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete an environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.util import exceptions
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import delete_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
"""\
To delete the environment named ``environment-1'', run:
$ {command} environment-1
"""
}
class Delete(base.DeleteCommand):
"""Delete one or more Cloud Composer environments.
Environments cannot be deleted unless they are in one of the RUNNING or
ERROR states. If run asynchronously with `--async`, exits after printing
one or more operation names that can be used to poll the status of the
deletion(s) via:
{top_command} composer operations describe
If any of the environments are already in the process of being deleted,
the original deletion operations are waited on (default) or printed
(`--async`).
"""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'to delete', plural=True, required=True, positional=True)
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
env_refs = args.CONCEPTS.environments.Parse()
console_io.PromptContinue(
message=command_util.ConstructList(
'Deleting the following environments: ', [
'[{}] in [{}]'.format(env_ref.environmentsId,
env_ref.locationsId)
for env_ref in env_refs
]),
cancel_on_no=True,
cancel_string='Deletion aborted by user.',
throw_if_unattended=True)
waiter = delete_util.EnvironmentDeletionWaiter(
release_track=self.ReleaseTrack())
encountered_errors = False
for env_ref in env_refs:
operation = None
failed = None
details = None
try:
operation = environments_api_util.Delete(
env_ref, release_track=self.ReleaseTrack())
except apitools_exceptions.HttpError as e:
failed = exceptions.HttpException(e).payload.status_message
encountered_errors = True
else:
details = 'with operation [{0}]'.format(operation.name)
waiter.AddPendingDelete(
environment_name=env_ref.RelativeName(), operation=operation)
finally:
log.DeletedResource(
env_ref.RelativeName(),
kind='environment',
is_async=True,
details=details,
failed=failed)
if not args.async_:
encountered_errors = waiter.Wait() or encountered_errors
if encountered_errors:
raise command_util.EnvironmentDeleteError(
'Some requested deletions did not succeed. '
'Please, refer to '
'https://cloud.google.com/composer/docs/how-to/managing/updating '
'and Composer Delete Troubleshooting pages to resolve this issue.')

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to show metadata for an environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
DETAILED_HELP = {
'EXAMPLES':
"""\
To get details about the Cloud Composer environment ``env-1'', run:
$ {command} env-1
"""
}
class Describe(base.DescribeCommand):
"""Get details about a Cloud Composer environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser, 'to describe')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
return environments_api_util.Get(env_ref, release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to trigger a database failover."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
DETAILED_HELP = {
'EXAMPLES': """\
To fetch database properties for the environment named ``environment-1'', run:
$ {command} environment-1
"""
}
class FetchDatabaseProperties(base.Command):
"""Fetch database properties."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to fetch database properties'
)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
release_track = self.ReleaseTrack()
return environments_api_util.FetchDatabaseProperties(
env_ref, release_track=release_track
)

View File

@@ -0,0 +1,96 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list environments in a project and location."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import resources
DETAILED_HELP = {
'EXAMPLES':
"""\
To list the Cloud Composer environments under the project 'project-1'
and in location 'us-central1', run:
$ {command} --project=project-1 --locations=us-central1
"""
}
@base.UniverseCompatible
class List(base.ListCommand):
"""List the Cloud Composer environments under a project and location.
List environments that have not been successfully deleted. Prints a table
with the following columns:
* name
* location
* status
* creation timestamp
"""
detailed_help = DETAILED_HELP
@staticmethod
def _GetUri(environment):
r = resources.REGISTRY.ParseRelativeName(
environment.name,
collection='composer.projects.locations.environments',
api_version='v1',
)
return r.SelfLink()
@staticmethod
def Args(parser):
resource_args.AddLocationResourceArg(
parser,
'in which to list environments',
positional=False,
required=arg_parsers.ArgRequiredInUniverse(
default_universe=False, non_default_universe=True
),
plural=True,
help_supplement=(
'If not specified, the location stored in the property '
' [composer/location] will be used.'
),
)
parser.display_info.AddFormat('table[box]('
'name.segment(5):label=NAME,'
'name.segment(3):label=LOCATION,'
'state:label=STATE,'
'createTime:reverse'
')')
parser.display_info.AddUriFunc(List._GetUri)
def Run(self, args):
location_refs = flags.FallthroughToLocationProperty(
args.CONCEPTS.locations.Parse(),
'--locations',
'One or more locations in which to list environments must be provided.')
return environments_api_util.List(
location_refs,
args.page_size,
limit=args.limit,
release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,228 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list all PyPI modules installed in an Airflow worker."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import random
import time
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
The following command:
$ {command} myenv
runs the "python -m pip list" command on a worker and returns the output.
The following command:
$ {command} myenv --tree
runs the "python -m pipdeptree --warn" command on a worker and returns the
output.
"""
}
WORKER_POD_SUBSTR = 'worker'
WORKER_CONTAINER = 'airflow-worker'
DEFAULT_POLL_TIME_SECONDS = 2
MAX_CONSECUTIVE_POLL_ERRORS = 10
MAX_POLL_TIME_SECONDS = 30
EXP_BACKOFF_MULTIPLIER = 1.75
POLL_JITTER_SECONDS = 0.5
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class Run(base.Command):
"""List all PyPI modules installed in an Airflow worker."""
detailed_help = DETAILED_HELP
@classmethod
def Args(cls, parser):
resource_args.AddEnvironmentResourceArg(parser,
'in which to list PyPI modules')
parser.add_argument(
'--tree',
default=None,
action='store_true',
help="""\
List PyPI packages, their versions and a dependency tree, as displayed by the "python -m pipdeptree --warn" command.
""")
def ConvertKubectlError(self, error, env_obj):
del env_obj # Unused argument.
return error
def _RunKubectl(self, args, env_obj):
cluster_id = env_obj.config.gkeCluster
cluster_location_id = command_util.ExtractGkeClusterLocationId(env_obj)
tty = 'no-tty' not in args
with command_util.TemporaryKubeconfig(
cluster_location_id, cluster_id, None
):
try:
image_version = env_obj.config.softwareConfig.imageVersion
kubectl_ns = command_util.FetchKubectlNamespace(image_version)
pod = command_util.GetGkePod(
pod_substr=WORKER_POD_SUBSTR, kubectl_namespace=kubectl_ns)
log.status.Print(
'Executing within the following Kubernetes cluster namespace: '
'{}'.format(kubectl_ns))
kubectl_args = ['exec', pod, '--stdin']
if tty:
kubectl_args.append('--tty')
kubectl_args.extend(['--container', WORKER_CONTAINER, '--'])
if args.tree:
kubectl_args.extend(['python', '-m', 'pipdeptree', '--warn'])
else:
kubectl_args.extend(['python', '-m', 'pip', 'list'])
command_util.RunKubectlCommand(
command_util.AddKubectlNamespace(kubectl_ns, kubectl_args),
out_func=log.out.Print)
except command_util.KubectlError as e:
raise self.ConvertKubectlError(e, env_obj)
def _RunApi(self, args, env_ref):
cmd_params = []
if args.tree:
subcommand = 'pipdeptree'
cmd_params.append('--warn')
else:
subcommand = 'pip list'
execute_result = environments_api_util.ExecuteAirflowCommand(
command='list-packages',
subcommand=subcommand,
parameters=[],
environment_ref=env_ref,
release_track=self.ReleaseTrack(),
)
if not execute_result.executionId:
raise command_util.Error(
'Cannot execute subcommand for environment. Got empty execution Id.'
)
output_end = False
next_line = 1
wait_time_seconds = DEFAULT_POLL_TIME_SECONDS
poll_result = None
cur_consequetive_poll_errors = 0
while not output_end:
lines = None
try:
time.sleep(
wait_time_seconds
+ random.uniform(-POLL_JITTER_SECONDS, POLL_JITTER_SECONDS)
)
poll_result = environments_api_util.PollAirflowCommand(
execution_id=execute_result.executionId,
pod_name=execute_result.pod,
pod_namespace=execute_result.podNamespace,
next_line_number=next_line,
environment_ref=env_ref,
release_track=self.ReleaseTrack(),
)
cur_consequetive_poll_errors = 0
output_end = poll_result.outputEnd
lines = poll_result.output
lines.sort(key=lambda line: line.lineNumber)
except: # pylint:disable=bare-except
cur_consequetive_poll_errors += 1
if cur_consequetive_poll_errors == MAX_CONSECUTIVE_POLL_ERRORS:
raise command_util.Error('Cannot fetch list-packages command status.')
if not lines:
wait_time_seconds = min(
wait_time_seconds * EXP_BACKOFF_MULTIPLIER, MAX_POLL_TIME_SECONDS
)
else:
wait_time_seconds = DEFAULT_POLL_TIME_SECONDS
for line in lines:
log.Print(line.content if line.content else '')
next_line = lines[-1].lineNumber + 1
if poll_result and poll_result.exitInfo and poll_result.exitInfo.exitCode:
log.error('Command exit code: {}'.format(poll_result.exitInfo.error))
exit(poll_result.exitInfo.exitCode)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
env_obj = environments_api_util.Get(
env_ref, release_track=self.ReleaseTrack())
if image_versions_command_util.IsVersionAirflowCommandsApiCompatible(
image_version=env_obj.config.softwareConfig.imageVersion
):
self._RunApi(args, env_ref)
else:
self._RunKubectl(args, env_obj)
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.ALPHA)
@base.DefaultUniverseOnly
class RunBeta(Run):
"""List all PyPI modules installed in an Airflow worker.
## EXAMPLES
The following command:
{command} myenv
runs the "python -m pip list" command on a worker and returns the output.
The following command:
{command} myenv --tree
runs the "python -m pipdeptree --warn" command on a worker and returns the
output.
"""
def ConvertKubectlError(self, error, env_obj):
is_private = (
env_obj.config.privateEnvironmentConfig and
env_obj.config.privateEnvironmentConfig.enablePrivateEnvironment)
if is_private:
return command_util.Error(
six.text_type(error) +
' Make sure you have followed https://cloud.google.com/composer/docs/how-to/accessing/airflow-cli#running_commands_on_a_private_ip_environment '
'to enable access to your private Cloud Composer environment from '
'your machine.')
return error

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list suggested environment upgrades."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class ListUpgrades(base.ListCommand):
"""List the Cloud Composer image version upgrades for a specific environment.
{command} prints a table listing the suggested image-version upgrades with the
following columns:
* Image Version ID
* Composer 'default' flag
* List of supported python versions
"""
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser, 'to list upgrades')
base.URI_FLAG.RemoveFromParser(parser)
parser.display_info.AddFormat(
'table[box,title="SUGGESTED UPGRADES"]('
'imageVersionId:label="IMAGE VERSION",'
'isDefault:label="COMPOSER DEFAULT",'
'supportedPythonVersions.list():label="SUPPORTED PYTHON VERSIONS")')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
log.status.Print('Fetching list of suggested upgrades...')
return image_versions_command_util.ListImageVersionUpgrades(
env_ref, release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Composer workloads."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import environments_workloads_util as workloads_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
DETAILED_HELP = {'EXAMPLES': """\
To display Composer workloads for the environment named ``environment-1'', run:
$ {command} environment-1
"""}
COMPOSER3_IS_REQUIRED_MSG = """\
The operation is not supported for given environment. Composer version {composer_version} or greater is required.
"""
@base.DefaultUniverseOnly
class ListWorkloads(base.Command):
"""List Composer workloads, supported in Composer 3 environments or greater."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to display workloads'
)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
release_track = self.ReleaseTrack()
env_obj = environments_api_util.Get(
env_ref, release_track=self.ReleaseTrack()
)
if not image_versions_command_util.IsVersionComposer3Compatible(
image_version=env_obj.config.softwareConfig.imageVersion
):
raise command_util.InvalidUserInputError(
COMPOSER3_IS_REQUIRED_MSG.format(
composer_version=flags.MIN_COMPOSER3_VERSION,
)
)
workloads_service = workloads_util.EnvironmentsWorkloadsService(
release_track
)
return workloads_service.List(
env_ref,
)

View File

@@ -0,0 +1,348 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to migrate an environment."""
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.api_lib.composer import util as api_util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
@base.DefaultUniverseOnly
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA)
class Migrate(base.Command):
"""Migrates an environment from Composer 2 to Composer 3 in-place.
If run asynchronously with `--async`, exits after printing an operation
that can be used to poll the status of the creation operation via:
{top_command} composer operations describe
"""
@classmethod
def Args(cls, parser):
resource_args.AddEnvironmentResourceArg(parser, 'to migrate')
base.ASYNC_FLAG.AddToParser(parser)
target_version_type = arg_parsers.RegexpValidator(
r'^composer-3-airflow-(\d+(?:\.\d+(?:\.\d+(?:-build\.\d+)?)?)?)',
"must be in the form 'composer-3-airflow-X[.Y[.Z]]' For example:"
" 'composer-3-airflow-2.3.4-build.5'. Only migrations to Composer 3 are"
" supported.",
)
parser.add_argument(
'--image-version',
type=target_version_type,
metavar='IMAGE_VERSION',
required=True,
help="""\
Migrate the Composer 2 environment to this Composer 3 version in-place.
The image version encapsulates the versions of both Cloud Composer and
Apache Airflow. Must be of the form
`composer-3-airflow-X[.Y[.Z[-build.N]]]`, where `[]` denotes optional
fragments.
Examples: `composer-3-airflow-2`, `composer-3-airflow-2.2`,
`composer-3-airflow-2.3.4`, `composer-3-airflow-2.3.4-build.5`.
The Cloud Composer portion of the image version must be a Composer 3
version. The Apache Airflow portion of the image version can be a
semantic version or an alias in the form of major or major.minor
version numbers, resolved to the latest matching Apache Airflow version
supported in the given Cloud Composer version. The resolved versions are
stored in the migrated environment.
""",
)
gke_cluster_retention_policy_group = parser.add_mutually_exclusive_group(
help=(
'Specify what should happen to the Composer 2 GKE cluster after'
' migration. If cluster is retained, until deleted manually it will'
' contribute to enviornment\'s cost.'
),
required=True,
)
gke_cluster_retention_policy_group.add_argument(
'--retain-gke-cluster',
action='store_true',
help='Retain Composer 2 GKE cluster after migration.',
)
gke_cluster_retention_policy_group.add_argument(
'--delete-gke-cluster',
action='store_true',
help='Delete Composer 2 GKE cluster after migration.',
)
maintenance_window_group = parser.add_argument_group(
help=(
'Specify the maintenance window for the migrated environment. It'
' will override the current maintenance window. If not specified,'
' and the enviornment uses Composer 2 default maintenance window,'
' the migrated environment will use the Composer 3 default'
' maintenance window.'
)
)
flags.MAINTENANCE_WINDOW_START_FLAG.AddToParser(maintenance_window_group)
flags.MAINTENANCE_WINDOW_END_FLAG.AddToParser(maintenance_window_group)
flags.MAINTENANCE_WINDOW_RECURRENCE_FLAG.AddToParser(
maintenance_window_group
)
dag_processor_group = parser.add_argument_group(
required=True,
help=(
'Specify the configuration of DAG processor for the'
' Composer 3 environment.'
)
)
dag_processor_group.add_argument(
'--dag-processor-cpu',
type=float,
required=True,
help='CPU allocated to Airflow dag processor'
)
dag_processor_group.add_argument(
'--dag-processor-memory',
type=arg_parsers.BinarySize(
lower_bound='1GB',
upper_bound='128GB',
suggested_binary_size_scales=['MB', 'GB'],
default_unit='G',
),
required=True,
help=(
'Memory allocated to Airflow dag processor, ex. 1GB, 3GB, 2. If'
' units are not provided, defaults to GB.'
),
)
dag_processor_group.add_argument(
'--dag-processor-storage',
type=arg_parsers.BinarySize(
default_unit='G',
),
required=True,
help=(
'Storage allocated to Airflow dag processor, ex. 600MB, 3GB, 2. If'
' units are not provided, defaults to GB.'
),
)
dag_processor_group.add_argument(
'--dag-processor-count',
type=int,
required=True,
help='Number of dag processors',
)
scheduler_group = parser.add_argument_group(
help=(
'Group of arguments for setting scheduler configuration in migrated'
' Composer environment. If not specified, the current scheduler'
' configuration will be preserved.'
)
)
flags.SCHEDULER_CPU.AddToParser(scheduler_group)
flags.SCHEDULER_MEMORY.AddToParser(scheduler_group)
flags.SCHEDULER_STORAGE.AddToParser(scheduler_group)
flags.NUM_SCHEDULERS.AddToParser(scheduler_group)
worker_group = parser.add_argument_group(
help=(
'Group of arguments for setting worker configuration in migrated'
' Composer environment. If not specified, the current worker'
' configuration will be preserved.'
)
)
flags.WORKER_CPU.AddToParser(worker_group)
flags.WORKER_MEMORY.AddToParser(worker_group)
flags.WORKER_STORAGE.AddToParser(worker_group)
flags.MIN_WORKERS.AddToParser(worker_group)
flags.MAX_WORKERS.AddToParser(worker_group)
web_server_group = parser.add_argument_group(
help=(
'Group of arguments for setting web server configuration in'
' migrated Composer environment. If not specified, the current web'
' server configuration will be preserved.'
)
)
flags.WEB_SERVER_CPU.AddToParser(web_server_group)
flags.WEB_SERVER_MEMORY.AddToParser(web_server_group)
flags.WEB_SERVER_STORAGE.AddToParser(web_server_group)
triggerer_group = parser.add_argument_group(
help=(
'Group of arguments for setting triggerer configuration in migrated'
' Composer environment. If not specified, the current triggerer'
' configuration will be preserved.'
)
)
flags.TRIGGERER_CPU.AddToParser(triggerer_group)
flags.TRIGGERER_MEMORY.AddToParser(triggerer_group)
flags.TRIGGERER_COUNT.AddToParser(triggerer_group)
def _Validate(self, env_obj, args):
if not image_versions_command_util.IsImageVersionStringComposerV2(
env_obj.config.softwareConfig.imageVersion
) or not image_versions_command_util.IsImageVersionStringComposerV3(
args.image_version
):
raise command_util.InvalidUserInputError(
'Migration is only supported from Composer 2 to Composer 3.'
)
def _ConstructGkeClusterRetentionPolicy(self, args, release_track):
messages = api_util.GetMessagesModule(release_track=release_track)
if args.retain_gke_cluster:
return (
messages.MigrateEnvironmentRequest.GkeClusterRetentionPolicyValueValuesEnum.RETAIN_GKE_CLUSTER
)
elif args.delete_gke_cluster:
return (
messages.MigrateEnvironmentRequest.GkeClusterRetentionPolicyValueValuesEnum.DELETE_GKE_CLUSTER
)
else:
raise command_util.InvalidUserInputError(
'One of --retain-gke-cluster or --delete-gke-cluster must be'
' specified.'
)
def _ConstructMigrateEnvironmentRequest(self, args, release_track):
messages = api_util.GetMessagesModule(release_track=release_track)
workloads_config = dict(
dagProcessor=messages.DagProcessorResource(
cpu=args.dag_processor_cpu,
memoryGb=environments_api_util.MemorySizeBytesToGB(
args.dag_processor_memory
),
storageGb=environments_api_util.MemorySizeBytesToGB(
args.dag_processor_storage
),
count=args.dag_processor_count,
),
)
if (
args.scheduler_cpu
or args.scheduler_memory
or args.scheduler_storage
or args.scheduler_count
):
workloads_config['scheduler'] = messages.SchedulerResource(
cpu=args.scheduler_cpu,
memoryGb=environments_api_util.MemorySizeBytesToGB(
args.scheduler_memory
),
storageGb=environments_api_util.MemorySizeBytesToGB(
args.scheduler_storage
),
count=args.scheduler_count,
)
if (
args.worker_cpu
or args.worker_memory
or args.worker_storage
or args.min_workers
or args.max_workers
):
workloads_config['worker'] = messages.WorkerResource(
cpu=args.worker_cpu,
memoryGb=environments_api_util.MemorySizeBytesToGB(
args.worker_memory
),
storageGb=environments_api_util.MemorySizeBytesToGB(
args.worker_storage
),
minCount=args.min_workers,
maxCount=args.max_workers,
)
if args.web_server_cpu or args.web_server_memory or args.web_server_storage:
workloads_config['webServer'] = messages.WebServerResource(
cpu=args.web_server_cpu,
memoryGb=environments_api_util.MemorySizeBytesToGB(
args.web_server_memory
),
storageGb=environments_api_util.MemorySizeBytesToGB(
args.web_server_storage
),
)
if args.triggerer_cpu or args.triggerer_memory or args.triggerer_count:
workloads_config['triggerer'] = messages.TriggererResource(
cpu=args.triggerer_cpu,
memoryGb=environments_api_util.MemorySizeBytesToGB(
args.triggerer_memory
),
count=args.triggerer_count,
)
migrate_request = dict(
imageVersion=args.image_version,
workloadsConfig=messages.WorkloadsConfig(**workloads_config),
gkeClusterRetentionPolicy=self._ConstructGkeClusterRetentionPolicy(
args, release_track
),
)
if (
args.maintenance_window_start
and args.maintenance_window_end
and args.maintenance_window_recurrence
):
migrate_request['maintenanceWindow'] = messages.MaintenanceWindow(
startTime=args.maintenance_window_start.isoformat(),
endTime=args.maintenance_window_end.isoformat(),
recurrence=args.maintenance_window_recurrence,
)
return messages.MigrateEnvironmentRequest(**migrate_request)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
env_obj = environments_api_util.Get(
env_ref, release_track=self.ReleaseTrack())
self._Validate(env_obj, args)
request = self._ConstructMigrateEnvironmentRequest(
args, self.ReleaseTrack()
)
operation = environments_api_util.Migrate(
environment_ref=env_ref,
request=request,
release_track=self.ReleaseTrack(),
)
if args.async_:
log.UpdatedResource(
env_ref.RelativeName(),
kind='environment',
is_async=True,
details='with operation [{0}]'.format(operation.name),
)
return operation
try:
operations_api_util.WaitForOperation(
operation,
'Waiting for [{}] to be updated with [{}]'.format(
env_ref.RelativeName(), operation.name),
release_track=self.ReleaseTrack())
except command_util.Error as e:
raise command_util.Error('Error updating [{}]: {}'.format(
env_ref.RelativeName(), six.text_type(e)))

View File

@@ -0,0 +1,83 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that restarts web server for an environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
To restart the Cloud Composer web server in an environment named
``env-1'', run:
$ {command} env-1
"""
}
@base.ReleaseTracks(
base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA, base.ReleaseTrack.GA
)
# TODO(b/371178112): Reconsider this annotation once the command is ready for
# other universes.
@base.DefaultUniverseOnly
class RestartWebServer(base.Command):
"""Restart web server for a Cloud Composer environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser, 'to restart web server for')
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
operation = environments_api_util.RestartWebServer(
env_resource, release_track=self.ReleaseTrack())
if args.async_:
return self._AsynchronousExecution(env_resource, operation)
else:
return self._SynchronousExecution(env_resource, operation)
def _AsynchronousExecution(self, env_resource, operation):
details = 'with operation [{0}]'.format(operation.name)
log.UpdatedResource(
env_resource.RelativeName(),
kind='environment',
is_async=True,
details=details)
return operation
def _SynchronousExecution(self, env_resource, operation):
try:
operations_api_util.WaitForOperation(
operation,
'Waiting for [{}] to be updated with [{}]'.format(
env_resource.RelativeName(), operation.name),
release_track=self.ReleaseTrack())
except command_util.Error as e:
raise command_util.Error('Error restarting web server [{}]: {}'.format(
env_resource.RelativeName(), six.text_type(e)))

View File

@@ -0,0 +1,508 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to run an Airflow CLI sub-command in an environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
import random
import re
import time
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import util as api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import execution_utils
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
WORKER_POD_SUBSTR = 'airflow-worker'
WORKER_CONTAINER = 'airflow-worker'
DEPRECATION_WARNING = ('Because Cloud Composer manages the Airflow metadata '
'database for your environment, support for the Airflow '
'`{}` subcommand is being deprecated. '
'To avoid issues related to Airflow metadata, we '
'recommend that you do not use this subcommand unless '
'you understand the outcome.')
DEFAULT_POLL_TIME_SECONDS = 2
MAX_CONSECUTIVE_POLL_ERRORS = 10
MAX_POLL_TIME_SECONDS = 30
EXP_BACKOFF_MULTIPLIER = 1.75
POLL_JITTER_SECONDS = 0.5
@base.DefaultUniverseOnly
class Run(base.Command):
"""Run an Airflow sub-command remotely in a Cloud Composer environment.
Executes an Airflow CLI sub-command remotely in an environment. If the
sub-command takes flags, separate the environment name from the sub-command
and its flags with ``--''. This command waits for the sub-command to
complete; its exit code will match the sub-command's exit code.
Note: Airflow CLI sub-command syntax differs between Airflow 1 and Airflow 2.
Refer to the Airflow CLI reference documentation for more details.
## EXAMPLES
The following command in environments with Airflow 2:
{command} myenv dags trigger -- some_dag --run_id=foo
is equivalent to running the following command from a shell inside the
*my-environment* environment:
airflow dags trigger --run_id=foo some_dag
The same command, but for environments with Airflow 1.10.14+:
{command} myenv trigger_dag -- some_dag --run_id=foo
is equivalent to running the following command from a shell inside the
*my-environment* environment:
airflow trigger_dag some_dag --run_id=foo
The following command (for environments with Airflow 1.10.14+):
{command} myenv dags list
is equivalent to running the following command from a shell inside the
*my-environment* environment:
airflow dags list
"""
SUBCOMMAND_ALLOWLIST = command_util.SUBCOMMAND_ALLOWLIST
@classmethod
def Args(cls, parser):
resource_args.AddEnvironmentResourceArg(
parser, 'in which to run an Airflow command')
doc_url = 'https://airflow.apache.org/docs/apache-airflow/stable/cli-and-env-variables-ref.html'
parser.add_argument(
'subcommand',
metavar='SUBCOMMAND',
choices=list(cls.SUBCOMMAND_ALLOWLIST.keys()),
help=('The Airflow CLI subcommand to run. Available subcommands '
'include (listed with Airflow versions that support): {} '
'(see {} for more info).').format(
', '.join(
sorted([
'{} [{}, {})'.format(cmd, r.from_version or '**',
r.to_version or '**')
for cmd, r in cls.SUBCOMMAND_ALLOWLIST.items()
])), doc_url))
# Add information about restricted nested subcommands.
# Some subcommands only allow certain nested subcommands.
# Written with for loops to reduce complexity. [g-complex-comprehension]
allowed_nested_subcommands_help = []
for sub_cmd, r in cls.SUBCOMMAND_ALLOWLIST.items():
# Skip sub-commands which don't have a list of allowed_nested_subcommands
# Meaning, all nested subcommands are allowed for this subcommand
if not r.allowed_nested_subcommands:
continue
allowed_nested_subcommands_help.append(
'- {}: {}'.format(
sub_cmd,
', '.join(sorted(r.allowed_nested_subcommands.keys()))
))
# Add an additional element stating that all other subcommands are allowed
allowed_nested_subcommands_help.append(
'- all other subcommands: all nested subcommands are allowed'
)
parser.add_argument(
'subcommand_nested',
metavar='SUBCOMMAND_NESTED',
nargs=argparse.OPTIONAL,
help=(
'Additional subcommand in case it is nested. '
'The following is a list of allowed nested subcommands:\n'
'{}'
).format('\n'.join(allowed_nested_subcommands_help)),
)
parser.add_argument(
'cmd_args',
metavar='CMD_ARGS',
nargs=argparse.REMAINDER,
help='Command line arguments to the subcommand.',
example='{command} myenv trigger_dag -- some_dag --run_id=foo')
def BypassConfirmationPrompt(self, args, airflow_version):
"""Bypasses confirmations with "yes" responses.
Prevents certain Airflow CLI subcommands from presenting a confirmation
prompting (which can make the gcloud CLI stop responding). When necessary,
bypass confirmations with a "yes" response.
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the .Args() method.
airflow_version: String, an Airflow semantic version.
"""
# Value is the lowest Airflow version for which this command needs to bypass
# the confirmation prompt.
prompting_subcommands = {
'backfill': '1.10.6',
'delete_dag': None,
('dags', 'backfill'): None,
('dags', 'delete'): None,
('tasks', 'clear'): None,
('db', 'clean'): None,
}
# Handle nested commands like "dags list". There are two ways to execute
# nested Airflow subcommands via gcloud:
# 1. {command} myenv dags delete -- dag_id
# 2. {command} myenv dags -- delete dag_id
subcommand_two_level = self._GetSubcommandTwoLevel(args)
def _IsPromptingSubcommand(s):
if s in prompting_subcommands:
pass
elif s[0] in prompting_subcommands:
s = s[0]
else:
return False
return (prompting_subcommands[s] is None or
image_versions_command_util.CompareVersions(
airflow_version, prompting_subcommands[s]) >= 0)
if (_IsPromptingSubcommand(subcommand_two_level) and
set(args.cmd_args or []).isdisjoint({'-y', '--yes'})):
args.cmd_args = args.cmd_args or []
args.cmd_args.append('--yes')
def CheckForRequiredCmdArgs(self, args):
"""Prevents running Airflow CLI commands without required arguments.
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the .Args() method.
"""
# Dict values are lists of tuples, each tuple represents set of arguments,
# where at least one argument from tuple will be required.
# E.g. for "users create" subcommand, one of the "-p", "--password" or
# "--use-random-password" will be required.
required_cmd_args = {
('users', 'create'): [['-p', '--password', '--use-random-password']],
}
def _StringifyRequiredCmdArgs(cmd_args):
quoted_args = ['"{}"'.format(a) for a in cmd_args]
return '[{}]'.format(', '.join(quoted_args))
subcommand_two_level = self._GetSubcommandTwoLevel(args)
# For now `required_cmd_args` contains only two-level Airflow commands,
# but potentially in the future it could be extended for one-level
# commands as well, and this code will have to be updated appropriately.
for subcommand_required_cmd_args in required_cmd_args.get(
subcommand_two_level, []):
if set(subcommand_required_cmd_args).isdisjoint(set(args.cmd_args or [])):
raise command_util.Error(
'The subcommand "{}" requires one of the following command line '
'arguments: {}.'.format(
' '.join(subcommand_two_level),
_StringifyRequiredCmdArgs(subcommand_required_cmd_args)))
def DeprecationWarningPrompt(self, args):
response = True
if args.subcommand in command_util.SUBCOMMAND_DEPRECATION:
response = console_io.PromptContinue(
message=DEPRECATION_WARNING.format(args.subcommand),
default=False,
cancel_on_no=True)
return response
def _GetSubcommandTwoLevel(self, args):
"""Extract and return two level nested Airflow subcommand in unified shape.
There are two ways to execute nested Airflow subcommands via gcloud, e.g.:
1. {command} myenv users create -- -u User
2. {command} myenv users -- create -u User
The method returns here (users, create) in both cases.
It is possible that first element of args.cmd_args will not be a nested
subcommand, but that is ok as it will not break entire logic.
So, essentially there can be subcommand_two_level = ['info', '--anonymize'].
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the .Args() method.
Returns:
subcommand_two_level: two level subcommand in unified format
"""
subcommand_two_level = (args.subcommand, None)
if args.subcommand_nested:
subcommand_two_level = (args.subcommand, args.subcommand_nested)
elif args.cmd_args:
subcommand_two_level = (args.subcommand, args.cmd_args[0])
return subcommand_two_level
def CheckSubcommandAirflowSupport(self, args, airflow_version):
def _CheckIsSupportedSubcommand(command, airflow_version, from_version,
to_version):
if not image_versions_command_util.IsVersionInRange(
airflow_version, from_version, to_version):
_RaiseLackOfSupportError(command, airflow_version)
def _RaiseLackOfSupportError(command, airflow_version):
raise command_util.Error(
'The subcommand "{}" is not supported for Composer environments'
' with Airflow version {}.'.format(command, airflow_version),)
subcommand, subcommand_nested = self._GetSubcommandTwoLevel(args)
_CheckIsSupportedSubcommand(
subcommand, airflow_version,
self.SUBCOMMAND_ALLOWLIST[args.subcommand].from_version,
self.SUBCOMMAND_ALLOWLIST[args.subcommand].to_version)
if not self.SUBCOMMAND_ALLOWLIST[
args.subcommand].allowed_nested_subcommands:
return
two_level_subcommand_string = '{} {}'.format(subcommand, subcommand_nested)
if subcommand_nested in self.SUBCOMMAND_ALLOWLIST[
args.subcommand].allowed_nested_subcommands:
_CheckIsSupportedSubcommand(
two_level_subcommand_string, airflow_version,
self.SUBCOMMAND_ALLOWLIST[args.subcommand]
.allowed_nested_subcommands[subcommand_nested].from_version,
self.SUBCOMMAND_ALLOWLIST[args.subcommand]
.allowed_nested_subcommands[subcommand_nested].to_version)
else:
_RaiseLackOfSupportError(two_level_subcommand_string, airflow_version)
def CheckSubcommandNestedAirflowSupport(self, args, airflow_version):
if (args.subcommand_nested and
not image_versions_command_util.IsVersionInRange(
airflow_version, '1.10.14', None)):
raise command_util.Error(
'Nested subcommands are supported only for Composer environments '
'with Airflow version 1.10.14 or higher.')
def ConvertKubectlError(self, error, env_obj):
is_private = (
env_obj.config.privateEnvironmentConfig and
env_obj.config.privateEnvironmentConfig.enablePrivateEnvironment)
if is_private:
return command_util.Error(
str(error)
+ ' Make sure you have followed'
' https://cloud.google.com/composer/docs/how-to/accessing/airflow-cli#private-ip'
' to enable access to your private Cloud Composer environment from'
' your machine.'
)
return error
def _ExtractAirflowVersion(self, image_version):
return re.findall(r'-airflow-([\d\.]+)', image_version)[0]
def _RunKubectl(self, args, env_obj):
"""Runs Airflow command using kubectl on the GKE Cluster.
This mode the command is executed by connecting to the cluster and
running `kubectl pod exec` command.
It requires access to GKE Control plane.
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the .Args() method.
env_obj: Cloud Composer Environment object.
"""
cluster_id = env_obj.config.gkeCluster
cluster_location_id = command_util.ExtractGkeClusterLocationId(env_obj)
tty = 'no-tty' not in args
with command_util.TemporaryKubeconfig(
cluster_location_id, cluster_id, None
):
try:
image_version = env_obj.config.softwareConfig.imageVersion
airflow_version = self._ExtractAirflowVersion(image_version)
self.CheckSubcommandAirflowSupport(args, airflow_version)
self.CheckSubcommandNestedAirflowSupport(args, airflow_version)
kubectl_ns = command_util.FetchKubectlNamespace(image_version)
pod = command_util.GetGkePod(
pod_substr=WORKER_POD_SUBSTR, kubectl_namespace=kubectl_ns)
log.status.Print(
'Executing within the following Kubernetes cluster namespace: '
'{}'.format(kubectl_ns))
self.BypassConfirmationPrompt(args, airflow_version)
kubectl_args = ['exec', pod, '--stdin']
if tty:
kubectl_args.append('--tty')
kubectl_args.extend(
['--container', WORKER_CONTAINER, '--', 'airflow', args.subcommand])
if args.subcommand_nested:
kubectl_args.append(args.subcommand_nested)
if args.cmd_args:
kubectl_args.extend(args.cmd_args)
command_util.RunKubectlCommand(
command_util.AddKubectlNamespace(kubectl_ns, kubectl_args),
out_func=log.out.Print)
except command_util.KubectlError as e:
raise self.ConvertKubectlError(e, env_obj)
def _RunApi(self, args, env_obj):
image_version = env_obj.config.softwareConfig.imageVersion
airflow_version = self._ExtractAirflowVersion(image_version)
env_ref = args.CONCEPTS.environment.Parse()
self.CheckSubcommandAirflowSupport(args, airflow_version)
self.CheckSubcommandNestedAirflowSupport(args, airflow_version)
self.BypassConfirmationPrompt(args, airflow_version)
cmd = [args.subcommand]
if args.subcommand_nested:
cmd.append(args.subcommand_nested)
if args.cmd_args:
cmd.extend(args.cmd_args)
log.status.Print(
'Executing the command: [ airflow {} ]...'.format(' '.join(cmd))
)
execute_result = environments_api_util.ExecuteAirflowCommand(
command=args.subcommand,
subcommand=args.subcommand_nested or '',
parameters=args.cmd_args or [],
environment_ref=env_ref,
release_track=self.ReleaseTrack(),
)
if execute_result and execute_result.executionId:
log.status.Print(
'Command has been started. execution_id={}'.format(
execute_result.executionId
)
)
if not execute_result.executionId:
raise command_util.Error(
'Cannot execute subcommand for environment. Got empty execution Id.'
)
log.status.Print('Use ctrl-c to interrupt the command')
output_end = False
next_line = 1
cur_consequetive_poll_errors = 0
wait_time_seconds = DEFAULT_POLL_TIME_SECONDS
poll_result = None
interrupted = False
force_stop = False
while not output_end and not force_stop:
lines = None
try:
with execution_utils.RaisesKeyboardInterrupt():
time.sleep(
wait_time_seconds
+ random.uniform(-POLL_JITTER_SECONDS, POLL_JITTER_SECONDS)
)
poll_result = environments_api_util.PollAirflowCommand(
execution_id=execute_result.executionId,
pod_name=execute_result.pod,
pod_namespace=execute_result.podNamespace,
next_line_number=next_line,
environment_ref=env_ref,
release_track=self.ReleaseTrack(),
)
cur_consequetive_poll_errors = 0
output_end = poll_result.outputEnd
lines = poll_result.output
lines.sort(key=lambda line: line.lineNumber)
except KeyboardInterrupt:
log.status.Print('Interrupting the command...')
try:
log.debug('Stopping the airflow command...')
stop_result = environments_api_util.StopAirflowCommand(
execution_id=execute_result.executionId,
pod_name=execute_result.pod,
force=interrupted,
pod_namespace=execute_result.podNamespace,
environment_ref=env_ref,
release_track=self.ReleaseTrack(),
)
log.debug('Stop airflow command result...'+str(stop_result))
if stop_result and stop_result.output:
for line in stop_result.output:
log.Print(line)
if interrupted:
force_stop = True
interrupted = True
except: # pylint:disable=bare-except
log.debug('Error during stopping airflow command. Retrying polling')
cur_consequetive_poll_errors += 1
except: # pylint:disable=bare-except
cur_consequetive_poll_errors += 1
if cur_consequetive_poll_errors == MAX_CONSECUTIVE_POLL_ERRORS:
raise command_util.Error('Cannot fetch airflow command status.')
if not lines:
wait_time_seconds = min(
wait_time_seconds * EXP_BACKOFF_MULTIPLIER, MAX_POLL_TIME_SECONDS
)
else:
wait_time_seconds = DEFAULT_POLL_TIME_SECONDS
for line in lines:
log.Print(line.content if line.content else '')
next_line = lines[-1].lineNumber + 1
if poll_result and poll_result.exitInfo and poll_result.exitInfo.exitCode:
if poll_result.exitInfo.error:
log.error('Error message: {}'.format(poll_result.exitInfo.error))
log.error('Command exit code: {}'.format(poll_result.exitInfo.exitCode))
exit(poll_result.exitInfo.exitCode)
def Run(self, args):
self.DeprecationWarningPrompt(args)
self.CheckForRequiredCmdArgs(args)
running_state = api_util.GetMessagesModule(
release_track=self.ReleaseTrack()
).Environment.StateValueValuesEnum.RUNNING
env_ref = args.CONCEPTS.environment.Parse()
env_obj = environments_api_util.Get(
env_ref, release_track=self.ReleaseTrack()
)
if env_obj.state != running_state:
raise command_util.Error(
'Cannot execute subcommand for environment in state {}. '
'Must be RUNNING.'.format(env_obj.state)
)
if image_versions_command_util.IsVersionAirflowCommandsApiCompatible(
image_version=env_obj.config.softwareConfig.imageVersion
):
self._RunApi(args, env_obj)
else:
self._RunKubectl(args, env_obj)

View File

@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for Cloud Composer Snapshots."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Snapshots(base.Group):
"""Save and load snapshots of environment.
The {command} command group saves and loads snapshots of Cloud Composer
environments.
"""
Snapshots.category = base.COMPOSER_CATEGORY

View File

@@ -0,0 +1,97 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that loads environment snapshots."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
"""\
To load a snapshot into the environment named env-1, run:
$ {command} env-1 \
--snapshot-path=gs://my-bucket/path-to-the-specific-snapshot
"""
}
class LoadSnapshot(base.Command):
"""Load a snapshot into the environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser, 'where to load a snapshot')
base.ASYNC_FLAG.AddToParser(parser)
parser.add_argument(
'--snapshot-path',
type=str,
help='The Cloud Storage path to load the snapshot from. It must '
'start with prefix gs:// and one needs to specify a single snapshot '
'that should be loaded.',
required=True)
flags.SKIP_PYPI_PACKAGES_INSTALLATION.AddToParser(parser)
flags.SKIP_ENVIRONMENT_VARIABLES_SETTING.AddToParser(parser)
flags.SKIP_AIRFLOW_OVERRIDES_SETTING.AddToParser(parser)
flags.SKIP_COPYING_GCS_DATA.AddToParser(parser)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
operation = environments_api_util.LoadSnapshot(
env_resource,
args.skip_pypi_packages_installation,
args.skip_environment_variables_setting,
args.skip_airflow_overrides_setting,
args.skip_gcs_data_copying,
args.snapshot_path,
release_track=self.ReleaseTrack())
if args.async_:
return self._AsynchronousExecution(env_resource, operation)
else:
return self._SynchronousExecution(env_resource, operation)
def _AsynchronousExecution(self, env_resource, operation):
details = 'with operation [{0}]'.format(operation.name)
log.UpdatedResource(
env_resource.RelativeName(),
kind='environment',
is_async=True,
details=details)
return operation
def _SynchronousExecution(self, env_resource, operation):
try:
operations_api_util.WaitForOperation(
operation,
'Waiting for [{}] to be updated with [{}]'.format(
env_resource.RelativeName(), operation.name),
release_track=self.ReleaseTrack())
except command_util.Error as e:
raise command_util.Error(
'Failed to load the snapshot of the environment [{}]: {}'.format(
env_resource.RelativeName(), six.text_type(e)))

View File

@@ -0,0 +1,109 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that saves environment snapshots."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.api_lib.composer import operations_util as operations_api_util
from googlecloudsdk.api_lib.composer import util as api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
import six
DETAILED_HELP = {
'EXAMPLES':
textwrap.dedent("""\
To save a snapshot of the environment named env-1, run:
$ {command} env-1
""")
}
class SaveSnapshot(base.Command):
"""Save a snapshot of the environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(parser,
'where the snapshot must be saved')
base.ASYNC_FLAG.AddToParser(parser)
parser.add_argument(
'--snapshot-location',
type=str,
help='The Cloud Storage location where to save the snapshot. It must '
'start with the prefix gs://. Default value is /snapshots directory in '
'the Cloud Storage bucket of the environment.')
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
operation = environments_api_util.SaveSnapshot(
env_resource, args.snapshot_location, release_track=self.ReleaseTrack())
if args.async_:
return self._AsynchronousExecution(env_resource, operation)
else:
return self._SynchronousExecution(env_resource, operation)
def _AsynchronousExecution(self, env_resource, operation):
log.UpdatedResource(
env_resource.RelativeName(),
kind='environment',
is_async=True,
details='with operation [{}]'.format(operation.name))
log.status.Print('If you want to see the result, run:')
log.status.Print('gcloud composer operations describe ' + operation.name)
return operation
def _SynchronousExecution(self, env_resource, operation):
try:
operations_api_util.WaitForOperation(
operation,
'Waiting for [{}] to be updated with [{}]'.format(
env_resource.RelativeName(), operation.name),
release_track=self.ReleaseTrack())
completed_operation = operations_api_util.GetService(
self.ReleaseTrack()).Get(
api_util.GetMessagesModule(self.ReleaseTrack())
.ComposerProjectsLocationsOperationsGetRequest(
name=operation.name))
log.status.Print('\nIf you want to see the result once more, run:')
log.status.Print('gcloud composer operations describe ' + operation.name +
'\n')
log.status.Print(
'If you want to see history of all operations to be able'
' to display results of previous check-upgrade runs, run:')
log.status.Print('gcloud composer operations list\n')
log.status.Print('Response: ')
return completed_operation.response
except command_util.Error as e:
raise command_util.Error(
'Failed to save the snapshot of the environment [{}]: {}'.format(
env_resource.RelativeName(), six.text_type(e)))

View File

@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for managing a Cloud Composer environment's storage."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Storage(base.Group):
"""Manage Cloud Storage objects stored as part of Cloud Composer environments.
The {command} command group lets you manage DAGs, Airflow plugins and data
used inside your Cloud Composer Environments.
"""
Storage.category = base.COMPOSER_CATEGORY

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for managing a Cloud Composer environment's DAGs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Dags(base.Group):
"""Manage DAGs for Cloud Composer environments.
The {command} command group lets you import, export, list, and delete DAGs for
your Cloud Composer environment.
"""

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete Airflow DAGs for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
'''\
To delete the dags in the path ``path/to/dags'', for the
environment named ``environment-1'' in the location ``us-east1'', run:
$ {command} path/to/dags --environment=environment-1 --location=us-east1
'''
}
class Delete(base.Command):
"""Delete DAG files from an Cloud Composer environment's Cloud Storage bucket.
"""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'whose DAGs to delete', positional=False)
flags.AddDeleteTargetPositional(parser, 'dags')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
subtarget = '[{}] in '.format(args.target) if args.target else ''
console_io.PromptContinue(
'Recursively deleting all contents from {}the \'dags/\' '
'subdirectory of environment [{}]'.format(subtarget,
env_ref.RelativeName()),
cancel_on_no=True)
return storage_util.Delete(
env_ref, args.target or '*', 'dags', release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,129 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to export files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Export(base.Command):
"""Export DAGs from an environment into local storage or Cloud Storage.
If the SOURCE is a directory, it and its contents are are exported
recursively. If no SOURCE is provided, the entire contents of the
environment's DAGs directory will be exported. Colliding files in the
DESTINATION will be overwritten. If a file exists in the DESTINATION but
there is no corresponding file to overwrite it, it is untouched.
## EXAMPLES
Suppose the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- dags
| |
| +-- file1.py
| +-- file2.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- file4.py
And the local directory '/foo' has the following
structure:
/foo
|
+-- file1.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- fileY.py
The following command:
{command} myenv --destination=/foo
would result in the following structure in the local '/foo' directory:
/foo
|
+-- file1.py
+-- file2.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- file4.py
| | +-- fileY.py
The local files '/foo/file1.py' and '/foo/subdir1/file3.py' will be
overwritten with the contents of the corresponding files in the Cloud Storage
bucket.
If instead we had run
{command} myenv --source=subdir1/file3.py --destination=/foo
the resulting local directory structure would be the following:
/foo
|
+-- file1.py
+-- file3.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- fileY.py
No local files would be overwritten since
'gs://the-bucket/dags/subdir1/file3.py' was written to '/foo/file3.py'
instead of 'foo/subdir1/file3.py'.
"""
SUBDIR_BASE = 'dags'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'from whose Cloud Storage bucket to export DAGs',
positional=False)
flags.AddExportSourceFlag(parser, Export.SUBDIR_BASE)
flags.AddExportDestinationFlag(parser)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
source_path = posixpath.join(Export.SUBDIR_BASE,
(args.source or '*').strip(posixpath.sep))
return storage_util.Export(
env_ref, source_path,
args.destination,
release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,139 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to import files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Import(base.Command):
"""Import DAGs from local storage or Cloud Storage into an environment.
If the SOURCE is a directory, it and its contents are imported recursively.
Colliding files in the environment's Cloud Storage bucket will be
overwritten. If a file exists in the bucket but is not present in the SOURCE,
it is not removed.
## EXAMPLES
Suppose the '/foo' directory in the local filesystem has the following
structure:
foo
|
+-- subdir1
| |
| +-- file1.txt
| +-- file2.txt
|
+-- subdir2
| |
| +-- file3.txt
| +-- file4.txt
And the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- dags
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
The following command:
{command} myenv --source=/foo
would result in the following structure in `myenv`'s Cloud Storage bucket:
gs://the-bucket
|
+-- dags
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| | | +-- file1.txt
| | | +-- file2.txt
| | |
| | +-- subdir2
| | | |
| | | +-- file3.txt
| | | +-- file4.txt
If instead we had run
{command} myenv --source=/foo --destination=bar
the resulting bucket structure would be the following:
gs://the-bucket
|
+-- dags
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| |
| +-- bar
| | |
| | +-- foo
| | | |
| | | +-- subdir1
| | | | |
| | | | +-- file1.txt
| | | | +-- file2.txt
| | | |
| | | +-- subdir2
| | | | |
| | | | +-- file3.txt
| | | | +-- file4.txt
"""
SUBDIR_BASE = 'dags'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'into whose Cloud Storage bucket to import DAGs',
positional=False)
flags.AddImportSourceFlag(parser, Import.SUBDIR_BASE)
flags.AddImportDestinationFlag(parser, Import.SUBDIR_BASE)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
gcs_subdir = Import.SUBDIR_BASE
if args.destination:
gcs_subdir = posixpath.join(gcs_subdir,
args.destination.strip(posixpath.sep))
gcs_subdir = posixpath.join(gcs_subdir, '')
return storage_util.Import(
env_ref, args.source, gcs_subdir, release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Airflow DAGs for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
DETAILED_HELP = {
'EXAMPLES':
"""\
To list the dags for the Cloud Composer environment
``environment-1'' and location ``us-central1'', run:
$ {command} --environment=environment-1 --location=us-central1
"""
}
class List(base.Command):
"""List the DAG files for a Cloud Composer environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to list DAGs', positional=False)
parser.display_info.AddFormat('table(name)')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
return storage_util.List(env_ref, 'dags', release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for managing a Cloud Composer environment's data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Data(base.Group):
"""Manage data for Cloud Composer environments.
The {command} command group lets you import, export, list, and delete data for
your Cloud Composer environment.
"""

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete Airflow data for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
'''\
To delete the data from the path ``path/to/data'', for the
environment named ``environment-1'' in the location ``us-east1'', run:
$ {command} path/to/data --environment=environment-1 --location=us-east1
'''
}
class Delete(base.Command):
"""Delete data from an Cloud Composer environment's Cloud Storage bucket.
"""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'whose data to delete.', positional=False)
flags.AddDeleteTargetPositional(parser, 'data')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
subtarget = '[{}] in '.format(args.target) if args.target else ''
console_io.PromptContinue(
'Recursively deleting all contents from {}the \'data/\' '
'subdirectory of environment [{}]'.format(subtarget,
env_ref.RelativeName()),
cancel_on_no=True)
return storage_util.Delete(
env_ref, args.target or '*', 'data', release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,129 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to export files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Export(base.Command):
"""Export data from an environment into local storage or Cloud Storage.
If the SOURCE is a directory, it and its contents are are exported
recursively. If no SOURCE is provided, the entire contents of the
environment's data directory will be exported. Colliding files in the
DESTINATION will be overwritten. If a file exists in the DESTINATION but
there is no corresponding file to overwrite it, it is untouched.
## EXAMPLES
Suppose the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- data
| |
| +-- file1.txt
| +-- file2.txt
| |
| +-- subdir1
| | |
| | +-- file3.txt
| | +-- file4.txt
And the local directory '/foo' has the following
structure:
/foo
|
+-- file1.txt
+-- fileX.txt
| |
| +-- subdir1
| | |
| | +-- file3.txt
| | +-- fileY.txt
The following command:
{command} myenv --destination=/foo
would result in the following structure in the local '/foo' directory:
/foo
|
+-- file1.txt
+-- file2.txt
+-- fileX.txt
| |
| +-- subdir1
| | |
| | +-- file3.txt
| | +-- file4.txt
| | +-- fileY.txt
The local files '/foo/file1.txt' and '/foo/subdir1/file3.txt' will be
overwritten with the contents of the corresponding files in the Cloud Storage
bucket.
If instead we had run
{command} myenv --source=subdir1/file3.txt --destination=/foo
the resulting local directory structure would be the following:
/foo
|
+-- file1.txt
+-- file3.txt
+-- fileX.txt
| |
| +-- subdir1
| | |
| | +-- file3.txt
| | +-- fileY.txt
No local files would be overwritten since
'gs://the-bucket/dags/subdir1/file3.txt' was written to '/foo/file3.txt'
instead of 'foo/subdir1/file3.txt'.
"""
SUBDIR_BASE = 'data'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'from whose Cloud Storage bucket to export data.',
positional=False)
flags.AddExportSourceFlag(parser, Export.SUBDIR_BASE)
flags.AddExportDestinationFlag(parser)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
source_path = posixpath.join(Export.SUBDIR_BASE,
(args.source or '*').strip(posixpath.sep))
return storage_util.Export(
env_ref, source_path,
args.destination,
release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,139 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to import files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Import(base.Command):
"""Import data from local storage or Cloud Storage into an environment.
If the SOURCE is a directory, it and its contents are imported recursively.
Colliding files in the environment's Cloud Storage bucket will be
overwritten. If a file exists in the bucket but is not present in the SOURCE,
it is not removed.
## EXAMPLES
Suppose the '/foo' directory in the local filesystem has the following
structure:
foo
|
+-- subdir1
| |
| +-- file1.txt
| +-- file2.txt
|
+-- subdir2
| |
| +-- file3.txt
| +-- file4.txt
And the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- data
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
The following command:
{command} myenv --source=/foo
would result in the following structure in `myenv`'s Cloud Storage bucket:
gs://the-bucket
|
+-- data
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| | | +-- file1.txt
| | | +-- file2.txt
| | |
| | +-- subdir2
| | | |
| | | +-- file3.txt
| | | +-- file4.txt
If instead we had run
{command} myenv --source=/foo --destination=bar
the resulting bucket structure would be the following:
gs://the-bucket
|
+-- data
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| |
| +-- bar
| | |
| | +-- foo
| | | |
| | | +-- subdir1
| | | | |
| | | | +-- file1.txt
| | | | +-- file2.txt
| | | |
| | | +-- subdir2
| | | | |
| | | | +-- file3.txt
| | | | +-- file4.txt
"""
SUBDIR_BASE = 'data'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'into whose Cloud Storage bucket to import data.',
positional=False)
flags.AddImportSourceFlag(parser, Import.SUBDIR_BASE)
flags.AddImportDestinationFlag(parser, Import.SUBDIR_BASE)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
gcs_subdir = Import.SUBDIR_BASE
if args.destination:
gcs_subdir = posixpath.join(gcs_subdir,
args.destination.strip(posixpath.sep))
gcs_subdir = posixpath.join(gcs_subdir, '')
return storage_util.Import(
env_ref, args.source, gcs_subdir, release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list data for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
DETAILED_HELP = {
'EXAMPLES':
"""\
To list the data from the Cloud Composer environment
``environment-1'' and location ``us-central1'', run:
$ {command} --environment=environment-1 --location=us-central1
"""
}
class List(base.Command):
"""List the data for a Cloud Composer environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to list data.', positional=False)
parser.display_info.AddFormat('table(name)')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
return storage_util.List(env_ref, 'data', release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for managing a Cloud Composer environment's plugins."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Plugins(base.Group):
"""Manage plugins for Cloud Composer environments.
The {command} command group lets you import, export, list, and delete plugins
for your Cloud Composer environment.
"""

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete Airflow plugins for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
from googlecloudsdk.core.console import console_io
DETAILED_HELP = {
'EXAMPLES':
'''\
To delete the plugins in the path ``path/to/plugins'', for the
environment named ``environment-1'' in the location ``us-east1'', run:
$ {command} path/to/plugins --environment=environment-1 --location=us-east1
'''
}
class Delete(base.Command):
"""Delete plugins from an Cloud Composer environment's Cloud Storage bucket.
"""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'whose plugins to delete.', positional=False)
flags.AddDeleteTargetPositional(parser, 'plugins')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
subtarget = '[{}] in '.format(args.target) if args.target else ''
console_io.PromptContinue(
'Recursively deleting all contents from {}the \'plugins/\' '
'subdirectory of environment [{}]'.format(subtarget,
env_ref.RelativeName()),
cancel_on_no=True)
return storage_util.Delete(
env_ref,
args.target or '*',
'plugins',
release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,129 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to export files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Export(base.Command):
"""Export plugins from an environment into local storage or Cloud Storage.
If the SOURCE is a directory, it and its contents are are exported
recursively. If no SOURCE is provided, the entire contents of the
environment's plugins directory will be exported. Colliding files in the
DESTINATION will be overwritten. If a file exists in the DESTINATION but
there is no corresponding file to overwrite it, it is untouched.
## EXAMPLES
Suppose the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- plugins
| |
| +-- file1.py
| +-- file2.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- file4.py
And the local directory '/foo' has the following
structure:
/foo
|
+-- file1.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- fileY.py
The following command:
{command} myenv --destination=/foo
would result in the following structure in the local '/foo' directory:
/foo
|
+-- file1.py
+-- file2.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- file4.py
| | +-- fileY.py
The local files '/foo/file1.py' and '/foo/subdir1/file3.py' will be
overwritten with the contents of the corresponding files in the Cloud Storage
bucket.
If instead we had run
{command} myenv file2.py subdir1/file3.py --destination=/foo
the resulting local directory structure would be the following:
/foo
|
+-- file1.py
+-- file3.py
+-- fileX.py
| |
| +-- subdir1
| | |
| | +-- file3.py
| | +-- fileY.py
No local files would be overwritten since
'gs://the-bucket/dags/subdir1/file3.py' was written to '/foo/file3.py'
instead of 'foo/subdir1/file3.py'.
"""
SUBDIR_BASE = 'plugins'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'from whose Cloud Storage bucket to export plugins.',
positional=False)
flags.AddExportSourceFlag(parser, Export.SUBDIR_BASE)
flags.AddExportDestinationFlag(parser)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
source_path = posixpath.join(Export.SUBDIR_BASE,
(args.source or '*').strip(posixpath.sep))
return storage_util.Export(
env_ref, source_path,
args.destination,
release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,139 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to import files into a Cloud Composer environment's bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import posixpath
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
class Import(base.Command):
"""Import plugins from local storage or Cloud Storage into an environment.
If the SOURCE is a directory, it and its contents are imported recursively.
Colliding files in the environment's Cloud Storage bucket will be
overwritten. If a file exists in the bucket but is not present in the SOURCE,
it is not removed.
## EXAMPLES
Suppose the '/foo' directory in the local filesystem has the following
structure:
foo
|
+-- subdir1
| |
| +-- file1.txt
| +-- file2.txt
|
+-- subdir2
| |
| +-- file3.txt
| +-- file4.txt
And the environment `myenv`'s Cloud Storage bucket has the following
structure:
gs://the-bucket
|
+-- plugins
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
The following command:
{command} myenv --source=/foo
would result in the following structure in `myenv`'s Cloud Storage bucket:
gs://the-bucket
|
+-- plugins
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| | | +-- file1.txt
| | | +-- file2.txt
| | |
| | +-- subdir2
| | | |
| | | +-- file3.txt
| | | +-- file4.txt
If instead we had run
{command} myenv --source=/foo --destination=bar
the resulting bucket structure would be the following:
gs://the-bucket
|
+-- plugins
| |
| +-- foo
| | |
| | +-- subdir1
| | | |
| | | +-- bar.txt
| |
| +-- bar
| | |
| | +-- foo
| | | |
| | | +-- subdir1
| | | | |
| | | | +-- file1.txt
| | | | +-- file2.txt
| | | |
| | | +-- subdir2
| | | | |
| | | | +-- file3.txt
| | | | +-- file4.txt
"""
SUBDIR_BASE = 'plugins'
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'into whose Cloud Storage bucket to import plugins.',
positional=False)
flags.AddImportSourceFlag(parser, Import.SUBDIR_BASE)
flags.AddImportDestinationFlag(parser, Import.SUBDIR_BASE)
def Run(self, args):
storage_util.WarnIfWildcardIsPresent(args.source, '--source')
env_ref = args.CONCEPTS.environment.Parse()
gcs_subdir = Import.SUBDIR_BASE
if args.destination:
gcs_subdir = posixpath.join(gcs_subdir,
args.destination.strip(posixpath.sep))
gcs_subdir = posixpath.join(gcs_subdir, '')
return storage_util.Import(
env_ref, args.source, gcs_subdir, release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Airflow plugins for a Cloud Composer environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import storage_util
DETAILED_HELP = {
'EXAMPLES':
"""\
To list the plugins for the Cloud Composer environment
``environment-1'' and location ``us-central1'', run:
$ {command} --environment=environment-1 --location=us-central1
"""
}
class List(base.Command):
"""List the plugins for a Cloud Composer environment."""
detailed_help = DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser, 'for which to list plugins.', positional=False)
parser.display_info.AddFormat('table(name)')
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
return storage_util.List(
env_ref, 'plugins', release_track=self.ReleaseTrack())

View File

@@ -0,0 +1,595 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that updates scalar properties of an environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.composer import environments_util as environments_api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import environment_patch_util as patch_util
from googlecloudsdk.command_lib.composer import flags
from googlecloudsdk.command_lib.composer import image_versions_util as image_versions_command_util
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.core import log
DETAILED_HELP = {
'EXAMPLES':
"""\
To update the Cloud Composer environment named ``env-1'' to have 8
Airflow workers, and not have the ``production'' label, run:
$ {command} env-1 --node-count=8 --remove-labels=production
"""
}
_INVALID_OPTION_FOR_V2_ERROR_MSG = """\
Cannot specify --{opt} with Composer 2.X or greater.
"""
_INVALID_OPTION_FOR_V1_ERROR_MSG = """\
Cannot specify --{opt} with Composer 1.X.
"""
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Update(base.Command):
"""Update properties of a Cloud Composer environment."""
detailed_help = DETAILED_HELP
_support_autoscaling = True
_support_maintenance_window = True
_support_environment_size = True
@staticmethod
def Args(parser, release_track=base.ReleaseTrack.GA):
resource_args.AddEnvironmentResourceArg(parser, 'to update')
base.ASYNC_FLAG.AddToParser(parser)
Update.update_type_group = parser.add_mutually_exclusive_group(
required=True, help='The update type.')
flags.AddNodeCountUpdateFlagToGroup(Update.update_type_group)
flags.AddPypiUpdateFlagsToGroup(Update.update_type_group)
flags.AddEnvVariableUpdateFlagsToGroup(Update.update_type_group)
flags.AddAirflowConfigUpdateFlagsToGroup(Update.update_type_group)
flags.AddLabelsUpdateFlagsToGroup(Update.update_type_group)
web_server_group = Update.update_type_group.add_mutually_exclusive_group()
flags.UPDATE_WEB_SERVER_ALLOW_IP.AddToParser(web_server_group)
flags.WEB_SERVER_ALLOW_ALL.AddToParser(web_server_group)
flags.WEB_SERVER_DENY_ALL.AddToParser(web_server_group)
flags.ENABLE_HIGH_RESILIENCE.AddToParser(Update.update_type_group)
flags.DISABLE_HIGH_RESILIENCE.AddToParser(Update.update_type_group)
flags.ENABLE_LOGS_IN_CLOUD_LOGGING_ONLY.AddToParser(
Update.update_type_group
)
flags.DISABLE_LOGS_IN_CLOUD_LOGGING_ONLY.AddToParser(
Update.update_type_group
)
flags.CLOUD_SQL_MACHINE_TYPE.AddToParser(Update.update_type_group)
flags.WEB_SERVER_MACHINE_TYPE.AddToParser(Update.update_type_group)
flags.AddAutoscalingUpdateFlagsToGroup(Update.update_type_group,
release_track)
flags.AddMasterAuthorizedNetworksUpdateFlagsToGroup(
Update.update_type_group)
flags.AIRFLOW_DATABASE_RETENTION_DAYS.AddToParser(
Update.update_type_group.add_argument_group()
)
flags.AddScheduledSnapshotFlagsToGroup(Update.update_type_group)
flags.AddMaintenanceWindowFlagsUpdateGroup(Update.update_type_group)
flags.AddCloudDataLineageIntegrationUpdateFlagsToGroup(
Update.update_type_group)
flags.AddEnvUpgradeFlagsToGroup(Update.update_type_group)
flags.AddComposer3FlagsToGroup(Update.update_type_group)
def _ConstructPatch(self, env_ref, args):
env_obj = environments_api_util.Get(
env_ref, release_track=self.ReleaseTrack())
is_composer_v1 = image_versions_command_util.IsImageVersionStringComposerV1(
env_obj.config.softwareConfig.imageVersion)
params = dict(
is_composer_v1=is_composer_v1,
env_ref=env_ref,
node_count=args.node_count,
update_pypi_packages_from_file=args.update_pypi_packages_from_file,
clear_pypi_packages=args.clear_pypi_packages,
remove_pypi_packages=args.remove_pypi_packages,
update_pypi_packages=dict(
command_util.SplitRequirementSpecifier(r)
for r in args.update_pypi_package),
clear_labels=args.clear_labels,
remove_labels=args.remove_labels,
update_labels=args.update_labels,
clear_airflow_configs=args.clear_airflow_configs,
remove_airflow_configs=args.remove_airflow_configs,
update_airflow_configs=args.update_airflow_configs,
clear_env_variables=args.clear_env_variables,
remove_env_variables=args.remove_env_variables,
update_env_variables=args.update_env_variables,
release_track=self.ReleaseTrack(),
)
params['update_image_version'] = self._getImageVersion(
args, env_ref, env_obj, self.ReleaseTrack()
)
params['update_web_server_access_control'] = (
environments_api_util.BuildWebServerAllowedIps(
args.update_web_server_allow_ip,
args.web_server_allow_all,
args.web_server_deny_all,
)
)
if args.cloud_sql_machine_type and not is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V2_ERROR_MSG.format(opt='cloud-sql-machine-type')
)
if args.web_server_machine_type and not is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V2_ERROR_MSG.format(opt='web-server-machine-type')
)
params['cloud_sql_machine_type'] = args.cloud_sql_machine_type
params['web_server_machine_type'] = args.web_server_machine_type
if self._support_environment_size:
if args.environment_size and is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V1_ERROR_MSG.format(opt='environment-size')
)
if self.ReleaseTrack() == base.ReleaseTrack.GA:
params['environment_size'] = flags.ENVIRONMENT_SIZE_GA.GetEnumForChoice(
args.environment_size
)
elif self.ReleaseTrack() == base.ReleaseTrack.BETA:
params['environment_size'] = (
flags.ENVIRONMENT_SIZE_BETA.GetEnumForChoice(args.environment_size)
)
elif self.ReleaseTrack() == base.ReleaseTrack.ALPHA:
params['environment_size'] = (
flags.ENVIRONMENT_SIZE_ALPHA.GetEnumForChoice(args.environment_size)
)
if self._support_autoscaling:
if (
args.scheduler_cpu
or args.worker_cpu
or args.web_server_cpu
or args.scheduler_memory
or args.worker_memory
or args.web_server_memory
or args.scheduler_storage
or args.worker_storage
or args.web_server_storage
or args.min_workers
or args.max_workers
or (
args.enable_triggerer
or args.disable_triggerer
or args.triggerer_count is not None
or args.triggerer_cpu
or args.triggerer_memory
)
):
params['workload_updated'] = True
if is_composer_v1:
raise command_util.InvalidUserInputError(
'Workloads Config flags introduced in Composer 2.X'
' cannot be used when updating Composer 1.X environments.'
)
if env_obj.config.workloadsConfig:
if env_obj.config.workloadsConfig.scheduler:
params['scheduler_cpu'] = env_obj.config.workloadsConfig.scheduler.cpu
params['scheduler_memory_gb'] = (
env_obj.config.workloadsConfig.scheduler.memoryGb
)
params['scheduler_storage_gb'] = (
env_obj.config.workloadsConfig.scheduler.storageGb
)
params['scheduler_count'] = (
env_obj.config.workloadsConfig.scheduler.count
)
if env_obj.config.workloadsConfig.worker:
params['worker_cpu'] = env_obj.config.workloadsConfig.worker.cpu
params['worker_memory_gb'] = (
env_obj.config.workloadsConfig.worker.memoryGb
)
params['worker_storage_gb'] = (
env_obj.config.workloadsConfig.worker.storageGb
)
params['min_workers'] = env_obj.config.workloadsConfig.worker.minCount
params['max_workers'] = env_obj.config.workloadsConfig.worker.maxCount
if env_obj.config.workloadsConfig.webServer:
params['web_server_cpu'] = (
env_obj.config.workloadsConfig.webServer.cpu
)
params['web_server_memory_gb'] = (
env_obj.config.workloadsConfig.webServer.memoryGb
)
params['web_server_storage_gb'] = (
env_obj.config.workloadsConfig.webServer.storageGb
)
if args.scheduler_count is not None:
params['scheduler_count'] = args.scheduler_count
if not is_composer_v1:
params['workload_updated'] = True
if args.scheduler_cpu is not None:
params['scheduler_cpu'] = args.scheduler_cpu
if args.worker_cpu is not None:
params['worker_cpu'] = args.worker_cpu
if args.web_server_cpu is not None:
params['web_server_cpu'] = args.web_server_cpu
if args.scheduler_memory is not None:
params['scheduler_memory_gb'] = (
environments_api_util.MemorySizeBytesToGB(args.scheduler_memory)
)
if args.worker_memory is not None:
params['worker_memory_gb'] = environments_api_util.MemorySizeBytesToGB(
args.worker_memory
)
if args.web_server_memory is not None:
params['web_server_memory_gb'] = (
environments_api_util.MemorySizeBytesToGB(args.web_server_memory)
)
if args.scheduler_storage is not None:
params['scheduler_storage_gb'] = (
environments_api_util.MemorySizeBytesToGB(args.scheduler_storage)
)
if args.worker_storage is not None:
params['worker_storage_gb'] = environments_api_util.MemorySizeBytesToGB(
args.worker_storage
)
if args.web_server_storage is not None:
params['web_server_storage_gb'] = (
environments_api_util.MemorySizeBytesToGB(args.web_server_storage)
)
if args.min_workers:
params['min_workers'] = args.min_workers
if args.max_workers:
params['max_workers'] = args.max_workers
self._addScheduledSnapshotFields(params, args, is_composer_v1)
self._addTriggererFields(params, args, env_obj)
if self._support_maintenance_window:
params['clear_maintenance_window'] = args.clear_maintenance_window
params['maintenance_window_start'] = args.maintenance_window_start
params['maintenance_window_end'] = args.maintenance_window_end
params[
'maintenance_window_recurrence'] = args.maintenance_window_recurrence
params['airflow_database_retention_days'] = (
args.airflow_database_retention_days
)
if args.enable_master_authorized_networks and args.disable_master_authorized_networks:
raise command_util.InvalidUserInputError(
'Cannot specify --enable-master-authorized-networks with --disable-master-authorized-networks'
)
if args.disable_master_authorized_networks and args.master_authorized_networks:
raise command_util.InvalidUserInputError(
'Cannot specify --disable-master-authorized-networks with --master-authorized-networks'
)
if args.enable_master_authorized_networks is None and args.master_authorized_networks:
raise command_util.InvalidUserInputError(
'Cannot specify --master-authorized-networks without --enable-master-authorized-networks'
)
if args.enable_master_authorized_networks or args.disable_master_authorized_networks:
params[
'master_authorized_networks_enabled'] = True if args.enable_master_authorized_networks else False
command_util.ValidateMasterAuthorizedNetworks(
args.master_authorized_networks)
params['master_authorized_networks'] = args.master_authorized_networks
if args.enable_high_resilience or args.disable_high_resilience:
if is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V1_ERROR_MSG.format(
opt='enable_high_resilience'
if args.enable_high_resilience
else 'disable_high_resilience'
)
)
params['enable_high_resilience'] = bool(args.enable_high_resilience)
if (
args.enable_logs_in_cloud_logging_only
or args.disable_logs_in_cloud_logging_only
):
if is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V1_ERROR_MSG.format(
opt='enable_logs_in_cloud_logging_only'
if args.enable_logs_in_cloud_logging_only
else 'disable_logs_in_cloud_logging_only'
)
)
params['enable_logs_in_cloud_logging_only'] = bool(
args.enable_logs_in_cloud_logging_only
)
if (
args.enable_cloud_data_lineage_integration
or args.disable_cloud_data_lineage_integration
):
if is_composer_v1:
raise command_util.InvalidUserInputError(
_INVALID_OPTION_FOR_V1_ERROR_MSG.format(
opt='enable-cloud-data-lineage-integration'
if args.enable_cloud_data_lineage_integration
else 'disable-cloud-data-lineage-integration'
)
)
params['cloud_data_lineage_integration_enabled'] = bool(
args.enable_cloud_data_lineage_integration
)
self._addComposer3Fields(params, args, env_obj)
return patch_util.ConstructPatch(**params)
def _getImageVersion(self, args, env_ref, env_obj, release_track):
is_composer_3 = image_versions_command_util.IsVersionComposer3Compatible(
env_obj.config.softwareConfig.imageVersion
)
if (
(args.image_version or (args.airflow_version and not is_composer_3))
and (
image_versions_command_util.IsDefaultImageVersion(
args.image_version
)
)
):
message = (
image_versions_command_util.BuildDefaultComposerVersionWarning(
args.image_version, args.airflow_version
)
)
log.warning(message)
if args.airflow_version:
args.image_version = (
image_versions_command_util.ImageVersionFromAirflowVersion(
args.airflow_version, env_obj.config.softwareConfig.imageVersion
)
)
# Checks validity of image_version upgrade request.
if args.image_version:
upgrade_validation = (
image_versions_command_util.IsValidImageVersionUpgrade(
env_obj.config.softwareConfig.imageVersion, args.image_version
)
)
if not upgrade_validation.upgrade_valid:
raise command_util.InvalidUserInputError(upgrade_validation.error)
return args.image_version
def _addComposer3Fields(self, params, args, env_obj):
is_composer3 = image_versions_command_util.IsVersionComposer3Compatible(
env_obj.config.softwareConfig.imageVersion
)
possible_args = {
'support-web-server-plugins': args.support_web_server_plugins,
'enable-private-builds-only': args.enable_private_builds_only,
'disable-private-builds-only': args.disable_private_builds_only,
'dag-processor-cpu': args.dag_processor_cpu,
'dag-processor-memory': args.dag_processor_memory,
'dag-processor-count': args.dag_processor_count,
'dag-processor-storage': args.dag_processor_storage,
'disable-vpc-connectivity': args.disable_vpc_connectivity,
'enable-private-environment': args.enable_private_environment,
'disable-private-environment': args.disable_private_environment,
'network': args.network,
'subnetwork': args.subnetwork,
'clear-maintenance-window': args.clear_maintenance_window,
}
for k, v in possible_args.items():
if v is not None and not is_composer3:
raise command_util.InvalidUserInputError(
flags.COMPOSER3_IS_REQUIRED_MSG.format(
opt=k,
composer_version=flags.MIN_COMPOSER3_VERSION,
)
)
if (
args.dag_processor_count is not None
or args.dag_processor_cpu
or args.dag_processor_memory
or args.dag_processor_storage
):
params['workload_updated'] = True
dag_processor_count = None
dag_processor_cpu = None
dag_processor_memory_gb = None
dag_processor_storage_gb = None
if (
env_obj.config.workloadsConfig
and env_obj.config.workloadsConfig.dagProcessor
):
dag_processor_resource = env_obj.config.workloadsConfig.dagProcessor
dag_processor_count = dag_processor_resource.count
dag_processor_cpu = dag_processor_resource.cpu
dag_processor_memory_gb = dag_processor_resource.memoryGb
dag_processor_storage_gb = dag_processor_resource.storageGb
if args.dag_processor_count is not None:
dag_processor_count = args.dag_processor_count
if args.dag_processor_cpu:
dag_processor_cpu = args.dag_processor_cpu
if args.dag_processor_memory:
dag_processor_memory_gb = environments_api_util.MemorySizeBytesToGB(
args.dag_processor_memory
)
if args.dag_processor_storage:
dag_processor_storage_gb = environments_api_util.MemorySizeBytesToGB(
args.dag_processor_storage
)
if args.support_web_server_plugins is not None:
params['support_web_server_plugins'] = args.support_web_server_plugins
if args.enable_private_builds_only or args.disable_private_builds_only:
params['support_private_builds_only'] = (
True if args.enable_private_builds_only else False
)
if args.enable_private_environment is not None:
params['enable_private_environment'] = args.enable_private_environment
if args.disable_private_environment is not None:
params['disable_private_environment'] = args.disable_private_environment
params['dag_processor_count'] = dag_processor_count
params['dag_processor_cpu'] = dag_processor_cpu
params['dag_processor_memory_gb'] = dag_processor_memory_gb
params['dag_processor_storage_gb'] = dag_processor_storage_gb
if args.disable_vpc_connectivity:
params['disable_vpc_connectivity'] = True
if args.network_attachment:
params['network_attachment'] = args.network_attachment
if args.network:
params['network'] = args.network
if args.subnetwork:
params['subnetwork'] = args.subnetwork
# TODO(b/245909413): Update Composer version
def _addScheduledSnapshotFields(self, params, args, is_composer_v1):
if (args.disable_scheduled_snapshot_creation or
args.enable_scheduled_snapshot_creation) and is_composer_v1:
raise command_util.InvalidUserInputError(
'Scheduled Snapshots flags introduced in Composer 2.X'
' cannot be used when creating Composer 1 environments.')
if args.disable_scheduled_snapshot_creation:
params['enable_scheduled_snapshot_creation'] = False
if args.enable_scheduled_snapshot_creation:
params['enable_scheduled_snapshot_creation'] = True
params['snapshot_location'] = args.snapshot_location
params['snapshot_schedule_timezone'] = args.snapshot_schedule_timezone
params['snapshot_creation_schedule'] = args.snapshot_creation_schedule
def _addTriggererFields(self, params, args, env_obj):
triggerer_supported = image_versions_command_util.IsVersionTriggererCompatible(
env_obj.config.softwareConfig.imageVersion)
triggerer_count = None
triggerer_cpu = None
triggerer_memory_gb = None
if (env_obj.config.workloadsConfig and
env_obj.config.workloadsConfig.triggerer and
env_obj.config.workloadsConfig.triggerer.count != 0):
triggerer_count = env_obj.config.workloadsConfig.triggerer.count
triggerer_memory_gb = env_obj.config.workloadsConfig.triggerer.memoryGb
triggerer_cpu = env_obj.config.workloadsConfig.triggerer.cpu
if args.disable_triggerer or args.enable_triggerer:
triggerer_count = 1 if args.enable_triggerer else 0
if args.triggerer_count is not None:
triggerer_count = args.triggerer_count
if args.triggerer_cpu:
triggerer_cpu = args.triggerer_cpu
if args.triggerer_memory:
triggerer_memory_gb = environments_api_util.MemorySizeBytesToGB(
args.triggerer_memory)
possible_args = {
'triggerer-count': args.enable_triggerer,
'triggerer-cpu': args.triggerer_cpu,
'triggerer-memory': args.triggerer_memory
}
for k, v in possible_args.items():
if v and not triggerer_supported:
raise command_util.InvalidUserInputError(
flags.INVALID_OPTION_FOR_MIN_IMAGE_VERSION_ERROR_MSG.format(
opt=k,
composer_version=flags.MIN_TRIGGERER_COMPOSER_VERSION,
airflow_version=flags.MIN_TRIGGERER_AIRFLOW_VERSION))
if not triggerer_count:
if args.triggerer_cpu:
raise command_util.InvalidUserInputError(
'Cannot specify --triggerer-cpu without enabled triggerer')
if args.triggerer_memory:
raise command_util.InvalidUserInputError(
'Cannot specify --triggerer-memory without enabled triggerer')
if triggerer_count == 1 and not (triggerer_memory_gb and triggerer_cpu):
raise command_util.InvalidUserInputError(
'Cannot enable triggerer without providing triggerer memory and cpu.')
params['triggerer_count'] = triggerer_count
if triggerer_count:
params['triggerer_cpu'] = triggerer_cpu
params['triggerer_memory_gb'] = triggerer_memory_gb
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
field_mask, patch = self._ConstructPatch(env_ref, args)
return patch_util.Patch(
env_ref,
field_mask,
patch,
args.async_,
release_track=self.ReleaseTrack())
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class UpdateBeta(Update):
"""Update properties of a Cloud Composer environment."""
_support_autoscaling = True
_support_maintenance_window = True
_support_environment_size = True
@staticmethod
def AlphaAndBetaArgs(parser, release_track=base.ReleaseTrack.BETA):
"""Arguments available only in both alpha and beta."""
Update.Args(parser, release_track=release_track)
@staticmethod
def Args(parser):
"""Arguments available only in beta, not in alpha."""
UpdateBeta.AlphaAndBetaArgs(parser, base.ReleaseTrack.BETA)
def Run(self, args):
env_ref = args.CONCEPTS.environment.Parse()
# Checks validity of update_web_server_allow_ip
if (self.ReleaseTrack() == base.ReleaseTrack.BETA and
args.update_web_server_allow_ip):
flags.ValidateIpRanges(
[acl['ip_range'] for acl in args.update_web_server_allow_ip])
field_mask, patch = self._ConstructPatch(env_ref, args)
return patch_util.Patch(
env_ref,
field_mask,
patch,
args.async_,
release_track=self.ReleaseTrack())
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(UpdateBeta):
"""Update properties of a Cloud Composer environment."""
_support_autoscaling = True
@staticmethod
def Args(parser):
UpdateBeta.AlphaAndBetaArgs(parser, base.ReleaseTrack.ALPHA)

View File

@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for Cloud Composer user workloads ConfigMaps."""
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class UserWorkloadsConfigMaps(base.Group):
"""Create and manage user workloads ConfigMaps of environment.
The {command} command group manages user workloads ConfigMaps of Cloud
Composer
environments.
"""
UserWorkloadsConfigMaps.category = base.COMPOSER_CATEGORY

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that creates a user workloads ConfigMap."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_config_maps_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To create a user workloads ConfigMap of the environment named env-1, run:
$ {command} --environment=env-1 --config-map-file-path=config_map.yaml
""")})
@base.DefaultUniverseOnly
class CreateUserWorkloadsConfigMap(base.Command):
"""Create a user workloads ConfigMap."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'where the user workloads ConfigMap must be created',
positional=False,
)
parser.add_argument(
'--config-map-file-path',
type=str,
help=(
'Path to a local file with a single Kubernetes ConfigMap in YAML'
' format.'
),
required=True,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
response = environments_user_workloads_config_maps_util.CreateUserWorkloadsConfigMap(
env_resource,
args.config_map_file_path,
release_track=self.ReleaseTrack(),
)
log.status.Print(f'ConfigMap {response.name} created')

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that deletes a user workloads ConfigMap."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_config_maps_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To delete a user workloads ConfigMap of the environment named env-1, run:
$ {command} config-map-1 --environment=env-1
""")})
@base.DefaultUniverseOnly
class DeleteUserWorkloadsConfigMap(base.Command):
"""Delete a user workloads ConfigMap."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
base.Argument(
'config_map_name', nargs='?', help='Name of the ConfigMap.'
).AddToParser(parser)
resource_args.AddEnvironmentResourceArg(
parser,
'of the config_map',
positional=False,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
environments_user_workloads_config_maps_util.DeleteUserWorkloadsConfigMap(
env_resource,
args.config_map_name,
release_track=self.ReleaseTrack(),
)
log.status.Print('ConfigMap deleted')

View File

@@ -0,0 +1,68 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that gets details about a user workloads ConfigMap."""
import textwrap
import typing
from typing import Union
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_config_maps_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
if typing.TYPE_CHECKING:
from googlecloudsdk.generated_clients.apis.composer.v1alpha2 import composer_v1alpha2_messages
from googlecloudsdk.generated_clients.apis.composer.v1beta1 import composer_v1beta1_messages
from googlecloudsdk.generated_clients.apis.composer.v1 import composer_v1_messages
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To get details about a user workloads ConfigMap of the environment named env-1, run:
$ {command} config-map-1 --environment=env-1
""")})
@base.DefaultUniverseOnly
class DescribeUserWorkloadsConfigMap(base.Command):
"""Get details about a user workloads ConfigMap."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
base.Argument(
'config_map_name', nargs='?', help='Name of the ConfigMap.'
).AddToParser(parser)
resource_args.AddEnvironmentResourceArg(
parser,
'of the config_map',
positional=False,
)
def Run(self, args) -> Union[
'composer_v1alpha2_messages.UserWorkloadsConfigMap',
'composer_v1beta1_messages.UserWorkloadsConfigMap',
'composer_v1_messages.UserWorkloadsConfigMap',
]:
env_resource = args.CONCEPTS.environment.Parse()
return (
environments_user_workloads_config_maps_util.GetUserWorkloadsConfigMap(
env_resource,
args.config_map_name,
release_track=self.ReleaseTrack(),
)
)

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that list user workloads ConfigMaps."""
import textwrap
import typing
from typing import Sequence, Union
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_config_maps_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
if typing.TYPE_CHECKING:
from googlecloudsdk.generated_clients.apis.composer.v1alpha2 import composer_v1alpha2_messages
from googlecloudsdk.generated_clients.apis.composer.v1beta1 import composer_v1beta1_messages
from googlecloudsdk.generated_clients.apis.composer.v1 import composer_v1_messages
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To list user workloads ConfigMaps of the environment named env-1, run:
$ {command} --environment=env-1
""")})
@base.DefaultUniverseOnly
class ListUserWorkloadsConfigMaps(base.Command):
"""List user workloads ConfigMaps."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'to list user workloads ConfigMaps',
positional=False,
)
parser.display_info.AddFormat('table[box](name.segment(7),data)')
def Run(self, args) -> Union[
Sequence['composer_v1alpha2_messages.UserWorkloadsConfigMap'],
Sequence['composer_v1beta1_messages.UserWorkloadsConfigMap'],
Sequence['composer_v1_messages.UserWorkloadsConfigMap'],
]:
env_resource = args.CONCEPTS.environment.Parse()
return environments_user_workloads_config_maps_util.ListUserWorkloadsConfigMaps(
env_resource,
release_track=self.ReleaseTrack(),
)

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that updates a user workloads ConfigMap."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_config_maps_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To update a user workloads ConfigMap of the environment named env-1, run:
$ {command} --environment=env-1 --config-map-file-path=config_map.yaml
""")})
@base.DefaultUniverseOnly
class UpdateUserWorkloadsConfigMap(base.Command):
"""Update a user workloads ConfigMap."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'where the user workloads ConfigMap must be updated',
positional=False,
)
parser.add_argument(
'--config-map-file-path',
type=str,
help=(
'Path to a local file with a single Kubernetes ConfigMap in YAML'
' format.'
),
required=True,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
response = environments_user_workloads_config_maps_util.UpdateUserWorkloadsConfigMap(
env_resource,
args.config_map_file_path,
release_track=self.ReleaseTrack(),
)
log.status.Print(f'ConfigMap {response.name} updated')

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for Cloud Composer user workloads Secrets."""
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class UserWorkloadsSecrets(base.Group):
"""Create and manage user workloads Secrets of environment.
The {command} command group manages user workloads Secrets of Cloud Composer
environments.
"""
UserWorkloadsSecrets.category = base.COMPOSER_CATEGORY

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that creates a user workloads Secret."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_secrets_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To create a user workloads Secret of the environment named env-1, run:
$ {command} --environment=env-1 --secret-file-path=secret.yaml
""")})
@base.DefaultUniverseOnly
class CreateUserWorkloadsSecret(base.Command):
"""Create a user workloads Secret."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'where the user workloads Secret must be created',
positional=False,
)
parser.add_argument(
'--secret-file-path',
type=str,
help=(
'Path to a local file with a single Kubernetes Secret in YAML'
' format.'
),
required=True,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
response = (
environments_user_workloads_secrets_util.CreateUserWorkloadsSecret(
env_resource,
args.secret_file_path,
release_track=self.ReleaseTrack(),
)
)
log.status.Print(f'Secret {response.name} created')

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that deletes a user workloads Secret."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_secrets_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To delete a user workloads Secret of the environment named env-1, run:
$ {command} secret-1 --environment=env-1
""")})
@base.DefaultUniverseOnly
class DeleteUserWorkloadsSecret(base.Command):
"""Delete a user workloads Secret."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
base.Argument(
'secret_name', nargs='?', help='Name of the Secret.'
).AddToParser(parser)
resource_args.AddEnvironmentResourceArg(
parser,
'of the secret',
positional=False,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
environments_user_workloads_secrets_util.DeleteUserWorkloadsSecret(
env_resource,
args.secret_name,
release_track=self.ReleaseTrack(),
)
log.status.Print('Secret deleted')

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that gets details about a user workloads Secret."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_secrets_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To get details about a user workloads Secret of the environment named env-1, run:
$ {command} secret-1 --environment=env-1
""")})
@base.DefaultUniverseOnly
class DescribeUserWorkloadsSecret(base.Command):
"""Get details about a user workloads Secret."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
base.Argument(
'secret_name', nargs='?', help='Name of the Secret.'
).AddToParser(parser)
resource_args.AddEnvironmentResourceArg(
parser,
'of the secret',
positional=False,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
return environments_user_workloads_secrets_util.GetUserWorkloadsSecret(
env_resource,
args.secret_name,
release_track=self.ReleaseTrack(),
)

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that list user workloads Secrets."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_secrets_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To list user workloads Secrets of the environment named env-1, run:
$ {command} --environment=env-1
""")})
@base.DefaultUniverseOnly
class ListUserWorkloadsSecrets(base.Command):
"""List user workloads Secrets."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'to list user workloads Secrets',
positional=False,
)
parser.display_info.AddFormat('table[box](name.segment(7),data)')
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
return environments_user_workloads_secrets_util.ListUserWorkloadsSecrets(
env_resource,
release_track=self.ReleaseTrack(),
)

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command that updates a user workloads Secret."""
import textwrap
import frozendict
from googlecloudsdk.api_lib.composer import environments_user_workloads_secrets_util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.composer import resource_args
from googlecloudsdk.core import log
_DETAILED_HELP = frozendict.frozendict({'EXAMPLES': textwrap.dedent("""\
To update a user workloads Secret of the environment named env-1, run:
$ {command} --environment=env-1 --secret-file-path=secret.yaml
""")})
@base.DefaultUniverseOnly
class UpdateUserWorkloadsSecret(base.Command):
"""Update a user workloads Secret."""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
resource_args.AddEnvironmentResourceArg(
parser,
'where the user workloads Secret must be updated',
positional=False,
)
parser.add_argument(
'--secret-file-path',
type=str,
help=(
'Path to a local file with a single Kubernetes Secret in YAML'
' format.'
),
required=True,
)
def Run(self, args):
env_resource = args.CONCEPTS.environment.Parse()
response = (
environments_user_workloads_secrets_util.UpdateUserWorkloadsSecret(
env_resource,
args.secret_file_path,
release_track=self.ReleaseTrack(),
)
)
log.status.Print(f'Secret {response.name} updated')