feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,145 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities related to adding flags for the gcloud meta api commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as c_exc
from googlecloudsdk.calliope import parser_extensions
from googlecloudsdk.command_lib.util.apis import arg_marshalling
from googlecloudsdk.command_lib.util.apis import registry
def APICompleter(**_):
return [a.name for a in registry.GetAllAPIs()]
def CollectionCompleter(**_):
return [c.full_name for c in registry.GetAPICollections()]
def MethodCompleter(prefix, parsed_args, **_):
del prefix
collection = getattr(parsed_args, 'collection', None)
if not collection:
return []
return [m.name for m in registry.GetMethods(collection)]
API_VERSION_FLAG = base.Argument(
'--api-version',
help='The version of the given API to use. If not provided, the default '
'version of the API will be used.')
COLLECTION_FLAG = base.Argument(
'--collection',
required=True,
completer=CollectionCompleter,
help='The name of the collection to specify the method for.')
RAW_FLAG = base.Argument(
'--raw',
action='store_true',
default=False,
help='For list commands, the response is flattened to return the items as '
'a list rather than returning the list response verbatim. Use this '
'flag to disable this behavior and return the raw response.'
)
API_REQUIRED_FLAG = base.Argument(
'--api',
required=True,
completer=APICompleter,
help='The name of the API to get the attributes for.')
class MethodDynamicPositionalAction(parser_extensions.DynamicPositionalAction):
"""A DynamicPositionalAction that adds flags for a given method to the parser.
Based on the value given for method, it looks up the valid fields for that
method call and adds those flags to the parser.
"""
def __init__(self, *args, **kwargs):
# Pop the dest so that the superclass doesn't try to automatically save the
# value of the arg to the namespace. We explicitly save the method ref
# instead.
self._dest = kwargs.pop('dest')
super(MethodDynamicPositionalAction, self).__init__(*args, **kwargs)
def GenerateArgs(self, namespace, method_name):
# Get the collection from the existing parsed args.
full_collection_name = getattr(namespace, 'collection', None)
api_version = getattr(namespace, 'api_version', None)
if not full_collection_name:
raise c_exc.RequiredArgumentException(
'--collection',
'The collection name must be specified before the API method.')
# Look up the method and get all the args for it.
method = registry.GetMethod(full_collection_name, method_name,
api_version=api_version)
arg_generator = arg_marshalling.AutoArgumentGenerator(method,
raw=namespace.raw)
method_ref = MethodRef(namespace, method, arg_generator)
setattr(namespace, self._dest, method_ref)
return arg_generator.GenerateArgs()
def Completions(self, prefix, parsed_args, **kwargs):
return MethodCompleter(prefix, parsed_args, **kwargs)
class MethodRef(object):
"""Encapsulates a method specified on the command line with all its flags.
This makes use of an ArgumentGenerator to generate and parse all the flags
that correspond to a method. It provides a simple interface to the command so
the implementor doesn't need to be aware of which flags were added and
manually extract them. This knows which flags exist and what method fields
they correspond to.
"""
def __init__(self, namespace, method, arg_generator):
"""Creates the MethodRef.
Args:
namespace: The argparse namespace that holds the parsed args.
method: APIMethod, The method.
arg_generator: arg_marshalling.AutoArgumentGenerator, The generator for
this method.
"""
self.namespace = namespace
self.method = method
self.arg_generator = arg_generator
def Call(self):
"""Execute the method.
Returns:
The result of the method call.
"""
raw = self.arg_generator.raw
request = self.arg_generator.CreateRequest(self.namespace)
limit = self.arg_generator.Limit(self.namespace)
page_size = self.arg_generator.PageSize(self.namespace)
return self.method.Call(request, raw=raw, limit=limit, page_size=page_size)

View File

@@ -0,0 +1,226 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The meta cache command library support."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis_util
from googlecloudsdk.calliope import parser_completer
from googlecloudsdk.calliope import walker
from googlecloudsdk.command_lib.util import completers
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import module_util
from googlecloudsdk.core import resources
from googlecloudsdk.core.cache import exceptions as cache_exceptions
from googlecloudsdk.core.cache import file_cache
from googlecloudsdk.core.cache import resource_cache
import six
_CACHE_RI_DEFAULT = 'resource://'
class Error(exceptions.Error):
"""Base cache exception."""
class NoTablesMatched(Error):
"""No table names matched the patterns."""
class GetCache(object):
"""Context manager for opening a cache given a cache identifier name."""
_TYPES = {
'file': file_cache.Cache,
'resource': resource_cache.ResourceCache,
}
def __init__(self, name, create=False):
"""Constructor.
Args:
name: The cache name to operate on. May be prefixed by "resource://" for
resource cache names or "file://" for persistent file cache names. If
only the prefix is specified then the default cache name for that prefix
is used.
create: Creates the persistent cache if it exists if True.
Raises:
CacheNotFound: If the cache does not exist.
Returns:
The cache object.
"""
self._name = name
self._create = create
self._cache = None
def _OpenCache(self, cache_class, name):
try:
return cache_class(name, create=self._create)
except cache_exceptions.Error as e:
raise Error(e)
def __enter__(self):
# Each cache_class has a default cache name. None or '' names that default.
if self._name:
for cache_id, cache_class in six.iteritems(self._TYPES):
if self._name.startswith(cache_id + '://'):
name = self._name[len(cache_id) + 3:]
if not name:
name = None
self._cache = self._OpenCache(cache_class, name)
return self._cache
self._cache = self._OpenCache(resource_cache.ResourceCache, self._name)
return self._cache
def __exit__(self, typ, value, traceback):
self._cache.Close(commit=typ is None)
def Delete():
"""Deletes the resource cache regardless of implementation."""
try:
resource_cache.Delete()
except cache_exceptions.Error as e:
raise Error(e)
return None
def AddCacheFlag(parser):
"""Adds the persistent cache flag to the parser."""
parser.add_argument(
'--cache',
metavar='CACHE_NAME',
default=_CACHE_RI_DEFAULT,
help=('The cache name to operate on. May be prefixed by "{}" for '
'resource cache names. If only the prefix is specified then the '
'default cache name for that prefix is used.'.format(
_CACHE_RI_DEFAULT)))
def _GetCompleterType(completer_class):
"""Returns the completer type name given its class."""
completer_type = None
try:
for t in completer_class.mro():
if t == completers.ResourceCompleter:
break
if t.__name__.endswith('Completer'):
completer_type = t.__name__
except AttributeError:
pass
if not completer_type and callable(completer_class):
completer_type = 'function'
return completer_type
class _CompleterModule(object):
def __init__(self, module_path, collection, api_version, completer_type):
self.module_path = module_path
self.collection = collection
self.api_version = api_version
self.type = completer_type
self.attachments = []
self._attachments_dict = {}
class _CompleterAttachment(object):
def __init__(self, command):
self.command = command
self.arguments = []
class _CompleterModuleGenerator(walker.Walker):
"""Constructs a CLI command dict tree."""
def __init__(self, cli):
super(_CompleterModuleGenerator, self).__init__(cli)
self._modules_dict = {}
def Visit(self, command, parent, is_group):
"""Visits each command in the CLI command tree to construct the module list.
Args:
command: group/command CommandCommon info.
parent: The parent Visit() return value, None at the top level.
is_group: True if command is a group, otherwise its is a command.
Returns:
The subtree module list.
"""
def _ActionKey(action):
return action.__repr__()
args = command.ai
for arg in sorted(args.flag_args + args.positional_args, key=_ActionKey):
try:
completer_class = arg.completer
except AttributeError:
continue
collection = None
api_version = None
if isinstance(completer_class, parser_completer.ArgumentCompleter):
completer_class = completer_class.completer_class
module_path = module_util.GetModulePath(completer_class)
if isinstance(completer_class, type):
try:
completer = completer_class()
try:
collection = completer.collection
except AttributeError:
pass
try:
api_version = completer.api_version
except AttributeError:
pass
except (apis_util.UnknownAPIError,
resources.InvalidCollectionException) as e:
collection = 'ERROR: {}'.format(e)
if arg.option_strings:
name = arg.option_strings[0]
else:
name = arg.dest.replace('_', '-')
module = self._modules_dict.get(module_path)
if not module:
module = _CompleterModule(
module_path=module_path,
collection=collection,
api_version=api_version,
completer_type=_GetCompleterType(completer_class),
)
self._modules_dict[module_path] = module
command_path = ' '.join(command.GetPath())
# pylint: disable=protected-access
attachment = module._attachments_dict.get(command_path)
if not attachment:
attachment = _CompleterAttachment(command_path)
module._attachments_dict[command_path] = attachment
module.attachments.append(attachment)
attachment.arguments.append(name)
return self._modules_dict
def ListAttachedCompleters(cli):
"""Returns the list of all attached CompleterModule objects in cli."""
return list(_CompleterModuleGenerator(cli).Walk().values())

View File

@@ -0,0 +1,43 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
- release_tracks: ${release_tracks}
help_text:
brief: Create a ${uppercase_api_name} ${singular_name}.
description: |
Create a new ${uppercase_api_name} ${singular_name}.
examples: |
To create a ${singular_name} called 'test-${singular_name}', run:
$ {command} my-${singular_name}
request:
collection: ${collection_name}
api_version: ${api_version}
arguments:
resource:
help_text: ${uppercase_api_name} ${singular_name} to create.
# The following should point to the resource argument definition under
# your surface's command_lib directory.:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${singular_name}
params:
%for arg in create_args:
- arg_name: ${create_args[arg]}
api_field: ${singular_name}.${arg}
help_text: |
Default ${arg} used by this ${singular_name}.
%endfor

View File

@@ -0,0 +1,35 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
- release_tracks: ${release_tracks}
help_text:
brief: Delete a ${uppercase_api_name} ${singular_name}.
description: |
Delete a ${uppercase_api_name} ${singular_name}.
examples: |
To delete a ${singular_name} called 'test-${singular_name}', run:
$ {command} my-${singular_name}
request:
collection: ${collection_name}
api_version: ${api_version}
arguments:
resource:
help_text: ${uppercase_api_name} ${singular_name} to delete.
# The following should point to the resource argument definition under your
# surface's command_lib directory.:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${singular_name}

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
release_tracks: ${release_tracks}
help_text:
brief: Show details about the ${singular_name}.
description: Show details about the ${singular_name}.
examples: |
To show details about a device, run:
$ {command} my-${singular_name} ${flags}
request:
collection: ${collection_name}
api_version: ${api_version}
arguments:
resource:
help_text: The ${singular_name} you want to describe.
# The following should point to the resource argument definition under your
# surface's command_lib directory.:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${singular_name}

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
release_tracks: ${release_tracks}
help_text:
brief: Get the IAM policy for the ${singular_name}.
description: |
*{command}* displays the IAM policy associated with the ${singular_name}.
If formatted as JSON,the output can be edited and used as a
policy file for set-iam-policy. The output includes an "etag" field
identifying the version emitted and allowing detection of
concurrent policy updates; see
$ {parent} set-iam-policy for additional details.
examples: |
To print the IAM policy for a given ${singular_name}, run:
$ {command} my-${singular_name} ${flags}
request:
collection: ${collection_name}
api_version: ${api_version}
use_relative_name: ${use_relative_name}
iam:
# Whether the command can accept 'condition' as part of IAM policy binding.
enable_condition: true
# IAM Policy version. Valid options are "0", "1", and "3".
# (Version 3 allows conditions.)
policy_version: 3
# Path to the policy_version field, for APIs that use non-standard mapping.
# Also commonly "options_requestedPolicyVersion"
get_iam_policy_version_path: getIamPolicyRequest.options.requestedPolicyVersion
arguments:
resource:
help_text: The ${singular_name} for which to display the IAM policy.
# the following should point to the resource argument definition under your
# surface's command_lib directory:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${singular_name}

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
- release_tracks: ${release_tracks}
help_text:
brief: List ${uppercase_api_name} ${plural_resource_name}.
description: |
List ${uppercase_api_name} ${plural_resource_name}.
examples: |
To list the ${plural_resource_name}, run:
$ {command}
request:
collection: ${collection_name}
api_version: ${api_version}
response:
id_field: name
arguments:
resource:
help_text: Parent ${uppercase_api_name} ${parent} to list all contained ${uppercase_api_name} ${plural_resource_name}.
# The following should point to the parent resource argument definition
# under your surface's command_lib directory.:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${parent}
output:
format: |
table(
name.basename():label=NAME,
% for field in create_args:
${field}:label=${field.upper()}${'' if loop.last else ','}
% endfor
)

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*- #
## Copyright 2020 Google LLC. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
release_tracks: ${release_tracks}
help_text:
brief: Set the IAM policy for the ${singular_name}.
description: |
*{command}* displays the IAM policy associated with the ${singular_name}.
If formatted as JSON,the output can be edited and used as a
policy file for set-iam-policy. The output includes an "etag" field
identifying the version emitted and allowing detection of
concurrent policy updates; see
$ {parent} set-iam-policy for additional details.
examples: |
To print the IAM policy for a given ${singular_name}, run:
$ {command} my-${singular_name} ${flags} policy.json
request:
collection: ${collection_name}
api_version: ${api_version}
use_relative_name: ${use_relative_name}
iam:
# Whether the command can accept 'condition' as part of IAM policy binding.
enable_condition: true
# IAM Policy version. Valid options are "0", "1", and "3".
# (Version 3 allows conditions.)
policy_version: 3
arguments:
resource:
help_text: The ${singular_name} for which to display the IAM policy.
# the following should point to the resource argument definition under your
# surface's command_lib directory:
spec: !REF googlecloudsdk.command_lib.${api_name}.resources:${singular_name}

View File

@@ -0,0 +1,30 @@
# ${utf_encoding}
# Copyright ${current_year} Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for managing ${branded_api_name} ${singular_resource_name_with_spaces} configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(${release_tracks})
class Config(base.Group):
"""Manage ${branded_api_name} ${singular_resource_name_with_spaces} configurations."""
% if group_category:
category = base.${group_category}
% endif

View File

@@ -0,0 +1,38 @@
release_tracks: ${release_tracks}
command_type: CONFIG_EXPORT
help_text:
brief: Export the configuration for ${api_a_or_an} ${branded_api_name} ${singular_name_with_spaces}.
description: |
*{command}* exports the configuration for ${api_a_or_an} ${branded_api_name} ${singular_name_with_spaces}.
${singular_capitalized_name} configurations can be exported in
Kubernetes Resource Model (krm) or Terraform HCL formats. The
default format is `krm`.
Specifying `--all` allows you to export the configurations for all
${plural_resource_name_with_spaces} within the project.
Specifying `--path` allows you to export the configuration(s) to
a local directory.
examples: |
To export the configuration for ${resource_a_or_an} ${singular_name_with_spaces}, run:
$ {command} ${resource_argument_name}
To export the configuration for ${resource_a_or_an} ${singular_name_with_spaces} to a file, run:
$ {command} ${resource_argument_name} --path=/path/to/dir/
To export the configuration for ${resource_a_or_an} ${singular_name_with_spaces} in Terraform
HCL format, run:
$ {command} ${resource_argument_name} --resource-format=terraform
To export the configurations for all ${plural_resource_name_with_spaces} within a
project, run:
$ {command} --all
arguments:
resource:
help_text: ${singular_capitalized_name} to export the configuration for.
spec: !REF googlecloudsdk.command_lib.${resource_spec_path}

View File

@@ -0,0 +1,14 @@
# ${utf_encoding}
# Copyright ${current_year} Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,16 @@
release_tracks: ${release_tracks}
arguments:
- group:
mutex: true
required: true
arguments:
- group:
arguments:
- name: ${surface_spec_resource_arg}
resource_arg: true
positional: true
required: false
- name: all
- name: path
required: false
- name: resource-format

View File

@@ -0,0 +1,42 @@
# ${utf_encoding}
# Copyright ${current_year} Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the ${test_command_string} config export command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.tests.lib import cli_test_base
from googlecloudsdk.tests.lib import sdk_test_base
from googlecloudsdk.tests.lib import test_case
from googlecloudsdk.tests.lib.command_lib.util.declarative import test_base
class ConfigExportTest(test_base.ConfigExportTestMixin,
cli_test_base.CliTestBase, sdk_test_base.WithFakeAuth):
def SetUp(self):
command = '${test_command_string}'
resource_args = ['${test_command_arguments}']${pylint_disable}
self.ConfigureTests(
track=calliope_base.ReleaseTrack.ALPHA,
collection='${full_collection_name}',
command=command,
resource_args=resource_args)
if __name__ == '__main__':
test_case.main()

View File

@@ -0,0 +1,161 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the interactive gcloud debugger.
Contains things like:
- Common imports pre-imported
- Easy utility wrappers
- Pre-initialized API clients
That make interactive debugging with gcloud a dream.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import code
# `site` initializes the interactive mode (defines `exit`/`quit`, sets up
# copyright notice, etc.).
import site # pylint: disable=unused-import
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.api_lib.util import apis_internal
from googlecloudsdk.generated_clients.apis import apis_map
################################################################################
# Consoles: Infrastructure for `gcloud meta debug`
################################################################################
_BANNER = r"""
_ _ _ _ _ _ _
/ \ / \ / \ / \ / \ / \ / \
( W ) ( E ) ( L ) ( C ) ( O ) ( M ) ( E )
\_/ \_/ \_/ \_/ \_/ \_/ \_/
_____ ________)
|_ _|___ (, / /) /)
| | | . | /___, _ (/_ // ___ _
|_| |___| / (_(_/_) (_(_(/_(_)(_(_/_)_
(_/
_ _ _ _
| | | | | | | |
__ _ ___| | ___ _ _ __| | __| | ___| |__ _ _ __ _ __ _ ___ _ __
/ _` |/ __| |/ _ \| | | |/ _` | / _` |/ _ \ '_ \| | | |/ _` |/ _` |/ _ \ '__|
| (_| | (__| | (_) | |_| | (_| | | (_| | __/ |_) | |_| | (_| | (_| | __/ |
\__, |\___|_|\___/ \__,_|\__,_| \__,_|\___|_.__/ \__,_|\__, |\__, |\___|_|
__/ | __/ | __/ |
|___/ |___/ |___/
"""
def _PythonConsole():
"""Run a console based on the built-in code.InteractiveConsole."""
try:
# pylint: disable=g-import-not-at-top
import readline
import rlcompleter
# pylint: disable=g-import-not-at-top
except ImportError:
pass
else:
readline.set_completer(rlcompleter.Completer(globals()).complete)
readline.parse_and_bind('tab: complete')
console = code.InteractiveConsole(globals())
console.interact(_BANNER)
def _PdbConsole():
"""Run a console based on the built-in pdb."""
import pdb # pylint: disable=g-import-not-at-top
pdb.set_trace()
def _IpdbConsole():
"""Run a console based on IPython's ipdb."""
try:
import ipdb # pylint: disable=g-import-not-at-top
ipdb.set_trace()
except ImportError:
log.error('Could not start the ipdb debugger. Please ensure that it is '
'installed, or try the default debugger with `--mode=python`.')
def _PudbConsole():
"""Run a console based on PuDB."""
try:
import pudb # pylint: disable=g-import-not-at-top
pudb.set_trace()
except ImportError:
log.error('Could not start the PuDB debugger. Please ensure that it is '
'installed, or try the default debugger with `--mode=python`.')
CONSOLES = {
'python': _PythonConsole,
'pdb': _PdbConsole,
'ipdb': _IpdbConsole,
'pudb': _PudbConsole,
}
################################################################################
# Common Cloud SDK imports
################################################################################
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
from googlecloudsdk.core import log # pylint: disable=unused-import
from googlecloudsdk.core import properties # pylint: disable=unused-import
from googlecloudsdk.core.console import console_io # pylint: disable=unused-import
from googlecloudsdk.core.util import files # pylint: disable=unused-import
# pylint: enable=g-import-not-at-top
# pylint: enable=g-bad-import-order
################################################################################
# Pre-initialized API clients
################################################################################
def LoadApis():
"""Populate the global module namespace with API clients."""
for api_name in apis_map.MAP:
# pylint:disable=protected-access
globals()[api_name] = apis.GetClientInstance(
api_name, apis_internal._GetDefaultVersion(api_name))
def _PopulateApiNamesWithLoadMessage():
"""Make API names print instructions for loading the APIs when __repr__'ed.
For example:
>>> appengine
Run `LoadApis()` to load all APIs, including this one.
Load APIs it lazily because it takes about a second to load all APIs.
"""
load_apis_message = (
'Run `{0}()` to load all APIs, including this one.').format(
LoadApis.__name__)
class _LoadApisMessage(object):
def __repr__(self):
return load_apis_message
for api_name in apis_map.MAP:
globals()[api_name] = _LoadApisMessage()
_PopulateApiNamesWithLoadMessage()

View File

@@ -0,0 +1,138 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for generating argument specifications for current implementations."""
from typing import Any, Dict
import uuid
from googlecloudsdk.calliope import cli_tree
MUTEX = 'mutex'
HIDDEN = 'hidden'
NAME = 'name'
NODE_ID = 'node_id'
ARGUMENTS = 'arguments'
REQUIRED = 'required'
GROUP = 'group'
TYPE = 'type'
CHOICES = 'choices'
POSITIONAL = 'positional'
OPTIONAL_NARGS = (0, '?', '*', '...')
GLOBAL = 'global'
UNDERSCORE = '_'
HYPHEN = '-'
FLAG_PREFIX = HYPHEN * 2
SHORT_FLAG_PREFIX = HYPHEN
def GenerateArgumentSpecifications(command_node=None) -> Dict[str, Any]:
"""Generates the argument specifications for the calliope cli command node.
Args:
command_node: calliope command node cli object.
Returns:
The argument specifications for the command node.
"""
command_node = cli_tree.Command(command_node, None)
if not command_node:
return None
argument_tree = {}
args = _AddArgsToGroup(command_node.constraints)
if args:
argument_tree[ARGUMENTS] = args
return argument_tree
def _AddArgsToGroup(arguments):
"""Add the given arguments to the given arguments group spec.
Args:
arguments: iterable: calliope objects representing the arguments group.
Returns:
The list of arguments added to the group spec.
"""
args_group_spec = []
for arg in arguments.arguments:
if arg.is_group:
child_args_group_spec = {ARGUMENTS: []}
if arg.is_mutex:
child_args_group_spec[MUTEX] = True
if arg.is_required:
child_args_group_spec[REQUIRED] = True
if arg.is_hidden:
child_args_group_spec[HIDDEN] = True
child_args_group_spec[NODE_ID] = str(uuid.uuid4())
child_args_group_spec[ARGUMENTS] = _AddArgsToGroup(arg)
# Only retain non-empty arg groups.
if child_args_group_spec[ARGUMENTS]:
args_group_spec.append({GROUP: child_args_group_spec})
elif arg.is_positional:
args_group_spec.append(_GetPositionalSpec(arg))
else:
args_group_spec.append(_GetFlagSpec(arg))
return args_group_spec
def _GetFlagSpec(flag):
"""Get the flag spec for the given flag.
Args:
flag: The calliope object representing the flag.
Returns:
The flag spec for the given flag.
"""
flag_name = flag.name
if flag_name.startswith(FLAG_PREFIX):
flag_prefix = FLAG_PREFIX
elif flag_name.startswith(SHORT_FLAG_PREFIX):
flag_prefix = SHORT_FLAG_PREFIX
else:
flag_prefix = ''
flag_name = flag_name[len(flag_prefix) :]
flag_name = flag_name.replace(UNDERSCORE, HYPHEN)
flag_spec = {NAME: flag_name}
flag_spec[TYPE] = flag.type
flag_spec[REQUIRED] = flag.is_required
if flag.is_global:
flag_spec[GLOBAL] = True
if flag.choices:
flag_spec[CHOICES] = list(flag.choices)
flag_spec[NODE_ID] = str(uuid.uuid4())
return flag_spec
def _GetPositionalSpec(positional):
"""Get the positional spec for the given positional.
Args:
positional: The calliope object representing the positional.
Returns:
The positional spec for the given positional.
"""
positional_name = positional.name.replace(HYPHEN, UNDERSCORE).upper()
positional_spec = {NAME: positional_name, POSITIONAL: True}
# Include required if it is non-default i.e. true.
positional_required = positional.nargs not in OPTIONAL_NARGS
if positional_required:
positional_spec[REQUIRED] = positional_required
positional_spec[NODE_ID] = str(uuid.uuid4())
return positional_spec

View File

@@ -0,0 +1,260 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for meta generate-command.
Contains utilities for file writing and template selection.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os.path
import re
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import resources
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import files
from mako import runtime
from mako import template
TEMPLATE_SUFFIX = '_template.tpl'
CRUD_TEMPLATES = frozenset({
'create_template.tpl', 'delete_template.tpl', 'describe_template.tpl',
'get_iam_policy_template.tpl', 'list_template.tpl',
'set_iam_policy_template.tpl'
})
class CollectionNotFoundError(core_exceptions.Error):
"""Exception for attempts to generate unsupported commands."""
def __init__(self, collection):
message = '{collection} collection is not found'.format(
collection=collection)
super(CollectionNotFoundError, self).__init__(message)
def WriteAllYaml(collection_name, output_dir):
"""Writes declarative YAML file for all supported command types.
Args:
collection_name: name of collection to generate commands for.
output_dir: path to the directory where generated YAML files will be
written.
"""
collection_dict = _MakeCollectionDict(collection_name)
api_message_module = apis.GetMessagesModule(collection_dict['api_name'],
collection_dict['api_version'])
api_dict = _MakeApiDict(api_message_module, collection_dict)
collection_dict.update(api_dict)
for command_template in os.listdir(
os.path.join(os.path.dirname(__file__), 'command_templates')):
if command_template.split('/')[-1] not in CRUD_TEMPLATES:
continue
should_write_test = WriteYaml(command_template, collection_dict, output_dir,
api_message_module)
if should_write_test:
WriteScenarioTest(command_template, collection_dict, output_dir)
def WriteYaml(command_tpl_name, collection_dict, output_dir,
api_message_module):
"""Writes command's YAML file; returns True if file written, else False.
Args:
command_tpl_name: name of command template file
collection_dict: a mapping of collection info to feed template
output_dir: path to directory in which to write YAML file. If command YAML
file already exists in this location, the user will be prompted to
choose to override it or not.
api_message_module: the API's message module, used to check if command
type is supported by API
Returns:
True if declarative file is written, False if user chooses not to
override an existing file OR API does not support command type, and no
new file is written.
"""
command_name = command_tpl_name[:-len(TEMPLATE_SUFFIX)]
command_name_capitalized = ''.join(
[word.capitalize() for word in command_name.split('_')])
if command_name == 'describe':
command_name_capitalized = 'Get'
collection_prefix = ''.join([
_GetResourceMessageClassName(word)
for word in collection_dict['collection_name'].split('.')
])
expected_message_name = collection_prefix + command_name_capitalized + 'Request'
alt_create_message_name = collection_prefix + 'InsertRequest'
command_supported = False
for message_name in dir(api_message_module):
if message_name == expected_message_name or message_name == alt_create_message_name:
# Note: APIs with nonstandard naming may not have all commands created
command_supported = True
command_yaml_tpl = _TemplateFileForCommandPath(command_tpl_name)
command_filename = command_name + '.yaml'
full_command_path = os.path.join(output_dir, command_filename)
file_already_exists = os.path.exists(full_command_path)
overwrite = False
if file_already_exists:
overwrite = console_io.PromptContinue(
default=False,
throw_if_unattended=True,
message='{command_filename} already exists, and continuing will '
'overwrite the old file. The scenario test skeleton file for this '
'command will only be generated if you continue'.format(
command_filename=command_filename))
if (not file_already_exists or overwrite) and command_supported:
with files.FileWriter(full_command_path) as f:
ctx = runtime.Context(f, **collection_dict)
command_yaml_tpl.render_context(ctx)
log.status.Print('New file written at ' + full_command_path)
return True
else:
log.status.Print('No new file written at ' + full_command_path)
return False
def WriteScenarioTest(command_tpl_name, collection_dict, test_output_dir):
"""Writes declarative YAML file for command.
Args:
command_tpl_name: name of command template file
collection_dict: a mapping of collection info to feed template
test_output_dir: path to directory in which to write YAML test file
"""
test_tpl = _TemplateFileForCommandPath(
'scenario_unit_test_template.tpl', test=True)
test_filename = command_tpl_name[:-len(TEMPLATE_SUFFIX)] + '.scenario.yaml'
full_test_path = os.path.join(test_output_dir, test_filename)
with files.FileWriter(full_test_path) as f:
ctx = runtime.Context(f, **collection_dict)
test_tpl.render_context(ctx)
log.status.Print('New test written at ' + full_test_path)
def _TemplateFileForCommandPath(command_template_filename, test=False):
"""Returns Mako template corresping to command_template_filename.
Args:
command_template_filename: name of file containing template (no path).
test: if the template file should be a test file, defaults to False.
"""
if test:
template_dir = 'test_templates'
else:
template_dir = 'command_templates'
template_path = os.path.join(
os.path.dirname(__file__), template_dir,
command_template_filename)
return template.Template(filename=template_path)
def _MakeSingular(plural_noun):
"""Returns singular of plural noun.
Args:
plural_noun: noun, str, to make .
"""
return plural_noun[:-1]
def _GetReleaseTracks(api_version):
"""Returns a string representation of release tracks.
Args:
api_version: API version to generate release tracks for.
"""
if 'alpha' in api_version:
return '[ALPHA]'
elif 'beta' in api_version:
return '[ALPHA, BETA]'
else:
return '[ALPHA, BETA, GA]'
def _MakeCollectionDict(collection_name):
"""Returns a dictionary of collection attributes from Registry.
Args:
collection_name: Name of collection to create dictionary about.
"""
collection_info = resources.REGISTRY.GetCollectionInfo(collection_name)
collection_dict = {}
collection_dict['collection_name'] = collection_name
collection_dict['api_name'] = collection_info.api_name
collection_dict['uppercase_api_name'] = collection_info.api_name.capitalize()
flat_paths = collection_info.flat_paths
collection_dict['use_relative_name'] = 'false' if not flat_paths else 'true'
collection_dict['api_version'] = collection_info.api_version
collection_dict['release_tracks'] = _GetReleaseTracks(
collection_info.api_version)
collection_dict['plural_resource_name'] = collection_info.name.split('.')[-1]
collection_dict['singular_name'] = _MakeSingular(
collection_dict['plural_resource_name'])
collection_dict['flags'] = ' '.join([
'--' + param + '=my-' + param
for param in collection_info.params
if (param not in (collection_dict['singular_name'], 'project'))
])
collection_dict['collection_name'] = collection_name
# the following is a best guess at desired parent for list command scope
collection_dict[
'parent'] = 'location' if 'location' in collection_name else 'project'
return collection_dict
def _MakeApiDict(message_module, collection_dict):
"""Returns a dictionary of API attributes from its messages module.
Args:
message_module: the messages module for the API (default version)
collection_dict: a dictionary containing collection info from registry
"""
api_dict = {}
try:
resource_message = getattr(message_module,
_GetResourceMessageClassName(
collection_dict['singular_name']))
args = [
field.__dict__['name']
for field in resource_message.all_fields()
if field.__dict__['name'] != 'name'
]
api_dict['create_args'] = {
arg:
'-'.join([w.lower() for w in re.findall('^[a-z]*|[A-Z][a-z]*', arg)])
for arg in args
} # dict is { camelCaseName: camel-case-name }
except AttributeError:
api_dict['create_args'] = {}
log.status.Print('Cannot find ' +
_GetResourceMessageClassName(
collection_dict['singular_name']) +
' in message module.')
return api_dict
def _GetResourceMessageClassName(singular_name):
"""Returns the properly capitalized resource class name."""
resource_name = singular_name.strip()
if len(resource_name) > 1:
return resource_name[0].upper() + resource_name[1:]
return resource_name.capitalize()

View File

@@ -0,0 +1,402 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for meta generate-config-commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
import os.path
from googlecloudsdk.core import branding
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import name_parsing
from googlecloudsdk.core import resources
from googlecloudsdk.core.util import files
from mako import runtime
from mako import template
_COMMAND_PATH_COMPONENTS = ('third_party', 'py', 'googlecloudsdk', 'surface')
_SPEC_PATH_COMPONENTS = ('cloud', 'sdk', 'surface_specs', 'gcloud')
_TEST_PATH_COMPONENTS = ('third_party', 'py', 'googlecloudsdk', 'tests', 'unit',
'surface')
class CollectionNotFoundError(core_exceptions.Error):
"""Exception for attempts to generate unsupported commands."""
def __init__(self, collection):
message = '{collection} collection is not found'.format(
collection=collection)
super(CollectionNotFoundError, self).__init__(message)
def WriteConfigYaml(collection, output_root, resource_data, release_tracks,
enable_overwrites):
"""Writes <comand|spec|test> declarative command files for collection.
Args:
collection: Name of collection to generate commands for.
output_root: Path to the root of the directory. Should just be $PWD when
executing the `meta generate-config-commands` command.
resource_data: Resource map data for the given resource.
release_tracks: Release tracks to generate files for.
enable_overwrites: True to enable overwriting of existing config export
files.
"""
log.status.Print('[{}]:'.format(collection))
collection_info = resources.REGISTRY.GetCollectionInfo(collection)
_RenderSurfaceSpecFiles(output_root, resource_data,
collection_info, release_tracks, enable_overwrites)
_RenderCommandGroupInitFile(output_root, resource_data,
collection_info, release_tracks,
enable_overwrites)
_RenderCommandFile(output_root, resource_data, collection_info,
release_tracks, enable_overwrites)
_RenderTestFiles(output_root, resource_data, collection_info,
enable_overwrites)
def _RenderFile(file_path,
file_template,
context,
enable_overwrites):
"""Renders a file to given path using the provided template and context."""
render_file = False
overwrite = False
if not os.path.exists(file_path):
render_file = True
elif enable_overwrites:
render_file = True
overwrite = True
if render_file:
log.status.Print(' -- Generating: File: [{}], Overwrite: [{}]'.format(
file_path, overwrite))
with files.FileWriter(file_path, create_path=True) as f:
ctx = runtime.Context(f, **context)
file_template.render_context(ctx)
else:
log.status.Print(' >> Skipped: File: [{}] --'.format(file_path))
def _WriteFile(file_path, file_contents, enable_overwrites):
if not os.path.exists(file_path) or enable_overwrites:
with files.FileWriter(file_path, create_path=True) as f:
f.write(file_contents)
def _BuildFilePath(output_root, sdk_path, home_directory, *argv):
path_args = (output_root,) + sdk_path + tuple(
home_directory.split('.')) + tuple(
path_component for path_component in argv)
file_path = os.path.join(*path_args)
return file_path
def _BuildTemplate(template_file_name):
dir_name = os.path.dirname(__file__)
template_path = os.path.join(dir_name, 'config_export_templates',
template_file_name)
file_template = template.Template(filename=template_path)
return file_template
def _RenderCommandGroupInitFile(output_root, resource_data, collection_info,
release_tracks, enable_overwrites):
file_path = _BuildFilePath(output_root, _COMMAND_PATH_COMPONENTS,
resource_data.home_directory, 'config',
'__init__.py')
file_template = _BuildTemplate('command_group_init_template.tpl')
context = _BuildCommandGroupInitContext(collection_info, release_tracks,
resource_data)
_RenderFile(file_path, file_template, context, enable_overwrites)
def _RenderCommandFile(output_root, resource_data, collection_info,
release_tracks, enable_overwrites):
file_path = _BuildFilePath(output_root, _COMMAND_PATH_COMPONENTS,
resource_data.home_directory,
'config', 'export.yaml')
file_template = _BuildTemplate('command_template.tpl')
context = _BuildCommandContext(collection_info, release_tracks, resource_data)
_RenderFile(
file_path,
file_template,
context,
enable_overwrites)
def _RenderSurfaceSpecFiles(output_root, resource_data, collection_info,
release_tracks, enable_overwrites):
"""Render surface spec files (both GROUP.yaml and command spec file.)"""
context = _BuildSurfaceSpecContext(collection_info, release_tracks,
resource_data)
# Render GROUP.yaml
group_template = _BuildTemplate('surface_spec_group_template.tpl')
group_file_path = _BuildFilePath(output_root, _SPEC_PATH_COMPONENTS,
resource_data.home_directory, 'config',
'GROUP.yaml')
_RenderFile(group_file_path, group_template, context, enable_overwrites)
# Render spec file
spec_path = _BuildFilePath(output_root, _SPEC_PATH_COMPONENTS,
resource_data.home_directory, 'config',
'export.yaml')
spec_template = _BuildTemplate('surface_spec_template.tpl')
_RenderFile(spec_path, spec_template, context, enable_overwrites)
def _RenderTestFiles(output_root, resource_data, collection_info,
enable_overwrites):
"""Render python test file using template and context."""
context = _BuildTestContext(collection_info, resource_data)
# Render init file.
init_path = _BuildFilePath(output_root, _TEST_PATH_COMPONENTS,
resource_data.home_directory, '__init__.py')
init_template = _BuildTemplate('python_blank_init_template.tpl')
_RenderFile(init_path, init_template, context, enable_overwrites)
# Render test file.
test_path = _BuildFilePath(output_root, _TEST_PATH_COMPONENTS,
resource_data.home_directory,
'config_export_test.py')
test_template = _BuildTemplate('unit_test_template.tpl')
_RenderFile(test_path, test_template, context, enable_overwrites)
def _BuildCommandGroupInitContext(collection_info, release_tracks,
resource_data):
"""Makes context dictionary for config init file template rendering."""
init_dict = {}
init_dict['utf_encoding'] = '-*- coding: utf-8 -*- #'
init_dict['current_year'] = datetime.datetime.now().year
init_dict['branded_api_name'] = branding.Branding().get(
collection_info.api_name, collection_info.api_name.capitalize())
init_dict[
'singular_resource_name_with_spaces'] = name_parsing.convert_collection_name_to_delimited(
collection_info.name)
release_track_string = ''
for x, release_track in enumerate(release_tracks):
release_track_string += 'base.ReleaseTrack.{}'.format(release_track.upper())
if x != len(release_tracks) - 1:
release_track_string += ', '
init_dict['release_tracks'] = release_track_string
if 'group_category' in resource_data:
init_dict['group_category'] = resource_data.group_category
return init_dict
def _BuildCommandContext(collection_info, release_tracks, resource_data):
"""Makes context dictionary for config export command template rendering."""
command_dict = {}
# apiname.collectionNames
command_dict['collection_name'] = collection_info.name
# Branded service name
command_dict['branded_api_name'] = branding.Branding().get(
collection_info.api_name, collection_info.api_name.capitalize())
# collection names
command_dict[
'plural_resource_name_with_spaces'] = name_parsing.convert_collection_name_to_delimited(
collection_info.name, make_singular=False)
# collection name
command_dict[
'singular_name_with_spaces'] = name_parsing.convert_collection_name_to_delimited(
collection_info.name)
# Collection name
command_dict['singular_capitalized_name'] = command_dict[
'singular_name_with_spaces'].capitalize()
if 'resource_spec_path' in resource_data:
command_dict[
'resource_spec_path'] = resource_data.resource_spec_path
else:
resource_spec_name = command_dict['singular_name_with_spaces'].replace(
' ', '_')
resource_spec_dir = resource_data.home_directory.split('.')[0]
command_dict['resource_spec_path'] = '{}.resources:{}'.format(
resource_spec_dir, resource_spec_name)
# my-collection-name
command_dict['resource_argument_name'] = _MakeResourceArgName(
collection_info.name)
# Release tracks
command_dict['release_tracks'] = _GetReleaseTracks(release_tracks)
# "a" or "an" for correct grammar.
api_a_or_an = 'a'
if command_dict['branded_api_name'][0] in 'aeiou':
api_a_or_an = 'an'
command_dict['api_a_or_an'] = api_a_or_an
resource_a_or_an = 'a'
if command_dict['singular_name_with_spaces'][0] in 'aeiou':
resource_a_or_an = 'an'
command_dict['resource_a_or_an'] = resource_a_or_an
return command_dict
def _BuildSurfaceSpecContext(collection_info, release_tracks, resource_data):
"""Makes context dictionary for surface spec rendering."""
surface_spec_dict = {}
surface_spec_dict['release_tracks'] = _GetReleaseTracks(release_tracks)
# collection_name
if 'surface_spec_resource_name' in resource_data:
surface_spec_dict[
'surface_spec_resource_arg'] = resource_data.surface_spec_resource_name
elif 'resource_spec_path' in resource_data:
surface_spec_dict[
'surface_spec_resource_arg'] = resource_data.resource_spec_path.split(
':')[-1].upper()
else:
surface_spec_dict[
'surface_spec_resource_arg'] = _MakeSurfaceSpecResourceArg(
collection_info)
return surface_spec_dict
def _BuildTestContext(collection_info, resource_data):
"""Makes context dictionary for config export est files rendering."""
test_dict = {}
test_dict['utf_encoding'] = '-*- coding: utf-8 -*- #'
test_dict['current_year'] = datetime.datetime.now().year
resource_arg_flags = _MakeResourceArgFlags(collection_info, resource_data)
resource_arg_positional = _MakeResourceArgName(collection_info.name)
test_dict['test_command_arguments'] = ' '.join(
[resource_arg_positional, resource_arg_flags])
test_dict['pylint_disable'] = ''
if len(test_dict['test_command_arguments']) > 56:
test_dict['pylint_disable'] = ' # pylint:disable=line-too-long'
test_dict['full_collection_name'] = '.'.join(
[collection_info.api_name, collection_info.name])
test_dict['test_command_string'] = _MakeTestCommandString(
resource_data.home_directory)
return test_dict
def _GetReleaseTracks(release_tracks):
"""Returns a string representation of release tracks.
Args:
release_tracks: API versions to generate release tracks for.
"""
release_tracks_normalized = '[{}]'.format(', '.join(
[track.upper() for track in sorted(release_tracks)]))
return release_tracks_normalized
def _MakeSurfaceSpecResourceArg(collection_info):
"""Makes resource arg name for surface specification context."""
return name_parsing.convert_collection_name_to_delimited(
collection_info.name, delimiter='_').upper()
def _MakeTestCommandString(home_directory):
"""Makes gcloud command string for test execution."""
return '{} config export'.format(
home_directory.replace('_', '-').replace('.', ' '))
def _MakeResourceArgName(collection_name):
resource_arg_name = 'my-{}'.format(
name_parsing.convert_collection_name_to_delimited(
collection_name, delimiter='-'))
return resource_arg_name
def _MakeResourceArgFlags(collection_info, resource_data):
"""Makes input resource arg flags for config export test file."""
resource_arg_flags = []
if getattr(collection_info, 'flat_paths'):
# Path components will generally be stored in the '' key of flat_paths dict.
if '' in getattr(collection_info, 'flat_paths', None):
components = collection_info.flat_paths[''].split('/')
# Remove surrounding brackets and 'Id' suffix from path component
resource_arg_flag_names = [
component.replace('{', '').replace('Id}', '')
for component in components
if '{' in component
]
# Remove project component as this isn't needed to specify test args.
filtered_resource_arg_flag_names = [
resource_arg for resource_arg in resource_arg_flag_names
if 'project' not in resource_arg
]
# Get parent components, convert from camelcase to dash delimited
# e.g. fooBar -> foo-bar
formatted_resource_arg_flag_names = []
for resource_arg in filtered_resource_arg_flag_names[:-1]:
formatted_name = name_parsing.split_name_on_capitals(
name_parsing.singularize(resource_arg),
delimiter='-').lower()
formatted_resource_arg_flag_names.append(formatted_name)
# Override component name using `resource_attribute_renames` field of
# declarative map if specified.
if 'resource_attribute_renames' in resource_data:
for original_attr_name, new_attr_name in resource_data.resource_attribute_renames.items(
):
for x in range(len(formatted_resource_arg_flag_names)):
if formatted_resource_arg_flag_names[x] == original_attr_name:
formatted_resource_arg_flag_names[x] = new_attr_name
# Format components into command string for unit tests.
resource_arg_flags = [
'--{param}=my-{param}'.format(param=resource_arg)
for resource_arg in formatted_resource_arg_flag_names
]
elif getattr(collection_info, 'params', None):
for param in collection_info.params:
modified_param_name = param
# Remove 'Id' suffix.
if modified_param_name[-2:] == 'Id':
modified_param_name = modified_param_name[:-2]
# Convert component name from camelCase to dash delimited
# e.g. fooBar -> foo-bar
modified_param_name = name_parsing.convert_collection_name_to_delimited(
modified_param_name, delimiter='-', make_singular=False)
# If component name is not positional resource name, `project`, or `name`
# format for unit test.
if (modified_param_name
not in (name_parsing.convert_collection_name_to_delimited(
collection_info.name, delimiter='-'), 'project', 'name')):
resource_arg = '--{param}=my-{param}'.format(param=modified_param_name)
resource_arg_flags.append(resource_arg)
return ' '.join(resource_arg_flags)

View File

@@ -0,0 +1,395 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for gcloud help document differences."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import contextlib
import os
import shutil
import time
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.util import files as file_utils
from googlecloudsdk.core.util import parallel
from googlecloudsdk.core.util import text
import six
# Max number of test changes to display.
TEST_CHANGES_DISPLAY_MAX = 32
class Error(exceptions.Error):
"""Errors for this module."""
class HelpUpdateError(Error):
"""Update errors."""
def IsOwnersFile(path):
"""Return True if path refers to an OWNERS file."""
return os.path.basename(path) == 'OWNERS'
def GetFileContents(file):
"""Returns the file contents and whether or not the file contains binary data.
Args:
file: A file path.
Returns:
A tuple of the file contents and whether or not the file contains binary
contents.
"""
try:
contents = file_utils.ReadFileContents(file)
is_binary = False
except UnicodeError:
contents = file_utils.ReadBinaryFileContents(file)
is_binary = True
return contents, is_binary
def GetDirFilesRecursive(directory):
"""Generates the set of all files in directory and its children recursively.
Args:
directory: The directory path name.
Returns:
A set of all files in directory and its children recursively, relative to
the directory.
"""
dirfiles = set()
for dirpath, _, files in os.walk(six.text_type(directory)):
for name in files:
file = os.path.join(dirpath, name)
relative_file = os.path.relpath(file, directory)
dirfiles.add(relative_file)
return dirfiles
@contextlib.contextmanager
def TimeIt(message):
"""Context manager to track progress and time blocks of code."""
with progress_tracker.ProgressTracker(message, autotick=True):
start = time.time()
yield
elapsed_time = time.time() - start
log.status.Print('{} took {} seconds'.format(message, elapsed_time))
class DiffAccumulator(object):
"""A module for accumulating DirDiff() differences."""
def __init__(self):
self._changes = 0
# pylint: disable=unused-argument
def Ignore(self, relative_file):
"""Checks if relative_file should be ignored by DirDiff().
Args:
relative_file: A relative file path name to be checked.
Returns:
True if path is to be ignored in the directory differences.
"""
return False
# pylint: disable=unused-argument
def AddChange(self, op, relative_file, old_contents=None, new_contents=None):
"""Called for each file difference.
AddChange() can construct the {'add', 'delete', 'edit'} file operations that
convert old_dir to match new_dir. Directory differences are ignored.
This base implementation counts the number of changes.
Args:
op: The change operation string;
'add'; relative_file is not in old_dir.
'delete'; relative_file is not in new_dir.
'edit'; relative_file is different in new_dir.
relative_file: The old_dir and new_dir relative path name of a file that
changed.
old_contents: The old file contents.
new_contents: The new file contents.
Returns:
A prune value. If non-zero then DirDiff() returns immediately with that
value.
"""
self._changes += 1
return None
def GetChanges(self):
"""Returns the accumulated changes."""
return self._changes
def Validate(self, relative_file, contents):
"""Called for each file for content validation.
Args:
relative_file: The old_dir and new_dir relative path name of an existing
file.
contents: The file contents string.
"""
pass
def DirDiff(old_dir, new_dir, diff):
"""Calls diff.AddChange(op, file) on files that changed from old_dir new_dir.
diff.AddChange() can construct the {'add', 'delete', 'edit'} file operations
that convert old_dir to match new_dir. Directory differences are ignored.
Args:
old_dir: The old directory path name.
new_dir: The new directory path name.
diff: A DiffAccumulator instance.
Returns:
The return value of the first diff.AddChange() call that returns non-zero
or None if all diff.AddChange() calls returned zero.
"""
with TimeIt('GetDirFilesRecursive new files'):
new_files = GetDirFilesRecursive(new_dir)
with TimeIt('GetDirFilesRecursive old files'):
old_files = GetDirFilesRecursive(old_dir)
def _FileDiff(file):
"""Diffs a file in new_dir and old_dir."""
new_contents, new_binary = GetFileContents(os.path.join(new_dir, file))
if not new_binary:
diff.Validate(file, new_contents)
if file in old_files:
old_contents, old_binary = GetFileContents(os.path.join(old_dir, file))
if old_binary == new_binary and old_contents == new_contents:
return
return 'edit', file, old_contents, new_contents
else:
return 'add', file, None, new_contents
with parallel.GetPool(16) as pool:
results = []
for file in new_files:
if diff.Ignore(file):
continue
result = pool.ApplyAsync(_FileDiff, (file,))
results.append(result)
for result_future in results:
result = result_future.Get()
if result:
op, file, old_contents, new_contents = result
prune = diff.AddChange(op, file, old_contents, new_contents)
if prune:
return prune
for file in old_files:
if diff.Ignore(file):
continue
if file not in new_files:
prune = diff.AddChange('delete', file)
if prune:
return prune
return None
class HelpAccumulator(DiffAccumulator):
"""Accumulates help document directory differences.
Attributes:
_changes: The list of DirDiff() (op, path) difference tuples.
_restrict: The set of file path prefixes that the accumulator should be
restricted to.
"""
def __init__(self, restrict=None):
super(HelpAccumulator, self).__init__()
self._changes = []
self._restrict = ({os.sep.join(r.split('.')[1:]) for r in restrict}
if restrict else {})
def Ignore(self, relative_file):
"""Checks if relative_file should be ignored by DirDiff().
Args:
relative_file: A relative file path name to be checked.
Returns:
True if path is to be ignored in the directory differences.
"""
if IsOwnersFile(relative_file):
return True
if not self._restrict:
return False
for item in self._restrict:
if relative_file == item or relative_file.startswith(item + os.sep):
return False
return True
def AddChange(self, op, relative_file, old_contents=None, new_contents=None):
"""Adds an DirDiff() difference tuple to the list of changes.
Args:
op: The difference operation, one of {'add', 'delete', 'edit'}.
relative_file: The relative path of a file that has changed.
old_contents: The old file contents.
new_contents: The new file contents.
Returns:
None which signals DirDiff() to continue.
"""
self._changes.append((op, relative_file))
return None
class HelpUpdater(object):
"""Updates the document directory to match the current CLI.
Attributes:
_cli: The Current CLI.
_directory: The help document directory.
_generator: The document generator.
_hidden: Boolean indicating whether to update hidden commands.
_test: Show but do not apply operations if True.
"""
def __init__(self, cli, directory, generator, test=False, hidden=False):
"""Constructor.
Args:
cli: The Current CLI.
directory: The help document directory.
generator: An uninstantiated walker_util document generator.
test: Show but do not apply operations if True.
hidden: Boolean indicating whether the hidden commands should be used.
Raises:
HelpUpdateError: If the destination directory does not exist.
"""
if not os.path.isabs(directory):
raise HelpUpdateError(
'Destination directory [%s] must be absolute.' % directory)
self._cli = cli
self._directory = directory
self._generator = generator
self._hidden = hidden
self._test = test
def _Update(self, restrict):
"""Update() helper method. Returns the number of changed help doc files."""
with file_utils.TemporaryDirectory() as temp_dir:
pb = console_io.ProgressBar(label='Generating Help Document Files')
with TimeIt('Creating walker'):
walker = self._generator(
self._cli, temp_dir, pb.SetProgress, restrict=restrict)
start = time.time()
pb.Start()
walker.Walk(hidden=True)
pb.Finish()
elapsed_time = time.time() - start
log.info(
'Generating Help Document Files took {} seconds'.format(elapsed_time)
)
diff = HelpAccumulator(restrict=restrict)
with TimeIt('Diffing'):
DirDiff(self._directory, temp_dir, diff)
ops = collections.defaultdict(list)
changes = 0
with TimeIt('Getting diffs'):
for op, path in sorted(diff.GetChanges()):
changes += 1
if not self._test or changes < TEST_CHANGES_DISPLAY_MAX:
log.status.Print('{0} {1}'.format(op, path))
ops[op].append(path)
if self._test:
if changes:
if changes >= TEST_CHANGES_DISPLAY_MAX:
log.status.Print('...')
log.status.Print('{0} help text {1} changed'.format(
changes, text.Pluralize(changes, 'file')))
return changes
with TimeIt('Updating destination files'):
for op in ('add', 'edit', 'delete'):
for path in ops[op]:
dest_path = os.path.join(self._directory, path)
if op in ('add', 'edit'):
if op == 'add':
subdir = os.path.dirname(dest_path)
if subdir:
file_utils.MakeDir(subdir)
temp_path = os.path.join(temp_dir, path)
shutil.copyfile(temp_path, dest_path)
elif op == 'delete':
try:
os.remove(dest_path)
except OSError:
pass
return changes
def Update(self, restrict=None):
"""Updates the help document directory to match the current CLI.
Args:
restrict: Restricts the walk to the command/group dotted paths in this
list. For example, restrict=['gcloud.alpha.test', 'gcloud.topic']
restricts the walk to the 'gcloud topic' and 'gcloud alpha test'
commands/groups.
Raises:
HelpUpdateError: If the destination directory does not exist.
Returns:
The number of changed help document files.
"""
if not os.path.isdir(self._directory):
raise HelpUpdateError(
'Destination directory [%s] must exist and be searchable.' %
self._directory)
try:
return self._Update(restrict)
except (IOError, OSError, SystemError) as e:
raise HelpUpdateError('Update failed: %s' % six.text_type(e))
def GetDiffFiles(self, restrict=None):
"""Print a list of help text files that are distinct from source, if any."""
with file_utils.TemporaryDirectory() as temp_dir:
walker = self._generator(
self._cli, temp_dir, None, restrict=restrict)
walker.Walk(hidden=True)
diff = HelpAccumulator(restrict=restrict)
DirDiff(self._directory, temp_dir, diff)
return sorted(diff.GetChanges())

View File

@@ -0,0 +1,117 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gcloud CLI tree lister module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import os
from googlecloudsdk.calliope import cli_tree
from googlecloudsdk.core import module_util
from googlecloudsdk.core.util import files
import six
def _ParameterizePath(path):
"""Return path with $HOME prefix replaced by ~."""
home = files.GetHomeDir() + os.path.sep
if path.startswith(home):
return '~' + os.path.sep + path[len(home):]
return path
class CliTreeInfo(object):
"""Info for one CLI tree. A list of these is returned by ListAll()."""
def __init__(self, command, path, version, cli_version, command_installed,
error):
self.command = command
self.path = path
self.version = version
self.cli_version = cli_version
self.command_installed = command_installed
self.error = error
def ListAll(directory=None):
"""Returns the CliTreeInfo list of all available CLI trees.
Args:
directory: The config directory containing the CLI tree modules.
Raises:
CliTreeVersionError: loaded tree version mismatch
ImportModuleError: import errors
Returns:
The CLI tree.
"""
# List all CLIs by searching directories in order. .py, .pyc, and .json
# files are treated as CLI modules/data, where the file base name is the name
# of the CLI root command.
directories = [
directory, # Explicit caller override dir
cli_tree.CliTreeConfigDir(), # Config dir shared across installations
cli_tree.CliTreeDir(), # Installation dir controlled by the updater
]
trees = []
for directory in directories:
if not directory or not os.path.exists(directory):
continue
for (dirpath, _, filenames) in os.walk(six.text_type(directory)):
for filename in sorted(filenames): # For stability across runs.
base, extension = os.path.splitext(filename)
if base == '__init__' or '.' in base:
# Ignore Python droppings and names containing more than one dot.
continue
path = os.path.join(dirpath, filename)
error = ''
tree = None
if extension in ('.py', '.pyc'):
try:
module = module_util.ImportPath(path)
except module_util.ImportModuleError as e:
error = six.text_type(e)
try:
tree = module.TREE
except AttributeError:
tree = None
elif extension == '.json':
try:
tree = json.loads(files.ReadFileContents(path))
except Exception as e: # pylint: disable=broad-except, record all errors
error = six.text_type(e)
if tree:
version = tree.get(cli_tree.LOOKUP_VERSION, 'UNKNOWN')
cli_version = tree.get(cli_tree.LOOKUP_CLI_VERSION, 'UNKNOWN')
del tree
else:
version = 'UNKNOWN'
cli_version = 'UNKNOWN'
trees.append(CliTreeInfo(
command=base,
path=_ParameterizePath(path),
version=version,
cli_version=cli_version,
command_installed=bool(files.FindExecutableOnPath(base)),
error=error))
# Don't search subdirectories.
break
return trees

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the gcloud meta apis surface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class Error(exceptions.Error):
pass
class UnknownApi(Error):
"""Raised when api is not found."""
class ConfigFileError(Error):
pass
class DiscoveryDocError(Error):
pass

View File

@@ -0,0 +1,132 @@
title: compute images get-iam-policy scenario test
release_tracks: [ALPHA, BETA, GA]
summary:
# This summary is generated automatically on update and should not be edited.
- execute:
- command: compute images get-iam-policy my-image
- stdout: |
etag: etag
- execute:
- command: compute images get-iam-policy my-image
- stdout: |
bindings:
- condition:
description: descr
expression: expr
title: title
members:
- user:oldtest@gmail.com
role: roles/non-primitive
etag: etag
version: 1
- execute:
- command: compute images get-iam-policy my-image --flatten=bindings[].members --filter=bindings.role:non-primitive
--format='value(bindings.members)'
- stdout: |
user:oldtest@gmail.com
actions:
- define_reference:
reference: api-version
track_values:
GA: v1
BETA: beta
ALPHA: alpha
- define_reference:
reference: query-params
value: alt=json&optionsRequestedPolicyVersion=3
- execute_command:
command: compute images get-iam-policy my-image
events:
- api_call:
expect_request:
uri: https://compute.googleapis.com/compute/$$api-version$$/projects/fake-project/global/images/my-image/getIamPolicy?$$query-params$$
method: GET
body: null
return_response:
headers:
status: '200'
body: |-
{
"etag": "etag",
"bindings": []
}
- expect_stdout: |
etag: etag
- expect_exit:
code: 0
- execute_command:
command: compute images get-iam-policy my-image
events:
- api_call:
expect_request:
uri: https://compute.googleapis.com/compute/$$api-version$$/projects/fake-project/global/images/my-image/getIamPolicy?$$query-params$$
method: GET
body: null
return_response:
headers:
status: '200'
body: |-
{
"version": 1,
"etag": "etag",
"bindings": [
{
"role": "roles/non-primitive",
"members": ["user:oldtest@gmail.com"],
"condition": {
"expression": "expr",
"title": "title",
"description": "descr"
}
}
]
}
- expect_stdout: |
bindings:
- condition:
description: descr
expression: expr
title: title
members:
- user:oldtest@gmail.com
role: roles/non-primitive
etag: etag
version: 1
- expect_exit:
code: 0
- execute_command:
command: compute images get-iam-policy my-image --flatten=bindings[].members --filter=bindings.role:non-primitive
--format='value(bindings.members)'
events:
- api_call:
expect_request:
uri: https://compute.googleapis.com/compute/$$api-version$$/projects/fake-project/global/images/my-image/getIamPolicy?$$query-params$$
method: GET
body: null
return_response:
headers:
status: '200'
body: |-
{
"version": 1,
"etag": "etag",
"bindings": [
{
"role": "roles/non-primitive",
"members": ["user:oldtest@gmail.com"],
"condition": {
"expression": "expr",
"title": "title",
"description": "descr"
}
}
]
}
- expect_stdout: |
user:oldtest@gmail.com
- expect_exit:
code: 0