feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for enabling service APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from typing import Optional
from googlecloudsdk.api_lib.services import enable_api
from googlecloudsdk.api_lib.services import exceptions
from googlecloudsdk.api_lib.util import api_enablement
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
def PromptToEnableApiIfDisabled(
service_name: str, enable_by_default: Optional[bool] = False
):
"""Prompts to enable the API if it's not enabled.
Args:
service_name: The name of the service to enable.
enable_by_default: default choice for the enablement prompt.
"""
project_id = properties.VALUES.core.project.GetOrFail()
try:
if enable_api.IsServiceEnabled(project_id, service_name):
return
if console_io.CanPrompt():
api_enablement.PromptToEnableApi(
project_id, service_name, enable_by_default=enable_by_default
)
else:
log.warning(
"Service {} is not enabled. This operation may not succeed.".format(
service_name
)
)
except exceptions.GetServicePermissionDeniedException:
log.info(
"Could not verify if service {} is enabled: missing permission"
" 'serviceusage.services.get'.".format(service_name)
)

View File

@@ -0,0 +1,167 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for the CMEK and user-provided AR use cases."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
from typing import Any, Optional
from apitools.base.py import exceptions as http_exceptions
from googlecloudsdk.api_lib.functions.v1 import exceptions
from googlecloudsdk.calliope import exceptions as base_exceptions
from six.moves import http_client
_KMS_KEY_RE = re.compile(
r'^projects/[^/]+/locations/(?P<location>[^/]+)/keyRings/[a-zA-Z0-9_-]+'
'/cryptoKeys/[a-zA-Z0-9_-]+$'
)
_DOCKER_REPOSITORY_RE = re.compile(
r'^projects/(?P<project>[^/]+)/locations/(?P<location>[^/]+)'
'/repositories/[a-z]([a-z0-9-]*[a-z0-9])?$'
)
_DOCKER_REPOSITORY_DOCKER_FORMAT_RE = re.compile(
r'^(?P<location>.*)-docker.pkg.dev\/(?P<project>[^\/]+)\/(?P<repo>[^\/]+)'
)
# TODO: b/349194056 - Switch to alias annotations once allowed.
_HttpError = http_exceptions.HttpError
# TODO: b/349194056 - Define and use dedicated aliases for the common types.
def ValidateKMSKeyForFunction(kms_key: str, function_ref: Any) -> None:
"""Checks that the KMS key is compatible with the function.
Args:
kms_key: Fully qualified KMS key name.
function_ref: Function resource reference.
Raises:
InvalidArgumentException: If the specified KMS key is not compatible with
the function.
"""
kms_key_match = _KMS_KEY_RE.search(kms_key)
if kms_key_match:
kms_keyring_location = kms_key_match.group('location')
if kms_keyring_location == 'global':
raise base_exceptions.InvalidArgumentException(
'--kms-key', 'Global KMS keyrings are not allowed.'
)
if function_ref.locationsId != kms_keyring_location:
raise base_exceptions.InvalidArgumentException(
'--kms-key',
'KMS keyrings should be created in the same region as the function.',
)
def ValidateDockerRepositoryForFunction(
docker_repository: str, function_ref: Any
) -> None:
"""Checks that the Docker repository is compatible with the function.
Args:
docker_repository: Fully qualified Docker repository resource name.
function_ref: Function resource reference.
Raises:
InvalidArgumentException: If the specified Docker repository is not
compatible with the function.
"""
if docker_repository is None:
return
function_project = function_ref.projectsId
function_location = function_ref.locationsId
repo_match = _DOCKER_REPOSITORY_RE.search(docker_repository)
if repo_match:
repo_project = repo_match.group('project')
repo_location = repo_match.group('location')
else:
repo_match_docker_format = _DOCKER_REPOSITORY_DOCKER_FORMAT_RE.search(
docker_repository
)
if repo_match_docker_format:
repo_project = repo_match_docker_format.group('project')
repo_location = repo_match_docker_format.group('location')
else:
repo_location = None
repo_project = None
if (
repo_project
and function_project != repo_project
and function_project.isdigit() == repo_project.isdigit()
):
raise base_exceptions.InvalidArgumentException(
'--docker-repository',
'Cross-project repositories are not supported: the repository should be'
f' in `${function_project}`.',
)
if repo_location and function_location != repo_location:
raise base_exceptions.InvalidArgumentException(
'--docker-repository',
'Cross-location repositories are not supported: the repository should'
f' be in `${function_location}`.',
)
def NormalizeDockerRepositoryFormat(docker_repository: str) -> None:
"""Normalizes the docker repository name to the standard resource format.
Args:
docker_repository: Fully qualified Docker repository name.
Returns:
The name in a standard format supported by the API.
"""
if docker_repository is None:
return docker_repository
repo_match_docker_format = _DOCKER_REPOSITORY_DOCKER_FORMAT_RE.search(
docker_repository
)
if repo_match_docker_format:
project = repo_match_docker_format.group('project')
location = repo_match_docker_format.group('location')
name = repo_match_docker_format.group('repo')
return 'projects/{}/locations/{}/repositories/{}'.format(
project, location, name
)
return docker_repository
def ProcessException(
http_exception: _HttpError, kms_key: Optional[str]
) -> None:
if (
kms_key
and http_exception.status_code == http_client.INTERNAL_SERVER_ERROR
):
# TODO(b/268523346): more specific user-friendly error messages for
# CMEK-related error modes.
raise exceptions.FunctionsError(
'An error occurred. Ensure that the KMS key {kms_key} exists and the '
'Cloud Functions service account has encrypter/decrypter permissions '
'(roles/cloudkms.cryptoKeyEncrypterDecrypter) on the key. If you '
'have recently made changes to the IAM config, wait a few minutes '
'for the config to propagate and try again.'.format(kms_key=kms_key)
)

View File

@@ -0,0 +1,190 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for working with secret environment variables and volumes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import re
from googlecloudsdk.command_lib.functions import secrets_config
import six
_SECRET_VERSION_RESOURCE_PATTERN = re.compile(
'^projects/(?P<project>[^/]+)/secrets/(?P<secret>[^/]+)'
'/versions/(?P<version>[^/]+)$'
)
def _GetSecretVersionResource(project, secret, version):
return 'projects/{project}/secrets/{secret}/versions/{version}'.format(
project=project or '*', secret=secret, version=version
)
def _CanonicalizedDict(secrets_dict):
"""Canonicalizes all keys in the dict and returns a new dict.
Args:
secrets_dict: Existing secrets configuration dict.
Returns:
Canonicalized secrets configuration dict.
"""
return collections.OrderedDict(
sorted(
six.iteritems(
{
secrets_config.CanonicalizeKey(key): value
for (key, value) in secrets_dict.items()
}
)
)
)
def GetSecretsAsDict(secret_env_vars, secret_volumes):
"""Converts secrets from message to flattened secrets configuration dict.
Args:
secret_env_vars: list of cloudfunctions_v1|v2alpha|v2beta.SecretEnvVars
secret_volumes: list of cloudfunctions_v1|v2alpha|v2beta.SecretVolumes
Returns:
OrderedDict[str, str], Secrets configuration sorted ordered dict.
"""
secrets_dict = {}
if secret_env_vars:
secrets_dict.update(
{
sev.key: _GetSecretVersionResource(
sev.projectId, sev.secret, sev.version
)
for sev in secret_env_vars
}
)
if secret_volumes:
for secret_volume in secret_volumes:
mount_path = secret_volume.mountPath
project = secret_volume.projectId
secret = secret_volume.secret
if secret_volume.versions:
for version in secret_volume.versions:
secrets_config_key = mount_path + ':' + version.path
secrets_config_value = _GetSecretVersionResource(
project, secret, version.version
)
secrets_dict[secrets_config_key] = secrets_config_value
else:
secrets_config_key = mount_path + ':/' + secret
secrets_config_value = _GetSecretVersionResource(
project, secret, 'latest'
)
secrets_dict[secrets_config_key] = secrets_config_value
return _CanonicalizedDict(secrets_dict)
def _ParseSecretRef(secret_ref):
"""Splits a secret version resource into its components.
Args:
secret_ref: Secret version resource reference.
Returns:
A dict with entries for project, secret and version.
"""
return _SECRET_VERSION_RESOURCE_PATTERN.search(secret_ref).groupdict()
def SecretEnvVarsToMessages(secret_env_vars_dict, messages):
"""Converts secrets from dict to cloud function SecretEnvVar message list.
Args:
secret_env_vars_dict: Secret environment variables configuration dict.
Prefers a sorted ordered dict for consistency.
messages: The GCF messages module to use.
Returns:
A list of cloud function SecretEnvVar message.
"""
secret_environment_variables = []
for secret_env_var_key, secret_env_var_value in six.iteritems(
secret_env_vars_dict
):
secret_ref = _ParseSecretRef(secret_env_var_value)
secret_environment_variables.append(
messages.SecretEnvVar(
key=secret_env_var_key,
projectId=secret_ref['project'],
secret=secret_ref['secret'],
version=secret_ref['version'],
)
)
return secret_environment_variables
def SecretVolumesToMessages(secret_volumes, messages, normalize_for_v2=False):
# type: (dict[str, str], ) -> (list[messages.SecretVolume])
"""Converts secrets from dict to cloud function SecretVolume message list.
Args:
secret_volumes: Secrets volumes configuration dict. Prefers a sorted ordered
dict for consistency.
messages: The GCF messages module to use.
normalize_for_v2: If set, normalizes the SecretVolumes to the format the
GCFv2 API expects.
Returns:
A list of Cloud Function SecretVolume messages.
"""
secret_volumes_messages = []
mount_path_to_secrets = collections.defaultdict(list)
for secret_volume_key, secret_volume_value in secret_volumes.items():
mount_path, secret_file_path = secret_volume_key.split(':', 1)
if normalize_for_v2:
# GCFv2 API doesn't accept a leading / in the secret file path.
secret_file_path = re.sub(r'^/', '', secret_file_path)
secret_ref = _ParseSecretRef(secret_volume_value)
mount_path_to_secrets[mount_path].append({
'path': secret_file_path,
'project': secret_ref['project'],
'secret': secret_ref['secret'],
'version': secret_ref['version'],
})
for mount_path, secrets in sorted(six.iteritems(mount_path_to_secrets)):
project = secrets[0]['project']
secret_value = secrets[0]['secret']
versions = [
messages.SecretVersion(path=secret['path'], version=secret['version'])
for secret in secrets
]
secret_volumes_messages.append(
messages.SecretVolume(
mountPath=mount_path,
projectId=project,
secret=secret_value,
versions=versions,
)
)
return secret_volumes_messages

View File

@@ -0,0 +1,250 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions resource transforms and symbols dict.
A resource transform function converts a JSON-serializable resource to a string
value. This module contains built-in transform functions that may be used in
resource projection and filter expressions.
NOTICE: Each TransformFoo() method is the implementation of a foo() transform
function. Even though the implementation here is in Python the usage in resource
projection and filter expressions is language agnostic. This affects the
Pythonicness of the Transform*() methods:
(1) The docstrings are used to generate external user documentation.
(2) The method prototypes are included in the documentation. In particular the
prototype formal parameter names are stylized for the documentation.
(3) The 'r', 'kwargs', and 'projection' args are not included in the external
documentation. Docstring descriptions, other than the Args: line for the
arg itself, should not mention these args. Assume the reader knows the
specific item the transform is being applied to. When in doubt refer to
the output of $ gcloud topic projections.
(4) The types of some args, like r, are not fixed until runtime. Other args
may have either a base type value or string representation of that type.
It is up to the transform implementation to silently do the string=>type
conversions. That's why you may see e.g. int(arg) in some of the methods.
(5) Unless it is documented to do so, a transform function must not raise any
exceptions related to the resource r. The `undefined' arg is used to
handle all unusual conditions, including ones that would raise exceptions.
Exceptions for arguments explicitly under the caller's control are OK.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.eventarc import types as trigger_types
GEN_1 = '1st gen'
GEN_2 = '2nd gen'
CLOUD_FUNCTION = 'CloudFunction'
FUNCTION = 'Function'
def _TransformState(data, undefined=''):
"""Returns textual information about functions state.
Args:
data: JSON-serializable object.
undefined: Returns this value if the resource cannot be formatted.
Returns:
str containing information about the functions state.
"""
if not isinstance(data, dict):
return undefined
if 'status' in data:
return data['status']
if 'state' in data:
return data['state']
return undefined
def _TransformTrigger(data, undefined=''):
"""Returns textual information about functions trigger.
Args:
data: JSON-serializable 1st and 2nd gen Functions objects.
undefined: Returns this value if the resource cannot be formatted.
Returns:
str containing information about functions trigger.
"""
data_type = _InferFunctionMessageFormat(data)
if data_type == CLOUD_FUNCTION:
if 'httpsTrigger' in data:
return 'HTTP Trigger'
if 'gcsTrigger' in data:
return 'bucket: ' + data['gcsTrigger']
if 'pubsubTrigger' in data:
return 'topic: ' + data['pubsubTrigger'].split('/')[-1]
if 'eventTrigger' in data:
return 'Event Trigger'
return undefined
elif data_type == FUNCTION:
if 'eventTrigger' in data:
event_trigger = data['eventTrigger']
event_type = event_trigger.get('eventType')
if trigger_types.IsAuditLogType(event_type):
return 'Cloud Audit Log'
elif trigger_types.IsStorageType(event_type):
event_filters = event_trigger['eventFilters']
bucket = next(
(
f.get('value')
for f in event_filters
if f.get('attribute') == 'bucket'
),
None,
)
if bucket:
return 'bucket: ' + bucket
if 'pubsubTopic' in event_trigger:
return 'topic: ' + event_trigger['pubsubTopic'].split('/')[-1]
return 'Event Trigger'
# v2 functions can always be http triggered as backed by a cloud run
# service, if no trigger is found display 'HTTP trigger'
return 'HTTP Trigger'
return undefined
def _InferFunctionMessageFormat(data, undefined='-'):
"""Returns Cloud Functions product version.
Infers data type by checking whether the object contains particular fields of
CloudFunction (1st Gen Function message type) or Function (2nd Gen Function
message type). Notes that Function can be used for both 1st Gen and 2nd Gen
functions.
Args:
data: JSON-serializable 1st and 2nd gen Functions objects.
undefined: Returns this value if the resource cannot be formatted.
Returns:
str containing inferred product version.
"""
# data.get returns None if entry doesn't exist
entry_point = data.get('entryPoint')
build_id = data.get('buildId')
runtime = data.get('runtime')
if any([entry_point, build_id, runtime]):
return CLOUD_FUNCTION
build_config = data.get('buildConfig')
service_config = data.get('serviceConfig')
if any([build_config, service_config]):
return FUNCTION
return undefined
def _TransformGeneration(data, undefined='-'):
"""Returns Cloud Functions product version.
Args:
data: JSON-serializable 1st and 2nd gen Functions objects.
undefined: Returns this value if the resource cannot be formatted.
Returns:
str containing inferred product version.
"""
# data.get returns None if entry doesn't exist
environment = data.get('environment')
if environment == 'GEN_1':
return GEN_1
if environment == 'GEN_2':
return GEN_2
# If there is no `environment` field, infers generation from data type.
data_type = _InferFunctionMessageFormat(data, undefined)
if data_type == CLOUD_FUNCTION:
return GEN_1
elif data_type == FUNCTION:
return GEN_2
return undefined
def _TransformEnvironments(data):
"""Returns the supported environments for a runtime.
Args:
data: JSON-serializable Runtimes object.
Returns:
str containing inferred product version.
"""
generations = []
for env in data.get('environments'):
if env == 'GEN_1':
generations.append(GEN_1)
if env == 'GEN_2':
generations.append(GEN_2)
return ', '.join(generations)
def _TransformUpgradeState(data, undefined=''):
"""Returns Cloud Functions upgrade state.
Upgrade state will only be available for gen1 functions which meet the upgrade
criteria
Args:
data: JSON-serializable 1st and 2nd gen Functions objects in V2 resource
format.
undefined: Returns this value if the resource cannot be formatted.
Returns:
String representing upgrade state.
"""
if 'upgradeInfo' in data and data['upgradeInfo'] is not None:
return data['upgradeInfo'].get('upgradeState', undefined)
return undefined
_TRANSFORMS = {
'trigger': _TransformTrigger,
'state': _TransformState,
'generation': _TransformGeneration,
'environments': _TransformEnvironments,
}
_TRANSFORMS_BETA = {
'trigger': _TransformTrigger,
'state': _TransformState,
'generation': _TransformGeneration,
'environments': _TransformEnvironments,
'upgradestate': _TransformUpgradeState,
}
def GetTransforms():
"""Returns the functions specific resource transform symbol table."""
return _TRANSFORMS
def GetTransformsBeta():
return _TRANSFORMS_BETA

View File

@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library for working with environment variables on functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def GetEnvVarsAsDict(env_vars):
if env_vars:
return {prop.key: prop.value for prop in env_vars.additionalProperties}
else:
return {}
def DictToEnvVarsProperty(env_vars_type_class=None, env_vars=None):
"""Sets environment variables.
Args:
env_vars_type_class: type class of environment variables
env_vars: a dict of environment variables
Returns:
An message with the environment variables from env_vars
"""
if not env_vars_type_class or not env_vars:
return None
return env_vars_type_class(
additionalProperties=[
env_vars_type_class.AdditionalProperty(key=key, value=value)
for key, value in sorted(env_vars.items())
]
)

View File

@@ -0,0 +1,26 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper for user-visible error exceptions to raise in the CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class FunctionsError(exceptions.Error):
"""Exceptions for Functions errors."""

View File

@@ -0,0 +1,167 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library used to interact with Operations objects."""
# TODO(b/73491568) Refactor to use api_lib.util.waiter
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.functions.v1 import exceptions
from googlecloudsdk.core.console import progress_tracker as console_progress_tracker
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import retry
MAX_WAIT_MS = 1820000
WAIT_CEILING_MS = 2000
SLEEP_MS = 1000
def OperationErrorToString(error):
"""Returns a human readable string representation from the operation.
Args:
error: A string representing the raw json of the operation error.
Returns:
A human readable string representation of the error.
"""
return 'OperationError: code={0}, message={1}'.format(
error.code, encoding.Decode(error.message)
)
# TODO(b/130604453): Remove try_set_invoker option.
def _GetOperationStatus(
client,
get_request,
progress_tracker=None,
try_set_invoker=None,
on_every_poll=None,
):
"""Helper function for getting the status of an operation.
Args:
client: The client used to make requests.
get_request: A GetOperationRequest message.
progress_tracker: progress_tracker.ProgressTracker, A reference for the
progress tracker to tick, in case this function is used in a Retryer.
try_set_invoker: function to try setting invoker, see above TODO.
on_every_poll: list of functions to execute every time we poll. Functions
should take in Operation as an argument.
Returns:
True if the operation succeeded without error.
False if the operation is not yet done.
Raises:
FunctionsError: If the operation is finished with error.
"""
if try_set_invoker:
try_set_invoker()
if progress_tracker:
progress_tracker.Tick()
op = client.operations.Get(get_request)
if op.error:
raise exceptions.FunctionsError(OperationErrorToString(op.error))
if on_every_poll:
for function in on_every_poll:
function(op)
return op.done
# TODO(b/139026575): Remove try_set_invoker option.
def _WaitForOperation(
client, get_request, message, try_set_invoker=None, on_every_poll=None
):
"""Wait for an operation to complete.
No operation is done instantly. Wait for it to finish following this logic:
* we wait 1s (jitter is also 1s)
* we query service
* if the operation is not finished we loop to first point
* wait limit is 1820s - if we get to that point it means something is wrong
and we can throw an exception
Args:
client: The client used to make requests.
get_request: A GetOperationRequest message.
message: str, The string to print while polling.
try_set_invoker: function to try setting invoker, see above TODO.
on_every_poll: list of functions to execute every time we poll. Functions
should take in Operation as an argument.
Returns:
True if the operation succeeded without error.
Raises:
FunctionsError: If the operation takes more than 1820s.
"""
with console_progress_tracker.ProgressTracker(message, autotick=False) as pt:
# This is actually linear retryer.
retryer = retry.Retryer(
exponential_sleep_multiplier=1,
max_wait_ms=MAX_WAIT_MS,
wait_ceiling_ms=WAIT_CEILING_MS,
)
try:
retryer.RetryOnResult(
_GetOperationStatus,
[client, get_request],
{
'progress_tracker': pt,
'try_set_invoker': try_set_invoker,
'on_every_poll': on_every_poll,
},
should_retry_if=lambda done, _: not done,
sleep_ms=SLEEP_MS,
)
except retry.WaitException:
raise exceptions.FunctionsError(
'Operation {0} is taking too long'.format(get_request.name)
)
def Wait(
operation,
messages,
client,
notice=None,
try_set_invoker=None,
on_every_poll=None,
):
"""Initialize waiting for operation to finish.
Generate get request based on the operation and wait for an operation
to complete.
Args:
operation: The operation which we are waiting for.
messages: GCF messages module.
client: GCF client module.
notice: str, displayed when waiting for the operation to finish.
try_set_invoker: function to try setting invoker, see above TODO.
on_every_poll: list of functions to execute every time we poll. Functions
should take in Operation as an argument.
Raises:
FunctionsError: If the operation takes more than 620s.
"""
if notice is None:
notice = 'Waiting for operation to finish'
request = messages.CloudfunctionsOperationsGetRequest()
request.name = operation.name
_WaitForOperation(client, request, notice, try_set_invoker, on_every_poll)

View File

@@ -0,0 +1,217 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library that is used to support Functions commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import itertools
import enum
UNADVERTISED_PROVIDER_LABEL = 'unadvertised'
class Resource(object):
def __init__(self, name, collection_id):
self.name = name
self.collection_id = collection_id
@enum.unique
class Resources(enum.Enum):
TOPIC = Resource('topic', 'pubsub.projects.topics')
BUCKET = Resource('bucket', 'cloudfunctions.projects.buckets')
FIREBASE_DB = Resource('firebase database', 'google.firebase.database.ref')
FIRESTORE_DOC = Resource('firestore document', 'google.firestore.document')
FIREBASE_ANALYTICS_EVENT = Resource(
'firebase analytics', 'google.firebase.analytics.event'
)
PROJECT = Resource('project', 'cloudresourcemanager.projects')
class TriggerProvider(object):
"""Represents --trigger-provider flag value options."""
def __init__(self, label, events):
self.label = label
self.events = events
for event in self.events:
# Used to access provider properties when listing event types
event.provider = self
@property
def default_event(self):
return self.events[0]
class TriggerEvent(object):
"""Represents --trigger-event flag value options."""
# Currently only project resource is optional
OPTIONAL_RESOURCE_TYPES = [Resources.PROJECT]
def __init__(self, label, resource_type):
self.label = label
self.resource_type = resource_type
@property
def event_is_optional(self):
return self.provider.default_event == self
# TODO(b/33097692) Let TriggerEvent know how to handle optional resources.
@property
def resource_is_optional(self):
return self.resource_type in TriggerEvent.OPTIONAL_RESOURCE_TYPES
# TODO (b/73062780): Event types should not be hard-coded.
# Don't use those structures directly. Use registry object instead.
# By convention, first event type is default.
_PROVIDERS = [
TriggerProvider(
'cloud.pubsub',
[
TriggerEvent('google.pubsub.topic.publish', Resources.TOPIC),
TriggerEvent(
'providers/cloud.pubsub/eventTypes/topic.publish',
Resources.TOPIC,
),
],
),
TriggerProvider(
'cloud.storage',
[
TriggerEvent('google.storage.object.finalize', Resources.BUCKET),
TriggerEvent(
'providers/cloud.storage/eventTypes/object.change',
Resources.BUCKET,
),
TriggerEvent('google.storage.object.archive', Resources.BUCKET),
TriggerEvent('google.storage.object.delete', Resources.BUCKET),
TriggerEvent(
'google.storage.object.metadataUpdate', Resources.BUCKET
),
],
),
TriggerProvider(
'google.firebase.database.ref',
[
TriggerEvent(
'providers/google.firebase.database/eventTypes/ref.create',
Resources.FIREBASE_DB,
),
TriggerEvent(
'providers/google.firebase.database/eventTypes/ref.update',
Resources.FIREBASE_DB,
),
TriggerEvent(
'providers/google.firebase.database/eventTypes/ref.delete',
Resources.FIREBASE_DB,
),
TriggerEvent(
'providers/google.firebase.database/eventTypes/ref.write',
Resources.FIREBASE_DB,
),
],
),
TriggerProvider(
'google.firestore.document',
[
TriggerEvent(
'providers/cloud.firestore/eventTypes/document.create',
Resources.FIRESTORE_DOC,
),
TriggerEvent(
'providers/cloud.firestore/eventTypes/document.update',
Resources.FIRESTORE_DOC,
),
TriggerEvent(
'providers/cloud.firestore/eventTypes/document.delete',
Resources.FIRESTORE_DOC,
),
TriggerEvent(
'providers/cloud.firestore/eventTypes/document.write',
Resources.FIRESTORE_DOC,
),
],
),
TriggerProvider(
'google.firebase.analytics.event',
[
TriggerEvent(
'providers/google.firebase.analytics/eventTypes/event.log',
Resources.FIREBASE_ANALYTICS_EVENT,
),
],
),
TriggerProvider(
'google.firebase.remoteConfig',
[
TriggerEvent(
'google.firebase.remoteconfig.update', Resources.PROJECT
),
],
),
TriggerProvider(
'firebase.auth',
[
TriggerEvent(
'providers/firebase.auth/eventTypes/user.create',
Resources.PROJECT,
),
TriggerEvent(
'providers/firebase.auth/eventTypes/user.delete',
Resources.PROJECT,
),
],
),
]
class _TriggerProviderRegistry(object):
"""This class encapsulates all Event Trigger related functionality."""
def __init__(self, all_providers):
self.providers = all_providers
self._unadvertised_provider = TriggerProvider(
UNADVERTISED_PROVIDER_LABEL, []
)
def ProvidersLabels(self):
return (p.label for p in self.providers)
def Provider(self, provider):
return next((p for p in self.providers if p.label == provider))
def EventsLabels(self, provider):
return (e.label for e in self.Provider(provider).events)
def AllEventLabels(self):
all_events = (self.EventsLabels(p.label) for p in self.providers)
return itertools.chain.from_iterable(all_events)
def Event(self, provider, event):
return next((e for e in self.Provider(provider).events if e.label == event))
def ProviderForEvent(self, event_label):
for p in self.providers:
if event_label in self.EventsLabels(p.label):
return p
return self._unadvertised_provider
TRIGGER_PROVIDER_REGISTRY = _TriggerProviderRegistry(_PROVIDERS)

View File

@@ -0,0 +1,658 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library that is used to support Functions commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import functools
import json
import re
from apitools.base.py import base_api
from apitools.base.py import exceptions as apitools_exceptions
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.functions.v1 import exceptions
from googlecloudsdk.api_lib.functions.v1 import operations
from googlecloudsdk.api_lib.functions.v2 import util as v2_util
from googlecloudsdk.api_lib.storage import storage_api as gcs_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.api_lib.util import exceptions as exceptions_util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.calliope import exceptions as base_exceptions
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.util import encoding
from googlecloudsdk.generated_clients.apis.cloudfunctions.v1 import cloudfunctions_v1_messages
import six.moves.http_client
_DEPLOY_WAIT_NOTICE = 'Deploying function (may take a while - up to 2 minutes)'
_FUNCTION_NAME_RE = re.compile(
r'^(.*/)?[A-Za-z](?:[-_A-Za-z0-9]{0,61}[A-Za-z0-9])?$'
)
_FUNCTION_NAME_ERROR = (
'Function name must contain only Latin letters, digits and a '
'hyphen (-). It must start with letter, must not end with a hyphen, '
'and must be at most 63 characters long.'
)
_TOPIC_NAME_RE = re.compile(r'^[a-zA-Z][\-\._~%\+a-zA-Z0-9]{2,254}$')
_TOPIC_NAME_ERROR = (
'Topic must contain only Latin letters (lower- or upper-case), digits and '
'the characters - + . _ ~ %. It must start with a letter and be from 3 to '
'255 characters long.'
)
_BUCKET_RESOURCE_URI_RE = re.compile(r'^projects/_/buckets/.{3,222}$')
_API_NAME = 'cloudfunctions'
_API_VERSION = 'v1'
_V1_AUTOPUSH_REGIONS = ['asia-east1', 'europe-west6']
_V1_STAGING_REGIONS = [
'southamerica-east1',
'us-central1',
'us-east1',
'us-east4',
'us-west1',
]
_DOCKER_REGISTRY_GCR = (
cloudfunctions_v1_messages.CloudFunction.DockerRegistryValueValuesEnum.CONTAINER_REGISTRY
)
def _GetApiVersion(track=calliope_base.ReleaseTrack.GA): # pylint: disable=unused-argument
"""Returns the current cloudfunctions Api Version configured in the sdk.
NOTE: Currently the value is hard-coded to v1, and surface/functions/deploy.py
assumes this to parse OperationMetadataV1 from the response.
Please change the parsing if more versions should be supported.
Args:
track: The gcloud track.
Returns:
The current cloudfunctions Api Version.
"""
return _API_VERSION
def GetApiClientInstance(track=calliope_base.ReleaseTrack.GA):
# type: (calliope_base.ReleaseTrack) -> base_api.BaseApiClient
"""Returns the GCFv1 client instance."""
endpoint_override = v2_util.GetApiEndpointOverride()
if (
not endpoint_override
or 'autopush-cloudfunctions' not in endpoint_override
):
return apis.GetClientInstance(_API_NAME, _GetApiVersion(track))
# GCFv1 autopush is actually behind the staging API endpoint so temporarily
# override the endpoint so that a staging API client is returned.
# The GCFv1 mixer routes to the appropriate autopush or staging manager job
# based on region.
# GFEs route autopush-cloudfunctions.sandbox.googleapis.com to the GCFv2
# frontend.
log.info(
'Temporarily overriding cloudfunctions endpoint to'
' staging-cloudfunctions.sandbox.googleapis.com so that GCFv1 autopush'
' resources can be accessed.'
)
properties.VALUES.api_endpoint_overrides.Property('cloudfunctions').Set(
'https://staging-cloudfunctions.sandbox.googleapis.com/'
)
client = apis.GetClientInstance(_API_NAME, _GetApiVersion(track))
# Reset override in case a GCFv2 autopush client is created later.
properties.VALUES.api_endpoint_overrides.Property('cloudfunctions').Set(
'https://autopush-cloudfunctions.sandbox.googleapis.com/'
)
return client
def GetResourceManagerApiClientInstance():
return apis.GetClientInstance('cloudresourcemanager', 'v1')
def GetApiMessagesModule(track=calliope_base.ReleaseTrack.GA):
return apis.GetMessagesModule(_API_NAME, _GetApiVersion(track))
def GetFunctionRef(name):
return resources.REGISTRY.Parse(
name,
params={
'projectsId': properties.VALUES.core.project.Get(required=True),
'locationsId': properties.VALUES.functions.region.Get(),
},
collection='cloudfunctions.projects.locations.functions',
)
_ID_CHAR = '[a-zA-Z0-9_]'
_P_CHAR = "[][~@#$%&.,?:;+*='()-]"
# capture: '{' ID_CHAR+ ('=' '*''*'?)? '}'
# Named wildcards may be written in curly brackets (e.g. {variable}). The
# value that matched this parameter will be included in the event
# parameters.
_CAPTURE = r'(\{' + _ID_CHAR + r'(=\*\*?)?})'
# segment: (ID_CHAR | P_CHAR)+
_SEGMENT = '((' + _ID_CHAR + '|' + _P_CHAR + ')+)'
# part: '/' segment | capture
_PART = '(/(' + _SEGMENT + '|' + _CAPTURE + '))'
# path: part+ (but first / is optional)
_PATH = '(/?(' + _SEGMENT + '|' + _CAPTURE + ')' + _PART + '*)'
_PATH_RE_ERROR = (
'Path must be a slash-separated list of segments and '
'captures. For example, [users/{userId}/profilePic].'
)
def GetHttpErrorMessage(error):
# type: (apitools_exceptions.HttpError) -> str
"""Returns a human readable string representation from the http response.
Args:
error: HttpException representing the error response.
Returns:
A human readable string representation of the error.
"""
status = error.response.get('status', '')
code = error.response.get('reason', '')
message = ''
try:
data = json.loads(error.content)
if 'error' in data:
error_info = data['error']
if 'message' in error_info:
message = error_info['message']
violations = _GetViolationsFromError(error)
if violations:
message += '\nProblems:\n' + violations
if status == 403:
permission_issues = _GetPermissionErrorDetails(error_info)
if permission_issues:
message += '\nPermission Details:\n' + permission_issues
except (ValueError, TypeError):
message = error.content
return 'ResponseError: status=[{0}], code=[{1}], message=[{2}]'.format(
status, code, encoding.Decode(message)
)
def _ValidateArgumentByRegexOrRaise(argument, regex, error_message):
if isinstance(regex, str):
match = re.match(regex, argument)
else:
match = regex.match(argument)
if not match:
raise arg_parsers.ArgumentTypeError(
"Invalid value '{0}': {1}".format(argument, error_message)
)
return argument
def ValidateFunctionNameOrRaise(name):
"""Checks if a function name provided by user is valid.
Args:
name: Function name provided by user.
Returns:
Function name.
Raises:
ArgumentTypeError: If the name provided by user is not valid.
"""
return _ValidateArgumentByRegexOrRaise(
name, _FUNCTION_NAME_RE, _FUNCTION_NAME_ERROR
)
def ValidateAndStandarizeBucketUriOrRaise(bucket):
"""Checks if a bucket uri provided by user is valid.
If the Bucket uri is valid, converts it to a standard form.
Args:
bucket: Bucket uri provided by user.
Returns:
Sanitized bucket uri.
Raises:
ArgumentTypeError: If the name provided by user is not valid.
"""
if _BUCKET_RESOURCE_URI_RE.match(bucket):
bucket_ref = storage_util.BucketReference.FromUrl(bucket)
else:
try:
bucket_ref = storage_util.BucketReference.FromArgument(
bucket, require_prefix=False
)
except argparse.ArgumentTypeError as e:
raise arg_parsers.ArgumentTypeError(
"Invalid value '{}': {}".format(bucket, e)
)
# strip any extrenuous '/' and append single '/'
bucket = bucket_ref.ToUrl().rstrip('/') + '/'
return bucket
def ValidatePubsubTopicNameOrRaise(topic):
"""Checks if a Pub/Sub topic name provided by user is valid.
Args:
topic: Pub/Sub topic name provided by user.
Returns:
Topic name.
Raises:
ArgumentTypeError: If the name provided by user is not valid.
"""
topic = _ValidateArgumentByRegexOrRaise(
topic, _TOPIC_NAME_RE, _TOPIC_NAME_ERROR
)
return topic
def ValidateRuntimeOrRaise(client, runtime, region):
"""Checks if runtime is supported.
Does not raise if the runtime list cannot be retrieved
Args:
client: v2 GCF client that supports ListRuntimes()
runtime: str, the runtime.
region: str, region code.
Returns:
warning: None|str, the warning if deprecated
"""
response = client.ListRuntimes(
region,
query_filter='name={} AND environment={}'.format(
runtime, client.messages.Runtime.EnvironmentValueValuesEnum.GEN_1
),
)
if not response or response.runtimes is None:
return None
if len(response.runtimes) < 1:
raise exceptions.FunctionsError(
'argument `--runtime`: {} is not a supported runtime on'
' GCF 1st gen. Use `gcloud functions runtimes list` to get a list'
' of available runtimes'.format(runtime)
)
runtime_info = response.runtimes[0]
return (
runtime_info.warnings[0]
if runtime_info and runtime_info.warnings
else None
)
def ValidatePathOrRaise(path):
"""Check if path provided by user is valid.
Args:
path: A string: resource path
Returns:
The argument provided, if found valid.
Raises:
ArgumentTypeError: If the user provided a path which is not valid
"""
path = _ValidateArgumentByRegexOrRaise(path, _PATH, _PATH_RE_ERROR)
return path
def _GetViolationsFromError(error):
"""Looks for violations descriptions in error message.
Args:
error: HttpError containing error information.
Returns:
String of newline-separated violations descriptions.
"""
error_payload = exceptions_util.HttpErrorPayload(error)
errors = []
errors.extend(
['{}:\n{}'.format(k, v) for k, v in error_payload.violations.items()]
)
errors.extend(
[
'{}:\n{}'.format(k, v)
for k, v in error_payload.field_violations.items()
]
)
if errors:
return '\n'.join(errors) + '\n'
return ''
def _GetPermissionErrorDetails(error_info):
"""Looks for permission denied details in error message.
Args:
error_info: json containing error information.
Returns:
string containing details on permission issue and suggestions to correct.
"""
try:
if 'details' in error_info:
details = error_info['details'][0]
if 'detail' in details:
return details['detail']
except (ValueError, TypeError):
pass
return None
def CatchHTTPErrorRaiseHTTPException(func):
"""Decorator that catches HttpError and raises corresponding exception."""
@functools.wraps(func)
def CatchHTTPErrorRaiseHTTPExceptionFn(*args, **kwargs):
try:
return func(*args, **kwargs)
except apitools_exceptions.HttpError as error:
core_exceptions.reraise(
base_exceptions.HttpException(GetHttpErrorMessage(error))
)
return CatchHTTPErrorRaiseHTTPExceptionFn
@CatchHTTPErrorRaiseHTTPException
def GetFunction(function_name):
"""Returns the Get method on function response, None if it doesn't exist."""
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
try:
# We got response for a get request so a function exists.
return client.projects_locations_functions.Get(
messages.CloudfunctionsProjectsLocationsFunctionsGetRequest(
name=function_name
)
)
except apitools_exceptions.HttpError as error:
if error.status_code == six.moves.http_client.NOT_FOUND:
# The function has not been found.
return None
raise
@CatchHTTPErrorRaiseHTTPException
def ListRegions():
"""Returns the list of regions where GCF 1st Gen is supported."""
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
results = list_pager.YieldFromList(
service=client.projects_locations,
request=messages.CloudfunctionsProjectsLocationsListRequest(
name='projects/' + properties.VALUES.core.project.Get(required=True)
),
field='locations',
batch_size_attribute='pageSize',
)
# We filter out v1 autopush and staging regions because they lie behind the
# same staging API endpoint but they're not distinguishable by environment.
if v2_util.GetCloudFunctionsApiEnv() is v2_util.ApiEnv.AUTOPUSH:
log.info(
'ListRegions: Autopush env detected. Filtering for v1 autopush regions.'
)
return [r for r in results if r.locationId in _V1_AUTOPUSH_REGIONS]
elif v2_util.GetCloudFunctionsApiEnv() is v2_util.ApiEnv.STAGING:
log.info(
'ListRegions: Staging env detected. Filtering for v1 staging regions.'
)
return [r for r in results if r.locationId in _V1_STAGING_REGIONS]
else:
return results
# TODO(b/130604453): Remove try_set_invoker option
@CatchHTTPErrorRaiseHTTPException
def WaitForFunctionUpdateOperation(
op, try_set_invoker=None, on_every_poll=None
):
"""Wait for the specied function update to complete.
Args:
op: Cloud operation to wait on.
try_set_invoker: function to try setting invoker, see above TODO.
on_every_poll: list of functions to execute every time we poll. Functions
should take in Operation as an argument.
"""
client = GetApiClientInstance()
operations.Wait(
op,
client.MESSAGES_MODULE,
client,
_DEPLOY_WAIT_NOTICE,
try_set_invoker=try_set_invoker,
on_every_poll=on_every_poll,
)
@CatchHTTPErrorRaiseHTTPException
def PatchFunction(function, fields_to_patch):
"""Call the api to patch a function based on updated fields.
Args:
function: the function to patch
fields_to_patch: the fields to patch on the function
Returns:
The cloud operation for the Patch.
"""
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
fields_to_patch_str = ','.join(sorted(fields_to_patch))
return client.projects_locations_functions.Patch(
messages.CloudfunctionsProjectsLocationsFunctionsPatchRequest(
cloudFunction=function,
name=function.name,
updateMask=fields_to_patch_str,
)
)
@CatchHTTPErrorRaiseHTTPException
def CreateFunction(function, location):
"""Call the api to create a function.
Args:
function: the function to create
location: location for function
Returns:
Cloud operation for the create.
"""
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
return client.projects_locations_functions.Create(
messages.CloudfunctionsProjectsLocationsFunctionsCreateRequest(
location=location, cloudFunction=function
)
)
@CatchHTTPErrorRaiseHTTPException
def GetFunctionIamPolicy(function_resource_name):
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
return client.projects_locations_functions.GetIamPolicy(
messages.CloudfunctionsProjectsLocationsFunctionsGetIamPolicyRequest(
resource=function_resource_name
)
)
@CatchHTTPErrorRaiseHTTPException
def AddFunctionIamPolicyBinding(
function_resource_name,
member='allUsers',
role='roles/cloudfunctions.invoker',
):
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
policy = GetFunctionIamPolicy(function_resource_name)
iam_util.AddBindingToIamPolicy(messages.Binding, policy, member, role)
return client.projects_locations_functions.SetIamPolicy(
messages.CloudfunctionsProjectsLocationsFunctionsSetIamPolicyRequest(
resource=function_resource_name,
setIamPolicyRequest=messages.SetIamPolicyRequest(policy=policy),
)
)
@CatchHTTPErrorRaiseHTTPException
def RemoveFunctionIamPolicyBindingIfFound(
function_resource_name,
member='allUsers',
role='roles/cloudfunctions.invoker',
):
"""Removes the specified policy binding if it is found."""
client = GetApiClientInstance()
messages = client.MESSAGES_MODULE
policy = GetFunctionIamPolicy(function_resource_name)
if not iam_util.BindingInPolicy(policy, member, role):
return policy
iam_util.RemoveBindingFromIamPolicy(policy, member, role)
return client.projects_locations_functions.SetIamPolicy(
messages.CloudfunctionsProjectsLocationsFunctionsSetIamPolicyRequest(
resource=function_resource_name,
setIamPolicyRequest=messages.SetIamPolicyRequest(policy=policy),
)
)
@CatchHTTPErrorRaiseHTTPException
def CanAddFunctionIamPolicyBinding(project):
"""Returns True iff the caller can add policy bindings for project."""
client = GetResourceManagerApiClientInstance()
messages = client.MESSAGES_MODULE
needed_permissions = [
'resourcemanager.projects.getIamPolicy',
'resourcemanager.projects.setIamPolicy',
]
iam_request = messages.CloudresourcemanagerProjectsTestIamPermissionsRequest(
resource=project,
testIamPermissionsRequest=messages.TestIamPermissionsRequest(
permissions=needed_permissions
),
)
iam_response = client.projects.TestIamPermissions(iam_request)
can_add = True
for needed_permission in needed_permissions:
if needed_permission not in iam_response.permissions:
can_add = False
return can_add
def ValidateSecureImageRepositoryOrWarn(region_name, project_id):
"""Validates image repository. Yields security and deprecation warnings.
Args:
region_name: String name of the region to which the function is deployed.
project_id: String ID of the Cloud project.
"""
_AddGcrDeprecationWarning()
gcr_bucket_url = GetStorageBucketForGcrRepository(region_name, project_id)
try:
gcr_host_policy = gcs_api.StorageClient().GetIamPolicy(
storage_util.BucketReference.FromUrl(gcr_bucket_url)
)
if gcr_host_policy and iam_util.BindingInPolicy(
gcr_host_policy, 'allUsers', 'roles/storage.objectViewer'
):
log.warning(
"The Container Registry repository that stores this function's "
'image is public. This could pose the risk of disclosing '
'sensitive data. To mitigate this, either use Artifact Registry '
"('--docker-registry=artifact-registry' flag) or change this "
'setting in Google Container Registry.\n'
)
except apitools_exceptions.HttpError:
log.warning(
'Secuirty check for Container Registry repository that stores this '
"function's image has not succeeded. To mitigate risks of disclosing "
'sensitive data, it is recommended to keep your repositories '
'private. This setting can be verified in Google Container Registry.\n'
)
def GetStorageBucketForGcrRepository(region_name, project_id):
"""Retrieves the GCS bucket that backs the GCR repository in specified region.
Args:
region_name: String name of the region to which the function is deployed.
project_id: String ID of the Cloud project.
Returns:
String representing the URL of the GCS bucket that backs the GCR repo.
"""
return 'gs://{multiregion}.artifacts.{project_id}.appspot.com'.format(
multiregion=_GetGcrMultiregion(region_name),
project_id=project_id,
)
def _GetGcrMultiregion(region_name):
"""Returns String name of the GCR multiregion for the given region."""
# Corresponds to the mapping outlined in go/gcf-regions-to-gcr-domains-map.
if region_name.startswith('europe'):
return 'eu'
elif region_name.startswith('asia') or region_name.startswith('australia'):
return 'asia'
else:
return 'us'
def IsGcrRepository(function):
return function.dockerRegistry == _DOCKER_REGISTRY_GCR
def _AddGcrDeprecationWarning():
"""Adds warning on deprecation of Container Registry."""
log.warning(
'Due to the general transition from Container Registry to Artifact'
' Registry, `--docker-registry=container-registry` will no longer be'
' available as an option when deploying a function.'
' All container image storage and management will automatically'
' transition to Artifact Registry.'
' For more information, please visit:'
' https://cloud.google.com/artifact-registry/docs/transition/transition-from-gcr'
)

View File

@@ -0,0 +1,208 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Functions (2nd gen) API Client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from typing import Generator, Optional
from apitools.base.py import exceptions as apitools_exceptions
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.functions.v1 import util as util_v1
from googlecloudsdk.api_lib.functions.v2 import types
from googlecloudsdk.api_lib.functions.v2 import util
from googlecloudsdk.core import properties
import six
class FunctionsClient(object):
"""Client for Cloud Functions (2nd gen) API."""
def __init__(self, release_track):
self.client = util.GetClientInstance(release_track)
self.messages = util.GetMessagesModule(release_track)
def ListRegions(self) -> Generator[types.Location, None, None]:
"""Lists GCF gen2 regions.
Returns:
Iterable[cloudfunctions_v2alpha.Location], Generator of available GCF gen2
regions.
"""
project = properties.VALUES.core.project.GetOrFail()
request = self.messages.CloudfunctionsProjectsLocationsListRequest(
name='projects/' + project
)
return list_pager.YieldFromList(
service=self.client.projects_locations,
request=request,
field='locations',
batch_size_attribute='pageSize',
)
def ListRuntimes(self, region: str, query_filter: Optional[str] = None):
"""Lists available GCF Gen 2 Runtimes in a region.
Args:
region: str, The region targeted to list runtimes in.
query_filter: str, Filters to apply to the list runtimes request.
Returns:
v2alpha|v2beta.ListRuntimesResponse, The list runtimes request
"""
project = properties.VALUES.core.project.GetOrFail()
# v2alpha|v2beta.CloudfunctionsProjectsLocationsRuntimesListRequest
request = self.messages.CloudfunctionsProjectsLocationsRuntimesListRequest(
parent='projects/{project}/locations/{region}'.format(
project=project, region=region
),
filter=query_filter,
)
return self.client.projects_locations_runtimes.List(request)
@util_v1.CatchHTTPErrorRaiseHTTPException
def GetFunction(
self, name: str, raise_if_not_found: bool = False
) -> Optional[types.Function]:
"""Gets the function with the given name or None if not found.
Args:
name: GCFv2 function resource relative name.
raise_if_not_found: If set, raises NOT_FOUND http errors instead of
returning None.
Returns:
cloudfunctions_v2_messages.Function, the fetched GCFv2 function or None.
"""
try:
return self.client.projects_locations_functions.Get(
self.messages.CloudfunctionsProjectsLocationsFunctionsGetRequest(
name=name
)
)
except apitools_exceptions.HttpError as error:
if raise_if_not_found or (
error.status_code != six.moves.http_client.NOT_FOUND
):
raise
return None
@util_v1.CatchHTTPErrorRaiseHTTPException
def AbortFunctionUpgrade(self, name: str) -> types.Operation:
"""Aborts the function upgrade for the given function.
Args:
name: str, GCFv2 function resource relative name.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.AbortFunctionUpgrade(
self.messages.CloudfunctionsProjectsLocationsFunctionsAbortFunctionUpgradeRequest(
name=name
)
)
@util_v1.CatchHTTPErrorRaiseHTTPException
def CommitFunctionUpgrade(self, name: str) -> types.Operation:
"""Commits the function upgrade for the given function.
Args:
name: str, GCFv2 function resource relative name.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.CommitFunctionUpgrade(
self.messages.CloudfunctionsProjectsLocationsFunctionsCommitFunctionUpgradeRequest(
name=name
)
)
@util_v1.CatchHTTPErrorRaiseHTTPException
def RedirectFunctionUpgradeTraffic(self, name: str) -> types.Operation:
"""Redirects function upgrade traffic for the given function.
Args:
name: str, GCFv2 function resource relative name.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.RedirectFunctionUpgradeTraffic(
self.messages.CloudfunctionsProjectsLocationsFunctionsRedirectFunctionUpgradeTrafficRequest(
name=name
)
)
@util_v1.CatchHTTPErrorRaiseHTTPException
def RollbackFunctionUpgradeTraffic(self, name: str) -> types.Operation:
"""Rolls back function upgrade traffic for the given function.
Args:
name: str, GCFv2 function resource relative name.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.RollbackFunctionUpgradeTraffic(
self.messages.CloudfunctionsProjectsLocationsFunctionsRollbackFunctionUpgradeTrafficRequest(
name=name
)
)
@util_v1.CatchHTTPErrorRaiseHTTPException
def SetupFunctionUpgradeConfig(
self, name: str, trigger_service_account: str
) -> types.Operation:
"""Sets up the function upgrade config for the given function.
Args:
name: str, GCFv2 function resource relative name.
trigger_service_account: str, The service account to use for the trigger.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.SetupFunctionUpgradeConfig(
self.messages.CloudfunctionsProjectsLocationsFunctionsSetupFunctionUpgradeConfigRequest(
name=name,
setupFunctionUpgradeConfigRequest=self.messages.SetupFunctionUpgradeConfigRequest(
triggerServiceAccount=trigger_service_account
),
)
)
@util_v1.CatchHTTPErrorRaiseHTTPException
def DetachFunction(self, name: str) -> types.Operation:
"""Detach a GCF 2nd gen function from GCF and make it a native Cloud Run function.
Args:
name: str, GCFv2 function resource relative name which follows the format
of `projects/{project}/locations/{region}/functions/{function}`.
Returns:
A long-running operation.
"""
return self.client.projects_locations_functions.DetachFunction(
self.messages.CloudfunctionsProjectsLocationsFunctionsDetachFunctionRequest(
name=name
)
)

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper for user-visible error exceptions to raise in the CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class FunctionsError(exceptions.Error):
"""Exceptions for Functions errors."""
class InvalidArgumentException(exceptions.Error):
"""InvalidArgumentException is for malformed arguments."""
def __init__(self, parameter_name, message):
"""Creates InvalidArgumentException.
Args:
parameter_name: str, the parameter flag or argument name
message: str, the exception message
"""
super(InvalidArgumentException, self).__init__(
'Invalid value for [{0}]: {1}'.format(parameter_name, message)
)
self.parameter_name = parameter_name
class RequiredArgumentException(exceptions.Error):
"""An exception for when a usually optional argument is required in this case."""
def __init__(self, parameter_name, message):
super(RequiredArgumentException, self).__init__(
'Missing required argument [{0}]: {1}'.format(parameter_name, message)
)
self.parameter_name = parameter_name
def StatusToFunctionsError(status, error_message=None):
"""Convert a google.rpc.Status (used for LRO errors) into a FunctionsError."""
if error_message:
return FunctionsError(error_message)
return FunctionsError(status.message)

View File

@@ -0,0 +1,82 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Type aliases for Cloud Functions v2 API."""
from typing import Union
from googlecloudsdk.generated_clients.apis.cloudfunctions.v2 import cloudfunctions_v2_messages as v2_messages
from googlecloudsdk.generated_clients.apis.cloudfunctions.v2alpha import cloudfunctions_v2alpha_messages as v2alpha_messages
from googlecloudsdk.generated_clients.apis.cloudfunctions.v2beta import cloudfunctions_v2beta_messages as v2beta_messages
BuildConfig = Union[
v2_messages.BuildConfig,
v2alpha_messages.BuildConfig,
v2beta_messages.BuildConfig,
]
EventTrigger = Union[
v2_messages.EventTrigger,
v2alpha_messages.EventTrigger,
v2beta_messages.EventTrigger,
]
Function = Union[
v2_messages.Function, v2alpha_messages.Function, v2beta_messages.Function
]
Location = Union[
v2_messages.Location, v2alpha_messages.Location, v2beta_messages.Location
]
Operation = Union[
v2_messages.Operation, v2alpha_messages.Operation, v2beta_messages.Operation
]
ServiceConfig = Union[
v2_messages.ServiceConfig,
v2alpha_messages.ServiceConfig,
v2beta_messages.ServiceConfig,
]
Source = Union[
v2_messages.Source, v2alpha_messages.Source, v2beta_messages.Source
]
DirectVpcNetworkInterface = Union[
v2_messages.DirectVpcNetworkInterface,
v2alpha_messages.DirectVpcNetworkInterface,
v2beta_messages.DirectVpcNetworkInterface,
]
# Enum types (these unfortunately can't be resolved from the type aliases above)
IngressSettings = Union[
v2_messages.ServiceConfig.IngressSettingsValueValuesEnum,
v2beta_messages.ServiceConfig.IngressSettingsValueValuesEnum,
v2alpha_messages.ServiceConfig.IngressSettingsValueValuesEnum,
]
LabelsValue = Union[
v2_messages.Function.LabelsValue,
v2alpha_messages.Function.LabelsValue,
v2beta_messages.Function.LabelsValue,
]
RetryPolicy = Union[
v2_messages.EventTrigger.RetryPolicyValueValuesEnum,
v2alpha_messages.EventTrigger.RetryPolicyValueValuesEnum,
v2beta_messages.EventTrigger.RetryPolicyValueValuesEnum,
]
VpcConnectorEgressSettings = Union[
v2_messages.ServiceConfig.VpcConnectorEgressSettingsValueValuesEnum,
v2alpha_messages.ServiceConfig.VpcConnectorEgressSettingsValueValuesEnum,
v2beta_messages.ServiceConfig.VpcConnectorEgressSettingsValueValuesEnum,
]
DirectVpcEgress = Union[
v2_messages.ServiceConfig.DirectVpcEgressValueValuesEnum,
v2alpha_messages.ServiceConfig.DirectVpcEgressValueValuesEnum,
v2beta_messages.ServiceConfig.DirectVpcEgressValueValuesEnum,
]

View File

@@ -0,0 +1,568 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functionality related to Cloud Functions v2 API clients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import enum
from apitools.base.py import encoding
from apitools.base.py import exceptions as apitools_exceptions
import frozendict
from googlecloudsdk.api_lib.cloudresourcemanager import projects_api
from googlecloudsdk.api_lib.cloudresourcemanager import projects_util as projects_api_util
from googlecloudsdk.api_lib.functions.v2 import exceptions
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.command_lib.projects import util as projects_util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.util import encoding as encoder
from googlecloudsdk.core.util import retry
import six
_API_NAME = 'cloudfunctions'
_V2_ALPHA = 'v2alpha'
_V2_BETA = 'v2beta'
_V2_GA = 'v2'
_DEFAULT_ABORTED_MESSAGE = (
'Aborted by user (background API operations may still be in progress).'
)
RELEASE_TRACK_TO_API_VERSION = {
calliope_base.ReleaseTrack.ALPHA: 'v2alpha',
calliope_base.ReleaseTrack.BETA: 'v2beta',
calliope_base.ReleaseTrack.GA: 'v2',
}
MAX_WAIT_MS = 1820000
SLEEP_MS = 1000
# EventArc types
EA_PUBSUB_MESSAGE_PUBLISHED = 'google.cloud.pubsub.topic.v1.messagePublished'
EA_STORAGE_ARCHIVE = 'google.cloud.storage.object.v1.archived'
EA_STORAGE_DELETE = 'google.cloud.storage.object.v1.deleted'
EA_STORAGE_FINALIZE = 'google.cloud.storage.object.v1.finalized'
EA_STORAGE_UPDATE = 'google.cloud.storage.object.v1.metadataUpdated'
EVENTARC_STORAGE_TYPES = (
EA_STORAGE_ARCHIVE,
EA_STORAGE_DELETE,
EA_STORAGE_FINALIZE,
EA_STORAGE_UPDATE,
)
# EventFlow types
EF_PUBSUB_MESSAGE_PUBLISH = 'google.pubsub.topic.publish'
EF_STORAGE_ARCHIVE = 'google.storage.object.archive'
EF_STORAGE_DELETE = 'google.storage.object.delete'
EF_STORAGE_FINALIZE = 'google.storage.object.finalize'
EF_STORAGE_METADATA_UPDATE = 'google.storage.object.metadataUpdate'
EVENTFLOW_TO_EVENTARC_STORAGE_MAP = frozendict.frozendict({
EF_STORAGE_ARCHIVE: EA_STORAGE_ARCHIVE,
EF_STORAGE_DELETE: EA_STORAGE_DELETE,
EF_STORAGE_FINALIZE: EA_STORAGE_FINALIZE,
EF_STORAGE_METADATA_UPDATE: EA_STORAGE_UPDATE,
})
# Legacy types
LEGACY_PUBSUB_MESSAGE_PUBLISH = (
'providers/cloud.pubsub/eventTypes/topic.publish'
)
PUBSUB_MESSAGE_PUBLISH_TYPES = (
EA_PUBSUB_MESSAGE_PUBLISHED,
EF_PUBSUB_MESSAGE_PUBLISH,
LEGACY_PUBSUB_MESSAGE_PUBLISH,
)
class ApiEnv(enum.Enum):
TEST = 1
AUTOPUSH = 2
STAGING = 3
PROD = 4
def GetProject():
# type: () -> str
"""Returns the value of the core/project config prooerty.
Config properties can be overridden with command line flags. If the --project
flag was provided, this will return the value provided with the flag.
"""
return properties.VALUES.core.project.Get(required=True)
def GetMessagesModule(release_track):
"""Returns the API messages module for GCFv2."""
api_version = RELEASE_TRACK_TO_API_VERSION.get(release_track)
return apis.GetMessagesModule(_API_NAME, api_version)
def GetStage(messages):
"""Returns corresponding GoogleCloudFunctionsV2(alpha|beta|ga)Stage."""
if messages is apis.GetMessagesModule(_API_NAME, _V2_ALPHA):
return messages.GoogleCloudFunctionsV2alphaStage
elif messages is apis.GetMessagesModule(_API_NAME, _V2_BETA):
return messages.GoogleCloudFunctionsV2betaStage
else:
return messages.GoogleCloudFunctionsV2Stage
def GetStateMessage(messages):
"""Returns corresponding GoogleCloudFunctionsV2(alpha|beta|ga)stateMessage."""
if messages is apis.GetMessagesModule(_API_NAME, _V2_ALPHA):
return messages.GoogleCloudFunctionsV2alphaStateMessage
elif messages is apis.GetMessagesModule(_API_NAME, _V2_BETA):
return messages.GoogleCloudFunctionsV2betaStateMessage
else:
return messages.GoogleCloudFunctionsV2StateMessage
def GetApiEndpointOverride():
# type: () -> str | None
"""Returns the API endpoint override property value for GCF."""
try:
return properties.VALUES.api_endpoint_overrides.Property(
'cloudfunctions'
).Get()
except properties.NoSuchPropertyError:
return None
def GetClientInstance(release_track):
"""Returns an API client for GCFv2."""
api_version = RELEASE_TRACK_TO_API_VERSION.get(release_track)
return apis.GetClientInstance(_API_NAME, api_version)
def GetStateMessagesStrings(state_messages):
"""Returns the list of string representations of the state messages."""
return map(
lambda st: '[{}] {}'.format(str(st.severity), st.message), state_messages
)
def GetOperationMetadata(messages):
"""Returns corresponding GoogleCloudFunctionsV2(alpha|beta|ga)OperationMetadata."""
if messages is apis.GetMessagesModule(_API_NAME, _V2_ALPHA):
return messages.GoogleCloudFunctionsV2alphaOperationMetadata
elif messages is apis.GetMessagesModule(_API_NAME, _V2_BETA):
return messages.GoogleCloudFunctionsV2betaOperationMetadata
elif messages is apis.GetMessagesModule(_API_NAME, _V2_GA):
return messages.GoogleCloudFunctionsV2OperationMetadata
else:
raise NotImplementedError('Invalid messages module.')
def _GetOperationMetadata(messages, operation):
return encoding.PyValueToMessage(
GetOperationMetadata(messages),
encoding.MessageToPyValue(operation.metadata),
)
def _GetStageHeader(name_enum):
"""Converts NameValueValuesEnum into the header to use in progress stages."""
return '[{}]'.format(six.text_type(name_enum).replace('_', ' ').title())
def _GetOperation(client, request):
"""Get operation and return None if doesn't exist."""
try:
# We got response for a GET request, so an operation exists.
return client.projects_locations_operations.Get(request)
except apitools_exceptions.HttpError as error:
if error.status_code == six.moves.http_client.NOT_FOUND:
return None
raise
def _GetOperationAndStages(client, request, messages):
"""Returns the stages in the operation."""
operation = _GetOperation(client, request)
if operation.error:
raise exceptions.StatusToFunctionsError(operation.error)
stages = []
if operation.metadata:
operation_metadata = _GetOperationMetadata(messages, operation)
for stage in operation_metadata.stages:
stages.append(
progress_tracker.Stage(
_GetStageHeader(stage.name), key=six.text_type(stage.name)
)
)
return operation, stages
def _GetOperationAndLogProgress(client, request, tracker, messages):
"""Returns a Boolean indicating whether the request has completed."""
operation = client.projects_locations_operations.Get(request)
if operation.error:
raise exceptions.StatusToFunctionsError(
operation.error, error_message=OperationErrorToString(operation.error)
)
operation_metadata = _GetOperationMetadata(messages, operation)
# cs/symbol:google.cloud.functions.v2main.OperationMetadata.Stage
for stage in operation_metadata.stages:
stage_in_progress = (
stage.state is GetStage(messages).StateValueValuesEnum.IN_PROGRESS
)
stage_complete = (
stage.state is GetStage(messages).StateValueValuesEnum.COMPLETE
)
if not stage_in_progress and not stage_complete:
continue
stage_key = str(stage.name)
if tracker.IsComplete(stage_key):
# Cannot update a completed stage in the tracker
continue
# Start running a stage
if tracker.IsWaiting(stage_key):
tracker.StartStage(stage_key)
# Update stage message, including Build logs URL if applicable
stage_message = stage.message or ''
if stage_in_progress:
stage_message = (stage_message or 'In progress') + '... '
else:
stage_message = ''
if stage.resourceUri and stage_key == 'BUILD':
stage_message += 'Logs are available at [{}]'.format(stage.resourceUri)
tracker.UpdateStage(stage_key, stage_message)
# Complete a finished stage
if stage_complete:
if stage.stateMessages:
tracker.CompleteStageWithWarnings(
stage_key, GetStateMessagesStrings(stage.stateMessages)
)
else:
tracker.CompleteStage(stage_key)
# We try to detect custom IAM roles in migrtion setup config.
if operation.done and operation_metadata.customIamRoleDetected:
# TODO(b/327636194): update the link once the doc is ready.
log.warning(
'A custom IAM role was detected. If this role is used to manage or'
' access your function, you must manually add the equivalent Cloud Run'
' permissions and add the binding to your Cloud Run function. Refer to'
' https://cloud.google.com/run/docs/reference/iam/permissions for'
' details.'
)
return operation
def WaitForOperation(
client, messages, operation, description, extra_stages=None
):
"""Wait for a long-running operation (LRO) to complete.
Args:
client: The GCFv2 API client.
messages: The GCFv2 message stubs.
operation: The operation message response.
description: str, the description of the waited operation.
extra_stages: List[progress_tracker.Stage]|None, list of optional stages for
the progress tracker to watch. The GCF 2nd api returns unexpected stages
in the case of rollbacks.
Returns:
cloudfunctions_v2_messages.Operation, the finished operation.
"""
def IsNotDoneAndIsMissingStages(res, _):
op, stages = res
return not stages and not op.done
request = messages.CloudfunctionsProjectsLocationsOperationsGetRequest(
name=operation.name
)
# Wait for stages to be loaded.
with progress_tracker.ProgressTracker(
'Preparing function', aborted_message=_DEFAULT_ABORTED_MESSAGE
) as tracker:
retryer = retry.Retryer(max_wait_ms=MAX_WAIT_MS)
try:
# List[progress_tracker.Stage]
operation, stages = retryer.RetryOnResult(
_GetOperationAndStages,
args=[client, request, messages],
should_retry_if=IsNotDoneAndIsMissingStages,
sleep_ms=SLEEP_MS,
)
except retry.WaitException:
raise exceptions.FunctionsError(
'Operation {0} is taking too long'.format(operation.name)
)
if extra_stages is not None:
stages += extra_stages
# Wait for LRO to complete.
description += '...'
with progress_tracker.StagedProgressTracker(
description, stages, aborted_message=_DEFAULT_ABORTED_MESSAGE
) as tracker:
if operation.done and not stages:
# No stages to show in the progress tracker so just return the operation.
return operation
retryer = retry.Retryer(max_wait_ms=MAX_WAIT_MS)
try:
operation = retryer.RetryOnResult(
_GetOperationAndLogProgress,
args=[client, request, tracker, messages],
should_retry_if=lambda op, _: not op.done,
sleep_ms=SLEEP_MS,
)
except retry.WaitException:
raise exceptions.FunctionsError(
'Operation {0} is taking too long'.format(request.name)
)
return operation
def OperationErrorToString(error):
"""Returns a human readable string representation from the operation.
Args:
error: A string representing the raw json of the operation error.
Returns:
A human readable string representation of the error.
"""
error_message = 'OperationError: code={0}, message={1}'.format(
error.code, encoder.Decode(error.message)
)
messages = apis.GetMessagesModule('cloudfunctions', _V2_ALPHA)
if error.details:
for detail in error.details:
sub_error = encoding.PyValueToMessage(
messages.Status, encoding.MessageToPyValue(detail)
)
if sub_error.code is not None or sub_error.message is not None:
error_message += '\n' + OperationErrorToString(sub_error)
return error_message
def HasRoleBinding(iam_policy, sa_email, role):
# type(Policy, str, str) -> bool
"""Returns whether the given SA has the given role bound in given policy.
Args:
iam_policy: The IAM policy to check.
sa_email: The service account to check.
role: The role to check for.
"""
# iam_policy.bindings structure:
# list[<Binding
# members=['serviceAccount:member@thing.iam.gserviceaccount.com', ...],
# role='roles/somerole'>...]
return any(
'serviceAccount:{}'.format(sa_email) in b.members and b.role == role
for b in iam_policy.bindings
)
def PromptToBindRoleIfMissing(sa_email, role, alt_roles=None, reason=''):
# type: (str, str, tuple[str] | None, str) -> None
"""Prompts to bind the role to the service account in project level if missing.
If the console cannot prompt, a warning is logged instead.
Args:
sa_email: The service account email to bind the role to.
role: The role to bind if missing.
alt_roles: Alternative roles to check that dismiss the need to bind the
specified role.
reason: Extra information to print explaining why the binding is necessary.
"""
alt_roles = alt_roles or []
project_ref = projects_util.ParseProject(GetProject())
member = 'serviceAccount:{}'.format(sa_email)
try:
iam_policy = projects_api.GetIamPolicy(project_ref)
if any(HasRoleBinding(iam_policy, sa_email, r) for r in [role, *alt_roles]):
return
log.status.Print(
'Service account [{}] is missing the role [{}].\n{}'.format(
sa_email, role, reason
)
)
bind = console_io.CanPrompt() and console_io.PromptContinue(
prompt_string='\nBind the role [{}] to service account [{}]?'.format(
role, sa_email
)
)
if not bind:
log.warning('Manual binding of above role may be necessary.\n')
return
projects_api.AddIamPolicyBinding(project_ref, member, role)
log.status.Print('Role successfully bound.\n')
except apitools_exceptions.HttpForbiddenError:
log.warning(
(
'Your account does not have permission to check or bind IAM'
' policies to project [%s]. If the deployment fails, ensure [%s]'
' has the role [%s] before retrying.'
),
project_ref,
sa_email,
role,
)
_rm_messages = projects_api_util.GetMessages()
_LOG_TYPES = frozenset([
_rm_messages.AuditLogConfig.LogTypeValueValuesEnum.ADMIN_READ,
_rm_messages.AuditLogConfig.LogTypeValueValuesEnum.DATA_READ,
_rm_messages.AuditLogConfig.LogTypeValueValuesEnum.DATA_WRITE,
])
def _LookupAuditConfig(iam_policy, service):
# type: (Policy, str) -> AuditConfig
"""Looks up the audit config for the given service.
If no audit config is found, a new one is created and attached to the given
policy.
Args:
iam_policy: The IAM policy to look through.
service: The service to find the audit config for.
Returns:
The audit config for the given service or a blank new one if not found.
"""
# iam_policy.auditConfigs structure:
# list[<AuditConfig
# auditLogConfigs=[<AuditLogConfig<logType=...>, ...],
# service='foo.googleapis.com'>...]
for ac in iam_policy.auditConfigs:
if ac.service == service:
return ac
audit_config = _rm_messages.AuditConfig(service=service, auditLogConfigs=[])
iam_policy.auditConfigs.append(audit_config)
return audit_config
def PromptToEnableDataAccessAuditLogs(service):
# type: (str) -> None
"""Prompts to enable Data Access audit logs for the given service.
If the console cannot prompt, a warning is logged instead.
Args:
service: The service to enable Data Access audit logs for.
"""
project = GetProject()
project_ref = projects_util.ParseProject(project)
warning_msg = (
'If audit logs are not fully enabled for [{}], your function may'
' fail to receive some events.'.format(service)
)
try:
policy = projects_api.GetIamPolicy(project_ref)
except apitools_exceptions.HttpForbiddenError:
log.warning(
'You do not have permission to retrieve the IAM policy and check'
' whether Data Access audit logs are enabled for [{}]. {}'.format(
service, warning_msg
)
)
return
audit_config = _LookupAuditConfig(policy, service)
enabled_log_types = set(lc.logType for lc in audit_config.auditLogConfigs)
if enabled_log_types == _LOG_TYPES:
return
log.status.Print(
'Some Data Access audit logs are disabled for [{}]: '
'https://console.cloud.google.com/iam-admin/audit?project={}'.format(
service, project
)
)
if not console_io.CanPrompt():
log.warning(warning_msg)
return
log.status.Print(warning_msg)
if not console_io.PromptContinue(
prompt_string='\nEnable all Data Access audit logs for [{}]?'.format(
service
)
):
return
# Create log configs for any missing log types.
log_types_to_enable = [lt for lt in _LOG_TYPES if lt not in enabled_log_types]
audit_config.auditLogConfigs.extend(
[_rm_messages.AuditLogConfig(logType=lt) for lt in log_types_to_enable]
)
try:
projects_api.SetIamPolicy(project_ref, policy, update_mask='auditConfigs')
log.status.Print('Data Access audit logs successfully enabled.')
except apitools_exceptions.HttpForbiddenError:
log.warning(
'You do not have permission to update the IAM policy and ensure Data'
' Access audit logs are enabled for [{}].'.format(service)
)
def GetCloudFunctionsApiEnv():
"""Determine the cloudfunctions API env the gcloud cmd is using."""
api_string = GetApiEndpointOverride()
if api_string is None:
return ApiEnv.PROD
if 'test-cloudfunctions' in api_string:
return ApiEnv.TEST
if 'autopush-cloudfunctions' in api_string:
return ApiEnv.AUTOPUSH
if 'staging-cloudfunctions' in api_string:
return ApiEnv.STAGING
return ApiEnv.PROD