feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A helper library for this command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals

View File

@@ -0,0 +1,95 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library that contains common logging commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import properties
def _AssertValidResource(arg, resource_name):
if not any([
resource_name.startswith(t)
for t in ('projects/', 'organizations/', 'folders/', 'billingAccounts/')
]):
raise exceptions.InvalidArgumentException(
arg, 'Invalid resource %s. Resource must be in the form '
'[projects|folders|organizations|billingAccounts]/{{resource_id}}' %
resource_name)
def FetchLogs(log_filter=None,
order_by='DESC',
limit=None,
parent=None,
resource_names=None):
"""Fetches log entries.
This method uses Cloud Logging V2 api.
https://cloud.google.com/logging/docs/api/introduction_v2
Entries are sorted on the timestamp field, and afterwards filter is applied.
If limit is passed, returns only up to that many matching entries.
If neither log_filter nor log_ids are passed, no filtering is done.
FetchLogs will query the combined resource set from "parent" and
"resource_names".
Args:
log_filter: filter expression used in the request.
order_by: the sort order, either DESC or ASC.
limit: how many entries to return.
parent: the name of the log's parent resource, e.g. "projects/foo" or
"organizations/123" or "folders/123". Defaults to the current project if
no `resource_names` are provided.
resource_names: if present, resource names to query.
Returns:
A generator that returns matching log entries.
Callers are responsible for handling any http exceptions.
"""
resource_names = resource_names or []
for name in resource_names:
_AssertValidResource('resource_names', name)
if parent:
_AssertValidResource('parent', parent)
resource_names.append(parent)
elif not resource_names:
resource_names.append('projects/%s' %
properties.VALUES.core.project.Get(required=True))
# The backend has an upper limit of 1000 for page_size.
# However, there is no need to retrieve more entries if limit is specified.
page_size = min(limit or 1000, 1000)
if order_by.upper() == 'DESC':
order_by = 'timestamp desc'
else:
order_by = 'timestamp asc'
client = util.GetClient()
request = client.MESSAGES_MODULE.ListLogEntriesRequest(
resourceNames=resource_names, filter=log_filter, orderBy=order_by)
return list_pager.YieldFromList(
client.entries, request, field='entries', limit=limit,
batch_size=page_size, batch_size_attribute='pageSize')

View File

@@ -0,0 +1,76 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Formatter to parse logs into single lines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
from cloudsdk.google.protobuf import timestamp_pb2
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.privateca import text_utils
def FormatLog(log):
"""Format logs for a service."""
is_log_entry = isinstance(log,
apis.GetMessagesModule('logging', 'v2').LogEntry)
log_entry_line = GetAttributeFieldFromLog('log_name', is_log_entry, log)
if not log_entry_line:
return ''
split_log = log_entry_line.split('%2F')
if len(split_log) < 2:
return ''
log_type = split_log[1]
log_output = [GetTimestampFromLogFormat(is_log_entry, log)]
if log_type == 'requests':
http_request = GetAttributeFieldFromLog('http_request', is_log_entry, log)
http_method = GetAttributeFieldFromLog('request_method', is_log_entry,
http_request)
status = GetAttributeFieldFromLog('status', is_log_entry, http_request)
url = GetAttributeFieldFromLog('request_url', is_log_entry, http_request)
log_output.append(http_method)
log_output.append(str(status))
log_output.append(url)
elif log_type == 'stderr' or log_type == 'stdout':
text_payload = GetAttributeFieldFromLog('text_payload', is_log_entry, log)
log_output.append(text_payload)
else:
return ''
return ' '.join(log_output)
def GetTimestampFromLogFormat(is_log_entry, log):
"""Returns timestamp in 'YYYY-MM-DD HH:MM:SS' string format."""
timestamp = GetAttributeFieldFromLog('timestamp', is_log_entry, log)
if is_log_entry:
ts = timestamp_pb2.Timestamp()
ts.FromJsonString(timestamp)
log_entry_timestamp = ts.ToDatetime()
return datetime.datetime.strftime(log_entry_timestamp, '%Y-%m-%d %H:%M:%S')
return datetime.datetime.strftime(timestamp, '%Y-%m-%d %H:%M:%S')
def GetAttributeFieldFromLog(field_name, is_log_entry, log_obj):
return getattr(log_obj, GetProperField(field_name, is_log_entry), '')
def GetProperField(field_name, is_log_entry):
"""Retrieve the proper atrribute from LogEntry depending if it is in MessageModule or GapiClient format."""
if not is_log_entry:
return field_name
return text_utils.SnakeCaseToCamelCase(field_name)

View File

@@ -0,0 +1,223 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library for logs tailing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import datetime
# pylint: disable=unused-import, type imports needed for gRPC
import google.appengine.logging.v1.request_log_pb2
import google.appengine.v1.audit_data_pb2
import google.appengine.v1beta.audit_data_pb2
import google.cloud.appengine_v1alpha.proto.audit_data_pb2
import google.cloud.audit.audit_log_pb2
import google.cloud.bigquery.logging.v1.audit_data_pb2
import google.iam.admin.v1.audit_data_pb2
import google.iam.v1.logging.audit_data_pb2
import google.type.money_pb2
# pylint: enable=unused-import
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import gapic_util
from googlecloudsdk.core import log
import grpc
_SUPPRESSION_INFO_FLUSH_PERIOD_SECONDS = 2
_HELP_PAGE_LINK = 'https://cloud.google.com/logging/docs/reference/tools/gcloud-logging#tailing.'
def _HandleGrpcRendezvous(rendezvous, output_debug, output_warning):
"""Handles _MultiThreadedRendezvous errors."""
error_messages_by_code = {
grpc.StatusCode.INVALID_ARGUMENT:
'Invalid argument.',
grpc.StatusCode.RESOURCE_EXHAUSTED:
'There are too many tail sessions open.',
grpc.StatusCode.INTERNAL:
'Internal error.',
grpc.StatusCode.PERMISSION_DENIED:
'Access is denied or has changed for resource.',
grpc.StatusCode.OUT_OF_RANGE:
('The maximum duration for tail has been met. '
'The command may be repeated to continue.')
}
# grpc calls cancelled by application should not warn
if rendezvous.code() == grpc.StatusCode.CANCELLED:
return
output_debug(rendezvous)
output_warning('{} ({})'.format(
error_messages_by_code.get(rendezvous.code(),
'Unknown error encountered.'),
rendezvous.details()))
def _HandleSuppressionCounts(counts_by_reason, handler):
"""Handles supression counts."""
client_class = apis.GetGapicClientClass('logging', 'v2')
suppression_info = (client_class.types.TailLogEntriesResponse.SuppressionInfo)
suppression_reason_strings = {
suppression_info.Reason.RATE_LIMIT:
'Logging API backend rate limit',
suppression_info.Reason.NOT_CONSUMED:
'client not consuming messages quickly enough',
}
for reason, count in counts_by_reason.items():
reason_string = suppression_reason_strings.get(
reason, 'UNKNOWN REASON: {}'.format(reason))
handler(reason_string, count)
class _SuppressionInfoAccumulator(object):
"""Accumulates and outputs information about suppression for the tail session."""
def __init__(self, get_now, output_warning, output_error):
self._get_now = get_now
self._warning = output_warning
self._error = output_error
self._count_by_reason_delta = collections.Counter()
self._count_by_reason_cumulative = collections.Counter()
self._last_flush = get_now()
def _OutputSuppressionHelpMessage(self):
self._warning(
'Find guidance for suppression at {}.'.format(_HELP_PAGE_LINK))
def _ShouldFlush(self):
return (self._get_now() - self._last_flush
).total_seconds() > _SUPPRESSION_INFO_FLUSH_PERIOD_SECONDS
def _OutputSuppressionDeltaMessage(self, reason_string, count):
self._error('Suppressed {} entries due to {}.'.format(count, reason_string))
def _OutputSuppressionCumulativeMessage(self, reason_string, count):
self._warning('In total, suppressed {} messages due to {}.'.format(
count, reason_string))
def _Flush(self):
self._last_flush = self._get_now()
_HandleSuppressionCounts(self._count_by_reason_delta,
self._OutputSuppressionDeltaMessage)
self._count_by_reason_cumulative += self._count_by_reason_delta
self._count_by_reason_delta.clear()
def Finish(self):
self._Flush()
_HandleSuppressionCounts(self._count_by_reason_cumulative,
self._OutputSuppressionCumulativeMessage)
if self._count_by_reason_cumulative:
self._OutputSuppressionHelpMessage()
def Add(self, suppression_info):
self._count_by_reason_delta += collections.Counter(
{info.reason: info.suppressed_count for info in suppression_info})
if self._ShouldFlush():
self._Flush()
def _StreamEntries(get_now, output_warning, output_error, output_debug,
tail_stub):
"""Streams entries back from the Logging API.
Args:
get_now: A callable that returns the current time.
output_warning: A callable that outputs the argument as a warning.
output_error: A callable that outputs the argument as an error.
output_debug: A callable that outputs the argument as debug info.
tail_stub: The `BidiRpc` stub to use.
Yields:
Entries included in the tail session.
"""
tail_stub.open()
suppression_info_accumulator = _SuppressionInfoAccumulator(
get_now, output_warning, output_error)
error = None
while tail_stub.is_active:
try:
response = tail_stub.recv()
except grpc.RpcError as e:
error = e
break
suppression_info_accumulator.Add(response.suppression_info)
for entry in response.entries:
yield entry
if error:
# The `grpc.RpcError` that are raised by `recv()` are actually gRPC
# `_MultiThreadedRendezvous` objects.
_HandleGrpcRendezvous(error, output_debug, output_warning)
suppression_info_accumulator.Finish()
tail_stub.close()
class LogTailer(object):
"""Streams logs using gRPC."""
def __init__(self):
self.client = apis.GetGapicClientInstance('logging', 'v2')
self.tail_stub = None
def TailLogs(self,
resource_names,
logs_filter,
buffer_window_seconds=None,
output_warning=log.err.Print,
output_error=log.error,
output_debug=log.debug,
get_now=datetime.datetime.now):
"""Tails log entries from the Cloud Logging API.
Args:
resource_names: The resource names to tail.
logs_filter: The Cloud Logging filter identifying entries to include in
the session.
buffer_window_seconds: The amount of time that Cloud Logging should buffer
entries to get correct ordering, or None if the backend should use its
default.
output_warning: A callable that outputs the argument as a warning.
output_error: A callable that outputs the argument as an error.
output_debug: A callable that outputs the argument as debug.
get_now: A callable that returns the current time.
Yields:
Entries for the tail session.
"""
request = self.client.types.TailLogEntriesRequest()
request.resource_names.extend(resource_names)
request.filter = logs_filter
self.tail_stub = gapic_util.MakeBidiRpc(
self.client, self.client.logging.transport.tail_log_entries,
initial_request=request)
if buffer_window_seconds:
request.buffer_window = datetime.timedelta(seconds=buffer_window_seconds)
for entry in _StreamEntries(get_now, output_warning, output_error,
output_debug, self.tail_stub):
yield entry
def Stop(self):
if self.tail_stub:
self.tail_stub.close()

View File

@@ -0,0 +1,526 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library that is used to support logging commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from apitools.base.py import extra_types
from googlecloudsdk.api_lib.resource_manager import folders
from googlecloudsdk.api_lib.util import apis as core_apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.resource_manager import completers
from googlecloudsdk.command_lib.util.apis import arg_utils
from googlecloudsdk.command_lib.util.args import common_args
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log as sdk_log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core import yaml
DEFAULT_API_VERSION = 'v2'
class Error(exceptions.Error):
"""Base error for this module."""
class InvalidJSONValueError(Error):
"""Invalid JSON value error."""
def GetClient():
"""Returns the client for the logging API."""
return core_apis.GetClientInstance('logging', DEFAULT_API_VERSION)
def GetMessages():
"""Returns the messages for the logging API."""
return core_apis.GetMessagesModule('logging', DEFAULT_API_VERSION)
def GetCurrentProjectParent():
"""Returns the relative resource path to the current project."""
project = properties.VALUES.core.project.Get(required=True)
project_ref = resources.REGISTRY.Parse(
project, collection='cloudresourcemanager.projects'
)
return project_ref.RelativeName()
def GetSinkReference(sink_name, args):
"""Returns the appropriate sink resource based on args."""
return resources.REGISTRY.Parse(
sink_name,
params={GetIdFromArgs(args): GetParentResourceFromArgs(args).Name()},
collection=GetCollectionFromArgs(args, 'sinks'),
)
def GetOperationReference(operation_name, args):
"""Returns the appropriate operation resource based on args."""
return resources.REGISTRY.Parse(
operation_name,
params={
GetIdFromArgs(args): GetParentResourceFromArgs(args).Name(),
'locationsId': args.location,
},
collection=GetCollectionFromArgs(args, 'locations.operations'),
)
def FormatTimestamp(timestamp):
"""Returns a string representing timestamp in RFC3339 format.
Args:
timestamp: A datetime.datetime object.
Returns:
A timestamp string in format, which is accepted by Cloud Logging.
"""
return timestamp.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
def ConvertToJsonObject(json_string):
"""Tries to convert the JSON string into JsonObject."""
try:
return extra_types.JsonProtoDecoder(json_string)
except Exception as e:
raise InvalidJSONValueError('Invalid JSON value: %s' % e)
def AddParentArgs(parser, help_string, exclude_billing_account=False):
"""Adds arguments for parent of the entities.
Args:
parser: parser to which arguments are added.
help_string: text that is prepended to help for each argument.
exclude_billing_account: whether to exclude the billing account argument.
"""
entity_group = parser.add_mutually_exclusive_group()
entity_group.add_argument(
'--organization',
required=False,
metavar='ORGANIZATION_ID',
completer=completers.OrganizationCompleter,
help='Organization of the {0}.'.format(help_string),
)
entity_group.add_argument(
'--folder',
required=False,
metavar='FOLDER_ID',
help='Folder of the {0}.'.format(help_string),
)
if not exclude_billing_account:
entity_group.add_argument(
'--billing-account',
required=False,
metavar='BILLING_ACCOUNT_ID',
help='Billing account of the {0}.'.format(help_string),
)
common_args.ProjectArgument(
help_text_to_prepend='Project of the {0}.'.format(help_string)
).AddToParser(entity_group)
def AddBucketLocationArg(parser, required, help_string):
"""Adds a location argument.
Args:
parser: parser to which to add args.
required: whether the arguments is required.
help_string: the help string.
"""
# We validate that the location is non-empty since otherwise the
# error message from the API can be confusing. We leave the rest of the
# validation to the API.
parser.add_argument(
'--location',
required=required,
metavar='LOCATION',
type=arg_parsers.RegexpValidator(r'.+', 'must be non-empty'),
help=help_string,
)
def GetProjectResource(project):
"""Returns the resource for the current project."""
return resources.REGISTRY.Parse(
project or properties.VALUES.core.project.Get(required=True),
collection='cloudresourcemanager.projects',
)
def GetOrganizationResource(organization):
"""Returns the resource for the organization.
Args:
organization: organization.
Returns:
The resource.
"""
return resources.REGISTRY.Parse(
organization, collection='cloudresourcemanager.organizations'
)
def GetFolderResource(folder):
"""Returns the resource for the folder.
Args:
folder: folder.
Returns:
The resource.
"""
return folders.FoldersRegistry().Parse(
folder, collection='cloudresourcemanager.folders'
)
def GetBillingAccountResource(billing_account):
"""Returns the resource for the billing_account.
Args:
billing_account: billing account.
Returns:
The resource.
"""
return resources.REGISTRY.Parse(
billing_account, collection='cloudbilling.billingAccounts'
)
def GetParentResourceFromArgs(args, exclude_billing_account=False):
"""Returns the parent resource derived from the given args.
Args:
args: command line args.
exclude_billing_account: whether to exclude the billing account argument.
Returns:
The parent resource.
"""
if args.organization:
return GetOrganizationResource(args.organization)
elif args.folder:
return GetFolderResource(args.folder)
elif not exclude_billing_account and args.billing_account:
return GetBillingAccountResource(args.billing_account)
else:
return GetProjectResource(args.project)
def GetParentFromArgs(args, exclude_billing_account=False):
"""Returns the relative path to the parent from args.
Args:
args: command line args.
exclude_billing_account: whether to exclude the billing account argument.
Returns:
The relative path. e.g. 'projects/foo', 'folders/1234'.
"""
return GetParentResourceFromArgs(args, exclude_billing_account).RelativeName()
def GetBucketLocationFromArgs(args):
"""Returns the relative path to the bucket location from args.
Args:
args: command line args.
Returns:
The relative path. e.g. 'projects/foo/locations/bar'.
"""
if args.location:
location = args.location
else:
location = '-'
return CreateResourceName(GetParentFromArgs(args), 'locations', location)
def GetIdFromArgs(args):
"""Returns the id to be used for constructing resource paths.
Args:
args: command line args.
Returns:
The id to be used..
"""
if args.organization:
return 'organizationsId'
elif args.folder:
return 'foldersId'
elif args.billing_account:
return 'billingAccountsId'
else:
return 'projectsId'
def GetCollectionFromArgs(args, collection_suffix):
"""Returns the collection derived from args and the suffix.
Args:
args: command line args.
collection_suffix: suffix of collection
Returns:
The collection.
"""
if args.organization:
prefix = 'logging.organizations'
elif args.folder:
prefix = 'logging.folders'
elif args.billing_account:
prefix = 'logging.billingAccounts'
else:
prefix = 'logging.projects'
return '{0}.{1}'.format(prefix, collection_suffix)
def CreateResourceName(parent, collection, resource_id):
"""Creates the full resource name.
Args:
parent: The project or organization id as a resource name, e.g.
'projects/my-project' or 'organizations/123'.
collection: The resource collection. e.g. 'logs'
resource_id: The id within the collection , e.g. 'my-log'.
Returns:
resource, e.g. projects/my-project/logs/my-log.
"""
# id needs to be escaped to create a valid resource name - i.e it is a
# requirement of the Cloud Logging API that each component of a resource
# name must have no slashes.
return '{0}/{1}/{2}'.format(
parent, collection, resource_id.replace('/', '%2F')
)
def CreateLogResourceName(parent, log_id):
"""Creates the full log resource name.
Args:
parent: The project or organization id as a resource name, e.g.
'projects/my-project' or 'organizations/123'.
log_id: The log id, e.g. 'my-log'. This may already be a resource name, in
which case parent is ignored and log_id is returned directly, e.g.
CreateLogResourceName('projects/ignored', 'projects/bar/logs/my-log')
returns 'projects/bar/logs/my-log'
Returns:
Log resource, e.g. projects/my-project/logs/my-log.
"""
if '/logs/' in log_id:
return log_id
return CreateResourceName(parent, 'logs', log_id)
def ExtractLogId(log_resource):
"""Extracts only the log id and restore original slashes.
Args:
log_resource: The full log uri e.g projects/my-projects/logs/my-log.
Returns:
A log id that can be used in other commands.
"""
log_id = log_resource.split('/logs/', 1)[1]
return log_id.replace('%2F', '/')
def IndexTypeToEnum(index_type):
"""Converts an Index Type String Literal to an Enum.
Args:
index_type: The index type e.g INDEX_TYPE_STRING.
Returns:
A IndexConfig.TypeValueValuesEnum mapped e.g
TypeValueValuesEnum(INDEX_TYPE_INTEGER, 2) .
Will return a Parser error if an incorrect value is provided.
"""
return arg_utils.ChoiceToEnum(
index_type,
GetMessages().IndexConfig.TypeValueValuesEnum,
valid_choices=['INDEX_TYPE_STRING', 'INDEX_TYPE_INTEGER'],
)
def PrintPermissionInstructions(destination, writer_identity):
"""Prints a message to remind the user to set up permissions for a sink.
Args:
destination: the sink destination (either bigquery or cloud storage).
writer_identity: identity to which to grant write access.
"""
if writer_identity:
grantee = '`{0}`'.format(writer_identity)
else:
grantee = 'the group `cloud-logs@google.com`'
if destination.startswith('bigquery'):
sdk_log.status.Print(
'Please remember to grant {0} the BigQuery Data '
'Editor role on the dataset.'.format(grantee)
)
elif destination.startswith('storage'):
sdk_log.status.Print(
'Please remember to grant {0} the Storage Object '
'Creator role on the bucket.'.format(grantee)
)
elif destination.startswith('pubsub'):
sdk_log.status.Print(
'Please remember to grant {0} the Pub/Sub Publisher '
'role on the topic.'.format(grantee)
)
sdk_log.status.Print(
'More information about sinks can be found at https://'
'cloud.google.com/logging/docs/export/configure_export'
)
def CreateLogMetric(
metric_name, description=None, log_filter=None, bucket_name=None, data=None
):
"""Returns a LogMetric message based on a data stream or a description/filter.
Args:
metric_name: str, the name of the metric.
description: str, a description.
log_filter: str, the filter for the metric's filter field.
bucket_name: str, the bucket name which ownes the metric.
data: str, a stream of data read from a config file.
Returns:
LogMetric, the message representing the new metric.
"""
messages = GetMessages()
if data:
contents = yaml.load(data)
metric_msg = encoding.DictToMessage(contents, messages.LogMetric)
metric_msg.name = metric_name
else:
metric_msg = messages.LogMetric(
name=metric_name,
description=description,
filter=log_filter,
bucketName=bucket_name,
)
return metric_msg
def UpdateLogMetric(
metric, description=None, log_filter=None, bucket_name=None, data=None
):
"""Updates a LogMetric message given description, filter, and/or data.
Args:
metric: LogMetric, the original metric.
description: str, updated description if any.
log_filter: str, updated filter for the metric's filter field if any.
bucket_name: str, the bucket name which ownes the metric.
data: str, a stream of data read from a config file if any.
Returns:
LogMetric, the message representing the updated metric.
"""
messages = GetMessages()
if description:
metric.description = description
if log_filter:
metric.filter = log_filter
if bucket_name:
metric.bucketName = bucket_name
if data:
# Update the top-level fields only.
update_data = yaml.load(data)
metric_diff = encoding.DictToMessage(update_data, messages.LogMetric)
for field_name in update_data:
setattr(metric, field_name, getattr(metric_diff, field_name))
return metric
def GetIamPolicy(view):
"""Get IAM policy, for a given view."""
get_iam_policy_request = (
GetMessages().LoggingProjectsLocationsBucketsViewsGetIamPolicyRequest(
resource=view
)
)
return GetClient().projects_locations_buckets_views.GetIamPolicy(
get_iam_policy_request
)
def SetIamPolicy(view, policy):
"""Set IAM policy, for a given view."""
messages = GetMessages()
policy_request = (
messages.LoggingProjectsLocationsBucketsViewsSetIamPolicyRequest(
resource=view,
setIamPolicyRequest=messages.SetIamPolicyRequest(policy=policy),
)
)
return GetClient().projects_locations_buckets_views.SetIamPolicy(
policy_request
)
def GetTagsArg():
"""Makes the base.Argument for --tags flag."""
help_parts = [
'List of tags KEY=VALUE pairs to bind.',
'Each item must be expressed as',
'`<tag-key-namespaced-name>=<tag-value-short-name>`.\n',
'Example: `123/environment=production,123/costCenter=marketing`\n',
]
return base.Argument(
'--tags',
metavar='KEY=VALUE',
type=arg_parsers.ArgDict(),
action=arg_parsers.UpdateAction,
help='\n'.join(help_parts),
hidden=True,
)
def GetTagsFromArgs(args, tags_message, tags_arg_name='tags'):
"""Makes the tags message object."""
tags = getattr(args, tags_arg_name)
if not tags:
return None
# Sorted for test stability
return tags_message(
additionalProperties=[
tags_message.AdditionalProperty(key=key, value=value)
for key, value in sorted(tags.items())
]
)