feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,241 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""High-level client for interacting with the Cloud Build API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import time
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.cloudbuild import logs as cloudbuild_logs
from googlecloudsdk.api_lib.util import requests
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from six.moves import range # pylint: disable=redefined-builtin
_ERROR_FORMAT_STRING = ('Error Response:{status_code? [{?}]}'
'{status_message? {?}}{url?\n{?}}'
'{details?\n\nDetails:\n{?}}')
def GetBuildProp(build_op, prop_key, required=False):
"""Extract the value of a build's prop_key from a build operation.
Args:
build_op: A Google Cloud Builder build operation.
prop_key: str, The property name.
required: If True, raise an OperationError if prop_key isn't present.
Returns:
The corresponding build operation value indexed by prop_key.
Raises:
OperationError: The required prop_key was not found.
"""
if build_op.metadata is not None:
for prop in build_op.metadata.additionalProperties:
if prop.key == 'build':
for build_prop in prop.value.object_value.properties:
if build_prop.key == prop_key:
string_value = build_prop.value.string_value
return string_value or build_prop.value
if required:
raise OperationError('Build operation does not contain required '
'property [{}]'.format(prop_key))
def _GetStatusFromOp(op):
"""Get the Cloud Build Status from an Operation object.
The op.response field is supposed to have a copy of the build object; however,
the wire JSON from the server doesn't get deserialized into an actual build
object. Instead, it is stored as a generic ResponseValue object, so we have
to root around a bit.
Args:
op: the Operation object from a CloudBuild build request.
Returns:
string status, likely "SUCCESS" or "ERROR".
"""
if op.response and op.response.additionalProperties:
for prop in op.response.additionalProperties:
if prop.key == 'status':
return prop.value.string_value
return 'UNKNOWN'
class BuildFailedError(exceptions.Error):
"""Raised when a Google Cloud Builder build fails."""
class OperationTimeoutError(exceptions.Error):
"""Raised when an operation times out."""
pass
class OperationError(exceptions.Error):
"""Raised when an operation contains an error."""
pass
class CloudBuildClient(object):
"""High-level client for interacting with the Cloud Build API."""
_RETRY_INTERVAL = 1
_MAX_RETRIES = 60 * 60
CLOUDBUILD_SUCCESS = 'SUCCESS'
CLOUDBUILD_LOGFILE_FMT_STRING = 'log-{build_id}.txt'
def __init__(self, client=None, messages=None):
self.client = client or cloudbuild_util.GetClientInstance()
self.messages = messages or cloudbuild_util.GetMessagesModule()
def ExecuteCloudBuildAsync(self, build, project=None):
"""Execute a call to CloudBuild service and return the build operation.
Args:
build: Build object. The Build to execute.
project: The project to execute, or None to use the current project
property.
Raises:
BuildFailedError: when the build fails.
Returns:
build_op, an in-progress build operation.
"""
if project is None:
project = properties.VALUES.core.project.Get(required=True)
build_op = self.client.projects_builds.Create(
self.messages.CloudbuildProjectsBuildsCreateRequest(
projectId=project,
build=build,))
return build_op
def ExecuteCloudBuild(self, build, project=None):
"""Execute a call to CloudBuild service and wait for it to finish.
Args:
build: Build object. The Build to execute.
project: The project to execute, or None to use the current project
property.
Raises:
BuildFailedError: when the build fails.
"""
build_op = self.ExecuteCloudBuildAsync(build, project)
self.WaitAndStreamLogs(build_op)
def WaitAndStreamLogs(self, build_op):
"""Wait for a Cloud Build to finish, streaming logs if possible."""
build_id = GetBuildProp(build_op, 'id', required=True)
logs_uri = GetBuildProp(build_op, 'logUrl')
logs_bucket = GetBuildProp(build_op, 'logsBucket')
log.status.Print(
'Started cloud build [{build_id}].'.format(build_id=build_id))
log_loc = 'in the Cloud Console.'
log_tailer = None
if logs_bucket:
log_object = self.CLOUDBUILD_LOGFILE_FMT_STRING.format(build_id=build_id)
log_tailer = cloudbuild_logs.GCSLogTailer(
bucket=logs_bucket,
obj=log_object)
if logs_uri:
log.status.Print('To see logs in the Cloud Console: ' + logs_uri)
log_loc = 'at ' + logs_uri
else:
log.status.Print('Logs can be found in the Cloud Console.')
callback = None
if log_tailer:
callback = log_tailer.Poll
try:
op = self.WaitForOperation(operation=build_op, retry_callback=callback)
except OperationTimeoutError:
log.debug('', exc_info=True)
raise BuildFailedError('Cloud build timed out. Check logs ' + log_loc)
# Poll the logs one final time to ensure we have everything. We know this
# final poll will get the full log contents because GCS is strongly
# consistent and Cloud Build waits for logs to finish pushing before
# marking the build complete.
if log_tailer:
log_tailer.Poll(is_last=True)
final_status = _GetStatusFromOp(op)
if final_status != self.CLOUDBUILD_SUCCESS:
message = requests.ExtractErrorMessage(
encoding.MessageToPyValue(op.error))
raise BuildFailedError('Cloud build failed. Check logs ' + log_loc
+ ' Failure status: ' + final_status + ': '
+ message)
def WaitForOperation(self, operation, retry_callback=None):
"""Wait until the operation is complete or times out.
This does not use the core api_lib.util.waiter because the cloud build logs
serve as a progress tracker.
Args:
operation: The operation resource to wait on
retry_callback: A callback to be executed before each retry, if desired.
Returns:
The operation resource when it has completed
Raises:
OperationTimeoutError: when the operation polling times out
"""
completed_operation = self._PollUntilDone(operation, retry_callback)
if not completed_operation:
raise OperationTimeoutError(('Operation [{0}] timed out. This operation '
'may still be underway.').format(
operation.name))
return completed_operation
def _PollUntilDone(self, operation, retry_callback):
"""Polls the operation resource until it is complete or times out."""
if operation.done:
return operation
request_type = self.client.operations.GetRequestType('Get')
request = request_type(name=operation.name)
for _ in range(self._MAX_RETRIES):
operation = self.client.operations.Get(request)
if operation.done:
log.debug('Operation [{0}] complete. Result: {1}'.format(
operation.name,
json.dumps(encoding.MessageToDict(operation), indent=4)))
return operation
log.debug('Operation [{0}] not complete. Waiting {1}s.'.format(
operation.name, self._RETRY_INTERVAL))
time.sleep(self._RETRY_INTERVAL)
if retry_callback is not None:
retry_callback()
return None

View File

@@ -0,0 +1,78 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions for the cloudbuild API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class ParserError(exceptions.Error):
"""Error parsing YAML into a dictionary."""
def __init__(self, path, msg):
msg = 'parsing {path}: {msg}'.format(
path=path,
msg=msg,
)
super(ParserError, self).__init__(msg)
class ParseProtoException(exceptions.Error):
"""Error interpreting a dictionary as a specific proto message."""
def __init__(self, path, proto_name, msg):
msg = 'interpreting {path} as {proto_name}: {msg}'.format(
path=path,
proto_name=proto_name,
msg=msg,
)
super(ParseProtoException, self).__init__(msg)
class HybridNonAlphaConfigError(exceptions.Error):
"""Hybrid Configs are currently only supported in the alpha release track."""
def __init__(self):
msg = 'invalid config file.'
super(HybridNonAlphaConfigError, self).__init__(msg)
class WorkerConfigButNoWorkerpoolError(exceptions.Error):
"""The user has not supplied a worker pool even though a workerconfig has been specified."""
def __init__(self):
msg = ('Detected a worker pool config but no worker pool. Please specify a '
'worker pool.')
super(WorkerConfigButNoWorkerpoolError, self).__init__(msg)
class TektonVersionError(exceptions.Error):
"""The Tekton version user supplied is not supported."""
def __init__(self):
msg = ('Tekton version is not supported. Only tekton.dev/v1beta1 is '
'supported at the moment.')
super(TektonVersionError, self).__init__(msg)
class InvalidYamlError(exceptions.Error):
"""The Tekton Yaml user supplied is invalid."""
def __init__(self, msg):
msg = ('Invalid yaml: {msg}').format(msg=msg)
super(InvalidYamlError, self).__init__(msg)

View File

@@ -0,0 +1,737 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the cloudbuild API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
import re
from apitools.base.protorpclite import messages as proto_messages
from apitools.base.py import encoding as apitools_encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as c_exceptions
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import resource_property
from googlecloudsdk.core.util import files
import six
_API_NAME = 'cloudbuild'
_GA_API_VERSION = 'v1'
_BETA_API_VERSION = 'v1beta1'
RELEASE_TRACK_TO_API_VERSION = {
base.ReleaseTrack.GA: _GA_API_VERSION,
base.ReleaseTrack.BETA: _GA_API_VERSION,
base.ReleaseTrack.ALPHA: _GA_API_VERSION,
}
WORKERPOOL_NAME_MATCHER = r'projects/.*/locations/.*/workerPools/.*'
WORKERPOOL_NAME_SELECTOR = r'projects/.*/locations/.*/workerPools/(.*)'
WORKERPOOL_REGION_SELECTOR = r'projects/.*/locations/(.*)/workerPools/.*'
# Default for optionally-regional requests when the user does not specify.
DEFAULT_REGION = 'global'
BYTES_IN_ONE_GB = 2**30
class WorkerpoolTypes(enum.Enum):
UNKNOWN = 0
PRIVATE = 1
HYBRID = 2
def GetMessagesModule(release_track=base.ReleaseTrack.GA):
"""Returns the messages module for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
Module containing the definitions of messages for Cloud Build.
"""
return apis.GetMessagesModule(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientClass(release_track=base.ReleaseTrack.GA):
"""Returns the client class for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
base_api.BaseApiClient, Client class for Cloud Build.
"""
return apis.GetClientClass(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientInstance(
release_track=base.ReleaseTrack.GA,
use_http=True,
skip_activation_prompt=False,
):
"""Returns an instance of the Cloud Build client.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
use_http: bool, True to create an http object for this client.
skip_activation_prompt: bool, True to skip prompting for service activation.
Should be used only if service activation was checked earlier in the
command.
Returns:
base_api.BaseApiClient, An instance of the Cloud Build client.
"""
return apis.GetClientInstance(
_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track],
no_http=(not use_http),
skip_activation_prompt=skip_activation_prompt,
)
def EncodeSubstitutions(substitutions, messages):
if not substitutions:
return None
# Sort for tests
return apitools_encoding.DictToAdditionalPropertyMessage(
substitutions, messages.Build.SubstitutionsValue, sort_items=True)
def EncodeTriggerSubstitutions(substitutions, value_type):
if not substitutions:
return None
substitution_properties = []
for key, value in sorted(six.iteritems(substitutions)): # Sort for tests
substitution_properties.append(
value_type.AdditionalProperty(key=key, value=value))
return value_type(additionalProperties=substitution_properties)
def EncodeUpdatedTriggerSubstitutions(old_substitutions, substitutions,
messages):
"""Encodes the trigger substitutions for the update command.
Args:
old_substitutions: The existing substitutions to be updated.
substitutions: The substitutions to be added to the existing substitutions.
messages: A Cloud Build messages module.
Returns:
The updated trigger substitutions.
"""
if not substitutions:
return old_substitutions
substitution_map = {}
if old_substitutions:
for sub in old_substitutions.additionalProperties:
substitution_map[sub.key] = sub.value
for key, value in six.iteritems(substitutions):
substitution_map[key] = value
updated_substitutions = []
for key, value in sorted(substitution_map.items()): # Sort for tests.
updated_substitutions.append(
messages.BuildTrigger.SubstitutionsValue.AdditionalProperty(
key=key, value=value
)
)
return messages.BuildTrigger.SubstitutionsValue(
additionalProperties=updated_substitutions
)
def RemoveTriggerSubstitutions(
old_substitutions, substitutions_to_be_removed, messages
):
"""Removes existing substitutions for the update command.
Args:
old_substitutions: The existing substitutions.
substitutions_to_be_removed: The substitutions to be removed if exist.
messages: A Cloud Build messages module.
Returns:
The updated trigger substitutions.
"""
if not substitutions_to_be_removed:
return None
substitution_properties = []
if old_substitutions:
for sub in old_substitutions.additionalProperties:
if sub.key not in substitutions_to_be_removed:
substitution_properties.append(
messages.BuildTrigger.SubstitutionsValue.AdditionalProperty(
key=sub.key, value=sub.value
)
)
if not substitution_properties:
substitution_properties.append(
messages.BuildTrigger.SubstitutionsValue.AdditionalProperty()
)
return messages.BuildTrigger.SubstitutionsValue(
additionalProperties=substitution_properties
)
def EncodeEmptyTriggerSubstitutions(messages):
substitution_properties = [
messages.BuildTrigger.SubstitutionsValue.AdditionalProperty()
]
return messages.BuildTrigger.SubstitutionsValue(
additionalProperties=substitution_properties
)
def SnakeToCamelString(snake):
"""Change a snake_case string into a camelCase string.
Args:
snake: str, the string to be transformed.
Returns:
str, the transformed string.
"""
parts = snake.split('_')
if not parts:
return snake
# Handle snake with leading '_'s by collapsing them into the next part.
# Legit field names will never look like this, but completeness of the
# function is important.
leading_blanks = 0
for p in parts:
if not p:
leading_blanks += 1
else:
break
if leading_blanks:
parts = parts[leading_blanks:]
if not parts:
# If they were all blanks, then we over-counted by one because of split
# behavior.
return '_' * (leading_blanks - 1)
parts[0] = '_' * leading_blanks + parts[0]
return ''.join(parts[:1] + [s.capitalize() for s in parts[1:]])
def SnakeToCamel(msg, skip=None):
"""Recursively transform all keys and values from snake_case to camelCase.
If a key is in skip, then its value is left alone.
Args:
msg: dict, list, or other. If 'other', the function returns immediately.
skip: contains dict keys whose values should not have camel case applied.
Returns:
Same type as msg, except all strings that were snake_case are now CamelCase,
except for the values of dict keys contained in skip.
"""
if skip is None:
skip = []
if isinstance(msg, dict):
return {
SnakeToCamelString(key):
(SnakeToCamel(val, skip) if key not in skip else val)
for key, val in six.iteritems(msg)
}
elif isinstance(msg, list):
return [SnakeToCamel(elem, skip) for elem in msg]
else:
return msg
def MessageToFieldPaths(msg):
"""Produce field paths from a message object.
The result is used to create a FieldMask proto message that contains all field
paths presented in the object.
https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto
Args:
msg: A user defined message object that extends the messages.Message class.
https://github.com/google/apitools/blob/master/apitools/base/protorpclite/messages.py
Returns:
The list of field paths.
"""
fields = []
for field in msg.all_fields():
v = msg.get_assigned_value(field.name)
if field.repeated and not v:
# Repeated field is initialized as an empty list.
continue
if v is not None:
# ConvertToSnakeCase produces private_poolv1_config or hybrid_pool_config.
if field.name == 'privatePoolV1Config':
name = 'private_pool_v1_config'
elif field.name == 'hybridPoolConfig':
name = 'hybrid_pool_config'
else:
name = resource_property.ConvertToSnakeCase(field.name)
if hasattr(v, 'all_fields'):
# message has sub-messages, constructing subpaths.
for f in MessageToFieldPaths(v):
fields.append('{}.{}'.format(name, f))
else:
fields.append(name)
return fields
def _UnpackCheckUnused(obj, msg_type):
"""Stuff a dict into a proto message, and fail if there are unused values.
Args:
obj: dict(), The structured data to be reflected into the message type.
msg_type: type, The proto message type.
Raises:
ValueError: If there is an unused value in obj.
Returns:
Proto message, The message that was created from obj.
"""
msg = apitools_encoding.DictToMessage(obj, msg_type)
def _CheckForUnusedFields(obj):
"""Check for any unused fields in nested messages or lists."""
if isinstance(obj, proto_messages.Message):
unused_fields = obj.all_unrecognized_fields()
if unused_fields:
if len(unused_fields) > 1:
# Because this message shows up in a dotted path, use braces.
# eg .foo.bar.{x,y,z}
unused_msg = '{%s}' % ','.join(sorted(unused_fields))
else:
# For single items, omit the braces.
# eg .foo.bar.x
unused_msg = unused_fields[0]
raise ValueError('.%s: unused' % unused_msg)
for used_field in obj.all_fields():
try:
field = getattr(obj, used_field.name)
_CheckForUnusedFields(field)
except ValueError as e:
raise ValueError('.%s%s' % (used_field.name, e))
if isinstance(obj, list):
for i, item in enumerate(obj):
try:
_CheckForUnusedFields(item)
except ValueError as e:
raise ValueError('[%d]%s' % (i, e))
_CheckForUnusedFields(msg)
return msg
def LoadMessageFromStream(stream,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load a proto message from a stream of JSON or YAML text.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream as a dict.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
if skip_camel_case is None:
skip_camel_case = []
# Turn the data into a dict
try:
structured_data = yaml.load(stream, file_hint=path)
except yaml.Error as e:
raise cloudbuild_exceptions.ParserError(path, e.inner_error)
if not isinstance(structured_data, dict):
raise cloudbuild_exceptions.ParserError(path,
'Could not parse as a dictionary.')
return _YamlToMessage(structured_data, msg_type, msg_friendly_name,
skip_camel_case, path)
def LoadMessagesFromStream(stream,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load multiple proto message from a stream of JSON or YAML text.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
Returns:
Proto message list of the messages that got decoded.
"""
if skip_camel_case is None:
skip_camel_case = []
# Turn the data into a dict
try:
structured_data = yaml.load_all(stream, file_hint=path)
except yaml.Error as e:
raise cloudbuild_exceptions.ParserError(path, e.inner_error)
return [
_YamlToMessage(item, msg_type, msg_friendly_name, skip_camel_case, path)
for item in structured_data
]
def _YamlToMessage(structured_data,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
structured_data: Dict containing the decoded YAML data.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
# Transform snake_case into camelCase.
structured_data = SnakeToCamel(structured_data, skip_camel_case)
# Then, turn the dict into a proto message.
try:
msg = _UnpackCheckUnused(structured_data, msg_type)
except Exception as e:
# Catch all exceptions here because a valid YAML can sometimes not be a
# valid message, so we need to catch all errors in the dict to message
# conversion.
raise cloudbuild_exceptions.ParseProtoException(path, msg_friendly_name,
'%s' % e)
return msg
def LoadMessageFromPath(path,
msg_type,
msg_friendly_name,
skip_camel_case=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
path: The path to a file containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
Raises:
files.MissingFileError: If the file does not exist.
ParserError: If there was a problem parsing the file as a dict.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
with files.FileReader(path) as f: # Returns user-friendly error messages
return LoadMessageFromStream(f, msg_type, msg_friendly_name,
skip_camel_case, path)
def LoadMessagesFromPath(path,
msg_type,
msg_friendly_name,
skip_camel_case=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
path: The path to a file containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
Raises:
files.MissingFileError: If the file does not exist.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message list of the messages that got decoded.
"""
with files.FileReader(path) as f: # Returns user-friendly error messages
return LoadMessagesFromStream(f, msg_type, msg_friendly_name,
skip_camel_case, path)
def IsWorkerPool(resource_name):
"""Determine if the provided full resource name is a worker pool.
Args:
resource_name: str, The string to test.
Returns:
bool, True if the string is a worker pool's full resource name.
"""
return bool(re.match(WORKERPOOL_NAME_MATCHER, resource_name))
def WorkerPoolShortName(resource_name):
"""Get the name part of a worker pool's full resource name.
For example, "projects/abc/locations/def/workerPools/ghi" returns "ghi".
Args:
resource_name: A worker pool's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The worker pool's short name.
"""
match = re.search(WORKERPOOL_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool resource name must match "%s"' %
(WORKERPOOL_NAME_MATCHER,))
def WorkerPoolRegion(resource_name):
"""Get the region part of a worker pool's full resource name.
For example, "projects/abc/locations/def/workerPools/ghi" returns "def".
Args:
resource_name: str, A worker pool's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
str, The worker pool's region string.
"""
match = re.search(WORKERPOOL_REGION_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool resource name must match "%s"' %
(WORKERPOOL_NAME_MATCHER,))
def GitHubEnterpriseConfigFromArgs(args, update=False):
"""Construct the GitHubEnterpriseConfig resource from the command line args.
Args:
args: An argparse namespace. All the arguments that were provided to this
command invocation.
update: bool, if the args are for an update.
Returns:
A populated GitHubEnterpriseConfig message.
"""
messages = GetMessagesModule()
ghe = messages.GitHubEnterpriseConfig()
ghe.hostUrl = args.host_uri
ghe.appId = args.app_id
if args.webhook_key is not None:
ghe.webhookKey = args.webhook_key
if not update and args.peered_network is not None:
ghe.peeredNetwork = args.peered_network
if args.gcs_bucket is not None:
gcs_location = messages.GCSLocation()
gcs_location.bucket = args.gcs_bucket
gcs_location.object = args.gcs_object
if args.generation is not None:
gcs_location.generation = args.generation
ghe.appConfigJson = gcs_location
else:
secret_location = messages.GitHubEnterpriseSecrets()
secret_location.privateKeyName = args.private_key_name
secret_location.webhookSecretName = args.webhook_secret_name
secret_location.oauthSecretName = args.oauth_secret_name
secret_location.oauthClientIdName = args.oauth_client_id_name
ghe.secrets = secret_location
return ghe
def BitbucketServerConfigFromArgs(args, update=False):
"""Construct the BitbucketServer resource from the command line args.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
update: bool, if the args are for an update.
Returns:
A populated BitbucketServerConfig message.
"""
messages = GetMessagesModule()
bbs = messages.BitbucketServerConfig()
bbs.hostUri = args.host_uri
bbs.username = args.user_name
bbs.apiKey = args.api_key
secret_location = messages.BitbucketServerSecrets()
secret_location.adminAccessTokenVersionName = (
args.admin_access_token_secret_version
)
secret_location.readAccessTokenVersionName = (
args.read_access_token_secret_version
)
secret_location.webhookSecretVersionName = args.webhook_secret_secret_version
if update or secret_location is not None:
bbs.secrets = secret_location
if not update:
if args.peered_network is None and args.peered_network_ip_range is not None:
raise c_exceptions.RequiredArgumentException(
'peered-network-ip-range',
(
'--peered-network is required when specifying'
' --peered-network-ip-range.'
),
)
if args.peered_network is not None:
bbs.peeredNetwork = args.peered_network
bbs.peeredNetworkIpRange = args.peered_network_ip_range
if args.IsSpecified('ssl_ca_file'):
bbs.sslCa = args.ssl_ca_file
return bbs
def GitLabConfigFromArgs(args):
"""Construct the GitLabConfig resource from the command line args.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
A populated GitLabConfig message.
"""
messages = GetMessagesModule()
config = messages.GitLabConfig()
config.username = args.user_name
secrets = messages.GitLabSecrets()
secrets.apiAccessTokenVersion = args.api_access_token_secret_version
secrets.readAccessTokenVersion = args.read_access_token_secret_version
secrets.webhookSecretVersion = args.webhook_secret_secret_version
secrets.apiKeyVersion = args.api_key_secret_version
if not _IsEmptyMessage(secrets):
config.secrets = secrets
enterprise_config = messages.GitLabEnterpriseConfig()
enterprise_config.hostUri = args.host_uri
service_directory_config = messages.ServiceDirectoryConfig()
service_directory_config.service = args.service_directory_service
enterprise_config.serviceDirectoryConfig = service_directory_config
if args.IsSpecified('ssl_ca_file'):
enterprise_config.sslCa = args.ssl_ca_file
if not _IsEmptyMessage(enterprise_config):
config.enterpriseConfig = enterprise_config
return config
def _IsEmptyMessage(message):
if message is None:
return True
message_dict = apitools_encoding.MessageToDict(message)
return not any(message_dict.values())
def WorkerPoolIsSpecified(build_config):
return (
build_config is not None
and build_config.options is not None
and build_config.options.pool is not None
and build_config.options.pool.name is not None
)
def WorkerPoolConfigIsSpecified(build_config):
return (
build_config is not None
and build_config.options is not None
and build_config.options.pool is not None
and build_config.options.pool.workerConfig is not None
)
def BytesToGb(size):
"""Converts bytes to GB.
Args:
size: a size in GB Does not require size to be a multiple of 1 GB unlike
utils.BytesToGb from from googlecloudsdk.api_lib.compute
Returns:
size in bytes.
"""
if not size:
return None
return size // BYTES_IN_ONE_GB

View File

@@ -0,0 +1,172 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parse cloudbuild config files.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.core import exceptions
# Don't apply camel case to keys for dict or list values with these field names.
# These correspond to map fields in our proto message, where we expect keys to
# be sent exactly as the user typed them, without transformation to camelCase.
_SKIP_CAMEL_CASE = [
'secretEnv', 'secret_env', 'substitutions', 'envMap', 'env_map'
]
# Regex for a valid user-defined substitution variable.
_BUILTIN_SUBSTITUTION_REGEX = re.compile('^_[A-Z0-9_]+$')
# What we call cloudbuild.yaml for error messages that try to parse it.
_BUILD_CONFIG_FRIENDLY_NAME = 'build config'
class InvalidBuildConfigException(exceptions.Error):
"""Build config message is not valid.
"""
def __init__(self, path, msg):
msg = 'validating {path} as build config: {msg}'.format(
path=path,
msg=msg,
)
super(InvalidBuildConfigException, self).__init__(msg)
def FinalizeCloudbuildConfig(build, path, params=None, no_source=None):
"""Validate the given build message, and merge substitutions.
Args:
build: The build message to finalize.
path: The path of the original build config, for error messages.
params: Any additional substitution parameters as a dict.
no_source: CLI flag value for --no-source. If set, the build config can
provide a remote build source.
Raises:
InvalidBuildConfigException: If the build config is invalid.
Returns:
The valid build message with substitutions complete.
"""
subst_value = build.substitutions
if subst_value is None:
subst_value = build.SubstitutionsValue()
if params is None:
params = {}
# Convert substitutions value to dict temporarily.
subst_dict = {}
for kv in subst_value.additionalProperties:
subst_dict[kv.key] = kv.value
# Validate the substitution keys in the message.
for key in subst_dict:
if not _BUILTIN_SUBSTITUTION_REGEX.match(key):
raise InvalidBuildConfigException(
path,
'substitution key {} does not respect format {}'.format(
key, _BUILTIN_SUBSTITUTION_REGEX.pattern
),
)
# Merge the substitutions passed in the flag.
subst_dict.update(params)
# Convert substitutions dict back into value, and store it.
# Sort so that tests work.
subst_value = build.SubstitutionsValue()
for key, value in sorted(subst_dict.items()):
ap = build.SubstitutionsValue.AdditionalProperty()
ap.key = key
ap.value = value
subst_value.additionalProperties.append(ap)
if subst_value.additionalProperties:
build.substitutions = subst_value
# Some problems can be caught before talking to the cloudbuild service.
if not no_source and build.source:
raise InvalidBuildConfigException(
path, 'config cannot specify source without the flag --no-source'
)
if not build.remoteConfig and not build.steps:
raise InvalidBuildConfigException(
path,
'config must list at least one step or specify remote_config',
)
return build
def LoadCloudbuildConfigFromStream(
stream,
messages,
params=None,
path=None,
):
"""Load a cloudbuild config file into a Build message.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
messages: module, The messages module that has a Build type.
params: dict, parameters to substitute into a templated Build spec.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream as a dict.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
InvalidBuildConfigException: If the build config has illegal values.
Returns:
Build message, The build that got decoded.
"""
build = cloudbuild_util.LoadMessageFromStream(stream, messages.Build,
_BUILD_CONFIG_FRIENDLY_NAME,
_SKIP_CAMEL_CASE, path)
build = FinalizeCloudbuildConfig(build, path, params)
return build
def LoadCloudbuildConfigFromPath(path, messages, params=None, no_source=None):
"""Load a cloudbuild config file into a Build message.
Args:
path: str. Path to the JSON or YAML data to be decoded.
messages: module, The messages module that has a Build type.
params: dict, parameters to substitute into a templated Build spec.
no_source: CLI flag value for --no-source. If set, the build config can
provide a remote build source.
Raises:
files.MissingFileError: If the file does not exist.
ParserError: If there was a problem parsing the file as a dict.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
InvalidBuildConfigException: If the build config has illegal values.
Returns:
Build message, The build that got decoded.
"""
build = cloudbuild_util.LoadMessageFromPath(
path, messages.Build, _BUILD_CONFIG_FRIENDLY_NAME, _SKIP_CAMEL_CASE)
build = FinalizeCloudbuildConfig(build, path, params, no_source)
return build

View File

@@ -0,0 +1,110 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Build resource filter expression rewrite backend."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core.resource import resource_expr_rewrite
from googlecloudsdk.core.resource import resource_property
from googlecloudsdk.core.util import times
import six
# If _STRING_FIELDS and _TIME_FIELDS are out of sync with the API then --filter
# expressions will still work, but parts may be done client side, degrading
# performance.
_STRING_FIELDS = {
'build_id',
'images',
'options.requested_verify_option',
'project_id',
'results.images.digest',
'results.images.name',
'source_provenance.resolved_repo_source.commit_sha',
'source.repo_source.branch_name',
'source.repo_source.commit_sha',
'source.repo_source.repo_name',
'source.repo_source.tag_name',
'source.storage_source.bucket',
'source.storage_source.object',
'status',
'tags',
'trigger_id',
}
_TIME_FIELDS = {
'create_time',
'finish_time',
'start_time',
}
class Backend(resource_expr_rewrite.Backend):
"""Cloud Build resource filter expression rewrite backend."""
def __init__(self, ongoing=False, **kwargs):
super(Backend, self).__init__(**kwargs)
self._ongoing = ongoing
def _RewriteStrings(self, key, op, operand):
"""Rewrites <key op operand>."""
terms = []
for arg in operand if isinstance(operand, list) else [operand]:
terms.append('{key}{op}{arg}'.format(key=key, op=op,
arg=self.Quote(arg, always=True)))
if len(terms) > 1:
return '{terms}'.format(terms=' OR '.join(terms))
return terms[0]
def _RewriteTimes(self, key, op, operand):
"""Rewrites <*Time op operand>."""
try:
dt = times.ParseDateTime(operand)
except ValueError as e:
raise ValueError(
'{operand}: date-time value expected for {key}: {error}'
.format(operand=operand, key=key, error=six.text_type(e)))
dt_string = times.FormatDateTime(dt, '%Y-%m-%dT%H:%M:%S.%3f%Ez', times.UTC)
return '{key}{op}{dt_string}'.format(
key=key, op=op, dt_string=self.Quote(dt_string, always=True))
def Rewrite(self, expression, **kwargs):
client_expression, server_expression = super(Backend, self).Rewrite(
expression, **kwargs)
if self._ongoing:
ongoing = 'status="WORKING" OR status="QUEUED"'
if server_expression:
server_expression = self.RewriteAND(server_expression, ongoing)
else:
server_expression = ongoing
return client_expression, server_expression
def RewriteTerm(self, key, op, operand, key_type):
"""Rewrites <key op operand>."""
del key_type # unused in RewriteTerm
if op == ':':
op = '='
elif op not in ['<', '<=', '=', '!=', '>=', '>']:
return None
name = resource_property.ConvertToSnakeCase(key)
if name in _STRING_FIELDS:
return self._RewriteStrings(name, op, operand)
elif name in _TIME_FIELDS:
return self._RewriteTimes(name, op, operand)
return None

View File

@@ -0,0 +1,587 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manage and stream build logs from Cloud Builds."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import re
import threading
import time
from apitools.base.py import exceptions as api_exceptions
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.logging import common
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.console import console_attr_os
from googlecloudsdk.core.credentials import requests as creds_requests
from googlecloudsdk.core.util import encoding
import requests
LOG_STREAM_HELP_TEXT = """
To live stream log output for this build, please ensure the grpc module is installed. Run:
pip install grpcio
and set:
export CLOUDSDK_PYTHON_SITEPACKAGES=1
"""
DEFAULT_LOGS_BUCKET_IS_OUTSIDE_SECURITY_PERIMETER_TEXT = """
The build is running, and logs are being written to the default logs bucket.
This tool can only stream logs if you are Viewer/Owner of the project and, if applicable, allowed by your VPC-SC security policy.
The default logs bucket is always outside any VPC-SC security perimeter.
If you want your logs saved inside your VPC-SC perimeter, use your own bucket.
See https://cloud.google.com/build/docs/securing-builds/store-manage-build-logs.
"""
class NoLogsBucketException(exceptions.Error):
def __init__(self):
msg = 'Build does not specify logsBucket, unable to stream logs'
super(NoLogsBucketException, self).__init__(msg)
class DefaultLogsBucketIsOutsideSecurityPerimeterException(exceptions.Error):
def __init__(self):
super(DefaultLogsBucketIsOutsideSecurityPerimeterException,
self).__init__(DEFAULT_LOGS_BUCKET_IS_OUTSIDE_SECURITY_PERIMETER_TEXT)
Response = collections.namedtuple('Response', ['status', 'headers', 'body'])
class RequestsLogTailer(object):
"""LogTailer transport to make HTTP requests using requests."""
def __init__(self):
self.session = creds_requests.GetSession()
def Request(self, url, cursor):
try:
response = self.session.request(
'GET', url, headers={'Range': 'bytes={0}-'.format(cursor)})
return Response(response.status_code, response.headers, response.content)
except requests.exceptions.RequestException as e:
raise api_exceptions.CommunicationError('Failed to connect: %s' % e)
def GetGCLLogTailer():
"""Return a GCL LogTailer."""
try:
# pylint: disable=g-import-not-at-top
from googlecloudsdk.api_lib.logging import tailing
# pylint: enable=g-import-not-at-top
except ImportError:
log.out.Print(LOG_STREAM_HELP_TEXT)
return None
return tailing.LogTailer()
def IsCB4A(build):
"""Separate CB4A requests to print different logs."""
if build.options:
if build.options.cluster:
return bool(build.options.cluster.name)
elif build.options.anthosCluster:
return bool(build.options.anthosCluster.membership)
return False
class TailerBase(object):
"""Base class for log tailer classes."""
LOG_OUTPUT_BEGIN = ' REMOTE BUILD OUTPUT '
OUTPUT_LINE_CHAR = '-'
def _ValidateScreenReader(self, text):
"""Modify output for better screen reader experience."""
screen_reader = properties.VALUES.accessibility.screen_reader.GetBool()
if screen_reader:
return re.sub('---> ', '', text)
return text
def _PrintLogLine(self, text):
"""Testing Hook: This method enables better verification of output."""
if self.out and text:
self.out.Print(text.rstrip())
def _PrintFirstLine(self, msg=LOG_OUTPUT_BEGIN):
"""Print a pretty starting line to identify start of build output logs."""
width, _ = console_attr_os.GetTermSize()
self._PrintLogLine(msg.center(width, self.OUTPUT_LINE_CHAR))
def _PrintLastLine(self, msg=''):
"""Print a pretty ending line to identify end of build output logs."""
width, _ = console_attr_os.GetTermSize()
# We print an extra blank visually separating the log from other output.
self._PrintLogLine(msg.center(width, self.OUTPUT_LINE_CHAR) + '\n')
class GCLLogTailer(TailerBase):
"""Helper class to tail logs from GCL, printing content as available."""
def __init__(self,
buildId,
projectId,
timestamp,
logUrl=None,
out=log.status,
is_cb4a=False):
self.tailer = GetGCLLogTailer()
self.build_id = buildId
self.project_id = projectId
self.timestamp = timestamp
self.out = out
self.buffer_window_seconds = 2
self.log_url = logUrl
self.stop = False
self.is_cb4a = is_cb4a
@classmethod
def FromBuild(cls, build, out=log.out):
"""Build a GCLLogTailer from a build resource.
Args:
build: Build resource, The build whose logs shall be streamed.
out: The output stream to write the logs to.
Returns:
GCLLogTailer, the tailer of this build's logs.
"""
return cls(
buildId=build.id,
projectId=build.projectId,
timestamp=build.createTime,
logUrl=build.logUrl,
out=out,
is_cb4a=IsCB4A(build))
def Tail(self):
"""Tail the GCL logs and print any new bytes to the console."""
if not self.tailer:
return
if self.stop:
return
parent = 'projects/{project_id}'.format(project_id=self.project_id)
log_filter = ('logName="projects/{project_id}/logs/cloudbuild" AND '
'resource.type="build" AND '
'resource.labels.build_id="{build_id}"').format(
project_id=self.project_id, build_id=self.build_id)
if self.is_cb4a:
# The labels starting with 'k8s-pod/' in the log entries from GKE-on-GCP
# clusters are different from other labels. The dots '.' in the labels are
# converted to '_'. For example, 'k8s-pod/tekton.dev/taskRun' is
# converted to 'k8s-pod/tekton_dev/taskRun'.
log_filter = ('labels."k8s-pod/tekton.dev/taskRun"="{build_id}" OR '
'labels."k8s-pod/tekton_dev/taskRun"="{build_id}"').format(
build_id=self.build_id)
output_logs = self.tailer.TailLogs(
[parent], log_filter, buffer_window_seconds=self.buffer_window_seconds)
self._PrintFirstLine()
for output in output_logs:
text = self._ValidateScreenReader(output.text_payload)
self._PrintLogLine(text)
self._PrintLastLine(' BUILD FINISHED; TRUNCATING OUTPUT LOGS ')
if self.log_url:
self._PrintLogLine(
'Logs are available at [{log_url}].'.format(log_url=self.log_url))
return
def Stop(self):
"""Stop log tailing."""
self.stop = True
# Sleep to allow the Tailing API to send the last logs it buffered up
time.sleep(self.buffer_window_seconds)
if self.tailer:
self.tailer.Stop()
def Print(self):
"""Print GCL logs to the console."""
parent = 'projects/{project_id}'.format(project_id=self.project_id)
log_filter = (
'logName="projects/{project_id}/logs/cloudbuild" AND '
'resource.type="build" AND '
# timestamp needed for faster querying in GCL
'timestamp>="{timestamp}" AND '
'resource.labels.build_id="{build_id}"').format(
project_id=self.project_id,
timestamp=self.timestamp,
build_id=self.build_id)
if self.is_cb4a:
# The labels starting with 'k8s-pod/' in the log entries from GKE-on-GCP
# clusters are different from other labels. The dots '.' in the labels are
# converted to '_'. For example, 'k8s-pod/tekton.dev/taskRun' is
# converted to 'k8s-pod/tekton_dev/taskRun'.
log_filter = ('(labels."k8s-pod/tekton.dev/taskRun"="{build_id}" OR '
'labels."k8s-pod/tekton_dev/taskRun"="{build_id}") AND '
'timestamp>="{timestamp}"').format(
build_id=self.build_id, timestamp=self.timestamp)
output_logs = common.FetchLogs(
log_filter=log_filter, order_by='asc', parent=parent)
self._PrintFirstLine()
for output in output_logs:
text = self._ValidateScreenReader(output.textPayload)
self._PrintLogLine(text)
self._PrintLastLine()
class GCSLogTailer(TailerBase):
"""Helper class to tail a GCS logfile, printing content as available."""
LOG_OUTPUT_INCOMPLETE = ' (possibly incomplete) '
def __init__(self, bucket, obj, out=log.status):
self.transport = RequestsLogTailer()
self.url = self._StorageUrl(bucket, obj)
log.debug('GCS logfile url is ' + self.url)
# position in the file being read
self.cursor = 0
self.out = out
self.stop = False
def _StorageUrl(self, bucket, obj):
universe_domain = properties.VALUES.core.universe_domain.Get()
url_pattern = 'https://storage.{universe_domain}/{bucket}/{obj}'
if properties.VALUES.context_aware.use_client_certificate.GetBool():
# mTLS is enabled.
url_pattern = 'https://storage.mtls.{universe_domain}/{bucket}/{obj}'
return url_pattern.format(
universe_domain=universe_domain, bucket=bucket, obj=obj)
@classmethod
def FromBuild(cls, build, out=log.out):
"""Build a GCSLogTailer from a build resource.
Args:
build: Build resource, The build whose logs shall be streamed.
out: The output stream to write the logs to.
Raises:
NoLogsBucketException: If the build does not specify a logsBucket.
Returns:
GCSLogTailer, the tailer of this build's logs.
"""
if not build.logsBucket:
raise NoLogsBucketException()
# remove gs:// prefix from bucket
log_stripped = build.logsBucket
gcs_prefix = 'gs://'
if log_stripped.startswith(gcs_prefix):
log_stripped = log_stripped[len(gcs_prefix):]
if '/' not in log_stripped:
log_bucket = log_stripped
log_object_dir = ''
else:
[log_bucket, log_object_dir] = log_stripped.split('/', 1)
log_object_dir += '/'
log_object = '{object}log-{id}.txt'.format(
object=log_object_dir,
id=build.id,
)
return cls(
bucket=log_bucket,
obj=log_object,
out=out)
def Poll(self, is_last=False):
"""Poll the GCS object and print any new bytes to the console.
Args:
is_last: True if this is the final poll operation.
Raises:
api_exceptions.HttpError: if there is trouble connecting to GCS.
api_exceptions.CommunicationError: if there is trouble reaching the server
and is_last=True.
"""
try:
res = self.transport.Request(self.url, self.cursor)
except api_exceptions.CommunicationError:
# Sometimes this request fails due to read timeouts (b/121307719). When
# this happens we should just proceed and rely on the next poll to pick
# up any missed logs. If this is the last request, there won't be another
# request, and we can just fail.
if is_last:
raise
return
if res.status == 404: # Not Found
# Logfile hasn't been written yet (ie, build hasn't started).
log.debug('Reading GCS logfile: 404 (no log yet; keep polling)')
return
if res.status == 416: # Requested Range Not Satisfiable
# We have consumed all available data. We'll get this a lot as we poll.
log.debug('Reading GCS logfile: 416 (no new content; keep polling)')
if is_last:
self._PrintLastLine()
return
if res.status == 206 or res.status == 200: # Partial Content
# New content available. Print it!
log.debug('Reading GCS logfile: {code} (read {count} bytes)'.format(
code=res.status, count=len(res.body)))
if self.cursor == 0:
self._PrintFirstLine()
self.cursor += len(res.body)
decoded = encoding.Decode(res.body)
if decoded is not None:
decoded = self._ValidateScreenReader(decoded)
self._PrintLogLine(decoded.rstrip('\n'))
if is_last:
self._PrintLastLine()
return
# For 429/503, there isn't much to do other than retry on the next poll.
# If we get a 429 after the build has completed, the user may get incomplete
# logs. This is expected to be rare enough to not justify building a complex
# exponential retry system.
if res.status == 429: # Too Many Requests
log.warning('Reading GCS logfile: 429 (server is throttling us)')
if is_last:
self._PrintLastLine(self.LOG_OUTPUT_INCOMPLETE)
return
if res.status >= 500 and res.status < 600: # Server Error
log.warning('Reading GCS logfile: got {0}, retrying'.format(res.status))
if is_last:
self._PrintLastLine(self.LOG_OUTPUT_INCOMPLETE)
return
# Default: any other codes are treated as errors.
headers = dict(res.headers)
headers['status'] = res.status
raise api_exceptions.HttpError(headers, res.body, self.url)
def Tail(self):
"""Tail the GCS object and print any new bytes to the console."""
while not self.stop:
self.Poll()
time.sleep(1)
# Poll the logs one final time to ensure we have everything. We know this
# final poll will get the full log contents because GCS is strongly
# consistent and Cloud Build waits for logs to finish pushing before
# marking the build complete.
self.Poll(is_last=True)
def Stop(self):
"""Stop log tailing."""
self.stop = True
def Print(self):
"""Print GCS logs to the console."""
self.Poll(is_last=True)
class ThreadInterceptor(threading.Thread):
"""Wrapper to intercept thread exceptions."""
def __init__(self, target):
super(ThreadInterceptor, self).__init__()
self.target = target
self.exception = None
def run(self):
try:
self.target()
except api_exceptions.HttpError as e:
if e.status_code == 403:
# The only way to successfully create a build and then be unable to read
# the logs bucket is if you are using the default logs bucket and
# VPC-SC.
self.exception = DefaultLogsBucketIsOutsideSecurityPerimeterException()
else:
self.exception = e
except api_exceptions.CommunicationError as e:
self.exception = e
class CloudBuildClient(object):
"""Client for interacting with the Cloud Build API (and Cloud Build logs)."""
def __init__(self,
client=None,
messages=None,
support_gcl=False,
polling_interval=1):
self.client = client or cloudbuild_util.GetClientInstance()
self.messages = messages or cloudbuild_util.GetMessagesModule()
self.support_gcl = support_gcl
self.polling_interval = polling_interval
def GetBuild(self, build_ref):
"""Get a Build message.
Args:
build_ref: Build reference. Expects a cloudbuild.projects.locations.builds
but also supports cloudbuild.projects.builds.
Returns:
Build resource
"""
# Legacy build_refs (for cloudbuild.projects.builds) don't have a location
# attached. Convert to the expected type and add the default location.
if build_ref.Collection() == 'cloudbuild.projects.builds':
build_ref = resources.REGISTRY.Create(
collection='cloudbuild.projects.locations.builds',
projectsId=build_ref.projectId,
locationsId=cloudbuild_util.DEFAULT_REGION,
buildsId=build_ref.id)
return self.client.projects_locations_builds.Get(
self.messages.CloudbuildProjectsLocationsBuildsGetRequest(
name=build_ref.RelativeName()))
def ShouldStopTailer(self, build, build_ref, log_tailer, working_statuses):
"""Checks whether a log tailer should be stopped.
Args:
build: Build object, containing build status
build_ref: Build reference, The build whose logs shall be streamed.
log_tailer: Specific log tailer object
working_statuses: Valid working statuses that define we should continue
tailing
Returns:
Build message, the completed or terminated build.
"""
log.status.Print('Waiting for build to complete. Polling interval: ' +
str(self.polling_interval) + ' second(s).')
while build.status in working_statuses:
build = self.GetBuild(build_ref)
time.sleep(self.polling_interval)
if log_tailer:
log_tailer.Stop()
return build
def Stream(self, build_ref, out=log.out):
"""Streams the logs for a build if available.
Regardless of whether logs are available for streaming, awaits build
completion before returning.
Args:
build_ref: Build reference, The build whose logs shall be streamed.
out: The output stream to write the logs to.
Raises:
NoLogsBucketException: If the build is expected to specify a logsBucket
but does not.
Returns:
Build message, the completed or terminated build.
"""
build = self.GetBuild(build_ref)
if not build.options or build.options.logging not in [
self.messages.BuildOptions.LoggingValueValuesEnum.NONE,
self.messages.BuildOptions.LoggingValueValuesEnum.STACKDRIVER_ONLY,
self.messages.BuildOptions.LoggingValueValuesEnum.CLOUD_LOGGING_ONLY,
]:
log_tailer = GCSLogTailer.FromBuild(build, out=out)
elif build.options.logging in [
self.messages.BuildOptions.LoggingValueValuesEnum.STACKDRIVER_ONLY,
self.messages.BuildOptions.LoggingValueValuesEnum.CLOUD_LOGGING_ONLY,
] and self.support_gcl:
log.info('Streaming logs from GCL: requested logging mode is {0}.'.format(
build.options.logging))
log_tailer = GCLLogTailer.FromBuild(build, out=out)
else:
log.info('Not streaming logs: requested logging mode is {0}.'.format(
build.options.logging))
log_tailer = None
statuses = self.messages.Build.StatusValueValuesEnum
working_statuses = [
statuses.QUEUED,
statuses.WORKING,
]
t = None
if log_tailer:
t = ThreadInterceptor(target=log_tailer.Tail)
t.start()
build = self.ShouldStopTailer(build, build_ref, log_tailer,
working_statuses)
if t:
t.join()
if t.exception is not None:
raise t.exception
return build
def PrintLog(self, build_ref):
"""Print the logs for a build.
Args:
build_ref: Build reference, The build whose logs shall be streamed.
Raises:
NoLogsBucketException: If the build does not specify a logsBucket.
"""
build = self.GetBuild(build_ref)
if not build.options or build.options.logging not in [
self.messages.BuildOptions.LoggingValueValuesEnum.NONE,
self.messages.BuildOptions.LoggingValueValuesEnum.STACKDRIVER_ONLY,
self.messages.BuildOptions.LoggingValueValuesEnum.CLOUD_LOGGING_ONLY,
]:
log_tailer = GCSLogTailer.FromBuild(build)
elif build.options.logging in [
self.messages.BuildOptions.LoggingValueValuesEnum.STACKDRIVER_ONLY,
self.messages.BuildOptions.LoggingValueValuesEnum.CLOUD_LOGGING_ONLY,
]:
log.info('Printing logs from GCL: requested logging mode is {0}.'.format(
build.options.logging))
log_tailer = GCLLogTailer.FromBuild(build)
else:
log.info('Logs not available: build logging mode is {0}.'.format(
build.options.logging))
log_tailer = None
if log_tailer:
log_tailer.Print()

View File

@@ -0,0 +1,25 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Build CSI metric names."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# Reserved CSI metric prefix for cloudbuild
_CLOUDBUILD_PREFIX = 'cloudbuild_'
# Time to create a configuration
UPLOAD_SOURCE = _CLOUDBUILD_PREFIX + 'upload_source'

View File

@@ -0,0 +1,138 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Move local source snapshots to GCP."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import os.path
import tarfile
import zipfile
from googlecloudsdk.api_lib.cloudbuild import metric_names
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.command_lib.util import gcloudignore
from googlecloudsdk.core import log
from googlecloudsdk.core import metrics
from googlecloudsdk.core.util import files
_IGNORED_FILE_MESSAGE = """\
Some files were not included in the source upload.
Check the gcloud log [{log_file}] to see which files and the contents of the
default gcloudignore file used (see `$ gcloud topic gcloudignore` to learn
more).
"""
def _ResetOwnership(tarinfo):
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = 'root'
return tarinfo
class Snapshot(storage_util.Snapshot):
"""Snapshot is a manifest of the source in a directory.
"""
def _MakeTarball(self, archive_path):
"""Constructs a tarball of snapshot contents.
Args:
archive_path: Path to place tar file.
Returns:
tarfile.TarFile, The constructed tar file.
"""
tf = tarfile.open(archive_path, mode='w:gz')
for dpath in self.dirs:
t = tf.gettarinfo(dpath)
if os.path.islink(dpath):
t.type = tarfile.SYMTYPE
t.linkname = os.readlink(dpath)
elif os.path.isdir(dpath):
t.type = tarfile.DIRTYPE
else:
log.debug(
'Adding [%s] as dir; os.path says is neither a dir nor a link.',
dpath)
t.type = tarfile.DIRTYPE
t.mode = os.stat(dpath).st_mode
tf.addfile(_ResetOwnership(t))
log.debug('Added dir [%s]', dpath)
for path in self.files:
tf.add(path, filter=_ResetOwnership)
log.debug('Added [%s]', path)
return tf
def _MakeZipFile(self, archive_path):
zip_file = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
try:
for dpath in self.dirs:
zip_file.write(dpath)
for path in self.files:
zip_file.write(path)
finally:
zip_file.close()
def CopyArchiveToGCS(
self, storage_client, gcs_object, ignore_file=None, hide_logs=False
):
"""Copy an archive of the snapshot to GCS.
Args:
storage_client: storage_api.StorageClient, The storage client to use for
uploading.
gcs_object: storage.objects Resource, The GCS object to write.
ignore_file: Override .gcloudignore file to specify skip files.
hide_logs: boolean, not print the status message if the flag is true.
Returns:
storage_v1_messages.Object, The written GCS object.
"""
with metrics.RecordDuration(metric_names.UPLOAD_SOURCE):
with files.ChDir(self.src_dir):
with files.TemporaryDirectory() as tmp:
if gcs_object.Name().endswith('.zip'):
archive_path = os.path.join(tmp, 'file.zip')
self._MakeZipFile(archive_path)
else:
archive_path = os.path.join(tmp, 'file.tgz')
tf = self._MakeTarball(archive_path)
tf.close()
ignore_file_path = os.path.join(
self.src_dir, ignore_file or gcloudignore.IGNORE_FILE_NAME)
if self.any_files_ignored:
if os.path.exists(ignore_file_path):
log.info('Using ignore file [{}]'.format(ignore_file_path))
elif not hide_logs:
log.status.Print(
_IGNORED_FILE_MESSAGE.format(log_file=log.GetLogFilePath()))
if not hide_logs:
file_type = (
'zipfile' if gcs_object.Name().endswith('.zip') else 'tarball'
)
log.status.write(
'Uploading {file_type} of [{src_dir}] to '
'[gs://{bucket}/{object}]\n'.format(
file_type=file_type,
src_dir=self.src_dir,
bucket=gcs_object.bucket,
object=gcs_object.object,
),
)
return storage_client.CopyFileToGCS(archive_path, gcs_object)

View File

@@ -0,0 +1,251 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""CloudBuild resource transforms and symbols dict.
A resource transform function converts a JSON-serializable resource to a string
value. This module contains built-in transform functions that may be used in
resource projection and filter expressions.
NOTICE: Each TransformFoo() method is the implementation of a foo() transform
function. Even though the implementation here is in Python the usage in resource
projection and filter expressions is language agnostic. This affects the
Pythonicness of the Transform*() methods:
(1) The docstrings are used to generate external user documentation.
(2) The method prototypes are included in the documentation. In particular the
prototype formal parameter names are stylized for the documentation.
(3) The 'r', 'kwargs', and 'projection' args are not included in the external
documentation. Docstring descriptions, other than the Args: line for the
arg itself, should not mention these args. Assume the reader knows the
specific item the transform is being applied to. When in doubt refer to
the output of $ gcloud topic projections.
(4) The types of some args, like r, are not fixed until runtime. Other args
may have either a base type value or string representation of that type.
It is up to the transform implementation to silently do the string=>type
conversions. That's why you may see e.g. int(arg) in some of the methods.
(5) Unless it is documented to do so, a transform function must not raise any
exceptions related to the resource r. The `undefined' arg is used to
handle all unusual conditions, including ones that would raise exceptions.
Exceptions for arguments explicitly under the caller's control are OK.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding as apitools_encoding
from googlecloudsdk.api_lib.container.fleet import client as hub_client
from googlecloudsdk.api_lib.util import apis as core_apis
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.resource import resource_transform
def TransformBuildImages(r, undefined=''):
"""Returns the formatted build results images.
Args:
r: JSON-serializable object.
undefined: Returns this value if the resource cannot be formatted.
Returns:
The formatted build results images.
"""
messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
b = apitools_encoding.DictToMessage(r, messages.Build)
if b.results is None:
return undefined
images = b.results.images
if not images:
return undefined
names = []
for i in images:
if i.name is None:
names.append(undefined)
else:
names.append(i.name)
if len(names) > 1:
return names[0] + ' (+{0} more)'.format(len(names)-1)
return names[0]
def TransformBuildSource(r, undefined=''):
"""Returns the formatted build source.
Args:
r: JSON-serializable object.
undefined: Returns this value if the resource cannot be formatted.
Returns:
The formatted build source.
"""
messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
b = apitools_encoding.DictToMessage(r, messages.Build)
if b.source is None:
return undefined
storage_source = b.source.storageSource
if storage_source is not None:
bucket = storage_source.bucket
obj = storage_source.object
if bucket is None or obj is None:
return undefined
return 'gs://{0}/{1}'.format(bucket, obj)
repo_source = b.source.repoSource
if repo_source is not None:
repo_name = repo_source.repoName or 'default'
branch_name = repo_source.branchName
if branch_name is not None:
return '{0}@{1}'.format(repo_name, branch_name)
tag_name = repo_source.tagName
if tag_name is not None:
return '{0}@{1}'.format(repo_name, tag_name)
commit_sha = repo_source.commitSha
if commit_sha is not None:
return '{0}@{1}'.format(repo_name, commit_sha)
git_source = b.source.gitSource
if git_source is not None:
url = git_source.url
revision = git_source.revision
if url is not None:
return '{0}@{1}'.format(url, revision)
storage_source_manifest = b.source.storageSourceManifest
if storage_source_manifest is not None:
bucket = storage_source_manifest.bucket
obj = storage_source_manifest.object
if bucket is None or obj is None:
return undefined
return 'gs://{0}/{1}'.format(bucket, obj)
connected_repository = b.source.connectedRepository
if connected_repository is not None:
repository = connected_repository.repository
revision = connected_repository.revision
if repository is not None:
return '{0}@{1}'.format(repository, revision)
developer_connect_config = b.source.developerConnectConfig
if developer_connect_config is not None:
git_repository_link = developer_connect_config.gitRepositoryLink
revision = developer_connect_config.revision
if git_repository_link is not None:
return '{0}@{1}'.format(git_repository_link, revision)
return undefined
def TransformResultDuration(resource, undefined=''):
"""Returns the formatted result duration.
Args:
resource: JSON-serializable object.
undefined: Returns this value if the resource cannot be formatted.
Returns:
The formatted result duration.
"""
messages = core_apis.GetMessagesModule('cloudbuild', 'v2')
result = apitools_encoding.DictToMessage(resource, messages.Result)
record_data = hub_client.HubClient.ToPyDict(
result.recordSummaries[0].recordData)
if 'completion_time' in record_data:
return resource_transform.TransformDuration(record_data, 'start_time',
'completion_time', 3, 0, False,
1, '-')
if 'finish_time' in record_data:
return resource_transform.TransformDuration(record_data, 'start_time',
'finish_time', 3, 0, False, 1,
'-')
return undefined
def TransformResultStatus(resource, undefined=''):
"""Returns the formatted result status.
Args:
resource: JSON-serializable object.
undefined: Returns this value if the resource cannot be formatted.
Returns:
The formatted result status.
"""
messages = core_apis.GetMessagesModule('cloudbuild', 'v2')
result = apitools_encoding.DictToMessage(resource, messages.Result)
record_summary = result.recordSummaries[0]
record_data = hub_client.HubClient.ToPyDict(record_summary.recordData)
if record_summary.status is not None:
return record_summary.status
if 'pipeline_run_status' in record_data or 'task_run_status' in record_data:
return 'CANCELLED'
# Get the status from conditions if record_summary.status is not set.
succeeded_status = ''
dequeued_status = ''
# There are two types of conditions, 'Succeeded' and 'Dequeued'.
for index in (0, 1):
condition_type = record_data.get(f'conditions[{index}].type')
if condition_type == 'Succeeded':
succeeded_status = record_data.get(f'conditions[{index}].status')
elif condition_type == 'Dequeued':
dequeued_status = record_data.get(f'conditions[{index}].status')
if succeeded_status == 'TRUE':
return 'SUCCESS'
if succeeded_status == 'FALSE':
return 'FAILURE'
if dequeued_status == 'TRUE':
return 'IN_PROGRESS'
if dequeued_status == 'FALSE':
return 'QUEUED'
return undefined
def _GetUri(resource, undefined=None):
# pylint: disable=missing-docstring
messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
if isinstance(resource, messages.Build):
build_ref = resources.REGISTRY.Parse(
None,
params={
'projectId': resource.projectId,
'id': resource.id,
},
collection='cloudbuild.projects.builds')
return build_ref.SelfLink() or undefined
elif isinstance(resource, messages.BuildTrigger):
project = properties.VALUES.core.project.Get(required=True)
trigger_ref = resources.REGISTRY.Parse(
None,
params={
'projectId': project,
'triggerId': resource.id,
},
collection='cloudbuild.projects.triggers')
return trigger_ref.SelfLink() or undefined
else:
return undefined
_TRANSFORMS = {
'build_images': TransformBuildImages,
'build_source': TransformBuildSource,
'result_duration': TransformResultDuration,
'result_status': TransformResultStatus,
'uri': _GetUri,
}
def GetTransforms():
"""Returns the cloudbuild specific resource transform symbol table."""
return _TRANSFORMS

View File

@@ -0,0 +1,201 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the cloudbuild v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.core import resources
from googlecloudsdk.core.resource import resource_property
_API_NAME = 'cloudbuild'
GA_API_VERSION = 'v2'
RELEASE_TRACK_TO_API_VERSION = {
base.ReleaseTrack.GA: GA_API_VERSION,
base.ReleaseTrack.BETA: GA_API_VERSION,
base.ReleaseTrack.ALPHA: GA_API_VERSION,
}
CLUSTER_NAME_SELECTOR = r'projects/.*/locations/.*/memberships/(.*)'
WORKERPOOL_SECOND_GEN_NAME_MATCHER = (
r'projects/.*/locations/.*/workerPoolSecondGen/.*'
)
WORKERPOOL_SECOND_GEN_NAME_SELECTOR = (
r'projects/.*/locations/.*/workerPoolSecondGen/(.*)'
)
def GetMessagesModule(release_track=base.ReleaseTrack.GA):
"""Returns the messages module for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
Module containing the definitions of messages for Cloud Build.
"""
return apis.GetMessagesModule(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientInstance(release_track=base.ReleaseTrack.GA, use_http=True):
"""Returns an instance of the Cloud Build client.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
use_http: bool, True to create an http object for this client.
Returns:
base_api.BaseApiClient, An instance of the Cloud Build client.
"""
return apis.GetClientInstance(
_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track],
no_http=(not use_http))
def GetRun(project, region, run_id, run_type):
"""Get a PipelineRun/TaskRun."""
client = GetClientInstance()
messages = GetMessagesModule()
if run_type == 'pipelinerun':
pipeline_run_resource = resources.REGISTRY.Parse(
run_id,
collection='cloudbuild.projects.locations.pipelineRuns',
api_version='v2',
params={
'projectsId': project,
'locationsId': region,
'pipelineRunsId': run_id,
})
pipeline_run = client.projects_locations_pipelineRuns.Get(
messages.CloudbuildProjectsLocationsPipelineRunsGetRequest(
name=pipeline_run_resource.RelativeName(),))
return pipeline_run
elif run_type == 'taskrun':
task_run_resource = resources.REGISTRY.Parse(
run_id,
collection='cloudbuild.projects.locations.taskRuns',
api_version='v2',
params={
'projectsId': project,
'locationsId': region,
'taskRunsId': run_id,
})
task_run = client.projects_locations_taskRuns.Get(
messages.CloudbuildProjectsLocationsTaskRunsGetRequest(
name=task_run_resource.RelativeName(),))
return task_run
def ClusterShortName(resource_name):
"""Get the name part of a cluster membership's full resource name.
For example, "projects/123/locations/global/memberships/cluster2" returns
"cluster2".
Args:
resource_name: A cluster's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The cluster's short name.
"""
match = re.search(CLUSTER_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The cluster membership resource name must match "%s"' %
(CLUSTER_NAME_SELECTOR,))
def ListLocations(project):
"""Get the list of supported Cloud Build locations.
Args:
project: The project to search.
Returns:
A CloudbuildProjectsLocationsListRequest object.
"""
client = GetClientInstance()
messages = GetMessagesModule()
return client.projects_locations.List(
messages.CloudbuildProjectsLocationsListRequest(
name='projects/{}'.format(project)
)
)
def WorkerPoolSecondGenShortName(resource_name):
"""Get the name part of a worker pool second gen's full resource name.
E.g. "projects/abc/locations/def/workerPoolSecondGen/ghi" returns "ghi".
Args:
resource_name: A worker pool second gen's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The worker pool's short name.
"""
match = re.search(WORKERPOOL_SECOND_GEN_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool second gen resource name must match "%s"' %
(WORKERPOOL_SECOND_GEN_NAME_MATCHER,))
def MessageToFieldPaths(msg):
"""Produce field paths from a message object.
The result is used to create a FieldMask proto message that contains all field
paths presented in the object.
https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto
Args:
msg: A user defined message object that extends the messages.Message class.
https://github.com/google/apitools/blob/master/apitools/base/protorpclite/messages.py
Returns:
The list of field paths.
"""
fields = []
for field in msg.all_fields():
v = msg.get_assigned_value(field.name)
if field.repeated and not v:
# Repeated field is initialized as an empty list.
continue
if v is not None:
name = resource_property.ConvertToSnakeCase(field.name)
if hasattr(v, 'all_fields'):
# message has sub-messages, constructing subpaths.
for f in MessageToFieldPaths(v):
fields.append('{}.{}'.format(name, f))
else:
fields.append(name)
return fields

View File

@@ -0,0 +1,220 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from typing import MutableMapping
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.core import yaml
_DC_GIT_REPO_LINK_PAT = re.compile("^projects/[^/]+/locations/[^/]+/connections"
"/[^/]+/gitRepositoryLinks/[^/]+$")
_PUB_SUB_TOPIC_PAT = re.compile("^projects/[^/]+/topics/[^/]+$")
def SetDictDottedKeyUpperCase(input_dict, dotted_key):
*key, last = dotted_key.split(".")
for bit in key:
if bit not in input_dict:
return
input_dict = input_dict.get(bit)
if last in input_dict:
input_dict[last] = input_dict[last].upper()
def LoadYamlFromPath(path):
try:
data = yaml.load_path(path, round_trip=True, preserve_quotes=True)
except yaml.Error as e:
raise cloudbuild_exceptions.ParserError(path, e.inner_error)
if not yaml.dict_like(data):
raise cloudbuild_exceptions.ParserError(path,
"Could not parse as a dictionary.")
return data
def CamelToSnake(data):
return re.sub(
pattern=r"([A-Z]+)", repl=r"_\1", string=data).lower().lstrip("_")
def UnrecognizedFields(message):
unrecognized_fields = message.all_unrecognized_fields()
if unrecognized_fields:
raise cloudbuild_exceptions.InvalidYamlError(
"Unrecognized fields in yaml: {f}".format(
f=", ".join(unrecognized_fields)))
def WorkflowTriggerTransform(trigger):
"""Transform workflow trigger according to the proto.
Refer to:
* go/gcb-v2-filters
* go/re-scope-workflow-resources-to-triggers-only
to understand more details.
Args:
trigger: the trigger defined in the workflow YAML.
Raises:
InvalidYamlError: The eventType was unsupported.
"""
trigger["id"] = trigger.pop("name")
eventsource = trigger.pop("source", trigger.pop("eventSource", ""))
if not eventsource:
raise cloudbuild_exceptions.InvalidYamlError("Empty event source")
if re.match(_PUB_SUB_TOPIC_PAT, eventsource):
trigger["source"] = {"topic": eventsource}
elif re.match(_DC_GIT_REPO_LINK_PAT, eventsource):
trigger["source"] = {"gitRepoLink": eventsource}
elif eventsource.startswith("https://"):
trigger["source"] = {"url": eventsource}
elif eventsource == "webhook":
if not trigger.get("webhookValidationSecret", ""):
raise cloudbuild_exceptions.InvalidYamlError(
"Webhook trigger requires a webhookValidationSecret")
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported event source: {eventsource}".format(
eventsource=eventsource
)
)
event_type_mapping = {
"branch-push": "PUSH_BRANCH",
"tag-push": "PUSH_TAG",
"pull-request": "PULL_REQUEST",
"any": "ALL",
}
if "eventType" in trigger:
event_type = trigger.pop("eventType")
mapped_event_type = event_type_mapping.get(event_type)
if mapped_event_type is not None:
trigger["eventType"] = mapped_event_type
else:
raise cloudbuild_exceptions.InvalidYamlError(
("Unsupported event type: {event_type}. "
"Supported: {event_types}").format(
event_type=event_type,
event_types=",".join(event_type_mapping.keys())))
for key, value in trigger.pop("filters", {}).items():
trigger[key] = value
if "gitRef" in trigger and "regex" in trigger["gitRef"]:
trigger["gitRef"]["nameRegex"] = trigger["gitRef"].pop("regex")
ParamDictTransform(trigger.get("params", []))
def _ConvertToUpperCase(input_map: MutableMapping[str, str], key: str):
if key in input_map:
input_map[key] = input_map[key].upper()
def ParamSpecTransform(param_spec):
if "default" in param_spec:
param_spec["default"] = ParamValueTransform(param_spec["default"])
_ConvertToUpperCase(param_spec, "type")
def PipelineResultTransform(pipeline_result):
if "value" in pipeline_result:
pipeline_result["value"] = ResultValueTransform(pipeline_result["value"])
def TaskStepTransform(task_step):
if "ref" in task_step:
RefTransform(task_step["ref"])
ParamDictTransform(task_step.get("params", []))
if "onError" in task_step:
OnErrorTransform(task_step)
def OnErrorTransform(data):
if data["onError"] not in ["continue", "stopAndFail"]:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported onError value: {value}. Supported: continue, stopAndFail"
.format(value=data["onError"])
)
else:
data["onError"] = CamelToSnake(data["onError"]).upper()
def TaskResultTransform(task_result):
_ConvertToUpperCase(task_result, "type")
for property_name in task_result.get("properties", []):
PropertySpecTransform(task_result["properties"][property_name])
if "value" in task_result:
task_result["value"] = ParamValueTransform(task_result["value"])
def PropertySpecTransform(property_spec):
"""Mutates the given property spec from Tekton to GCB format.
Args:
property_spec: A Tekton-compliant property spec.
"""
_ConvertToUpperCase(property_spec, "type")
def ParamDictTransform(params):
for param in params:
param["value"] = ParamValueTransform(param["value"])
def ParamValueTransform(param_value):
if (
isinstance(param_value, str)
or isinstance(param_value, float)
or isinstance(param_value, int)
):
return {"type": "STRING", "stringVal": str(param_value)}
elif isinstance(param_value, list):
return {"type": "ARRAY", "arrayVal": param_value}
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported param value type. {msg_type}".format(
msg_type=type(param_value)))
def ResultValueTransform(result_value):
"""Transforms the string result value from Tekton to GCB resultValue struct."""
if (
isinstance(result_value, str)
or isinstance(result_value, float)
or isinstance(result_value, int)
):
return {"type": "STRING", "stringVal": str(result_value)}
elif isinstance(result_value, list):
return {"type": "ARRAY", "arrayVal": result_value}
elif isinstance(result_value, object):
return {"type": "OBJECT", "objectVal": result_value}
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported param value type. {msg_type}".format(
msg_type=type(result_value)
)
)
def RefTransform(ref):
if "resolver" in ref:
ref["resolver"] = ref.pop("resolver").upper()
ParamDictTransform(ref.get("params", []))

View File

@@ -0,0 +1,214 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manage and stream logs in-progress or completed PipelineRun/TaskRun."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import time
from googlecloudsdk.api_lib.cloudbuild import logs as v1_logs_util
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util as v2_client_util
from googlecloudsdk.api_lib.logging import common
from googlecloudsdk.core import log
class GCLLogTailer(v1_logs_util.TailerBase):
"""Helper class to tail logs from GCL, printing content as available."""
CLOUDBUILD_BUCKET = 'cloudbuild'
ALL_LOGS_VIEW = '_AllLogs'
def __init__(
self, project, location, log_filter, has_tipp_pool, out=log.status
):
self.tailer = v1_logs_util.GetGCLLogTailer()
self.log_filter = log_filter
self.project = project
self.location = location
self.default_log_view = (
'projects/{project_id}/locations/global/buckets/_Default/views/{view}'
).format(project_id=self.project, view=self.ALL_LOGS_VIEW)
self.workerpool_log_view = 'projects/{project_id}/locations/{location}/buckets/{bucket}/views/{view}'.format(
project_id=self.project,
location=self.location,
bucket=self.CLOUDBUILD_BUCKET,
view=self.ALL_LOGS_VIEW)
self.has_tipp_pool = has_tipp_pool
self.out = out
self.buffer_window_seconds = 2
@classmethod
def FromFilter(
cls, project, location, log_filter, has_tipp_pool, out=log.out
):
"""Build a GCLLogTailer from a log filter."""
return cls(
project=project,
log_filter=log_filter,
location=location,
has_tipp_pool=has_tipp_pool,
out=out,
)
def Tail(self):
"""Tail the GCL logs and print any new bytes to the console."""
if not self.tailer:
return
if self.has_tipp_pool:
resource_names = [self.workerpool_log_view]
else:
resource_names = [self.default_log_view]
output_logs = self.tailer.TailLogs(
resource_names,
self.log_filter,
buffer_window_seconds=self.buffer_window_seconds,
)
self._PrintFirstLine(' REMOTE RUN OUTPUT ')
for output in output_logs:
text = self._ValidateScreenReader(output.text_payload)
self._PrintLogLine(text)
self._PrintLastLine(' RUN FINISHED; TRUNCATING OUTPUT LOGS ')
return
def Stop(self):
"""Stop log tailing."""
# Sleep to allow the Tailing API to send the last logs it buffered up
time.sleep(self.buffer_window_seconds)
if self.tailer:
self.tailer.Stop()
def Print(self):
"""Print GCL logs to the console."""
if self.has_tipp_pool:
resource_names = [self.workerpool_log_view]
else:
resource_names = [self.default_log_view]
output_logs = common.FetchLogs(
log_filter=self.log_filter,
order_by='asc',
resource_names=resource_names,
)
self._PrintFirstLine(' REMOTE RUN OUTPUT ')
for output in output_logs:
text = self._ValidateScreenReader(output.textPayload)
self._PrintLogLine(text)
self._PrintLastLine()
class CloudBuildLogClient(object):
"""Client for interacting with the Cloud Build API (and Cloud Build logs)."""
def __init__(self, sleep_time=60):
self.v2_client = v2_client_util.GetClientInstance()
self.sleep_time = sleep_time
def _GetLogFilter(self, region, run_id, run_type, has_tipp_pool, create_time):
if has_tipp_pool:
return self._GetWorkerPoolLogFilter(create_time, run_id, run_type, region)
else:
return self._GetNonWorkerPoolLogFilter(create_time, run_id, region)
def _GetNonWorkerPoolLogFilter(self, create_time, run_id, region):
return (
'timestamp>="{timestamp}" AND labels.location="{region}" AND'
' labels.run_name={run_id}'
).format(timestamp=create_time, region=region, run_id=run_id)
def _GetWorkerPoolLogFilter(self, create_time, run_id, run_type, region):
run_label = 'taskRun' if run_type == 'taskrun' else 'pipelineRun'
return (
'(labels."k8s-pod/tekton.dev/{run_label}"="{run_id}" OR '
'labels."k8s-pod/tekton_dev/{run_label}"="{run_id}") AND '
'timestamp>="{timestamp}" AND resource.labels.location="{region}"'
).format(
run_label=run_label, run_id=run_id, timestamp=create_time, region=region
)
def ShouldStopTailer(self, log_tailer, run, project, region, run_id,
run_type):
"""Checks whether a log tailer should be stopped."""
while run.completionTime is None:
run = v2_client_util.GetRun(project, region, run_id, run_type)
time.sleep(1)
if log_tailer:
# wait for some time since logs can still be coming in after run
# is completed
time.sleep(self.sleep_time)
log_tailer.Stop()
return run
def Stream(self, project, region, run_id, run_type, out=log.out):
"""Streams the logs for a run if available."""
run = v2_client_util.GetRun(project, region, run_id, run_type)
# TODO: b/327446875 - Remove this check once the TiPP pool is removed.
has_tipp_pool = (
bool(run.workerPool) and 'workerPoolSecondGen' not in run.workerPool
)
log_filter = self._GetLogFilter(
region, run_id, run_type, has_tipp_pool, run.createTime
)
log_tailer = GCLLogTailer.FromFilter(
project, region, log_filter, has_tipp_pool, out=out
)
t = None
if log_tailer:
t = v1_logs_util.ThreadInterceptor(target=log_tailer.Tail)
t.start()
run = self.ShouldStopTailer(log_tailer, run, project, region, run_id,
run_type)
if t:
t.join()
if t.exception is not None:
raise t.exception
return run
def PrintLog(
self,
project,
region,
run_id,
run_type,
):
"""Print the logs for a run."""
run = v2_client_util.GetRun(project, region, run_id, run_type)
has_tipp_pool = (
bool(run.workerPool) and 'workerPoolSecondGen' not in run.workerPool
)
log_filter = self._GetLogFilter(
region, run_id, run_type, has_tipp_pool, run.createTime
)
log_tailer = GCLLogTailer.FromFilter(
project, region, log_filter, has_tipp_pool
)
if log_tailer:
log_tailer.Print()

View File

@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing output for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
def ParseName(pattern, primitive_type):
"""Parses the name of a pipelineRun/taskRun.
Args:
pattern:
"projects/{project}/locations/{location}/pipelineRuns/{pipeline_run}"
"projects/{project}/locations/{location}/taskRuns/{task_run}"
primitive_type: string
Returns:
name: string
"""
if primitive_type == "pipelinerun":
match = re.match(
r"projects/([^/]+)/locations/([^/]+)/pipelineRuns/([^/]+)", pattern
)
if match:
return match.group(3)
elif primitive_type == "taskrun":
match = re.match(
r"projects/([^/]+)/locations/([^/]+)/taskRuns/([^/]+)", pattern
)
if match:
return match.group(3)

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util
from googlecloudsdk.api_lib.cloudbuild.v2 import input_util
from googlecloudsdk.core import log
_WORKER_POOL_ANNOTATION = "cloudbuild.googleapis.com/worker-pool"
_MANAGED_SIDECARS_ANNOTATION = "cloudbuild.googleapis.com/managed-sidecars"
_MACHINE_TYPE = "cloudbuild.googleapis.com/worker/machine-type"
_PROVENANCE_ENABLED = "cloudbuild.googleapis.com/provenance/enabled"
_PROVENANCE_STORAGE = "cloudbuild.googleapis.com/provenance/storage"
_PROVENANCE_REGION = "cloudbuild.googleapis.com/provenance/region"
def TektonYamlDataToPipelineRun(data):
"""Convert Tekton yaml file into PipelineRun message."""
_VersionCheck(data)
_MetadataTransform(data)
spec = data["spec"]
if "pipelineSpec" in spec:
_PipelineSpecTransform(spec["pipelineSpec"])
elif "pipelineRef" in spec:
input_util.RefTransform(spec["pipelineRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"PipelineSpec or PipelineRef is required.")
if "resources" in spec:
spec.pop("resources")
log.warning(
"PipelineResources are dropped because they are deprecated: "
"https://github.com/tektoncd/pipeline/blob/main/docs/resources.md")
_ServiceAccountTransformPipelineSpec(spec)
input_util.ParamDictTransform(spec.get("params", []))
messages = client_util.GetMessagesModule()
_CheckSpecKeys(data, spec)
data.update(spec)
data.pop("spec")
data.pop("kind")
schema_message = encoding.DictToMessage(data, messages.PipelineRun)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _CheckSpecKeys(data, spec):
for key in spec.keys():
if key in data:
raise cloudbuild_exceptions.InvalidYamlError(
"{0} only needs to be defined in spec".format(key)
)
def TektonYamlDataToTaskRun(data):
"""Convert Tekton yaml file into TaskRun message."""
_VersionCheck(data)
metadata = _MetadataTransform(data)
spec = data["spec"]
if "taskSpec" in spec:
_TaskSpecTransform(spec["taskSpec"])
managed_sidecars = _MetadataToSidecar(metadata)
if managed_sidecars:
spec["taskSpec"]["managedSidecars"] = managed_sidecars
elif "taskRef" in spec:
input_util.RefTransform(spec["taskRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"TaskSpec or TaskRef is required.")
_ServiceAccountTransformTaskSpec(spec)
input_util.ParamDictTransform(spec.get("params", []))
messages = client_util.GetMessagesModule()
_CheckSpecKeys(data, spec)
data.update(spec)
data.pop("spec")
data.pop("kind")
schema_message = encoding.DictToMessage(data, messages.TaskRun)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _VersionCheck(data):
api_version = data.pop("apiVersion")
if api_version != "tekton.dev/v1" and api_version != "tekton.dev/v1beta1":
raise cloudbuild_exceptions.TektonVersionError()
def _MetadataTransform(data):
"""Helper funtion to transform the metadata."""
spec = data["spec"]
if not spec:
raise cloudbuild_exceptions.InvalidYamlError("spec is empty.")
metadata = data.pop("metadata")
if not metadata:
raise cloudbuild_exceptions.InvalidYamlError("Metadata is missing in yaml.")
annotations = metadata.get("annotations", {})
if _WORKER_POOL_ANNOTATION in annotations:
spec["workerPool"] = annotations[_WORKER_POOL_ANNOTATION]
spec["annotations"] = annotations
if _MACHINE_TYPE in annotations:
spec["worker"] = {"machineType": annotations[_MACHINE_TYPE]}
security = {}
if security:
spec["security"] = security
provenance = {}
if _PROVENANCE_ENABLED in annotations:
provenance["enabled"] = annotations[_PROVENANCE_ENABLED].upper()
if _PROVENANCE_STORAGE in annotations:
provenance["storage"] = annotations[_PROVENANCE_STORAGE].upper()
if _PROVENANCE_REGION in annotations:
provenance["region"] = annotations[_PROVENANCE_REGION].upper()
if provenance:
spec["provenance"] = provenance
return metadata
def _MetadataToSidecar(metadata):
if "annotations" in metadata and _MANAGED_SIDECARS_ANNOTATION in metadata[
"annotations"]:
return metadata["annotations"][_MANAGED_SIDECARS_ANNOTATION]
return None
def _PipelineSpecTransform(spec):
for param_spec in spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
for task in spec["tasks"]:
_TaskTransform(task)
if "finally" in spec:
finally_tasks = spec.pop("finally")
for task in finally_tasks:
_TaskTransform(task)
spec["finallyTasks"] = finally_tasks
for pipeline_result in spec.get("results", []):
input_util.PipelineResultTransform(pipeline_result)
def _TaskSpecTransform(spec):
for param_spec in spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
for task_result in spec.get("results", []):
input_util.TaskResultTransform(task_result)
for task_step in spec.get("steps", []):
input_util.TaskStepTransform(task_step)
def _TaskTransform(task):
"""Transform task message."""
if "taskSpec" in task:
task_spec = task.pop("taskSpec")
_TaskSpecTransform(task_spec)
managed_sidecars = _MetadataToSidecar(
task_spec.pop("metadata")) if "metadata" in task_spec else []
if managed_sidecars:
task_spec["managedSidecars"] = managed_sidecars
task["taskSpec"] = {"taskSpec": task_spec}
if "taskRef" in task:
input_util.RefTransform(task["taskRef"])
whens = task.pop("when", [])
for when in whens:
if "operator" in when:
when["expressionOperator"] = input_util.CamelToSnake(
when.pop("operator")).upper()
task["whenExpressions"] = whens
input_util.ParamDictTransform(task.get("params", []))
def _ServiceAccountTransformPipelineSpec(spec):
if "taskRunTemplate" in spec:
if "serviceAccountName" in spec["taskRunTemplate"]:
sa = spec.pop("taskRunTemplate").pop("serviceAccountName")
security = spec.setdefault("security", {})
security["serviceAccount"] = sa
return
raise cloudbuild_exceptions.InvalidYamlError(
"spec.taskRunTemplate.serviceAccountName is required."
)
def _ServiceAccountTransformTaskSpec(spec):
if "serviceAccountName" in spec:
sa = spec.pop("serviceAccountName")
spec["serviceAccount"] = sa
security = spec.setdefault("security", {})
security["serviceAccount"] = sa

View File

@@ -0,0 +1,397 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing ouput for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.cloudbuild.v2 import output_util
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import custom_printer_base
PRINTER_FORMAT = "tekton"
class TektonPrinter(custom_printer_base.CustomPrinterBase):
"""Print a PipelineRun or TaskRun in Tekton YAML format."""
def Transform(self, internal_proto):
proto = encoding.MessageToDict(internal_proto)
if (
"pipelineSpec" in proto
or "pipelineRef" in proto
or "pipelineSpecYaml" in proto
):
yaml_str = self.PublicPRToTektonPR(proto)
return yaml.dump(yaml_str, round_trip=True)
elif "taskSpec" in proto or "taskRef" in proto:
yaml_str = self.PublicTRToTektonPR(proto)
return yaml.dump(yaml_str, round_trip=True)
def PublicPRToTektonPR(self, internal):
"""Convert a PipelineRun message into Tekton yaml."""
pr = {
"metadata": {},
"spec": {},
"status": {},
}
# METADATA
if "name" in internal:
pr["metadata"]["name"] = output_util.ParseName(
internal.pop("name"), "pipelinerun"
)
if "annotations" in internal:
pr["metadata"]["annotations"] = internal.pop("annotations")
# SPEC
if "params" in internal:
pr["spec"]["params"] = _TransformParams(internal.pop("params"))
if "pipelineSpec" in internal:
pr["spec"]["pipelineSpec"] = _TransformPipelineSpec(
internal.pop("pipelineSpec")
)
elif "pipelineRef" in internal:
pr["spec"]["pipelineRef"] = TransformRef(internal.pop("pipelineRef"))
elif "pipelineSpecYaml" in internal:
yaml_string = internal.pop("pipelineSpecYaml")
formatted_yaml = yaml.load(yaml_string, round_trip=True)
pr["spec"]["pipelineSpecYaml"] = formatted_yaml
if "timeout" in internal:
pr["spec"]["timeout"] = internal.pop("timeout")
if "workspaces" in internal:
pr["spec"]["workspaces"] = internal.pop("workspaces")
# STATUS
if "conditions" in internal:
conditions = internal.pop("conditions")
pr["status"]["conditions"] = _TransformConditions(conditions)
if "startTime" in internal:
pr["status"]["startTime"] = internal.pop("startTime")
if "completionTime" in internal:
pr["status"]["completionTime"] = internal.pop("completionTime")
# We set the resolvedPipelineSpec as status in Tekton
if "resolvedPipelineSpec" in internal:
rps = internal.pop("resolvedPipelineSpec")
pr["status"]["pipelineSpec"] = _TransformPipelineSpec(rps)
# PipelineRunResults
if "results" in internal:
pr["status"]["results"] = _TransformPipelineRunResults(
internal.pop("results")
)
if "childReferences" in internal:
crs = internal.pop("childReferences")
pr["status"]["childReferences"] = crs
# TASKRUNTEMPLATE
if "serviceAccount" in internal:
pr["taskRunTemplate"] = {
"serviceAccountName": internal.pop("serviceAccount"),
}
return pr
def PublicTRToTektonPR(self, internal):
"""Convert a TaskRun message into Tekton yaml."""
tr = {
"metadata": {},
"spec": {},
"status": {},
}
# METADATA
if "name" in internal:
tr["metadata"]["name"] = output_util.ParseName(
internal.pop("name"), "taskrun"
)
# SPEC
if "params" in internal:
tr["spec"]["params"] = _TransformParams(internal.pop("params"))
if "taskSpec" in internal:
tr["spec"]["taskSpec"] = _TransformTaskSpec(internal.pop("taskSpec"))
elif "taskRef" in internal:
tr["spec"]["taskRef"] = TransformRef(internal.pop("taskRef"))
if "timeout" in internal:
tr["spec"]["timeout"] = internal.pop("timeout")
if "workspaces" in internal:
tr["spec"]["workspaces"] = internal.pop("workspaces")
if "serviceAccountName" in internal:
tr["spec"]["serviceAccountName"] = internal.pop("serviceAccountName")
# STATUS
if "conditions" in internal:
tr["status"]["conditions"] = _TransformConditions(
internal.pop("conditions")
)
if "startTime" in internal:
tr["status"]["startTime"] = internal.pop("startTime")
if "completionTime" in internal:
tr["status"]["completionTime"] = internal.pop("completionTime")
# We set the resolvedTaskSpec as the Status field in Tekton
if "resolvedTaskSpec" in internal:
rts = internal.pop("resolvedTaskSpec")
tr["status"]["taskSpec"] = _TransformTaskSpec(rts)
# StepState
if "steps" in internal:
tr["status"]["steps"] = _TransformStepStates(internal.pop("steps"))
# TaskRunResults
if "results" in internal:
tr["status"]["results"] = _TransformTaskRunResults(
internal.pop("results")
)
# SidecarState
if "sidecars" in internal:
tr["status"]["sidecars"] = internal.pop("sidecars")
return tr
def _TransformPipelineSpec(ps):
"""Convert PipelineSpec into Tekton yaml."""
pipeline_spec = {}
if "params" in ps:
pipeline_spec["params"] = TransformParamsSpec(ps.pop("params"))
if "tasks" in ps:
pipeline_spec["tasks"] = _TransformPipelineTasks(ps.pop("tasks"))
if "results" in ps:
pipeline_spec["results"] = _TransformPipelineResults(ps.pop("results"))
if "finallyTasks" in ps:
pipeline_spec["finally"] = _TransformPipelineTasks(ps.pop("finallyTasks"))
if "workspaces" in ps:
pipeline_spec["workspaces"] = ps.pop("workspaces")
return pipeline_spec
def TransformParamsSpec(ps):
"""Convert ParamsSpecs into Tekton yaml."""
param_spec = []
for p in ps:
param = {}
if "name" in p:
param["name"] = p.pop("name")
if "description" in p:
param["description"] = p.pop("description")
if "type" in p:
param["type"] = p.pop("type").lower()
if "default" in p:
param["default"] = _TransformParamValue(p.pop("default"))
if "properties" in p:
param["properties"] = p.pop("properties")
param_spec.append(param)
return param_spec
def _TransformTaskSpec(ts):
"""Convert TaskSpecs into Tekton yaml."""
task_spec = {}
if "params" in ts:
task_spec["params"] = TransformParamsSpec(ts.pop("params"))
if "steps" in ts:
task_spec["steps"] = _TransformSteps(ts.pop("steps"))
if "stepTemplate" in ts:
task_spec["stepTemplate"] = ts.pop("stepTemplate")
if "results" in ts:
task_spec["results"] = _TransformTaskResults(ts.pop("results"))
if "sidecars" in ts:
task_spec["sidecars"] = ts.pop("sidecars")
if "workspaces" in ts:
task_spec["workspaces"] = ts.pop("workspaces")
return task_spec
def _TransformOnError(oe):
"""Convert OnError into Tekton yaml."""
return cloudbuild_util.SnakeToCamelString(oe.lower())
def _TransformSteps(steps):
"""Convert Steps into Tekton yaml."""
results = []
for step in steps:
if "ref" in step:
step["ref"] = TransformRef(step.pop("ref"))
if "params" in step:
step["params"] = _TransformParams(step.pop("params"))
results.append(step)
if "onError" in step:
step["onError"] = _TransformOnError(step.pop("onError"))
return results
def _TransformPipelineTasks(ts):
"""Convert PipelineTasks into Tekton yaml."""
tasks = []
for task in ts:
t = {"name": task.get("name", None)}
if "params" in task:
t["params"] = _TransformParams(task.pop("params"))
if "taskSpec" in task:
task_spec = task.pop("taskSpec").pop("taskSpec")
t["taskSpec"] = _TransformTaskSpec(task_spec)
elif "taskRef" in task:
t["taskRef"] = task.pop("taskRef")
if "workspaces" in task:
t["workspaces"] = task.pop("workspaces")
if "runAfter" in task:
t["runAfter"] = task.pop("runAfter")
if "timeout" in task:
t["timeout"] = task.pop("timeout")
tasks.append(t)
return tasks
def _TransformPipelineResults(rs):
"""Convert PipelineResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "description" in r:
result["description"] = r.pop("description")
if "type" in r:
result["type"] = r.pop("type").lower()
if "value" in r:
result["value"] = _TransformResultValue(r.pop("value"))
results.append(result)
return results
def _TransformTaskResults(rs):
"""Convert TaskResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "description" in r:
result["description"] = r.pop("description")
if "type" in r:
result["type"] = r.pop("type").lower()
if "properties" in r:
result["properties"] = r.pop("properties")
if "value" in r:
result["value"] = _TransformParamValue(r.pop("value"))
results.append(result)
return results
def _TransformPipelineRunResults(rs):
"""Convert PipelineRunResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "value" in r:
result["value"] = _TransformResultValue(r.pop("value"))
results.append(result)
return results
def _TransformStepStates(steps):
"""Convert StepState into Tekton yaml."""
step_states = []
for s in steps:
if "results" in s:
s["results"] = _TransformTaskRunResults(s.pop("results"))
step_states.append(s)
return step_states
def _TransformTaskRunResults(rs):
"""Convert TaskRunResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "resultValue" in r:
result["value"] = _TransformResultValue(r.pop("resultValue"))
results.append(result)
return results
def _TransformResultValue(v):
"""Convert ResultValue into Tekton yaml."""
if "stringVal" in v:
return v.pop("stringVal")
if "arrayVal" in v:
return v.pop("arrayVal")
if "objectVal" in v:
return v.pop("objectVal")
return v
def _TransformParamValue(v):
"""Convert ParamValue into Tekton yaml."""
if "stringVal" in v:
return v.pop("stringVal")
if "arrayVal" in v:
return v.pop("arrayVal")
return v
def _TransformParams(ps):
"""Convert Params into Tekton yaml."""
params = []
for p in ps:
param = {}
if "name" in p:
param["name"] = p.pop("name")
if "value" in p:
param["value"] = _TransformParamValue(p.pop("value"))
params.append(param)
return params
def _TransformConditions(cs):
"""Convert Conditions into Tekton yaml."""
conditions = []
for c in cs:
condition = {}
# Only append the condition if it has a message
# which indicates the final condition
if "message" in c:
condition["message"] = c.pop("message")
if "lastTransitionTime" in c:
condition["lastTransitionTime"] = c.pop("lastTransitionTime")
if "status" in c:
condition["status"] = c.pop("status").capitalize()
if "type" in c:
condition["type"] = c.pop("type").capitalize()
if "reason" in c:
condition["reason"] = c.pop("reason")
conditions.append(condition)
return conditions
def _TransformChildRefs(crs):
"""Convert ChildReferences into Tekton yaml."""
child_refs = []
for cr in crs:
child_ref = {}
if "name" in cr:
child_ref["name"] = cr.pop("name")
if "pipelineTask" in cr:
child_ref["pipelineTask"] = cr.pop("pipelineTask")
child_refs.append(child_ref)
return child_refs
def TransformRef(ref):
"""Convert a generic reference (step, task, or pipeline) into Tekton yaml."""
result = {}
if "name" in ref:
result["name"] = ref.pop("name")
if "resolver" in ref:
result["resolver"] = ref.pop("resolver")
if "params" in ref:
result["params"] = _TransformParams(ref.pop("params"))
return result

View File

@@ -0,0 +1,171 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util
from googlecloudsdk.api_lib.cloudbuild.v2 import input_util
from googlecloudsdk.core import yaml
_WORKFLOW_OPTIONS_ENUMS = [
"options.provenance.enabled",
"options.provenance.storage",
"options.provenance.region",
]
def CloudBuildYamlDataToWorkflow(workflow):
"""Convert cloudbuild.yaml file into Workflow message."""
_WorkflowTransform(workflow)
_WorkflowValidate(workflow)
messages = client_util.GetMessagesModule()
schema_message = encoding.DictToMessage(workflow, messages.Workflow)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _WorkflowValidate(workflow):
"""Check that the given workflow has all required fields.
Args:
workflow: The user-supplied Cloud Build Workflow YAML.
Raises:
InvalidYamlError: If the workflow is invalid.
"""
if (
"options" not in workflow
or "security" not in workflow["options"]
or "serviceAccount" not in workflow["options"]["security"]
):
raise cloudbuild_exceptions.InvalidYamlError(
"A service account is required. Specify your user-managed service"
" account using the options.security.serviceAccount field"
)
def _WorkflowTransform(workflow):
"""Transform workflow message."""
if "triggers" in workflow:
workflow["workflowTriggers"] = workflow.pop("triggers")
for workflow_trigger in workflow.get("workflowTriggers", []):
input_util.WorkflowTriggerTransform(workflow_trigger)
for param_spec in workflow.get("params", []):
input_util.ParamSpecTransform(param_spec)
if not param_spec.get("name", ""):
raise cloudbuild_exceptions.InvalidYamlError(
"Workflow parameter name is required"
)
if (
param_spec.get("type", "string") != "string"
or param_spec.get("default", {"type": "STRING"}).get("type") != "STRING"
):
raise cloudbuild_exceptions.InvalidYamlError(
"Only string are supported for workflow parameters, error at "
"parameter with name: {}".format(param_spec.get("name"))
)
if "pipelineSpec" in workflow:
workflow["pipelineSpecYaml"] = yaml.dump(
workflow.pop("pipelineSpec"), round_trip=True
)
elif "pipelineRef" in workflow:
input_util.RefTransform(workflow["pipelineRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"PipelineSpec or PipelineRef is required.")
for workspace_binding in workflow.get("workspaces", []):
_WorkspaceBindingTransform(workspace_binding)
if "options" in workflow and "status" in workflow["options"]:
popped_status = workflow["options"].pop("status")
workflow["options"]["statusUpdateOptions"] = popped_status
for option in _WORKFLOW_OPTIONS_ENUMS:
input_util.SetDictDottedKeyUpperCase(workflow, option)
def _PipelineSpecTransform(pipeline_spec):
"""Transform pipeline spec message."""
for pipeline_task in pipeline_spec.get("tasks", []):
_PipelineTaskTransform(pipeline_task)
for param_spec in pipeline_spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
if "finally" in pipeline_spec:
finally_tasks = pipeline_spec.pop("finally")
for task in finally_tasks:
_PipelineTaskTransform(task)
pipeline_spec["finallyTasks"] = finally_tasks
def _PipelineTaskTransform(pipeline_task):
"""Transform pipeline task message."""
if "taskSpec" in pipeline_task:
popped_task_spec = pipeline_task.pop("taskSpec")
for param_spec in popped_task_spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
pipeline_task["taskSpec"] = {}
pipeline_task["taskSpec"]["taskSpec"] = popped_task_spec
elif "taskRef" in pipeline_task:
input_util.RefTransform(pipeline_task["taskRef"])
pipeline_task["taskRef"] = pipeline_task.pop("taskRef")
if "when" in pipeline_task:
for when_expression in pipeline_task.get("when", []):
_WhenExpressionTransform(when_expression)
pipeline_task["whenExpressions"] = pipeline_task.pop("when")
input_util.ParamDictTransform(pipeline_task.get("params", []))
def _WhenExpressionTransform(when_expression):
if "operator" in when_expression:
when_expression["expressionOperator"] = input_util.CamelToSnake(
when_expression.pop("operator")).upper()
def _WorkspaceBindingTransform(workspace_binding):
"""Transform workspace binding message."""
if "secretName" in workspace_binding:
popped_secret = workspace_binding.pop("secretName")
workspace_binding["secret"] = {}
workspace_binding["secret"]["secretName"] = popped_secret
elif "volume" in workspace_binding:
popped_volume = workspace_binding.pop("volume")
# Volume Claim Template.
workspace_binding["volumeClaim"] = {}
if "storage" in popped_volume:
storage = popped_volume.pop("storage")
workspace_binding["volumeClaim"]["storage"] = storage
else:
return

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for displaying workflows for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudbuild.v2 import pipeline_output_util
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import custom_printer_base
PRINTER_FORMAT = "workflow"
class WorkflowPrinter(custom_printer_base.CustomPrinterBase):
"""Print a Workflow in YAML with comments."""
def _WorkflowDisplayLines(self, workflow):
"""Apply formatting to the workflow for describe command."""
if "pipelineSpecYaml" in workflow:
yaml_str = workflow.pop("pipelineSpecYaml")
workflow = self._updateWorkflowSpec(workflow, yaml_str)
elif (
"pipelineSpec" in workflow
and "generatedYaml" in workflow["pipelineSpec"]
):
yaml_str = workflow["pipelineSpec"].pop("generatedYaml")
del workflow["pipelineSpec"]
workflow = self._updateWorkflowSpec(workflow, yaml_str)
elif "pipelineRef" in workflow:
ref = workflow.pop("pipelineRef")
workflow["pipelineRef"] = pipeline_output_util.TransformRef(ref)
params = workflow.get("params", {})
if params:
workflow["params"] = pipeline_output_util.TransformParamsSpec(params)
yaml_str = yaml.dump(workflow, round_trip=True)
return custom_printer_base.Lines(yaml_str.split("\n"))
def _updateWorkflowSpec(self, workflow, yaml_str):
data = yaml.load(yaml_str, round_trip=True)
workflow["pipelineSpec"] = data
return workflow
def Transform(self, record):
"""Transform ApplicationStatus into the output structure of marker classes.
Args:
record: a dict object
Returns:
lines formatted for output
"""
return self._WorkflowDisplayLines(record)

View File

@@ -0,0 +1,67 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parse workerpool config files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
# What we call workerpool.yaml for error messages that try to parse it.
_WORKERPOOL_CONFIG_FRIENDLY_NAME = 'workerpool config'
def LoadWorkerpoolConfigFromStream(stream, messages, path=None):
"""Load a workerpool config file into a WorkerPool message.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
messages: module, The messages module that has a WorkerPool type.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream as a dict.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
Returns:
WorkerPool message, The worker pool that got decoded.
"""
wp = cloudbuild_util.LoadMessageFromStream(
stream, messages.WorkerPool, _WORKERPOOL_CONFIG_FRIENDLY_NAME, [], path)
return wp
def LoadWorkerpoolConfigFromPath(path, msg_type):
"""Load a workerpool config file into a WorkerPool message.
Args:
path: str. Path to the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
Raises:
files.MissingFileError: If the file does not exist.
ParserError: If there was a problem parsing the file as a dict.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
WorkerPool message, The worker pool that got decoded.
"""
wp = cloudbuild_util.LoadMessageFromPath(path, msg_type,
_WORKERPOOL_CONFIG_FRIENDLY_NAME)
return wp