feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,201 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the cloudbuild v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.core import resources
from googlecloudsdk.core.resource import resource_property
_API_NAME = 'cloudbuild'
GA_API_VERSION = 'v2'
RELEASE_TRACK_TO_API_VERSION = {
base.ReleaseTrack.GA: GA_API_VERSION,
base.ReleaseTrack.BETA: GA_API_VERSION,
base.ReleaseTrack.ALPHA: GA_API_VERSION,
}
CLUSTER_NAME_SELECTOR = r'projects/.*/locations/.*/memberships/(.*)'
WORKERPOOL_SECOND_GEN_NAME_MATCHER = (
r'projects/.*/locations/.*/workerPoolSecondGen/.*'
)
WORKERPOOL_SECOND_GEN_NAME_SELECTOR = (
r'projects/.*/locations/.*/workerPoolSecondGen/(.*)'
)
def GetMessagesModule(release_track=base.ReleaseTrack.GA):
"""Returns the messages module for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
Module containing the definitions of messages for Cloud Build.
"""
return apis.GetMessagesModule(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientInstance(release_track=base.ReleaseTrack.GA, use_http=True):
"""Returns an instance of the Cloud Build client.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
use_http: bool, True to create an http object for this client.
Returns:
base_api.BaseApiClient, An instance of the Cloud Build client.
"""
return apis.GetClientInstance(
_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track],
no_http=(not use_http))
def GetRun(project, region, run_id, run_type):
"""Get a PipelineRun/TaskRun."""
client = GetClientInstance()
messages = GetMessagesModule()
if run_type == 'pipelinerun':
pipeline_run_resource = resources.REGISTRY.Parse(
run_id,
collection='cloudbuild.projects.locations.pipelineRuns',
api_version='v2',
params={
'projectsId': project,
'locationsId': region,
'pipelineRunsId': run_id,
})
pipeline_run = client.projects_locations_pipelineRuns.Get(
messages.CloudbuildProjectsLocationsPipelineRunsGetRequest(
name=pipeline_run_resource.RelativeName(),))
return pipeline_run
elif run_type == 'taskrun':
task_run_resource = resources.REGISTRY.Parse(
run_id,
collection='cloudbuild.projects.locations.taskRuns',
api_version='v2',
params={
'projectsId': project,
'locationsId': region,
'taskRunsId': run_id,
})
task_run = client.projects_locations_taskRuns.Get(
messages.CloudbuildProjectsLocationsTaskRunsGetRequest(
name=task_run_resource.RelativeName(),))
return task_run
def ClusterShortName(resource_name):
"""Get the name part of a cluster membership's full resource name.
For example, "projects/123/locations/global/memberships/cluster2" returns
"cluster2".
Args:
resource_name: A cluster's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The cluster's short name.
"""
match = re.search(CLUSTER_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The cluster membership resource name must match "%s"' %
(CLUSTER_NAME_SELECTOR,))
def ListLocations(project):
"""Get the list of supported Cloud Build locations.
Args:
project: The project to search.
Returns:
A CloudbuildProjectsLocationsListRequest object.
"""
client = GetClientInstance()
messages = GetMessagesModule()
return client.projects_locations.List(
messages.CloudbuildProjectsLocationsListRequest(
name='projects/{}'.format(project)
)
)
def WorkerPoolSecondGenShortName(resource_name):
"""Get the name part of a worker pool second gen's full resource name.
E.g. "projects/abc/locations/def/workerPoolSecondGen/ghi" returns "ghi".
Args:
resource_name: A worker pool second gen's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The worker pool's short name.
"""
match = re.search(WORKERPOOL_SECOND_GEN_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool second gen resource name must match "%s"' %
(WORKERPOOL_SECOND_GEN_NAME_MATCHER,))
def MessageToFieldPaths(msg):
"""Produce field paths from a message object.
The result is used to create a FieldMask proto message that contains all field
paths presented in the object.
https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto
Args:
msg: A user defined message object that extends the messages.Message class.
https://github.com/google/apitools/blob/master/apitools/base/protorpclite/messages.py
Returns:
The list of field paths.
"""
fields = []
for field in msg.all_fields():
v = msg.get_assigned_value(field.name)
if field.repeated and not v:
# Repeated field is initialized as an empty list.
continue
if v is not None:
name = resource_property.ConvertToSnakeCase(field.name)
if hasattr(v, 'all_fields'):
# message has sub-messages, constructing subpaths.
for f in MessageToFieldPaths(v):
fields.append('{}.{}'.format(name, f))
else:
fields.append(name)
return fields

View File

@@ -0,0 +1,220 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from typing import MutableMapping
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.core import yaml
_DC_GIT_REPO_LINK_PAT = re.compile("^projects/[^/]+/locations/[^/]+/connections"
"/[^/]+/gitRepositoryLinks/[^/]+$")
_PUB_SUB_TOPIC_PAT = re.compile("^projects/[^/]+/topics/[^/]+$")
def SetDictDottedKeyUpperCase(input_dict, dotted_key):
*key, last = dotted_key.split(".")
for bit in key:
if bit not in input_dict:
return
input_dict = input_dict.get(bit)
if last in input_dict:
input_dict[last] = input_dict[last].upper()
def LoadYamlFromPath(path):
try:
data = yaml.load_path(path, round_trip=True, preserve_quotes=True)
except yaml.Error as e:
raise cloudbuild_exceptions.ParserError(path, e.inner_error)
if not yaml.dict_like(data):
raise cloudbuild_exceptions.ParserError(path,
"Could not parse as a dictionary.")
return data
def CamelToSnake(data):
return re.sub(
pattern=r"([A-Z]+)", repl=r"_\1", string=data).lower().lstrip("_")
def UnrecognizedFields(message):
unrecognized_fields = message.all_unrecognized_fields()
if unrecognized_fields:
raise cloudbuild_exceptions.InvalidYamlError(
"Unrecognized fields in yaml: {f}".format(
f=", ".join(unrecognized_fields)))
def WorkflowTriggerTransform(trigger):
"""Transform workflow trigger according to the proto.
Refer to:
* go/gcb-v2-filters
* go/re-scope-workflow-resources-to-triggers-only
to understand more details.
Args:
trigger: the trigger defined in the workflow YAML.
Raises:
InvalidYamlError: The eventType was unsupported.
"""
trigger["id"] = trigger.pop("name")
eventsource = trigger.pop("source", trigger.pop("eventSource", ""))
if not eventsource:
raise cloudbuild_exceptions.InvalidYamlError("Empty event source")
if re.match(_PUB_SUB_TOPIC_PAT, eventsource):
trigger["source"] = {"topic": eventsource}
elif re.match(_DC_GIT_REPO_LINK_PAT, eventsource):
trigger["source"] = {"gitRepoLink": eventsource}
elif eventsource.startswith("https://"):
trigger["source"] = {"url": eventsource}
elif eventsource == "webhook":
if not trigger.get("webhookValidationSecret", ""):
raise cloudbuild_exceptions.InvalidYamlError(
"Webhook trigger requires a webhookValidationSecret")
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported event source: {eventsource}".format(
eventsource=eventsource
)
)
event_type_mapping = {
"branch-push": "PUSH_BRANCH",
"tag-push": "PUSH_TAG",
"pull-request": "PULL_REQUEST",
"any": "ALL",
}
if "eventType" in trigger:
event_type = trigger.pop("eventType")
mapped_event_type = event_type_mapping.get(event_type)
if mapped_event_type is not None:
trigger["eventType"] = mapped_event_type
else:
raise cloudbuild_exceptions.InvalidYamlError(
("Unsupported event type: {event_type}. "
"Supported: {event_types}").format(
event_type=event_type,
event_types=",".join(event_type_mapping.keys())))
for key, value in trigger.pop("filters", {}).items():
trigger[key] = value
if "gitRef" in trigger and "regex" in trigger["gitRef"]:
trigger["gitRef"]["nameRegex"] = trigger["gitRef"].pop("regex")
ParamDictTransform(trigger.get("params", []))
def _ConvertToUpperCase(input_map: MutableMapping[str, str], key: str):
if key in input_map:
input_map[key] = input_map[key].upper()
def ParamSpecTransform(param_spec):
if "default" in param_spec:
param_spec["default"] = ParamValueTransform(param_spec["default"])
_ConvertToUpperCase(param_spec, "type")
def PipelineResultTransform(pipeline_result):
if "value" in pipeline_result:
pipeline_result["value"] = ResultValueTransform(pipeline_result["value"])
def TaskStepTransform(task_step):
if "ref" in task_step:
RefTransform(task_step["ref"])
ParamDictTransform(task_step.get("params", []))
if "onError" in task_step:
OnErrorTransform(task_step)
def OnErrorTransform(data):
if data["onError"] not in ["continue", "stopAndFail"]:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported onError value: {value}. Supported: continue, stopAndFail"
.format(value=data["onError"])
)
else:
data["onError"] = CamelToSnake(data["onError"]).upper()
def TaskResultTransform(task_result):
_ConvertToUpperCase(task_result, "type")
for property_name in task_result.get("properties", []):
PropertySpecTransform(task_result["properties"][property_name])
if "value" in task_result:
task_result["value"] = ParamValueTransform(task_result["value"])
def PropertySpecTransform(property_spec):
"""Mutates the given property spec from Tekton to GCB format.
Args:
property_spec: A Tekton-compliant property spec.
"""
_ConvertToUpperCase(property_spec, "type")
def ParamDictTransform(params):
for param in params:
param["value"] = ParamValueTransform(param["value"])
def ParamValueTransform(param_value):
if (
isinstance(param_value, str)
or isinstance(param_value, float)
or isinstance(param_value, int)
):
return {"type": "STRING", "stringVal": str(param_value)}
elif isinstance(param_value, list):
return {"type": "ARRAY", "arrayVal": param_value}
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported param value type. {msg_type}".format(
msg_type=type(param_value)))
def ResultValueTransform(result_value):
"""Transforms the string result value from Tekton to GCB resultValue struct."""
if (
isinstance(result_value, str)
or isinstance(result_value, float)
or isinstance(result_value, int)
):
return {"type": "STRING", "stringVal": str(result_value)}
elif isinstance(result_value, list):
return {"type": "ARRAY", "arrayVal": result_value}
elif isinstance(result_value, object):
return {"type": "OBJECT", "objectVal": result_value}
else:
raise cloudbuild_exceptions.InvalidYamlError(
"Unsupported param value type. {msg_type}".format(
msg_type=type(result_value)
)
)
def RefTransform(ref):
if "resolver" in ref:
ref["resolver"] = ref.pop("resolver").upper()
ParamDictTransform(ref.get("params", []))

View File

@@ -0,0 +1,214 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manage and stream logs in-progress or completed PipelineRun/TaskRun."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import time
from googlecloudsdk.api_lib.cloudbuild import logs as v1_logs_util
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util as v2_client_util
from googlecloudsdk.api_lib.logging import common
from googlecloudsdk.core import log
class GCLLogTailer(v1_logs_util.TailerBase):
"""Helper class to tail logs from GCL, printing content as available."""
CLOUDBUILD_BUCKET = 'cloudbuild'
ALL_LOGS_VIEW = '_AllLogs'
def __init__(
self, project, location, log_filter, has_tipp_pool, out=log.status
):
self.tailer = v1_logs_util.GetGCLLogTailer()
self.log_filter = log_filter
self.project = project
self.location = location
self.default_log_view = (
'projects/{project_id}/locations/global/buckets/_Default/views/{view}'
).format(project_id=self.project, view=self.ALL_LOGS_VIEW)
self.workerpool_log_view = 'projects/{project_id}/locations/{location}/buckets/{bucket}/views/{view}'.format(
project_id=self.project,
location=self.location,
bucket=self.CLOUDBUILD_BUCKET,
view=self.ALL_LOGS_VIEW)
self.has_tipp_pool = has_tipp_pool
self.out = out
self.buffer_window_seconds = 2
@classmethod
def FromFilter(
cls, project, location, log_filter, has_tipp_pool, out=log.out
):
"""Build a GCLLogTailer from a log filter."""
return cls(
project=project,
log_filter=log_filter,
location=location,
has_tipp_pool=has_tipp_pool,
out=out,
)
def Tail(self):
"""Tail the GCL logs and print any new bytes to the console."""
if not self.tailer:
return
if self.has_tipp_pool:
resource_names = [self.workerpool_log_view]
else:
resource_names = [self.default_log_view]
output_logs = self.tailer.TailLogs(
resource_names,
self.log_filter,
buffer_window_seconds=self.buffer_window_seconds,
)
self._PrintFirstLine(' REMOTE RUN OUTPUT ')
for output in output_logs:
text = self._ValidateScreenReader(output.text_payload)
self._PrintLogLine(text)
self._PrintLastLine(' RUN FINISHED; TRUNCATING OUTPUT LOGS ')
return
def Stop(self):
"""Stop log tailing."""
# Sleep to allow the Tailing API to send the last logs it buffered up
time.sleep(self.buffer_window_seconds)
if self.tailer:
self.tailer.Stop()
def Print(self):
"""Print GCL logs to the console."""
if self.has_tipp_pool:
resource_names = [self.workerpool_log_view]
else:
resource_names = [self.default_log_view]
output_logs = common.FetchLogs(
log_filter=self.log_filter,
order_by='asc',
resource_names=resource_names,
)
self._PrintFirstLine(' REMOTE RUN OUTPUT ')
for output in output_logs:
text = self._ValidateScreenReader(output.textPayload)
self._PrintLogLine(text)
self._PrintLastLine()
class CloudBuildLogClient(object):
"""Client for interacting with the Cloud Build API (and Cloud Build logs)."""
def __init__(self, sleep_time=60):
self.v2_client = v2_client_util.GetClientInstance()
self.sleep_time = sleep_time
def _GetLogFilter(self, region, run_id, run_type, has_tipp_pool, create_time):
if has_tipp_pool:
return self._GetWorkerPoolLogFilter(create_time, run_id, run_type, region)
else:
return self._GetNonWorkerPoolLogFilter(create_time, run_id, region)
def _GetNonWorkerPoolLogFilter(self, create_time, run_id, region):
return (
'timestamp>="{timestamp}" AND labels.location="{region}" AND'
' labels.run_name={run_id}'
).format(timestamp=create_time, region=region, run_id=run_id)
def _GetWorkerPoolLogFilter(self, create_time, run_id, run_type, region):
run_label = 'taskRun' if run_type == 'taskrun' else 'pipelineRun'
return (
'(labels."k8s-pod/tekton.dev/{run_label}"="{run_id}" OR '
'labels."k8s-pod/tekton_dev/{run_label}"="{run_id}") AND '
'timestamp>="{timestamp}" AND resource.labels.location="{region}"'
).format(
run_label=run_label, run_id=run_id, timestamp=create_time, region=region
)
def ShouldStopTailer(self, log_tailer, run, project, region, run_id,
run_type):
"""Checks whether a log tailer should be stopped."""
while run.completionTime is None:
run = v2_client_util.GetRun(project, region, run_id, run_type)
time.sleep(1)
if log_tailer:
# wait for some time since logs can still be coming in after run
# is completed
time.sleep(self.sleep_time)
log_tailer.Stop()
return run
def Stream(self, project, region, run_id, run_type, out=log.out):
"""Streams the logs for a run if available."""
run = v2_client_util.GetRun(project, region, run_id, run_type)
# TODO: b/327446875 - Remove this check once the TiPP pool is removed.
has_tipp_pool = (
bool(run.workerPool) and 'workerPoolSecondGen' not in run.workerPool
)
log_filter = self._GetLogFilter(
region, run_id, run_type, has_tipp_pool, run.createTime
)
log_tailer = GCLLogTailer.FromFilter(
project, region, log_filter, has_tipp_pool, out=out
)
t = None
if log_tailer:
t = v1_logs_util.ThreadInterceptor(target=log_tailer.Tail)
t.start()
run = self.ShouldStopTailer(log_tailer, run, project, region, run_id,
run_type)
if t:
t.join()
if t.exception is not None:
raise t.exception
return run
def PrintLog(
self,
project,
region,
run_id,
run_type,
):
"""Print the logs for a run."""
run = v2_client_util.GetRun(project, region, run_id, run_type)
has_tipp_pool = (
bool(run.workerPool) and 'workerPoolSecondGen' not in run.workerPool
)
log_filter = self._GetLogFilter(
region, run_id, run_type, has_tipp_pool, run.createTime
)
log_tailer = GCLLogTailer.FromFilter(
project, region, log_filter, has_tipp_pool
)
if log_tailer:
log_tailer.Print()

View File

@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing output for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
def ParseName(pattern, primitive_type):
"""Parses the name of a pipelineRun/taskRun.
Args:
pattern:
"projects/{project}/locations/{location}/pipelineRuns/{pipeline_run}"
"projects/{project}/locations/{location}/taskRuns/{task_run}"
primitive_type: string
Returns:
name: string
"""
if primitive_type == "pipelinerun":
match = re.match(
r"projects/([^/]+)/locations/([^/]+)/pipelineRuns/([^/]+)", pattern
)
if match:
return match.group(3)
elif primitive_type == "taskrun":
match = re.match(
r"projects/([^/]+)/locations/([^/]+)/taskRuns/([^/]+)", pattern
)
if match:
return match.group(3)

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util
from googlecloudsdk.api_lib.cloudbuild.v2 import input_util
from googlecloudsdk.core import log
_WORKER_POOL_ANNOTATION = "cloudbuild.googleapis.com/worker-pool"
_MANAGED_SIDECARS_ANNOTATION = "cloudbuild.googleapis.com/managed-sidecars"
_MACHINE_TYPE = "cloudbuild.googleapis.com/worker/machine-type"
_PROVENANCE_ENABLED = "cloudbuild.googleapis.com/provenance/enabled"
_PROVENANCE_STORAGE = "cloudbuild.googleapis.com/provenance/storage"
_PROVENANCE_REGION = "cloudbuild.googleapis.com/provenance/region"
def TektonYamlDataToPipelineRun(data):
"""Convert Tekton yaml file into PipelineRun message."""
_VersionCheck(data)
_MetadataTransform(data)
spec = data["spec"]
if "pipelineSpec" in spec:
_PipelineSpecTransform(spec["pipelineSpec"])
elif "pipelineRef" in spec:
input_util.RefTransform(spec["pipelineRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"PipelineSpec or PipelineRef is required.")
if "resources" in spec:
spec.pop("resources")
log.warning(
"PipelineResources are dropped because they are deprecated: "
"https://github.com/tektoncd/pipeline/blob/main/docs/resources.md")
_ServiceAccountTransformPipelineSpec(spec)
input_util.ParamDictTransform(spec.get("params", []))
messages = client_util.GetMessagesModule()
_CheckSpecKeys(data, spec)
data.update(spec)
data.pop("spec")
data.pop("kind")
schema_message = encoding.DictToMessage(data, messages.PipelineRun)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _CheckSpecKeys(data, spec):
for key in spec.keys():
if key in data:
raise cloudbuild_exceptions.InvalidYamlError(
"{0} only needs to be defined in spec".format(key)
)
def TektonYamlDataToTaskRun(data):
"""Convert Tekton yaml file into TaskRun message."""
_VersionCheck(data)
metadata = _MetadataTransform(data)
spec = data["spec"]
if "taskSpec" in spec:
_TaskSpecTransform(spec["taskSpec"])
managed_sidecars = _MetadataToSidecar(metadata)
if managed_sidecars:
spec["taskSpec"]["managedSidecars"] = managed_sidecars
elif "taskRef" in spec:
input_util.RefTransform(spec["taskRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"TaskSpec or TaskRef is required.")
_ServiceAccountTransformTaskSpec(spec)
input_util.ParamDictTransform(spec.get("params", []))
messages = client_util.GetMessagesModule()
_CheckSpecKeys(data, spec)
data.update(spec)
data.pop("spec")
data.pop("kind")
schema_message = encoding.DictToMessage(data, messages.TaskRun)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _VersionCheck(data):
api_version = data.pop("apiVersion")
if api_version != "tekton.dev/v1" and api_version != "tekton.dev/v1beta1":
raise cloudbuild_exceptions.TektonVersionError()
def _MetadataTransform(data):
"""Helper funtion to transform the metadata."""
spec = data["spec"]
if not spec:
raise cloudbuild_exceptions.InvalidYamlError("spec is empty.")
metadata = data.pop("metadata")
if not metadata:
raise cloudbuild_exceptions.InvalidYamlError("Metadata is missing in yaml.")
annotations = metadata.get("annotations", {})
if _WORKER_POOL_ANNOTATION in annotations:
spec["workerPool"] = annotations[_WORKER_POOL_ANNOTATION]
spec["annotations"] = annotations
if _MACHINE_TYPE in annotations:
spec["worker"] = {"machineType": annotations[_MACHINE_TYPE]}
security = {}
if security:
spec["security"] = security
provenance = {}
if _PROVENANCE_ENABLED in annotations:
provenance["enabled"] = annotations[_PROVENANCE_ENABLED].upper()
if _PROVENANCE_STORAGE in annotations:
provenance["storage"] = annotations[_PROVENANCE_STORAGE].upper()
if _PROVENANCE_REGION in annotations:
provenance["region"] = annotations[_PROVENANCE_REGION].upper()
if provenance:
spec["provenance"] = provenance
return metadata
def _MetadataToSidecar(metadata):
if "annotations" in metadata and _MANAGED_SIDECARS_ANNOTATION in metadata[
"annotations"]:
return metadata["annotations"][_MANAGED_SIDECARS_ANNOTATION]
return None
def _PipelineSpecTransform(spec):
for param_spec in spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
for task in spec["tasks"]:
_TaskTransform(task)
if "finally" in spec:
finally_tasks = spec.pop("finally")
for task in finally_tasks:
_TaskTransform(task)
spec["finallyTasks"] = finally_tasks
for pipeline_result in spec.get("results", []):
input_util.PipelineResultTransform(pipeline_result)
def _TaskSpecTransform(spec):
for param_spec in spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
for task_result in spec.get("results", []):
input_util.TaskResultTransform(task_result)
for task_step in spec.get("steps", []):
input_util.TaskStepTransform(task_step)
def _TaskTransform(task):
"""Transform task message."""
if "taskSpec" in task:
task_spec = task.pop("taskSpec")
_TaskSpecTransform(task_spec)
managed_sidecars = _MetadataToSidecar(
task_spec.pop("metadata")) if "metadata" in task_spec else []
if managed_sidecars:
task_spec["managedSidecars"] = managed_sidecars
task["taskSpec"] = {"taskSpec": task_spec}
if "taskRef" in task:
input_util.RefTransform(task["taskRef"])
whens = task.pop("when", [])
for when in whens:
if "operator" in when:
when["expressionOperator"] = input_util.CamelToSnake(
when.pop("operator")).upper()
task["whenExpressions"] = whens
input_util.ParamDictTransform(task.get("params", []))
def _ServiceAccountTransformPipelineSpec(spec):
if "taskRunTemplate" in spec:
if "serviceAccountName" in spec["taskRunTemplate"]:
sa = spec.pop("taskRunTemplate").pop("serviceAccountName")
security = spec.setdefault("security", {})
security["serviceAccount"] = sa
return
raise cloudbuild_exceptions.InvalidYamlError(
"spec.taskRunTemplate.serviceAccountName is required."
)
def _ServiceAccountTransformTaskSpec(spec):
if "serviceAccountName" in spec:
sa = spec.pop("serviceAccountName")
spec["serviceAccount"] = sa
security = spec.setdefault("security", {})
security["serviceAccount"] = sa

View File

@@ -0,0 +1,397 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing ouput for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.cloudbuild.v2 import output_util
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import custom_printer_base
PRINTER_FORMAT = "tekton"
class TektonPrinter(custom_printer_base.CustomPrinterBase):
"""Print a PipelineRun or TaskRun in Tekton YAML format."""
def Transform(self, internal_proto):
proto = encoding.MessageToDict(internal_proto)
if (
"pipelineSpec" in proto
or "pipelineRef" in proto
or "pipelineSpecYaml" in proto
):
yaml_str = self.PublicPRToTektonPR(proto)
return yaml.dump(yaml_str, round_trip=True)
elif "taskSpec" in proto or "taskRef" in proto:
yaml_str = self.PublicTRToTektonPR(proto)
return yaml.dump(yaml_str, round_trip=True)
def PublicPRToTektonPR(self, internal):
"""Convert a PipelineRun message into Tekton yaml."""
pr = {
"metadata": {},
"spec": {},
"status": {},
}
# METADATA
if "name" in internal:
pr["metadata"]["name"] = output_util.ParseName(
internal.pop("name"), "pipelinerun"
)
if "annotations" in internal:
pr["metadata"]["annotations"] = internal.pop("annotations")
# SPEC
if "params" in internal:
pr["spec"]["params"] = _TransformParams(internal.pop("params"))
if "pipelineSpec" in internal:
pr["spec"]["pipelineSpec"] = _TransformPipelineSpec(
internal.pop("pipelineSpec")
)
elif "pipelineRef" in internal:
pr["spec"]["pipelineRef"] = TransformRef(internal.pop("pipelineRef"))
elif "pipelineSpecYaml" in internal:
yaml_string = internal.pop("pipelineSpecYaml")
formatted_yaml = yaml.load(yaml_string, round_trip=True)
pr["spec"]["pipelineSpecYaml"] = formatted_yaml
if "timeout" in internal:
pr["spec"]["timeout"] = internal.pop("timeout")
if "workspaces" in internal:
pr["spec"]["workspaces"] = internal.pop("workspaces")
# STATUS
if "conditions" in internal:
conditions = internal.pop("conditions")
pr["status"]["conditions"] = _TransformConditions(conditions)
if "startTime" in internal:
pr["status"]["startTime"] = internal.pop("startTime")
if "completionTime" in internal:
pr["status"]["completionTime"] = internal.pop("completionTime")
# We set the resolvedPipelineSpec as status in Tekton
if "resolvedPipelineSpec" in internal:
rps = internal.pop("resolvedPipelineSpec")
pr["status"]["pipelineSpec"] = _TransformPipelineSpec(rps)
# PipelineRunResults
if "results" in internal:
pr["status"]["results"] = _TransformPipelineRunResults(
internal.pop("results")
)
if "childReferences" in internal:
crs = internal.pop("childReferences")
pr["status"]["childReferences"] = crs
# TASKRUNTEMPLATE
if "serviceAccount" in internal:
pr["taskRunTemplate"] = {
"serviceAccountName": internal.pop("serviceAccount"),
}
return pr
def PublicTRToTektonPR(self, internal):
"""Convert a TaskRun message into Tekton yaml."""
tr = {
"metadata": {},
"spec": {},
"status": {},
}
# METADATA
if "name" in internal:
tr["metadata"]["name"] = output_util.ParseName(
internal.pop("name"), "taskrun"
)
# SPEC
if "params" in internal:
tr["spec"]["params"] = _TransformParams(internal.pop("params"))
if "taskSpec" in internal:
tr["spec"]["taskSpec"] = _TransformTaskSpec(internal.pop("taskSpec"))
elif "taskRef" in internal:
tr["spec"]["taskRef"] = TransformRef(internal.pop("taskRef"))
if "timeout" in internal:
tr["spec"]["timeout"] = internal.pop("timeout")
if "workspaces" in internal:
tr["spec"]["workspaces"] = internal.pop("workspaces")
if "serviceAccountName" in internal:
tr["spec"]["serviceAccountName"] = internal.pop("serviceAccountName")
# STATUS
if "conditions" in internal:
tr["status"]["conditions"] = _TransformConditions(
internal.pop("conditions")
)
if "startTime" in internal:
tr["status"]["startTime"] = internal.pop("startTime")
if "completionTime" in internal:
tr["status"]["completionTime"] = internal.pop("completionTime")
# We set the resolvedTaskSpec as the Status field in Tekton
if "resolvedTaskSpec" in internal:
rts = internal.pop("resolvedTaskSpec")
tr["status"]["taskSpec"] = _TransformTaskSpec(rts)
# StepState
if "steps" in internal:
tr["status"]["steps"] = _TransformStepStates(internal.pop("steps"))
# TaskRunResults
if "results" in internal:
tr["status"]["results"] = _TransformTaskRunResults(
internal.pop("results")
)
# SidecarState
if "sidecars" in internal:
tr["status"]["sidecars"] = internal.pop("sidecars")
return tr
def _TransformPipelineSpec(ps):
"""Convert PipelineSpec into Tekton yaml."""
pipeline_spec = {}
if "params" in ps:
pipeline_spec["params"] = TransformParamsSpec(ps.pop("params"))
if "tasks" in ps:
pipeline_spec["tasks"] = _TransformPipelineTasks(ps.pop("tasks"))
if "results" in ps:
pipeline_spec["results"] = _TransformPipelineResults(ps.pop("results"))
if "finallyTasks" in ps:
pipeline_spec["finally"] = _TransformPipelineTasks(ps.pop("finallyTasks"))
if "workspaces" in ps:
pipeline_spec["workspaces"] = ps.pop("workspaces")
return pipeline_spec
def TransformParamsSpec(ps):
"""Convert ParamsSpecs into Tekton yaml."""
param_spec = []
for p in ps:
param = {}
if "name" in p:
param["name"] = p.pop("name")
if "description" in p:
param["description"] = p.pop("description")
if "type" in p:
param["type"] = p.pop("type").lower()
if "default" in p:
param["default"] = _TransformParamValue(p.pop("default"))
if "properties" in p:
param["properties"] = p.pop("properties")
param_spec.append(param)
return param_spec
def _TransformTaskSpec(ts):
"""Convert TaskSpecs into Tekton yaml."""
task_spec = {}
if "params" in ts:
task_spec["params"] = TransformParamsSpec(ts.pop("params"))
if "steps" in ts:
task_spec["steps"] = _TransformSteps(ts.pop("steps"))
if "stepTemplate" in ts:
task_spec["stepTemplate"] = ts.pop("stepTemplate")
if "results" in ts:
task_spec["results"] = _TransformTaskResults(ts.pop("results"))
if "sidecars" in ts:
task_spec["sidecars"] = ts.pop("sidecars")
if "workspaces" in ts:
task_spec["workspaces"] = ts.pop("workspaces")
return task_spec
def _TransformOnError(oe):
"""Convert OnError into Tekton yaml."""
return cloudbuild_util.SnakeToCamelString(oe.lower())
def _TransformSteps(steps):
"""Convert Steps into Tekton yaml."""
results = []
for step in steps:
if "ref" in step:
step["ref"] = TransformRef(step.pop("ref"))
if "params" in step:
step["params"] = _TransformParams(step.pop("params"))
results.append(step)
if "onError" in step:
step["onError"] = _TransformOnError(step.pop("onError"))
return results
def _TransformPipelineTasks(ts):
"""Convert PipelineTasks into Tekton yaml."""
tasks = []
for task in ts:
t = {"name": task.get("name", None)}
if "params" in task:
t["params"] = _TransformParams(task.pop("params"))
if "taskSpec" in task:
task_spec = task.pop("taskSpec").pop("taskSpec")
t["taskSpec"] = _TransformTaskSpec(task_spec)
elif "taskRef" in task:
t["taskRef"] = task.pop("taskRef")
if "workspaces" in task:
t["workspaces"] = task.pop("workspaces")
if "runAfter" in task:
t["runAfter"] = task.pop("runAfter")
if "timeout" in task:
t["timeout"] = task.pop("timeout")
tasks.append(t)
return tasks
def _TransformPipelineResults(rs):
"""Convert PipelineResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "description" in r:
result["description"] = r.pop("description")
if "type" in r:
result["type"] = r.pop("type").lower()
if "value" in r:
result["value"] = _TransformResultValue(r.pop("value"))
results.append(result)
return results
def _TransformTaskResults(rs):
"""Convert TaskResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "description" in r:
result["description"] = r.pop("description")
if "type" in r:
result["type"] = r.pop("type").lower()
if "properties" in r:
result["properties"] = r.pop("properties")
if "value" in r:
result["value"] = _TransformParamValue(r.pop("value"))
results.append(result)
return results
def _TransformPipelineRunResults(rs):
"""Convert PipelineRunResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "value" in r:
result["value"] = _TransformResultValue(r.pop("value"))
results.append(result)
return results
def _TransformStepStates(steps):
"""Convert StepState into Tekton yaml."""
step_states = []
for s in steps:
if "results" in s:
s["results"] = _TransformTaskRunResults(s.pop("results"))
step_states.append(s)
return step_states
def _TransformTaskRunResults(rs):
"""Convert TaskRunResults into Tekton yaml."""
results = []
for r in rs:
result = {}
if "name" in r:
result["name"] = r.pop("name")
if "resultValue" in r:
result["value"] = _TransformResultValue(r.pop("resultValue"))
results.append(result)
return results
def _TransformResultValue(v):
"""Convert ResultValue into Tekton yaml."""
if "stringVal" in v:
return v.pop("stringVal")
if "arrayVal" in v:
return v.pop("arrayVal")
if "objectVal" in v:
return v.pop("objectVal")
return v
def _TransformParamValue(v):
"""Convert ParamValue into Tekton yaml."""
if "stringVal" in v:
return v.pop("stringVal")
if "arrayVal" in v:
return v.pop("arrayVal")
return v
def _TransformParams(ps):
"""Convert Params into Tekton yaml."""
params = []
for p in ps:
param = {}
if "name" in p:
param["name"] = p.pop("name")
if "value" in p:
param["value"] = _TransformParamValue(p.pop("value"))
params.append(param)
return params
def _TransformConditions(cs):
"""Convert Conditions into Tekton yaml."""
conditions = []
for c in cs:
condition = {}
# Only append the condition if it has a message
# which indicates the final condition
if "message" in c:
condition["message"] = c.pop("message")
if "lastTransitionTime" in c:
condition["lastTransitionTime"] = c.pop("lastTransitionTime")
if "status" in c:
condition["status"] = c.pop("status").capitalize()
if "type" in c:
condition["type"] = c.pop("type").capitalize()
if "reason" in c:
condition["reason"] = c.pop("reason")
conditions.append(condition)
return conditions
def _TransformChildRefs(crs):
"""Convert ChildReferences into Tekton yaml."""
child_refs = []
for cr in crs:
child_ref = {}
if "name" in cr:
child_ref["name"] = cr.pop("name")
if "pipelineTask" in cr:
child_ref["pipelineTask"] = cr.pop("pipelineTask")
child_refs.append(child_ref)
return child_refs
def TransformRef(ref):
"""Convert a generic reference (step, task, or pipeline) into Tekton yaml."""
result = {}
if "name" in ref:
result["name"] = ref.pop("name")
if "resolver" in ref:
result["resolver"] = ref.pop("resolver")
if "params" in ref:
result["params"] = _TransformParams(ref.pop("params"))
return result

View File

@@ -0,0 +1,171 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the parsing input for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.cloudbuild.v2 import client_util
from googlecloudsdk.api_lib.cloudbuild.v2 import input_util
from googlecloudsdk.core import yaml
_WORKFLOW_OPTIONS_ENUMS = [
"options.provenance.enabled",
"options.provenance.storage",
"options.provenance.region",
]
def CloudBuildYamlDataToWorkflow(workflow):
"""Convert cloudbuild.yaml file into Workflow message."""
_WorkflowTransform(workflow)
_WorkflowValidate(workflow)
messages = client_util.GetMessagesModule()
schema_message = encoding.DictToMessage(workflow, messages.Workflow)
input_util.UnrecognizedFields(schema_message)
return schema_message
def _WorkflowValidate(workflow):
"""Check that the given workflow has all required fields.
Args:
workflow: The user-supplied Cloud Build Workflow YAML.
Raises:
InvalidYamlError: If the workflow is invalid.
"""
if (
"options" not in workflow
or "security" not in workflow["options"]
or "serviceAccount" not in workflow["options"]["security"]
):
raise cloudbuild_exceptions.InvalidYamlError(
"A service account is required. Specify your user-managed service"
" account using the options.security.serviceAccount field"
)
def _WorkflowTransform(workflow):
"""Transform workflow message."""
if "triggers" in workflow:
workflow["workflowTriggers"] = workflow.pop("triggers")
for workflow_trigger in workflow.get("workflowTriggers", []):
input_util.WorkflowTriggerTransform(workflow_trigger)
for param_spec in workflow.get("params", []):
input_util.ParamSpecTransform(param_spec)
if not param_spec.get("name", ""):
raise cloudbuild_exceptions.InvalidYamlError(
"Workflow parameter name is required"
)
if (
param_spec.get("type", "string") != "string"
or param_spec.get("default", {"type": "STRING"}).get("type") != "STRING"
):
raise cloudbuild_exceptions.InvalidYamlError(
"Only string are supported for workflow parameters, error at "
"parameter with name: {}".format(param_spec.get("name"))
)
if "pipelineSpec" in workflow:
workflow["pipelineSpecYaml"] = yaml.dump(
workflow.pop("pipelineSpec"), round_trip=True
)
elif "pipelineRef" in workflow:
input_util.RefTransform(workflow["pipelineRef"])
else:
raise cloudbuild_exceptions.InvalidYamlError(
"PipelineSpec or PipelineRef is required.")
for workspace_binding in workflow.get("workspaces", []):
_WorkspaceBindingTransform(workspace_binding)
if "options" in workflow and "status" in workflow["options"]:
popped_status = workflow["options"].pop("status")
workflow["options"]["statusUpdateOptions"] = popped_status
for option in _WORKFLOW_OPTIONS_ENUMS:
input_util.SetDictDottedKeyUpperCase(workflow, option)
def _PipelineSpecTransform(pipeline_spec):
"""Transform pipeline spec message."""
for pipeline_task in pipeline_spec.get("tasks", []):
_PipelineTaskTransform(pipeline_task)
for param_spec in pipeline_spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
if "finally" in pipeline_spec:
finally_tasks = pipeline_spec.pop("finally")
for task in finally_tasks:
_PipelineTaskTransform(task)
pipeline_spec["finallyTasks"] = finally_tasks
def _PipelineTaskTransform(pipeline_task):
"""Transform pipeline task message."""
if "taskSpec" in pipeline_task:
popped_task_spec = pipeline_task.pop("taskSpec")
for param_spec in popped_task_spec.get("params", []):
input_util.ParamSpecTransform(param_spec)
pipeline_task["taskSpec"] = {}
pipeline_task["taskSpec"]["taskSpec"] = popped_task_spec
elif "taskRef" in pipeline_task:
input_util.RefTransform(pipeline_task["taskRef"])
pipeline_task["taskRef"] = pipeline_task.pop("taskRef")
if "when" in pipeline_task:
for when_expression in pipeline_task.get("when", []):
_WhenExpressionTransform(when_expression)
pipeline_task["whenExpressions"] = pipeline_task.pop("when")
input_util.ParamDictTransform(pipeline_task.get("params", []))
def _WhenExpressionTransform(when_expression):
if "operator" in when_expression:
when_expression["expressionOperator"] = input_util.CamelToSnake(
when_expression.pop("operator")).upper()
def _WorkspaceBindingTransform(workspace_binding):
"""Transform workspace binding message."""
if "secretName" in workspace_binding:
popped_secret = workspace_binding.pop("secretName")
workspace_binding["secret"] = {}
workspace_binding["secret"]["secretName"] = popped_secret
elif "volume" in workspace_binding:
popped_volume = workspace_binding.pop("volume")
# Volume Claim Template.
workspace_binding["volumeClaim"] = {}
if "storage" in popped_volume:
storage = popped_volume.pop("storage")
workspace_binding["volumeClaim"]["storage"] = storage
else:
return

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for displaying workflows for cloud build v2 API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudbuild.v2 import pipeline_output_util
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import custom_printer_base
PRINTER_FORMAT = "workflow"
class WorkflowPrinter(custom_printer_base.CustomPrinterBase):
"""Print a Workflow in YAML with comments."""
def _WorkflowDisplayLines(self, workflow):
"""Apply formatting to the workflow for describe command."""
if "pipelineSpecYaml" in workflow:
yaml_str = workflow.pop("pipelineSpecYaml")
workflow = self._updateWorkflowSpec(workflow, yaml_str)
elif (
"pipelineSpec" in workflow
and "generatedYaml" in workflow["pipelineSpec"]
):
yaml_str = workflow["pipelineSpec"].pop("generatedYaml")
del workflow["pipelineSpec"]
workflow = self._updateWorkflowSpec(workflow, yaml_str)
elif "pipelineRef" in workflow:
ref = workflow.pop("pipelineRef")
workflow["pipelineRef"] = pipeline_output_util.TransformRef(ref)
params = workflow.get("params", {})
if params:
workflow["params"] = pipeline_output_util.TransformParamsSpec(params)
yaml_str = yaml.dump(workflow, round_trip=True)
return custom_printer_base.Lines(yaml_str.split("\n"))
def _updateWorkflowSpec(self, workflow, yaml_str):
data = yaml.load(yaml_str, round_trip=True)
workflow["pipelineSpec"] = data
return workflow
def Transform(self, record):
"""Transform ApplicationStatus into the output structure of marker classes.
Args:
record: a dict object
Returns:
lines formatted for output
"""
return self._WorkflowDisplayLines(record)