feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A helper library for this command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions for interacting with the Cloud Dataflow API.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class Error(exceptions.Error):
"""Base class for dataflow exceptions."""
class ServiceException(Error):
"""Generic exception related to calling the Dataflow service APIs."""
class UnsupportedNameException(Error):
"""Exception raised if a name is incompatible with Graphviz ID escaping."""
class InvalidExclusionException(Error):
"""Raised if a user tries to exclude incompatible metrics."""

View File

@@ -0,0 +1,107 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Facility for displaying information about a Job message to a user.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataflow import apis
from googlecloudsdk.core.util import times
def FormatDateTime(string):
"""Returns a yyyy-mm-dd hh:mm:ss formatted date/time for string."""
dt = times.ParseDateTime(string)
if not times.GetTimeStampFromDateTime(dt):
return None
return times.FormatDateTime(dt, '%Y-%m-%d %H:%M:%S')
class DisplayInfo(object):
"""Information about a job displayed in command output.
Fields:
id: the job ID
name: the job name
type: one of 'batch', 'streaming'
state: string representing the current job status
creationTime: in the form yyyy-mm-dd hh:mm:ss
stateTime: in the form yyyy-mm-dd hh:mm:ss
location: the job's regional endpoint
"""
def __init__(self, job):
self.id = job.id
self.name = job.name
self.type = DisplayInfo._JobTypeForJob(job.type)
self.state = DisplayInfo._StatusForJob(job.currentState)
self.location = job.location
# We ignore these errors to make the field names more consistent across
# commands using the --filter argument. This is because most commands are
# more or less a straight dump of the API response which has camel-case
# naming conventions. This class is only used for formatting jobs for
# display purposes.
#
# Don't worry, be happy.
#
# pylint: disable=invalid-name
self.stateTime = FormatDateTime(job.currentStateTime)
self.creationTime = FormatDateTime(job.createTime)
# pylint: enable=invalid-name
@staticmethod
def _JobTypeForJob(job_type):
"""Return a string describing the job type.
Args:
job_type: The job type enum
Returns:
string describing the job type
"""
type_value_enum = apis.GetMessagesModule().Job.TypeValueValuesEnum
value_map = {
type_value_enum.JOB_TYPE_BATCH: 'Batch',
type_value_enum.JOB_TYPE_STREAMING: 'Streaming',
}
return value_map.get(job_type, 'Unknown')
@staticmethod
def _StatusForJob(job_state):
"""Return a string describing the job state.
Args:
job_state: The job state enum
Returns:
string describing the job state
"""
state_value_enum = apis.GetMessagesModule().Job.CurrentStateValueValuesEnum
value_map = {
state_value_enum.JOB_STATE_CANCELLED: 'Cancelled',
state_value_enum.JOB_STATE_CANCELLING: 'Cancelling',
state_value_enum.JOB_STATE_DONE: 'Done',
state_value_enum.JOB_STATE_DRAINED: 'Drained',
state_value_enum.JOB_STATE_DRAINING: 'Draining',
state_value_enum.JOB_STATE_FAILED: 'Failed',
state_value_enum.JOB_STATE_PENDING: 'Pending',
state_value_enum.JOB_STATE_QUEUED: 'Queued',
state_value_enum.JOB_STATE_RUNNING: 'Running',
state_value_enum.JOB_STATE_STOPPED: 'Stopped',
state_value_enum.JOB_STATE_UPDATED: 'Updated',
}
return value_map.get(job_state, 'Unknown')

View File

@@ -0,0 +1,179 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for parsing SQL query parameters from the command line."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import json
from googlecloudsdk.api_lib.dataflow import exceptions
from googlecloudsdk.core.util import files
import six
def ParseParametersFile(path):
"""Reads a JSON file specified by path and returns its contents as a string."""
with files.FileReader(path) as parameters_file:
parameters = json.load(parameters_file)
# Dict order only matters for predictable test output.
results = [
collections.OrderedDict(sorted(param.items())) for param in parameters
]
return json.dumps(results)
def ParseParametersList(parameters):
"""Parses a list of parameters.
Arguments:
parameters: A list of parameter strings with the format name:type:value,
for example min_word_count:INT64:250.
Returns:
A JSON string containing the parameters.
"""
results = []
for parameter in parameters:
results.append(_ParseParameter(parameter))
return json.dumps(results)
def _SplitParam(param_string):
split = param_string.split(':', 1)
if len(split) != 2:
raise exceptions.Error(
'Query parameters must be of the form: '
'"name:type:value", ":type:value", or "name::value". '
'An empty name produces a positional parameter. '
'An empty type produces a STRING parameter.')
return split
def _ParseParameter(param_string):
name, param_string = _SplitParam(param_string)
type_dict, value_dict = _ParseParameterTypeAndValue(param_string)
result = collections.OrderedDict()
if name:
result['name'] = name
result['parameterType'] = type_dict
result['parameterValue'] = value_dict
return result
def _ParseParameterTypeAndValue(param_string):
"""Parse a string of the form <recursive_type>:<value> into each part."""
type_string, value_string = _SplitParam(param_string)
if not type_string:
type_string = 'STRING'
type_dict = _ParseParameterType(type_string)
return type_dict, _ParseParameterValue(type_dict, value_string)
def _ParseParameterType(type_string):
"""Parse a parameter type string into a JSON dict for the DF SQL launcher."""
type_dict = {'type': type_string.upper()}
if type_string.upper().startswith('ARRAY<') and type_string.endswith('>'):
type_dict = collections.OrderedDict([
('arrayType', _ParseParameterType(type_string[6:-1])), ('type', 'ARRAY')
])
if type_string.startswith('STRUCT<') and type_string.endswith('>'):
type_dict = collections.OrderedDict([('structTypes',
_ParseStructType(type_string[7:-1])),
('type', 'STRUCT')])
if not type_string:
raise exceptions.Error('Query parameter missing type')
return type_dict
def _ParseStructType(type_string):
"""Parse a Struct QueryParameter type into a JSON dict form."""
subtypes = []
for name, sub_type in _StructTypeSplit(type_string):
entry = collections.OrderedDict([('name', name),
('type', _ParseParameterType(sub_type))])
subtypes.append(entry)
return subtypes
def _StructTypeSplit(type_string):
"""Yields single field-name, sub-types tuple from a StructType string."""
while type_string:
next_span = type_string.split(',', 1)[0]
if '<' in next_span:
angle_count = 0
i = 0
for i in range(next_span.find('<'), len(type_string)):
if type_string[i] == '<':
angle_count += 1
if type_string[i] == '>':
angle_count -= 1
if angle_count == 0:
break
if angle_count != 0:
raise exceptions.Error('Malformatted struct type')
next_span = type_string[:i + 1]
type_string = type_string[len(next_span) + 1:]
splits = next_span.split(None, 1)
if len(splits) != 2:
raise exceptions.Error('Struct parameter missing name for field')
yield splits
def _IsString(val):
try:
# Python 2
return isinstance(val, unicode)
except NameError:
return isinstance(val, str)
def _ParseParameterValue(type_dict, value_input):
"""Parse a parameter value of type `type_dict` from value_input.
Arguments:
type_dict: The JSON-dict type as which to parse `value_input`.
value_input: Either a string representing the value, or a JSON dict for
array and value types.
Returns:
A dict with one of value, arrayValues, or structValues populated depending
on the type.
"""
if 'structTypes' in type_dict:
if _IsString(value_input):
if value_input == 'NULL':
return {'structValues': None}
value_input = json.loads(value_input)
value_input = collections.OrderedDict(sorted(value_input.items()))
type_map = collections.OrderedDict([
(x['name'], x['type']) for x in type_dict['structTypes']
])
values = collections.OrderedDict()
for (field_name, value) in six.iteritems(value_input):
values[field_name] = _ParseParameterValue(type_map[field_name], value)
return {'structValues': values}
if 'arrayType' in type_dict:
if _IsString(value_input):
if value_input == 'NULL':
return {'arrayValues': None}
value_input = json.loads(value_input)
values = [
_ParseParameterValue(type_dict['arrayType'], x) for x in value_input
]
return {'arrayValues': values}
return {'value': value_input if value_input != 'NULL' else None}

View File

@@ -0,0 +1,301 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code to transform the (cleaned-up) description of a dataflow into Graphviz.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataflow import exceptions
import six
class _Cluster(object):
"""Encapsulation of a single cluster in the final Step-Graph.
The cluster hierarchy represents pieces of the user_name. A given cluster is
either a leaf (it contains a single step and no sub-clusters) or a transform
(it contains no step and one or more sub-clusters).
"""
def __init__(self, parent, name_in_parent):
self.__children = {}
self.__parent = parent
self.__name_in_parent = name_in_parent
self.__step = None
def IsLeaf(self):
"""A leaf cluster contains no sub-clusters.
Returns:
True iff this is a leaf cluster.
"""
return not self.__children
def IsSingleton(self):
"""A singleton is any cluster that contains a single child.
Returns:
True iff this is a singleton cluster.
"""
return len(self.__children) == 1
def IsRoot(self):
"""Determine if this cluster is the root.
Returns:
True iff this is the root cluster.
"""
return not self.__parent
def GetStep(self):
"""Return the step for this cluster.
Returns:
The step for this cluster. May be None if this is not a leaf.
"""
return self.__step
def SetStep(self, step):
"""Set the step for this cluster.
Can only be called on leaf nodes that have not yet had their step set.
Args:
step: The step that this cluster represents.
"""
assert not self.__children
assert not self.__step
self.__step = step
def Name(self, relative_to=None):
"""Return the name of this sub-cluster relative to the given ancestor.
Args:
relative_to: The ancestor to output the name relative to.
Returns:
The string representing the user_name component for this cluster.
"""
if (not self.__parent) or (self.__parent == relative_to):
return self.__name_in_parent
parent_name = self.__parent.Name(relative_to)
if parent_name:
return parent_name + '/' + self.__name_in_parent
else:
return self.__name_in_parent
def GetOrAddChild(self, piece_name):
"""Return the cluster representing the given piece_name within this cluster.
Args:
piece_name: String representing the piece name of the desired child.
Returns:
Cluster representing the child.
"""
assert not self.__step # Leaves cannot have steps.
if piece_name not in self.__children:
self.__children[piece_name] = _Cluster(self, piece_name)
return self.__children[piece_name]
def Children(self):
"""Return the sub-clusters.
Returns:
Sorted list of pairs for the children in this cluster.
"""
return sorted(six.iteritems(self.__children))
def _SplitStep(user_name):
"""Given a user name for a step, split it into the individual pieces.
Examples:
_SplitStep('Transform/Step') = ['Transform', 'Step']
_SplitStep('Read(gs://Foo)/Bar') = ['Read(gs://Foo)', 'Bar']
Args:
user_name: The full user_name of the step.
Returns:
A list representing the individual pieces of the step name.
"""
parens = 0
accum = []
step_parts = []
for piece in user_name.split('/'):
parens += piece.count('(') - piece.count(')')
accum.append(piece)
if parens == 0:
step_parts.append(''.join(accum))
del accum[:]
else:
accum.append('/')
# If the name contained mismatched parentheses, treat everything after the
# previous slash as the last step-part.
if accum:
step_parts.append(accum)
return step_parts
def _UnflattenStepsToClusters(steps):
"""Extract a hierarchy from the steps provided.
The `step graph' is constructed as follows:
1. Every node has a `name'. This is flat, something like "s1", "s100".
2. Each node can depend on others. These edges are specified by "name".
3. Each node can also have a user_name, like "Foo/Bar". This name creates
a hierarchy of subgraphs (eg., Foo/Bar and Foo/Baz are in the same
cluster).
Args:
steps: A list of steps from the Job message.
Returns:
A Cluster representing the root of the step hierarchy.
"""
root = _Cluster(None, '')
for step in steps:
step_path = _SplitStep(step['properties'].get('user_name', step['name']))
node = root
for piece in step_path:
node = node.GetOrAddChild(piece)
node.SetStep(step)
return root
def _EscapeGraphvizId(name):
"""Escape a string for use as in Graphviz.
Args:
name: The string to escape.
Returns:
The `name', with double-quotes escaped, and quotes around it.
Raises:
exceptions.UnsupportedNameException: If the name is incompatible with
Graphviz ID escaping.
"""
if name.endswith('\\'):
raise exceptions.UnsupportedNameException(
'Unsupported name for Graphviz ID escaping: {0!r}'.format(name))
return '"{0}"'.format(name.replace('"', '\\"'))
_NODE_FORMAT = (
'{name} [label={user_name}, tooltip={full_name}'
', style=filled, fillcolor=white];')
def _YieldGraphvizClusters(cluster, parent=None):
if cluster.IsLeaf():
step = cluster.GetStep()
yield _NODE_FORMAT.format(
name=_EscapeGraphvizId(step['name']),
full_name=_EscapeGraphvizId(cluster.Name()),
user_name=_EscapeGraphvizId(cluster.Name(relative_to=parent)))
elif cluster.IsSingleton() or cluster.IsRoot():
for unused_key, subcluster in cluster.Children():
for line in _YieldGraphvizClusters(subcluster, parent=parent):
yield line
else:
full_name = cluster.Name()
yield 'subgraph {0} {{'.format(_EscapeGraphvizId('cluster ' + full_name))
yield 'style=filled;'
yield 'bgcolor=white;'
yield 'labeljust=left;'
yield 'tooltip={0};'.format(_EscapeGraphvizId(full_name))
yield 'label={0};'.format(_EscapeGraphvizId(cluster.Name(parent)))
for unused_key, subgroup in cluster.Children():
for line in _YieldGraphvizClusters(subgroup, parent=cluster):
yield line
yield '}'
_EDGE_FORMAT = ('{edge_source} -> {edge_dest} '
'[taillabel={edge_output}, style={style}];')
def _GraphvizEdge(step_name, output_ref, style='solid'):
"""Returns an edge from the output referred to by output_ref to step_name.
Args:
step_name: String identifying the step with the dependency.
output_ref: Output-reference dictionary to start the edge at.
style: The style for the edge.
Returns:
A string representing the edge in Graphviz format.
"""
return _EDGE_FORMAT.format(
edge_source=_EscapeGraphvizId(output_ref['step_name']),
edge_dest=_EscapeGraphvizId(step_name),
edge_output=_EscapeGraphvizId(output_ref['output_name']),
style=style)
def _YieldGraphvizEdges(step):
"""Output Graphviz edges for the given step.
Args:
step: Step to write edges for.
Yields:
The Graphviz edge lines for the given step.
"""
step_name = step['name']
par_input = step['properties'].get('parallel_input', None)
if par_input:
yield _GraphvizEdge(step_name, par_input)
for other_input in step['properties'].get('inputs', []):
yield _GraphvizEdge(step_name, other_input)
for side_input in step['properties'].get('non_parallel_inputs', {}).values():
yield _GraphvizEdge(step_name, side_input, style='dashed')
def YieldGraphviz(steps, graph_name=None):
"""Given a root cluster produce the Graphviz DOT format.
No attempt is made to produce `pretty' output.
Args:
steps: A list of steps from the Job message.
graph_name: The name of the graph to output.
Yields:
The lines representing the step-graph in Graphviz format.
"""
yield 'strict digraph {graph_name} {{'.format(
graph_name=_EscapeGraphvizId(graph_name or 'G'))
# Output the step nodes in the proper clusters.
root = _UnflattenStepsToClusters(steps)
for line in _YieldGraphvizClusters(root):
yield line
# Output the edges.
yield ''
for step in steps:
for line in _YieldGraphvizEdges(step):
yield line
# End the graph.
yield '}'

View File

@@ -0,0 +1,116 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code to clean-up transform the JSON description of a dataflow.
Example clean-ups:
1. Dictionaries representing primitives with a schema will be converted to the
primitive:
Ex: { '@type': "https://schema.org/Text", 'value': "Hello" } becomes "Hello"
2. Fields that are unlikely to be human consumable may be hidden.
Ex: The serialized_fn field will be hidden, since humans are unlikely to try
to read the serialized Java object.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import six
_EXCLUDED_PROPERTIES = set(['serialized_fn'])
_VALUE_RETRIEVERS = {
'http://schema.org/Boolean': lambda value: value.boolean_value,
'http://schema.org/Text': lambda value: value.string_value,
}
def _ExtractStep(step_msg):
"""Converts a Step message into a dict with more sensible structure.
Args:
step_msg: A Step message.
Returns:
A dict with the cleaned up information.
"""
properties = {}
if step_msg.properties:
for prop in step_msg.properties.additionalProperties:
if prop.key not in _EXCLUDED_PROPERTIES:
properties[prop.key] = _ExtractValue(prop.value)
return {
'kind': step_msg.kind,
'name': step_msg.name,
'properties': properties,
}
def _ExtractDecoratedObject(proto):
"""Extracts an object from the proto representation of the JSON object.
Args:
proto: A protocol representation of a JSON object.
Returns:
A clean representation of the JSON object. If it was an object
representing a primitive, then that primitive.
"""
prop_dict = {}
for prop in proto.object_value.properties:
prop_dict[prop.key] = prop.value
ty = prop_dict.get('@type', None)
retriever = ty and _VALUE_RETRIEVERS.get(ty.string_value, None)
if not ty or not retriever:
# No @type means this wasn't an object-wrapped leaf.
# No retriever means that this was created "by us", so we just want to
# output the properties. We leave the @type around since it has semantic
# value.
return dict((k, _ExtractValue(v)) for k, v in six.iteritems(prop_dict))
# If we have a retriever,we can throw away everything except the value, and
# convert it to a more reasonable type. This is important since it cleans
# up the printed representation significantly.
try:
return retriever(prop_dict['value'])
except KeyError:
return 'Missing value for type [{0}] in proto [{1}]'.format(
ty.string_value, proto)
def _ExtractValue(proto):
# Values are weird, because we actually wrap JSON objects around real
# JSON values.
if proto.object_value:
return _ExtractDecoratedObject(proto)
if proto.array_value:
return [_ExtractValue(v) for v in proto.array_value.entries]
if proto.string_value:
return proto.string_value
return 'No decoding provided for: {0}'.format(proto)
def ExtractSteps(job):
"""Extract the cleaned up step dictionary for all the steps in the job.
Args:
job: A Job message.
Returns:
A list of cleaned up step dictionaries.
"""
return [_ExtractStep(step) for step in job.steps]