feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,14 @@
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,62 @@
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A script for converting between legacy YAML and public JSON representation.
Example invocation:
convert_yaml.py app.yaml > app.json
"""
from __future__ import absolute_import
import argparse
import json
import sys
from googlecloudsdk.appengine.admin.tools.conversion import yaml_schema_v1
from googlecloudsdk.appengine.admin.tools.conversion import yaml_schema_v1beta
import ruamel.yaml as yaml
API_VERSION_SCHEMAS = {
'v1beta': yaml_schema_v1beta,
'v1alpha': yaml_schema_v1beta,
'v1': yaml_schema_v1,
}
def main():
parser = argparse.ArgumentParser(description='Convert between legacy YAML '
'and public JSON representations of App '
'Engine versions')
parser.add_argument('input_file')
parser.add_argument('--api_version', dest='api_version', default='v1',
choices=sorted(API_VERSION_SCHEMAS.keys()))
args = parser.parse_args()
with open(args.input_file) as input_file:
input_yaml = yaml.safe_load(input_file)
yaml_schema = API_VERSION_SCHEMAS[args.api_version]
converted_yaml = yaml_schema.SCHEMA.ConvertValue(input_yaml)
json.dump(converted_yaml, sys.stdout, indent=2, sort_keys=True)
def GetSchemaParser(api_version=None):
return API_VERSION_SCHEMAS.get(api_version, yaml_schema_v1).SCHEMA
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,590 @@
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Conversions to translate between legacy YAML and OnePlatform protos."""
from __future__ import absolute_import
import re
# pylint:disable=g-import-not-at-top
try:
from googlecloudsdk.appengine.api import dispatchinfo
except ImportError:
from google.appengine.api import dispatchinfo
try:
from googlecloudsdk.appengine.api import appinfo
except ImportError:
from google.appengine.api import appinfo
# pylint:enable=g-import-not-at-top
_SECONDS_PER_MINUTE = 60
_MILLISECONDS_PER_SECOND = 1000
_NANOSECONDS_PER_SECOND = 1000000000
_COMMON_HANDLER_FIELDS = (
'urlRegex',
'login',
'authFailAction',
'securityLevel',
'redirectHttpResponseCode',
)
_SCRIPT_FIELDS = (
'scriptPath',
)
_HANDLER_FIELDS = {
'staticFiles': (
'path',
'uploadPathRegex',
'httpHeaders',
'expiration',
'applicationReadable',
'mimeType',
'requireMatchingFile',
),
'script': _SCRIPT_FIELDS,
'apiEndpoint': _SCRIPT_FIELDS,
}
_REQUEST_UTILIZATION_SCALING_FIELDS = (
'targetRequestCountPerSec',
'targetConcurrentRequests',
'targetRequestCountPerSecond',
)
_DISK_UTILIZATION_SCALING_FIELDS = (
'targetWriteBytesPerSec',
'targetWriteOpsPerSec',
'targetReadBytesPerSec',
'targetReadOpsPerSec',
'targetWriteBytesPerSecond',
'targetWriteOpsPerSecond',
'targetReadBytesPerSecond',
'targetReadOpsPerSecond',
)
_NETWORK_UTILIZATION_SCALING_FIELDS = (
'targetSentBytesPerSec',
'targetSentPacketsPerSec',
'targetReceivedBytesPerSec',
'targetReceivedPacketsPerSec',
'targetSentBytesPerSecond',
'targetSentPacketsPerSecond',
'targetReceivedBytesPerSecond',
'targetReceivedPacketsPerSecond',
)
_ENDPOINTS_ROLLOUT_STRATEGY_VALUES = (
'fixed',
'managed',
)
(_ENDPOINTS_UNSPECIFIED_ROLLOUT_STRATEGY_ENUM_VALUE
) = 'UNSPECIFIED_ROLLOUT_STRATEGY'
_STANDARD_SCHEDULER_SETTINGS = (
'maxInstances',
'minInstances',
'targetCpuUtilization',
'targetThroughputUtilization',
)
_SUBNETWORK_KEY_FIELDS = ('hostProjectId', 'subnet')
# Maps VPC egress setting as specified in app.yaml to their proto enum values.
_VPC_EGRESS_SETTING_MAP = {
'all-traffic': 'ALL_TRAFFIC',
'private-ranges-only': 'PRIVATE_IP_RANGES',
}
# Maps bundled service type as specified in app.yaml to their proto enum values.
_BUNDLED_SERVICE_TYPE_ENUM = {
'app_identity_service': 'BUNDLED_SERVICE_TYPE_APP_IDENTITY_SERVICE',
'blobstore': 'BUNDLED_SERVICE_TYPE_BLOBSTORE',
'capability_service': 'BUNDLED_SERVICE_TYPE_CAPABILITY_SERVICE',
'datastore_v3': 'BUNDLED_SERVICE_TYPE_DATASTORE_V3',
'deferred': 'BUNDLED_SERVICE_TYPE_DEFERRED',
'images': 'BUNDLED_SERVICE_TYPE_IMAGES',
'mail': 'BUNDLED_SERVICE_TYPE_MAIL',
'memcache': 'BUNDLED_SERVICE_TYPE_MEMCACHE',
'modules': 'BUNDLED_SERVICE_TYPE_MODULES',
'namespaces': 'BUNDLED_SERVICE_TYPE_NAMESPACES',
'ndb': 'BUNDLED_SERVICE_TYPE_NDB',
'search': 'BUNDLED_SERVICE_TYPE_SEARCH',
'taskqueue': 'BUNDLED_SERVICE_TYPE_TASKQUEUES',
'urlfetch': 'BUNDLED_SERVICE_TYPE_URLFETCH',
'user': 'BUNDLED_SERVICE_TYPE_USERS',
}
def ToBundledServiceTypeEnum(value):
"""Converts a string to a bundled service type.
Args:
value: The bundled service name (string).
Returns:
The corresponding enum value (string).
Raises:
ValueError: If the provided value is not a valid bundled service name.
"""
if str(value) not in _BUNDLED_SERVICE_TYPE_ENUM:
raise ValueError(
f'Value "{value}" is not a valid bundled service name. '
f'Expected one of: {_BUNDLED_SERVICE_TYPE_ENUM.keys()}'
)
return _BUNDLED_SERVICE_TYPE_ENUM[str(value)]
def ToVpcEgressSettingEnum(value):
"""Converts a string to a VPC egress setting."""
if str(value) not in _VPC_EGRESS_SETTING_MAP:
raise ValueError(
'egress_setting must be one of: [%s]'
% ','.join(_VPC_EGRESS_SETTING_MAP.keys())
)
return _VPC_EGRESS_SETTING_MAP[str(value)]
def EnumConverter(prefix):
"""Create conversion function which translates from string to enum value.
Args:
prefix: Prefix for enum value. Expected to be an upper-cased value.
Returns:
A conversion function which translates from string to enum value.
Raises:
ValueError: If an invalid prefix (empty, non-upper-cased, etc.) prefix was
provided.
"""
if not prefix:
raise ValueError('A prefix must be provided')
if prefix != prefix.upper():
raise ValueError('Upper-cased prefix must be provided')
if prefix.endswith('_'):
raise ValueError(
'Prefix should not contain a trailing underscore: "%s"' % prefix)
return lambda value: '_'.join([prefix, str(value).upper()])
def Not(value):
"""Convert the given boolean value to the opposite value."""
if not isinstance(value, bool):
raise ValueError('Expected a boolean value. Got "%s"' % value)
return not value
def ToJsonString(value):
"""Coerces a primitive value into a JSON-compatible string.
Special handling for boolean values, since the Python version (True/False) is
incompatible with the JSON version (true/false).
Args:
value: value to convert.
Returns:
Value as a string.
Raises:
ValueError: when a non-primitive value is provided.
"""
if isinstance(value, (list, dict)):
raise ValueError('Expected a primitive value. Got "%s"' % value)
if isinstance(value, bool):
return str(value).lower()
return str(value)
def ToUpperCaseJsonString(value):
"""Coerces a primitive value into a upper-case JSON-compatible string.
Special handling for values whose JSON version is in upper-case.
Args:
value: value to convert.
Returns:
Value as a string.
Raises:
ValueError: when a non-primitive value is provided.
"""
return str(value).upper()
def StringToInt(handle_automatic=False):
"""Create conversion function which converts from a string to an integer.
Args:
handle_automatic: Boolean indicating whether a value of "automatic" should
be converted to 0.
Returns:
A conversion function which converts a string to an integer.
"""
def Convert(value):
if value == 'automatic' and handle_automatic:
return 0
return int(value)
return Convert
def SecondsToDuration(value):
"""Convert seconds expressed as integer to a Duration value."""
return '%ss' % int(value)
def LatencyToDuration(value):
"""Convert valid pending latency argument to a Duration value of seconds.
Args:
value: A string in the form X.Xs or XXms.
Returns:
Duration value of the given argument.
Raises:
ValueError: if the given value isn't parseable.
"""
if not re.compile(appinfo._PENDING_LATENCY_REGEX).match(value): # pylint: disable=protected-access
raise ValueError('Unrecognized latency: %s' % value)
if value == 'automatic':
return None
if value.endswith('ms'):
return '%ss' % (float(value[:-2]) / _MILLISECONDS_PER_SECOND)
else:
return value
def IdleTimeoutToDuration(value):
"""Convert valid idle timeout argument to a Duration value of seconds.
Args:
value: A string in the form Xm or Xs
Returns:
Duration value of the given argument.
Raises:
ValueError: if the given value isn't parseable.
"""
if not re.compile(appinfo._IDLE_TIMEOUT_REGEX).match(value): # pylint: disable=protected-access
raise ValueError('Unrecognized idle timeout: %s' % value)
if value.endswith('m'):
return '%ss' % (int(value[:-1]) * _SECONDS_PER_MINUTE)
else:
return value
def ExpirationToDuration(value):
"""Convert valid expiration argument to a Duration value of seconds.
Args:
value: String that matches _DELTA_REGEX.
Returns:
Time delta expressed as a Duration.
Raises:
ValueError: if the given value isn't parseable.
"""
if not re.compile(appinfo._EXPIRATION_REGEX).match(value): # pylint: disable=protected-access
raise ValueError('Unrecognized expiration: %s' % value)
delta = appinfo.ParseExpiration(value)
return '%ss' % delta
def ConvertAutomaticScaling(automatic_scaling):
"""Moves several VM-specific automatic scaling parameters to submessages.
For example:
Input {
"targetSentPacketsPerSec": 10,
"targetReadOpsPerSec": 2,
"targetRequestCountPerSec": 3
}
Output {
"networkUtilization": {
"targetSentPacketsPerSec": 10
},
"diskUtilization": {
"targetReadOpsPerSec": 2
},
"requestUtilization": {
"targetRequestCountPerSec": 3
}
}
Args:
automatic_scaling: Result of converting automatic_scaling according to
schema.
Returns:
AutomaticScaling which has moved network/disk utilization related fields to
submessage.
"""
def MoveFieldsTo(field_names, target_field_name):
target = {}
for field_name in field_names:
if field_name in automatic_scaling:
target[field_name] = automatic_scaling[field_name]
del automatic_scaling[field_name]
if target:
automatic_scaling[target_field_name] = target
MoveFieldsTo(_REQUEST_UTILIZATION_SCALING_FIELDS, 'requestUtilization')
MoveFieldsTo(_DISK_UTILIZATION_SCALING_FIELDS, 'diskUtilization')
MoveFieldsTo(_NETWORK_UTILIZATION_SCALING_FIELDS, 'networkUtilization')
MoveFieldsTo(_STANDARD_SCHEDULER_SETTINGS, 'standardSchedulerSettings')
return automatic_scaling
def ConvertUrlHandler(handler):
"""Rejiggers the structure of the url handler based on its type.
An extra level of message nesting occurs here, based on the handler type.
Fields common to all handler types occur at the top-level, while
handler-specific fields will go into a submessage based on handler type.
For example, a static files handler is transformed as follows:
Input {
"urlRegex": "foo/bar.html",
"path": "static_files/foo/bar.html"
}
Output {
"urlRegex": "foo/bar.html",
"staticFiles": {
"path": "static_files/foo/bar.html"
}
}
Args:
handler: Result of converting handler according to schema.
Returns:
Handler which has moved fields specific to the handler's type to a
submessage.
"""
def AppendRegexToPath(path, regex):
"""Equivalent to os.path.join(), except uses forward slashes always."""
return path.rstrip('/') + '/' + regex
handler_type = _GetHandlerType(handler)
# static_dir is syntactic sugar for static_files, so we "demote" any
# static_dir directives we see to a static_files directive before
# continuing.
if handler_type == 'staticDirectory':
# Groups are disallowed in URLs for static directory handlers.
# We check for them using the Python re module. App Engine uses Posix
# extended regular expressions, but it's overkill to start packaging a
# library that officially supports Posix extended regular expressions for
# this simple validation. We just let compile errors slide; Python regular
# expressions are mostly a superset.
try:
compiled = re.compile(handler['urlRegex'])
except re.error:
pass # We'll let the API handle this.
else:
if compiled.groups: # `groups` is the number of groups in the RE
raise ValueError(
'Groups are not allowed in URLs for static directory handlers: ' +
handler['urlRegex'])
tmp = {
'path': AppendRegexToPath(handler['staticDir'], r'\1'),
'uploadPathRegex': AppendRegexToPath(handler['staticDir'], '.*'),
'urlRegex': AppendRegexToPath(handler['urlRegex'], '(.*)'),
}
del handler['staticDir']
handler.update(tmp)
handler_type = 'staticFiles'
new_handler = {}
new_handler[handler_type] = {}
for field in _HANDLER_FIELDS[handler_type]:
if field in handler:
new_handler[handler_type][field] = handler[field]
# Copy the common fields
for common_field in _COMMON_HANDLER_FIELDS:
if common_field in handler:
new_handler[common_field] = handler[common_field]
return new_handler
def ConvertDispatchHandler(handler):
"""Create conversion function which handles dispatch rules.
Extract domain and path from dispatch url,
set service value from service or module info.
Args:
handler: Result of converting handler according to schema.
Returns:
Handler which has 'domain', 'path' and 'service' fields.
"""
dispatch_url = dispatchinfo.ParsedURL(handler['url'])
dispatch_service = handler['service']
dispatch_domain = dispatch_url.host
if not dispatch_url.host_exact:
dispatch_domain = '*' + dispatch_domain
dispatch_path = dispatch_url.path
if not dispatch_url.path_exact:
dispatch_path = dispatch_path.rstrip('/') + '/*'
new_handler = {}
new_handler['domain'] = dispatch_domain
new_handler['path'] = dispatch_path
new_handler['service'] = dispatch_service
return new_handler
def _GetHandlerType(handler):
"""Get handler type of mapping.
Args:
handler: Original handler.
Returns:
Handler type determined by which handler id attribute is set.
Raises:
ValueError: when none of the handler id attributes are set.
"""
if 'apiEndpoint' in handler:
return 'apiEndpoint'
elif 'staticDir' in handler:
return 'staticDirectory'
elif 'path' in handler:
return 'staticFiles'
elif 'scriptPath' in handler:
return 'script'
raise ValueError('Unrecognized handler type: %s' % handler)
def ConvertEndpointsRolloutStrategyToEnum(value):
"""Converts the rollout strategy to an enum.
In the YAML file, the user does not use the enum values directly. Therefore we
must convert these to their corresponding enum values in version.proto.
Args:
value: A string that is a valid rollout strategy ("fixed" or "managed")
Returns:
Value converted to the proper enum value. Defaults to
"UNSPECIFIED_ROLLOUT_STRATEGY"
Raises:
ValueError: When the value is set and is not one of "fixed" or "managed".
"""
if value is None:
return _ENDPOINTS_UNSPECIFIED_ROLLOUT_STRATEGY_ENUM_VALUE
if value in _ENDPOINTS_ROLLOUT_STRATEGY_VALUES:
return value.upper()
raise ValueError('Unrecognized rollout strategy: %s' % value)
def ConvertEntrypoint(entrypoint):
"""Converts the raw entrypoint to a nested shell value.
In the YAML file, the user specifies an entrypoint value. However, the version
resource expects it to be nested under a 'shell' key. In addition, Zeus
always prepends 'exec' to the value provided, so we remove it here as it is
sometimes added client-side by the validation library.
Args:
entrypoint: string, entrypoint value.
Returns:
Dict containing entrypoint.
"""
if entrypoint is None:
entrypoint = ''
if entrypoint.startswith('exec '):
entrypoint = entrypoint[len('exec '):]
return {'shell': entrypoint}
def ConvertVpcEgressSubnetworkKey(vpc_egress):
"""Converts the subnetwork key to a nested value.
For example:
Input {
hostProjectId: "my-project",
subnet: "my-subnet"
}
Output {
subnetworkKey: {
hostProjectId: "my-project",
subnet: "my-subnet"
}
}
Args:
vpc_egress: Result of converting vpc_egress according to schema.
Returns:
VpcEgress which has moved subnetwork key fields to a submessage.
"""
def MoveFieldsTo(field_names, target_field_name):
target = {}
for field_name in field_names:
if field_name in vpc_egress:
target[field_name] = vpc_egress[field_name]
del vpc_egress[field_name]
if target:
vpc_egress[target_field_name] = target
MoveFieldsTo(_SUBNETWORK_KEY_FIELDS, 'subnetworkKey')
return vpc_egress
def ToVpcNetworkTags(network_tags_str):
"""Converts a comma-separated string of network tags to a list of VpcNetworkTag dicts.
Args:
network_tags_str: A string containing one or more network tags,
separated by commas.
Returns:
A list of dictionaries, where each dictionary has a 'value' key
representing a network tag.
"""
if not network_tags_str:
return []
tags = network_tags_str.split(',')
vpc_network_tags = []
for tag in tags:
# Remove any whitespace from the tag.
tag = tag.strip()
if not tag:
raise ValueError('Network tags cannot be empty.')
vpc_network_tags.append({'value': tag})
return vpc_network_tags

View File

@@ -0,0 +1,327 @@
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Collection of classes for converting and transforming an input dictionary.
Conversions are defined statically using subclasses of SchemaField (Message,
Value, RepeatedField) which transform a source dictionary input to the target
schema. The source dictionary is expected to be parsed from a JSON
representation.
Only fields listed in the schema will be converted (i.e. an allowlist).
A SchemaField is a recursive structure and employs the visitor pattern to
convert an input structure.
# Schema to use for transformation
SAMPLE_SCHEMA = Message(
foo=Value(target_name='bar'),
list_of_things=RepeatedField(target_name='bar_list_of_things',
element=Value()))
# Input dictionary:
input_dict = {
'foo': '1234',
'list_of_things': [1, 4, 5],
'some_other_field': "hello"
}
# To convert:
result = SAMPLE_SCHEMA.ConvertValue(input_dict)
# The resulting dictionary will be:
{
'bar': '1234',
'bar_list_of_things': [1, 4, 5]
}
Note that both fields were renamed according to the rules in the schema. Fields
not listed in the schema will not be copied. In this example, "some_other_field"
was not copied.
If further transformation is required on the value itself, a converter can be
specified, which is simply a function which takes an input value and transforms
it according to whatever logic it wants.
For example, to convert a string value to an integer value, one could construct
a schema as follows:
CONVERTER_SCHEMA = Message(
foo=Value(target_name='bar', converter=int))
Using the above input dictionary, the result would be:
{
'bar': 1234
}
"""
from __future__ import absolute_import
import logging
from googlecloudsdk.appengine.admin.tools.conversion import converters
# TODO(user) Better error handling patterns.
def UnderscoreToLowerCamelCase(text):
"""Convert underscores to lower camel case (e.g. 'foo_bar' --> 'fooBar')."""
parts = text.lower().split('_')
return parts[0] + ''.join(part.capitalize() for part in parts[1:])
def ValidateType(source_value, expected_type):
if not isinstance(source_value, expected_type):
raise ValueError(
'Expected a %s, but got %s for value %s' % (expected_type,
type(source_value),
source_value))
def ValidateNotType(source_value, non_expected_type):
if isinstance(source_value, non_expected_type):
raise ValueError(
'Did not expect %s for value %s' % (non_expected_type, source_value))
def MergeDictionaryValues(old_dict, new_dict):
"""Attempts to merge the given dictionaries.
Warns if a key exists with different values in both dictionaries. In this
case, the new_dict value trumps the previous value.
Args:
old_dict: Existing dictionary.
new_dict: New dictionary.
Returns:
Result of merging the two dictionaries.
Raises:
ValueError: If the keys in each dictionary are not unique.
"""
common_keys = set(old_dict) & set(new_dict)
if common_keys:
conflicting_keys = set(key for key in common_keys
if old_dict[key] != new_dict[key])
if conflicting_keys:
def FormatKey(key):
return ('\'{key}\' has conflicting values \'{old}\' and \'{new}\'. '
'Using \'{new}\'.').format(key=key,
old=old_dict[key],
new=new_dict[key])
for conflicting_key in conflicting_keys:
logging.warning(FormatKey(conflicting_key))
result = old_dict.copy()
result.update(new_dict)
return result
class SchemaField(object):
"""Transformation strategy from input dictionary to an output dictionary.
Each subclass defines a different strategy for how an input value is converted
to an output value. ConvertValue() makes a copy of the input with the proper
transformations applied. Additionally, constraints about the input structure
are validated while doing the transformation.
"""
def __init__(self, target_name=None, converter=None):
"""Constructor.
Args:
target_name: New field name to use when creating an output dictionary. If
None is specified, then the original name is used.
converter: A function which performs a transformation on the value of the
field.
"""
self.target_name = target_name
self.converter = converter
def ConvertValue(self, value):
"""Convert an input value using the given schema and converter.
This method is not meant to be overwritten. Update _VisitInternal to change
the behavior.
Args:
value: Input value.
Returns:
Output which has been transformed using the given schema for renaming and
converter, if specified.
"""
result = self._VisitInternal(value)
return self._PerformConversion(result)
def _VisitInternal(self, value):
"""Shuffles the input value using the renames specified in the schema.
Only structural changes are made (e.g. renaming keys, copying lists, etc.).
Subclasses are expected to override this.
Args:
value: Input value.
Returns:
Output which has been transformed using the given schema.
"""
raise NotImplementedError()
def _PerformConversion(self, result):
"""Transforms the result value if a converter is specified."""
return self.converter(result) if self.converter else result
class Message(SchemaField):
"""A message has a collection of fields which should be converted.
Expected input type: Dictionary
Output type: Dictionary
"""
def __init__(self, target_name=None, converter=None, **kwargs):
"""Constructor.
Args:
target_name: New field name to use when creating an output dictionary. If
None is specified, then the original name is used.
converter: A function which performs a transformation on the value of the
field.
**kwargs: Kwargs where the keys are names of the fields and values are
FieldSchemas for each child field.
Raises:
ValueError: If the message has no child fields specified.
"""
super(Message, self).__init__(target_name, converter)
self.fields = kwargs
if not self.fields:
raise ValueError('Message must contain fields')
def _VisitInternal(self, value):
"""Convert each child field and put the result in a new dictionary."""
ValidateType(value, dict)
result = {}
for source_key, field_schema in self.fields.items():
if source_key not in value:
continue
source_value = value[source_key]
target_key = field_schema.target_name or source_key
target_key = UnderscoreToLowerCamelCase(target_key)
result_value = field_schema.ConvertValue(source_value)
if target_key not in result:
result[target_key] = result_value
# Only know how to merge dicts right now.
elif isinstance(result[target_key], dict) and isinstance(result_value,
dict):
result[target_key] = MergeDictionaryValues(result[target_key],
result_value)
else:
raise ValueError('Target key "%s" already exists.' % target_key)
return result
class Value(SchemaField):
"""Represents a leaf node. Only the value itself is copied.
A primitive value corresponds to any non-string, non-dictionary value which
can be represented in JSON.
Expected input type: Primitive value type (int, string, boolean, etc.).
Output type: Same primitive value type.
"""
def _VisitInternal(self, value):
ValidateNotType(value, list)
ValidateNotType(value, dict)
return value
class Map(SchemaField):
"""Represents a leaf node where the value itself is a map.
Expected input type: Dictionary
Output type: Dictionary
"""
def __init__(self, target_name=None, converter=None,
key_converter=converters.ToJsonString,
value_converter=converters.ToJsonString):
"""Constructor.
Args:
target_name: New field name to use when creating an output dictionary. If
None is specified, then the original name is used.
converter: A function which performs a transformation on the value of the
field.
key_converter: A function which performs a transformation on the keys.
value_converter: A function which performs a transformation on the values.
"""
super(Map, self).__init__(target_name, converter)
self.key_converter = key_converter
self.value_converter = value_converter
def _VisitInternal(self, value):
ValidateType(value, dict)
result = {}
for key, dict_value in value.items():
if self.key_converter:
key = self.key_converter(key)
if self.value_converter:
dict_value = self.value_converter(dict_value)
result[key] = dict_value
return result
class RepeatedField(SchemaField):
"""Represents a list of nested elements. Each item in the list is copied.
The type of each element in the list is specified in the constructor.
Expected input type: List
Output type: List
"""
def __init__(self, target_name=None, converter=None, element=None):
"""Constructor.
Args:
target_name: New field name to use when creating an output dictionary. If
None is specified, then the original name is used.
converter: A function which performs a transformation on the value of the
field.
element: A SchemaField element defining the type of every element in the
list. The input structure is expected to be homogenous.
Raises:
ValueError: If an element has not been specified or if the element type is
incompatible with a repeated field.
"""
super(RepeatedField, self).__init__(target_name, converter)
self.element = element
if not self.element:
raise ValueError('Element required for a repeated field')
if isinstance(self.element, Map):
raise ValueError('Repeated maps are not supported')
def _VisitInternal(self, value):
ValidateType(value, list)
result = []
for item in value:
result.append(self.element.ConvertValue(item))
return result

View File

@@ -0,0 +1,182 @@
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition for conversion between legacy YAML and the API JSON formats."""
from __future__ import absolute_import
from googlecloudsdk.appengine.admin.tools.conversion import converters as c
from googlecloudsdk.appengine.admin.tools.conversion import schema as s
SCHEMA = s.Message(
api_config=s.Message(
url=s.Value(converter=c.ToJsonString),
login=s.Value(converter=c.EnumConverter('LOGIN')),
secure=s.Value('security_level', converter=c.EnumConverter('SECURE')),
auth_fail_action=s.Value(converter=c.EnumConverter('AUTH_FAIL_ACTION')),
script=s.Value(converter=c.ToJsonString)),
api_version=s.Value('runtime_api_version', converter=c.ToJsonString),
app_engine_apis=s.Value(),
auto_id_policy=s.Value('beta_settings',
lambda val: {'auto_id_policy': val}),
automatic_scaling=s.Message(
converter=c.ConvertAutomaticScaling,
cool_down_period_sec=s.Value(
'cool_down_period', converter=c.SecondsToDuration),
cpu_utilization=s.Message(
target_utilization=s.Value(),
aggregation_window_length_sec=s.Value(
'aggregation_window_length', converter=c.SecondsToDuration)),
max_instances=s.Value('max_instances'),
min_instances=s.Value('min_instances'),
target_cpu_utilization=s.Value('target_cpu_utilization'),
target_throughput_utilization=s.Value('target_throughput_utilization'),
max_num_instances=s.Value('max_total_instances'),
min_pending_latency=s.Value(converter=c.LatencyToDuration),
min_idle_instances=s.Value(
converter=c.StringToInt(handle_automatic=True)),
max_idle_instances=s.Value(
converter=c.StringToInt(handle_automatic=True)),
max_pending_latency=s.Value(converter=c.LatencyToDuration),
max_concurrent_requests=s.Value(converter=c.StringToInt()),
min_num_instances=s.Value('min_total_instances'),
target_network_sent_bytes_per_sec=s.Value(
'target_sent_bytes_per_second'),
target_network_sent_packets_per_sec=s.Value(
'target_sent_packets_per_second'),
target_network_received_bytes_per_sec=s.Value(
'target_received_bytes_per_second'),
target_network_received_packets_per_sec=s.Value(
'target_received_packets_per_second'),
target_disk_write_bytes_per_sec=s.Value(
'target_write_bytes_per_second'),
target_disk_write_ops_per_sec=s.Value('target_write_ops_per_second'),
target_disk_read_bytes_per_sec=s.Value('target_read_bytes_per_second'),
target_disk_read_ops_per_sec=s.Value('target_read_ops_per_second'),
target_request_count_per_sec=s.Value('target_request_count_per_second'),
target_concurrent_requests=s.Value()),
basic_scaling=s.Message(
idle_timeout=s.Value(converter=c.IdleTimeoutToDuration),
max_instances=s.Value(converter=c.StringToInt())),
beta_settings=s.Map(),
default_expiration=s.Value(converter=c.ExpirationToDuration),
endpoints_api_service=s.Message(
name=s.Value(),
rollout_strategy=s.Value(
converter=c.ConvertEndpointsRolloutStrategyToEnum),
config_id=s.Value(),
trace_sampling=s.Value('disable_trace_sampling', converter=c.Not),
),
entrypoint=s.Value(converter=c.ConvertEntrypoint),
env=s.Value(),
env_variables=s.Map(),
build_env_variables=s.Map(),
error_handlers=s.RepeatedField(
element=s.Message(
error_code=s.Value(converter=c.EnumConverter('ERROR_CODE')),
file=s.Value('static_file', converter=c.ToJsonString),
mime_type=s.Value(converter=c.ToJsonString))),
runtime_config=s.Message('flexible_runtime_settings',
operating_system=s.Value(converter=c.ToJsonString),
runtime_version=s.Value(converter=c.ToJsonString),
),
# Restructure the handler after it's complete, since this is more
# complicated than a simple rename.
handlers=s.RepeatedField(
element=s.Message(
converter=c.ConvertUrlHandler,
auth_fail_action=s.Value(
converter=c.EnumConverter('AUTH_FAIL_ACTION')),
static_dir=s.Value(converter=c.ToJsonString),
secure=s.Value(
'security_level', converter=c.EnumConverter('SECURE')),
redirect_http_response_code=s.Value(
converter=c.EnumConverter('REDIRECT_HTTP_RESPONSE_CODE')),
http_headers=s.Map(),
url=s.Value('url_regex'),
expiration=s.Value(converter=c.ExpirationToDuration),
static_files=s.Value('path', converter=c.ToJsonString),
script=s.Value('script_path', converter=c.ToJsonString),
upload=s.Value('upload_path_regex', converter=c.ToJsonString),
api_endpoint=s.Value(),
application_readable=s.Value(),
position=s.Value(),
login=s.Value(converter=c.EnumConverter('LOGIN')),
mime_type=s.Value(converter=c.ToJsonString),
require_matching_file=s.Value())),
health_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
healthy_threshold=s.Value(),
enable_health_check=s.Value('disable_health_check', converter=c.Not),
unhealthy_threshold=s.Value(),
host=s.Value(converter=c.ToJsonString),
restart_threshold=s.Value()),
liveness_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
initial_delay_sec=s.Value(
'initial_delay', converter=c.SecondsToDuration),
success_threshold=s.Value(),
failure_threshold=s.Value(),
path=s.Value(),
host=s.Value(converter=c.ToJsonString)),
readiness_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
app_start_timeout_sec=s.Value(
'app_start_timeout', converter=c.SecondsToDuration),
success_threshold=s.Value(),
failure_threshold=s.Value(),
path=s.Value(),
host=s.Value(converter=c.ToJsonString)),
inbound_services=s.RepeatedField(
element=s.Value(converter=c.EnumConverter('INBOUND_SERVICE'))),
instance_class=s.Value(converter=c.ToJsonString),
libraries=s.RepeatedField(
element=s.Message(
version=s.Value(converter=c.ToJsonString),
name=s.Value(converter=c.ToJsonString))),
main=s.Value('runtime_main_executable_path', converter=c.ToJsonString),
manual_scaling=s.Message(instances=s.Value(converter=c.StringToInt())),
network=s.Message(
instance_tag=s.Value(converter=c.ToJsonString),
name=s.Value(converter=c.ToJsonString),
subnetwork_name=s.Value(converter=c.ToJsonString),
forwarded_ports=s.RepeatedField(
element=s.Value(converter=c.ToJsonString)),
session_affinity=s.Value(),
instance_ip_mode=s.Value(converter=c.ToJsonString)),
nobuild_files=s.Value('nobuild_files_regex', converter=c.ToJsonString),
resources=s.Message(
memory_gb=s.Value(),
disk_size_gb=s.Value('disk_gb'),
cpu=s.Value(),
volumes=s.RepeatedField(
element=s.Message(
name=s.Value(converter=c.ToJsonString),
volume_type=s.Value(converter=c.ToJsonString),
size_gb=s.Value()))),
runtime=s.Value(converter=c.ToJsonString),
runtime_channel=s.Value(converter=c.ToJsonString),
service_account=s.Value(converter=c.ToJsonString),
standard_websocket=s.Value('enable_standard_websocket'),
threadsafe=s.Value(),
version=s.Value('id', converter=c.ToJsonString),
vm=s.Value(),
vm_settings=s.Map('beta_settings'),
vpc_access_connector=s.Message(
name=s.Value(converter=c.ToJsonString),
egress_setting=s.Value(converter=c.ToVpcEgressSettingEnum)))

View File

@@ -0,0 +1,205 @@
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition for conversion between legacy YAML and the API JSON formats."""
from __future__ import absolute_import
from googlecloudsdk.appengine.admin.tools.conversion import converters as c
from googlecloudsdk.appengine.admin.tools.conversion import schema as s
SCHEMA = s.Message(
api_config=s.Message(
url=s.Value(converter=c.ToJsonString),
login=s.Value(converter=c.EnumConverter('LOGIN')),
secure=s.Value('security_level', converter=c.EnumConverter('SECURE')),
auth_fail_action=s.Value(converter=c.EnumConverter('AUTH_FAIL_ACTION')),
script=s.Value(converter=c.ToJsonString)),
api_version=s.Value('runtime_api_version', converter=c.ToJsonString),
app_engine_apis=s.Value(),
auto_id_policy=s.Value('beta_settings',
lambda val: {'auto_id_policy': val}),
automatic_scaling=s.Message(
converter=c.ConvertAutomaticScaling,
cool_down_period_sec=s.Value(
'cool_down_period', converter=c.SecondsToDuration),
cpu_utilization=s.Message(
target_utilization=s.Value(),
aggregation_window_length_sec=s.Value(
'aggregation_window_length', converter=c.SecondsToDuration)),
max_instances=s.Value('max_instances'),
min_instances=s.Value('min_instances'),
target_cpu_utilization=s.Value('target_cpu_utilization'),
target_throughput_utilization=s.Value('target_throughput_utilization'),
max_num_instances=s.Value('max_total_instances'),
min_pending_latency=s.Value(converter=c.LatencyToDuration),
min_idle_instances=s.Value(
converter=c.StringToInt(handle_automatic=True)),
max_idle_instances=s.Value(
converter=c.StringToInt(handle_automatic=True)),
max_pending_latency=s.Value(converter=c.LatencyToDuration),
max_concurrent_requests=s.Value(converter=c.StringToInt()),
min_num_instances=s.Value('min_total_instances'),
target_network_sent_bytes_per_sec=s.Value('target_sent_bytes_per_sec'),
target_network_sent_packets_per_sec=s.Value(
'target_sent_packets_per_sec'),
target_network_received_bytes_per_sec=s.Value(
'target_received_bytes_per_sec'),
target_network_received_packets_per_sec=s.Value(
'target_received_packets_per_sec'),
target_disk_write_bytes_per_sec=s.Value('target_write_bytes_per_sec'),
target_disk_write_ops_per_sec=s.Value('target_write_ops_per_sec'),
target_disk_read_bytes_per_sec=s.Value('target_read_bytes_per_sec'),
target_disk_read_ops_per_sec=s.Value('target_read_ops_per_sec'),
target_request_count_per_sec=s.Value(),
target_concurrent_requests=s.Value(),
custom_metrics=s.RepeatedField(
element=s.Message(
metric_name=s.Value(converter=c.ToJsonString),
target_type=s.Value(converter=c.ToJsonString),
target_utilization=s.Value('target_utilization'),
single_instance_assignment=s.Value(
'single_instance_assignment'),
filter=s.Value(converter=c.ToJsonString))),
),
basic_scaling=s.Message(
idle_timeout=s.Value(converter=c.IdleTimeoutToDuration),
max_instances=s.Value(converter=c.StringToInt())),
beta_settings=s.Map(),
default_expiration=s.Value(converter=c.ExpirationToDuration),
endpoints_api_service=s.Message(
name=s.Value(),
rollout_strategy=s.Value(
converter=c.ConvertEndpointsRolloutStrategyToEnum),
config_id=s.Value(),
trace_sampling=s.Value('disable_trace_sampling', converter=c.Not),
),
entrypoint=s.Value(converter=c.ConvertEntrypoint),
env=s.Value(),
env_variables=s.Map(),
build_env_variables=s.Map(),
error_handlers=s.RepeatedField(
element=s.Message(
error_code=s.Value(converter=c.EnumConverter('ERROR_CODE')),
file=s.Value('static_file', converter=c.ToJsonString),
mime_type=s.Value(converter=c.ToJsonString),
)
),
runtime_config=s.Message(
'flexible_runtime_settings',
operating_system=s.Value(converter=c.ToJsonString),
runtime_version=s.Value(converter=c.ToJsonString),
),
# Restructure the handler after it's complete, since this is more
# complicated than a simple rename.
handlers=s.RepeatedField(
element=s.Message(
converter=c.ConvertUrlHandler,
auth_fail_action=s.Value(
converter=c.EnumConverter('AUTH_FAIL_ACTION')),
static_dir=s.Value(converter=c.ToJsonString),
secure=s.Value(
'security_level', converter=c.EnumConverter('SECURE')),
redirect_http_response_code=s.Value(
converter=c.EnumConverter('REDIRECT_HTTP_RESPONSE_CODE')),
http_headers=s.Map(),
url=s.Value('url_regex'),
expiration=s.Value(converter=c.ExpirationToDuration),
static_files=s.Value('path', converter=c.ToJsonString),
script=s.Value('script_path', converter=c.ToJsonString),
upload=s.Value('upload_path_regex', converter=c.ToJsonString),
api_endpoint=s.Value(),
application_readable=s.Value(),
position=s.Value(),
login=s.Value(converter=c.EnumConverter('LOGIN')),
mime_type=s.Value(converter=c.ToJsonString),
require_matching_file=s.Value())),
health_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
healthy_threshold=s.Value(),
enable_health_check=s.Value('disable_health_check', converter=c.Not),
unhealthy_threshold=s.Value(),
host=s.Value(converter=c.ToJsonString),
restart_threshold=s.Value()),
liveness_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
initial_delay_sec=s.Value(
'initial_delay', converter=c.SecondsToDuration),
success_threshold=s.Value(),
failure_threshold=s.Value(),
path=s.Value(),
host=s.Value(converter=c.ToJsonString)),
readiness_check=s.Message(
check_interval_sec=s.Value(
'check_interval', converter=c.SecondsToDuration),
timeout_sec=s.Value('timeout', converter=c.SecondsToDuration),
app_start_timeout_sec=s.Value(
'app_start_timeout', converter=c.SecondsToDuration),
success_threshold=s.Value(),
failure_threshold=s.Value(),
path=s.Value(),
host=s.Value(converter=c.ToJsonString)),
inbound_services=s.RepeatedField(
element=s.Value(converter=c.EnumConverter('INBOUND_SERVICE'))),
instance_class=s.Value(converter=c.ToJsonString),
libraries=s.RepeatedField(
element=s.Message(
version=s.Value(converter=c.ToJsonString),
name=s.Value(converter=c.ToJsonString))),
main=s.Value('runtime_main_executable_path', converter=c.ToJsonString),
manual_scaling=s.Message(instances=s.Value(converter=c.StringToInt())),
network=s.Message(
instance_tag=s.Value(converter=c.ToJsonString),
name=s.Value(converter=c.ToJsonString),
subnetwork_name=s.Value(converter=c.ToJsonString),
forwarded_ports=s.RepeatedField(
element=s.Value(converter=c.ToJsonString)),
session_affinity=s.Value(),
instance_ip_mode=s.Value(
'instance_ip_mode', converter=c.ToUpperCaseJsonString)),
nobuild_files=s.Value('nobuild_files_regex', converter=c.ToJsonString),
resources=s.Message(
memory_gb=s.Value(),
disk_size_gb=s.Value('disk_gb'),
cpu=s.Value(),
volumes=s.RepeatedField(
element=s.Message(
name=s.Value(converter=c.ToJsonString),
volume_type=s.Value(converter=c.ToJsonString),
size_gb=s.Value()))),
runtime=s.Value(converter=c.ToJsonString),
runtime_channel=s.Value(converter=c.ToJsonString),
service_account=s.Value(converter=c.ToJsonString),
standard_websocket=s.Value('enable_standard_websocket'),
threadsafe=s.Value(),
version=s.Value('id', converter=c.ToJsonString),
vm=s.Value(),
vm_settings=s.Map('beta_settings'),
vpc_access_connector=s.Message(
name=s.Value(converter=c.ToJsonString),
egress_setting=s.Value(converter=c.ToVpcEgressSettingEnum)),
vpc_egress=s.Message(
converter=c.ConvertVpcEgressSubnetworkKey,
host_project_id=s.Value(converter=c.ToJsonString),
subnet=s.Value(converter=c.ToJsonString),
egress_setting=s.Value(converter=c.ToVpcEgressSettingEnum),
network_tags=s.Value(converter=c.ToVpcNetworkTags),
),
zones=s.RepeatedField(element=s.Value(converter=c.ToJsonString)),
app_engine_bundled_services=s.RepeatedField(
element=s.Value(converter=c.ToBundledServiceTypeEnum)
))