feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,117 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities for deleting resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.metastore import operations_util as operations_api_util
from googlecloudsdk.api_lib.metastore import util as api_util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
import six
class ServiceDeletionWaiter(object):
"""Class for waiting for synchronous deletion of one or more Services."""
def __init__(self, release_track=base.ReleaseTrack.GA):
self.pending_deletes = []
self.release_track = release_track
def AddPendingDelete(self, service_name, operation):
"""Adds a service whose deletion to track.
Args:
service_name: str, the relative resource name of the service being deleted
operation: Operation, the longrunning operation object returned by the API
when the deletion was initiated
"""
self.pending_deletes.append(_PendingServiceDelete(service_name, operation))
def Wait(self):
"""Polls pending deletions and returns when they are complete."""
encountered_errors = False
for pending_delete in self.pending_deletes:
try:
operations_api_util.WaitForOperation(
pending_delete.operation,
'Waiting for [{}] to be deleted'.format(
pending_delete.service_name),
release_track=self.release_track)
except api_util.OperationError as e:
encountered_errors = True
log.DeletedResource(
pending_delete.service_name,
kind='service',
is_async=False,
failed=six.text_type(e))
return encountered_errors
class _PendingServiceDelete(object):
"""Data class holding information about a pending service deletion."""
def __init__(self, service_name, operation):
self.service_name = service_name
self.operation = operation
class FederationDeletionWaiter(object):
"""Class for waiting for synchronous deletion of one or more Federations."""
def __init__(self, release_track=base.ReleaseTrack.GA):
self.pending_deletes = []
self.release_track = release_track
def AddPendingDelete(self, federation_name, operation):
"""Adds a federation whose deletion to track.
Args:
federation_name: str, the relative resource name of the federation being
deleted
operation: Operation, the longrunning operation object returned by the API
when the deletion was initiated
"""
self.pending_deletes.append(
_PendingFederationDelete(federation_name, operation))
def Wait(self):
"""Polls pending deletions and returns when they are complete."""
encountered_errors = False
for pending_delete in self.pending_deletes:
try:
operations_api_util.WaitForOperation(
pending_delete.operation,
'Waiting for [{}] to be deleted'.format(
pending_delete.federation_name),
release_track=self.release_track)
except api_util.OperationError as e:
encountered_errors = True
log.DeletedResource(
pending_delete.federation_name,
kind='federation',
is_async=False,
failed=six.text_type(e))
return encountered_errors
class _PendingFederationDelete(object):
"""Data class holding information about a pending federation deletion."""
def __init__(self, federation_name, operation):
self.federation_name = federation_name
self.operation = operation

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,113 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for "gcloud metastore federations" commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.metastore import validators as validator
import six
def GenerateCreateBackends(job_ref, args, create_federation_req):
"""Construct the backend names for create requests of Dataproc Metastore federations.
Args:
job_ref: A resource ref to the parsed Federation resource.
args: The parsed args namespace from CLI.
create_federation_req: Create federation request for the API call.
Returns:
Modified request for the API call.
"""
return validator.ParseBackendsIntoRequest(job_ref, create_federation_req)
def GenerateUpdateBackends(job_ref, args, update_federation_req):
"""Construct the long name for backends and updateMask for update requests of Dataproc Metastore federations.
Args:
job_ref: A resource ref to the parsed Federation resource.
args: The parsed args namespace from CLI.
update_federation_req: Update federation request for the API call.
Returns:
Modified request for the API call.
"""
args_set = set(args.GetSpecifiedArgNames())
if '--remove-backends' in args_set and '--update-backends' not in args_set:
update_federation_req.federation.backendMetastores = {}
if '--update-backends' in args_set:
validator.ParseBackendsIntoRequest(job_ref, update_federation_req)
update_federation_req.updateMask = _GenerateUpdateMask(args)
return update_federation_req
def _AppendKeysToUpdateMask(prefix, key):
return prefix + '.' + key
def _GenerateUpdateMask(args):
"""Constructs updateMask for federation patch requests.
Args:
args: The parsed args namespace from CLI.
Returns:
String containing update mask for patch request.
"""
arg_name_to_field = {
'--clear-backends': 'backend_metastores',
'--clear-labels': 'labels'
}
update_mask = set()
input_args = set(args.GetSpecifiedArgNames())
for arg_name in input_args.intersection(arg_name_to_field):
update_mask.add(arg_name_to_field[arg_name])
for arg in input_args:
if 'backend_metastores' not in update_mask:
if '--update-backends' == arg:
update_backends_value = args.update_backends
backends_list = update_backends_value.split(',')
for backend in backends_list:
update_mask.add(
_AppendKeysToUpdateMask('backend_metastores',
backend.split('=')[0]))
if '--remove-backends' == arg:
remove_backends_value = args.remove_backends
backend_keys_list = remove_backends_value.split(',')
for backend in backend_keys_list:
update_mask.add(
_AppendKeysToUpdateMask('backend_metastores', backend))
if 'labels' not in update_mask:
if '--update-labels' == arg:
for key in args.update_labels:
update_mask.add(_AppendKeysToUpdateMask('labels', key))
if '--remove-labels' == arg:
for key in args.remove_labels:
update_mask.add(_AppendKeysToUpdateMask('labels', key))
return ','.join(sorted(update_mask))

View File

@@ -0,0 +1,99 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource parsing helpers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
def GetProject():
"""Returns the value of the core/project config property.
Config properties can be overridden with command line flags. If the --project
flag was provided, this will return the value provided with the flag.
"""
return properties.VALUES.core.project.Get(required=True)
def ParseNetwork(network):
"""Parses a network name using configuration properties for fallback.
Args:
network: str, the network's ID, fully-qualified URL, or relative name
Returns:
str: the relative name of the network resource
"""
return resources.REGISTRY.Parse(
network, params={
'project': GetProject
}, collection='compute.networks').RelativeName()
def ParseSubnetwork(subnetwork, location=None):
"""Parses a subnetwork name using configuration properties for fallback.
Args:
subnetwork: str, the subnetwork's ID, fully-qualified URL, or relative name
location: str, the location ID
Returns:
str: the relative name of the network resource
"""
return resources.REGISTRY.Parse(
subnetwork,
params={
'project': GetProject,
'region': _GetConfigLocationProperty if location is None else location
},
collection='compute.subnetworks').RelativeName()
def ParseSecretManagerSecretVersion(secret_manager_version):
"""Parses a secret manager secret version name using configuration properties for fallback.
Args:
secret_manager_version: str, fully-qualified URL, or relative name
Returns:
str: the relative name of the secret version resource
"""
return resources.REGISTRY.Parse(
secret_manager_version,
collection='secretmanager.projects.secrets.versions').RelativeName()
def ParseCloudKmsKey(cloud_kms_key):
"""Parses a Cloud KMS key using configuration properties for fallback.
Args:
cloud_kms_key: str, fully-qualified URL, or relative name
Returns:
str: the relative name of the Cloud KMS key resource
"""
return resources.REGISTRY.Parse(
cloud_kms_key,
collection='cloudkms.projects.locations.keyRings.cryptoKeys'
).RelativeName()
def _GetConfigLocationProperty():
"""Returns the value of the metastore/location config property."""
return properties.VALUES.metastore.location.GetOrFail()

View File

@@ -0,0 +1,163 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource flags for Dataproc Metastore commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import properties
def AddServiceResourceArg(parser,
verb,
positional=True,
required=True,
plural=False):
"""Add a resource argument for a Dataproc Metastore Service.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command
verb: str, the verb to describe the resource, for example, 'to update'.
positional: boolean, if True, means that the resource is a positional rather
than a flag.
required: boolean, if True, the arg is required
plural: boolean, if True, expects a list of resources
"""
noun = 'service' + ('s' if plural else '')
name = _BuildArgName(noun, positional)
concept_parsers.ConceptParser.ForResource(
name,
GetServiceResourceSpec(),
'The {} {}.'.format(noun, verb),
required=required,
plural=plural).AddToParser(parser)
def AddOperationResourceArg(parser,
verb,
positional=True,
required=True,
plural=False):
"""Add a resource argument for a Dataproc Metastore long-running operation.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command
verb: str, the verb to describe the resource, for example, 'to update'.
positional: boolean, if True, means that the resource is a positional rather
than a flag.
required: boolean, if True, the arg is required
plural: boolean, if True, expects a list of resources
"""
noun = 'operation' + ('s' if plural else '')
name = _BuildArgName(noun, positional)
concept_parsers.ConceptParser.ForResource(
name,
GetOperationResourceSpec(),
'The {} {}.'.format(noun, verb),
required=required,
plural=plural).AddToParser(parser)
def AddFederationResourceArg(parser,
verb,
positional=True,
required=True,
plural=False):
"""Add a resource argument for a Dataproc Metastore Federation.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command
verb: str, the verb to describe the resource, for example, 'to update'.
positional: boolean, if True, means that the resource is a positional rather
than a flag.
required: boolean, if True, the arg is required
plural: boolean, if True, expects a list of resources
"""
noun = 'federation' + ('s' if plural else '')
name = _BuildArgName(noun, positional)
concept_parsers.ConceptParser.ForResource(
name,
GetFederationResourceSpec(),
'The {} {}.'.format(noun, verb),
required=required,
plural=plural).AddToParser(parser)
def GetServiceResourceSpec():
return concepts.ResourceSpec(
'metastore.projects.locations.services',
resource_name='service',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
servicesId=ServiceAttributeConfig())
def GetOperationResourceSpec():
return concepts.ResourceSpec(
'metastore.projects.locations.operations',
resource_name='operation',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
operationsId=OperationAttributeConfig())
def GetFederationResourceSpec():
return concepts.ResourceSpec(
'metastore.projects.locations.federations',
resource_name='federation',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
federationsId=FederationAttributeConfig())
def ServiceAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='service',
help_text='Dataproc Metastore service for the {resource}.')
def FederationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='federation',
help_text='Dataproc Metastore federation for the {resource}.')
def OperationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='operation',
help_text='Dataproc Metastore operation for the {resource}.')
def LocationAttributeConfig(fallthroughs_enabled=True):
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='Location to which the {resource} belongs.',
fallthroughs=([
deps.PropertyFallthrough(properties.VALUES.metastore.location)
] if fallthroughs_enabled else []))
def _BuildArgName(name, positional):
return '{}{}'.format('' if positional else '--', name)

View File

@@ -0,0 +1,136 @@
project:
name: project
collection: metastore.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: The project ID.
property: core/project
location:
name: location
collection: metastore.projects.locations
disable_auto_completers: false
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: |
The location of the Dataproc Metastore service.
If not specified, will use `default` metastore/location.
property: metastore/location
federation:
name: federation
collection: metastore.projects.locations.federations
request_id_field: federationId
attributes:
- *project
- *location
- &federation
parameter_name: federationsId
attribute_name: federation
help: The identifier of the Dataproc Metastore federation
parent_service:
name: service
collection: metastore.projects.locations.services
attributes:
- *project
- *location
- &parent_service
parameter_name: servicesId
attribute_name: service
help: The identifier of the Dataproc Metastore service
service:
name: service
collection: metastore.projects.locations.services
request_id_field: serviceId
attributes:
- *project
- *location
- &service
parameter_name: servicesId
attribute_name: service
help: The identifier of the Dataproc Metastore service
import:
name: import
collection: metastore.projects.locations.services.metadataImports
request_id_field: metadataImportId
attributes:
- *project
- *location
- *service
- &import
parameter_name: metadataImportsId
attribute_name: import
help: The identifier of the metadata import under Dataproc Metastore services.
backup:
name: backup
collection: metastore.projects.locations.services.backups
request_id_field: backupId
attributes:
- *project
- *location
- *service
- &backup
parameter_name: backupsId
attribute_name: backup
help: The identifier of the backup under Dataproc Metastore services.
operation:
name: operation
collection: metastore.projects.locations.operations
attributes:
- *project
- *location
- &operation
parameter_name: operationsId
attribute_name: operation
help: The ID of the Dataproc Metastore operation.
database:
name: database
collection: metastore.projects.locations.services.databases
request_id_field: databaseId
attributes:
- *project
- *location
- *service
- &database
parameter_name: databasesId
attribute_name: database
help: The identifier of the database under Dataproc Metastore services.
table:
name: table
collection: metastore.projects.locations.services.databases.tables
request_id_field: tableId
attributes:
- *project
- *location
- *service
- *database
- &table
parameter_name: tablesId
attribute_name: table
help: The identifier of the table under Database.
migration:
name: migration
collection: metastore.projects.locations.services.migrationExecutions
request_id_field: migrationExecutionId
attributes:
- *project
- *location
- *service
- &migration
parameter_name: migrationExecutionsId
attribute_name: migration
help: The identifier of the migration for a Dataproc Metastore service.

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for "gcloud metastore services backups" commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import resources
def UpdateBackupV1Alpha(ref, args, request):
return _UpdateBackup(ref, args, request, api_version='v1alpha')
def UpdateBackupV1Beta(ref, args, request):
return _UpdateBackup(ref, args, request, api_version='v1beta')
def UpdateBackupV1(ref, args, request):
return _UpdateBackup(ref, args, request, api_version='v1')
def _UpdateBackup(ref, args, request, api_version):
"""Returns a modified create request with the `backup` field updated if the `backup` is passed.
If the user passes in a single resource like `my-backup`, convert it to a
relative resource name. If the user passes in a relative resource name, parse
to make sure it's valid. This will always overwrite the `backup` field in the
request, however, it might be overwritten with the same value.
Args:
ref: A resource ref to the parsed Service resource.
args: The parsed args namespace from CLI.
request: The framework generated request to modify.
api_version: The API version of the backup.
"""
if args.backup is None:
return request
request.restoreServiceRequest.backup = resources.REGISTRY.Parse(
args.backup,
params={
'projectsId': ref.projectsId,
'locationsId': ref.locationsId,
'servicesId': ref.servicesId,
},
api_version=api_version,
collection='metastore.projects.locations.services.backups').RelativeName(
)
return request

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for "gcloud metastore services imports" commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis
def Messages(api_version):
return apis.GetMessagesModule('metastore', api_version)
def UpdateDescription(unused_ref, args, update_import_req):
"""Constructs updateMask for update requests of Dataproc Metastore services.
Args:
unused_ref: A resource ref to the parsed Service resource.
args: The parsed args namespace from CLI.
update_import_req: Created Update request for the API call.
Returns:
Modified request for the API call.
"""
update_import_req.metadataImport.description = args.description
return update_import_req

View File

@@ -0,0 +1,363 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for "gcloud metastore services" commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import xml.etree.cElementTree as element_tree
from googlecloudsdk.command_lib.metastore import parsers
from googlecloudsdk.core import properties
def GetTier():
"""Returns the value of the metastore/tier config property.
Config properties can be overridden with command line flags. If the --tier
flag was provided, this will return the value provided with the flag.
"""
return properties.VALUES.metastore.tier.Get(required=True)
def LoadHiveMetatsoreConfigsFromXmlFile(file_arg):
"""Convert Input XML file into Hive metastore configurations."""
hive_metastore_configs = {}
root = element_tree.fromstring(file_arg)
for prop in root.iter('property'):
hive_metastore_configs[prop.find('name').text] = prop.find('value').text
return hive_metastore_configs
def UpdateScalingConfig(unused_ref, args, req):
"""Updates the Service scaling config.
Sets autoscalingEnabled to true if the service specified a min scaling factor,
max scaling factor, or both. Sets the scaling config to None if tier is
specified. If no scaling configs are specified, the scaling config is set to
None.
Args:
args: The request arguments.
req: A request with `service` field.
Returns:
The same request object with a modified scaling config.
1. Scaling config with autoscaling enabled if min or max scaling factors are
specified.
2. Scaling config is set to None if tier is specified. Do note if tier and
scaling configs are specified, the SDK will throw a 400. The scaling config
is set to an empty dictionary by default that fails on the server side.
"""
if args.min_scaling_factor or args.max_scaling_factor:
req.service.scalingConfig.autoscalingConfig.autoscalingEnabled = True
elif args.tier:
req.service.scalingConfig = None
return req
def UpdateScalingConfigForCreate(unused_ref, args, req):
"""Updates the Service scaling config.
Sets autoscalingEnabled to true if the service specified a min scaling factor,
max scaling factor, or both. Sets the scaling config to None if tier is
specified. If no scaling configs are specified, the scaling config is set to
None.
Args:
args: The request arguments.
req: A request with `service` field.
Returns:
A request with a modified scaling config.
"""
req = UpdateScalingConfig(unused_ref, args, req)
if not (
args.min_scaling_factor
or args.max_scaling_factor
or args.instance_size
or args.scaling_factor
):
req.service.scalingConfig = None
return req
def GenerateNetworkConfigFromSubnetList(unused_ref, args, req):
"""Generates the NetworkConfig message from the list of subnetworks.
Args:
args: The request arguments.
req: A request with `service` field.
Returns:
A request with network configuration field if `consumer-subnetworks` is
present in the arguments.
"""
if args.consumer_subnetworks:
req.service.networkConfig = {
'consumers': [
{'subnetwork': parsers.ParseSubnetwork(s, args.location)}
for s in args.consumer_subnetworks
]
}
return req
def GenerateAuxiliaryVersionsConfigFromList(unused_ref, args, req):
"""Generates the auxiliary versions map from the list of auxiliary versions.
Args:
args: The request arguments.
req: A request with `service` field.
Returns:
If `auxiliary-versions` is present in the arguments, a request with hive
metastore config's auxiliary versions map field is returned.
Otherwise the original request is returned.
"""
if args.auxiliary_versions:
if req.service.hiveMetastoreConfig is None:
req.service.hiveMetastoreConfig = {}
req.service.hiveMetastoreConfig.auxiliaryVersions = (
_GenerateAuxiliaryVersionsVersionList(args.auxiliary_versions)
)
return req
def LoadAuxiliaryVersionsConfigsFromYamlFile(file_contents):
"""Convert Input YAML file into auxiliary versions configurations map.
Args:
file_contents: The YAML file contents of the file containing the auxiliary
versions configurations.
Returns:
The auxiliary versions configuration mapping with service name as the key
and config as the value.
"""
aux_versions = {}
for aux_config in file_contents:
aux_versions[aux_config['name']] = {'version': aux_config['version']}
if 'config_overrides' in aux_config:
aux_versions[aux_config['name']]['configOverrides'] = aux_config[
'config_overrides'
]
return aux_versions
def LoadScheduledBackupConfigsFromJsonFile(file_contents):
"""Convert Input JSON file into scheduled backup configurations map.
Args:
file_contents: The JSON file contents of the file containing the scheduled
backup configurations.
Returns:
The scheduled backup configuration mapping with key and value.
"""
try:
scheduled_backup_configs = json.loads(file_contents)
config = {}
if 'enabled' in scheduled_backup_configs:
config['enabled'] = scheduled_backup_configs.pop('enabled')
if config.get('enabled', False):
if 'cron_schedule' not in scheduled_backup_configs:
raise ValueError('Missing required field: cron_schedule')
if 'backup_location' not in scheduled_backup_configs:
raise ValueError('Missing required field: backup_location')
config['cron_schedule'] = scheduled_backup_configs.get('cron_schedule')
config['backup_location'] = scheduled_backup_configs.get('backup_location')
config['time_zone'] = scheduled_backup_configs.get('time_zone', 'UTC')
return config
except (json.JSONDecodeError, KeyError) as e:
raise ValueError(f'Invalid scheduled backup configuration JSON data: {e}')
def _GenerateAdditionalProperties(values_dict):
"""Format values_dict into additionalProperties-style dict."""
props = [{'key': k, 'value': v} for k, v in sorted(values_dict.items())]
return {'additionalProperties': props}
def _GenerateUpdateMask(args):
"""Constructs updateMask for patch requests.
Args:
args: The parsed args namespace from CLI.
Returns:
String containing update mask for patch request.
"""
hive_metastore_configs = 'hive_metastore_config.config_overrides'
labels = 'labels'
arg_name_to_field = {
'--port': 'port',
'--tier': 'tier',
'--instance-size': 'scaling_config.instance_size',
'--scaling-factor': 'scaling_config.scaling_factor',
'--autoscaling-enabled': (
'scaling_config.autoscaling_config.autoscaling_enabled'
),
'--min-scaling-factor': (
'scaling_config.autoscaling_config.limit_config.min_scaling_factor'
),
'--max-scaling-factor': (
'scaling_config.autoscaling_config.limit_config.max_scaling_factor'
),
'--update-hive-metastore-configs-from-file': (
'hive_metastore_config.config_overrides'
),
'--clear-hive-metastore-configs': hive_metastore_configs,
'--clear-labels': labels,
'--kerberos-principal': 'hive_metastore_config.kerberos_config.principal',
'--keytab': 'hive_metastore_config.kerberos_config.keytab',
'--krb5-config': (
'hive_metastore_config.kerberos_config.krb5_config_gcs_uri'
),
'--maintenance-window-day': 'maintenance_window',
'--maintenance-window-hour': 'maintenance_window',
'--data-catalog-sync': 'metadataIntegration.dataCatalogConfig.enabled',
'--no-data-catalog-sync': 'metadataIntegration.dataCatalogConfig.enabled',
'--deletion-protection': 'deletion_protection',
'--no-deletion-protection': 'deletion_protection',
'--endpoint-protocol': 'hive_metastore_config.endpoint_protocol',
'--add-auxiliary-versions': 'hive_metastore_config.auxiliary_versions',
'--update-auxiliary-versions-from-file': (
'hive_metastore_config.auxiliary_versions'
),
'--clear-auxiliary-versions': 'hive_metastore_config.auxiliary_versions',
'--scheduled-backup-configs-from-file': 'scheduled_backup',
'--enable-scheduled-backup': 'scheduled_backup',
'--no-enable-scheduled-backup': 'scheduled_backup.enabled',
'--scheduled-backup-cron': 'scheduled_backup',
'--scheduled-backup-location': 'scheduled_backup',
'--clear-bigquery-metastore-migration-config': (
'bigquery_metastore_migration_config'
),
'--bigquery-project-id': (
'bigquery_metastore_migration_config.bigquery_project_id'
),
'--bigquery-dataset-location': (
'bigquery_metastore_migration_config.bigquery_dataset_location'
),
'--desired-migration-state': (
'bigquery_metastore_migration_config.desired_migration_state'
),
}
update_mask = set()
for arg_name in set(args.GetSpecifiedArgNames()).intersection(
arg_name_to_field
):
update_mask.add(arg_name_to_field[arg_name])
hive_metastore_configs_update_mask_prefix = hive_metastore_configs + '.'
if hive_metastore_configs not in update_mask:
if args.update_hive_metastore_configs:
for key in args.update_hive_metastore_configs:
update_mask.add(hive_metastore_configs_update_mask_prefix + key)
if args.remove_hive_metastore_configs:
for key in args.remove_hive_metastore_configs:
update_mask.add(hive_metastore_configs_update_mask_prefix + key)
labels_update_mask_prefix = labels + '.'
if labels not in update_mask:
if args.update_labels:
for key in args.update_labels:
update_mask.add(labels_update_mask_prefix + key)
if args.remove_labels:
for key in args.remove_labels:
update_mask.add(labels_update_mask_prefix + key)
return ','.join(sorted(update_mask))
def SetServiceRequestUpdateHiveMetastoreConfigs(
unused_job_ref, args, update_service_req
):
"""Modify the Service update request to update, remove, or clear Hive metastore configurations.
Args:
unused_ref: A resource ref to the parsed Service resource.
args: The parsed args namespace from CLI.
update_service_req: Created Update request for the API call.
Returns:
Modified request for the API call.
"""
hive_metastore_configs = {}
if args.update_hive_metastore_configs:
hive_metastore_configs = args.update_hive_metastore_configs
if args.update_hive_metastore_configs_from_file:
hive_metastore_configs = LoadHiveMetatsoreConfigsFromXmlFile(
args.update_hive_metastore_configs_from_file
)
update_service_req.service.hiveMetastoreConfig.configOverrides = (
_GenerateAdditionalProperties(hive_metastore_configs)
)
return update_service_req
def GenerateUpdateAuxiliaryVersionsConfigs(
unused_job_ref, args, update_service_req
):
"""Modify the Service update request to add or clear list of auxiliary versions configurations.
Args:
unused_ref: A resource ref to the parsed Service resource.
args: The parsed args namespace from CLI.
update_service_req: Created Update request for the API call.
Returns:
Modified request for the API call containing auxiliary version updates if
specified else the original request.
"""
if update_service_req.service.hiveMetastoreConfig is None:
update_service_req.service.hiveMetastoreConfig = {}
if args.clear_auxiliary_versions:
update_service_req.service.hiveMetastoreConfig.auxiliaryVersions = {}
if args.add_auxiliary_versions:
update_service_req.service.hiveMetastoreConfig.auxiliaryVersions = (
_GenerateAuxiliaryVersionsVersionList(args.add_auxiliary_versions)
)
return update_service_req
def _GenerateAuxiliaryVersionsVersionList(aux_versions):
return _GenerateAdditionalProperties({
'aux-' + version.replace('.', '-'): {'version': version}
for version in aux_versions
})
def UpdateServiceMaskHook(unused_ref, args, update_service_req):
"""Constructs updateMask for update requests of Dataproc Metastore services.
Args:
unused_ref: A resource ref to the parsed Service resource.
args: The parsed args namespace from CLI.
update_service_req: Created Update request for the API call.
Returns:
Modified request for the API call.
"""
update_service_req.updateMask = _GenerateUpdateMask(args)
return update_service_req

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities for Metastore commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import io
from googlecloudsdk.core.resource import resource_printer
def ConstructList(title, items):
"""Constructs text output listing the elements of items and a title.
Args:
title: string, the listing title
items: iterable, the iterable whose elements to list
Returns:
string, text representing list title and elements.
"""
buf = io.StringIO()
resource_printer.Print(items, 'list[title="{0}"]'.format(title), out=buf)
return buf.getvalue()

View File

@@ -0,0 +1,583 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource parsing helpers."""
# TODO(b/331622809): Review and fix any violations of naming conventions.
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import ipaddress
import re
from typing import Any
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.metastore import parsers
STRING_MAX_LENGTH = 1000
METASTORE_TYPE_DICT = {
'dpms': 'DATAPROC_METASTORE',
'dataplex': 'DATAPLEX',
'bigquery': 'BIGQUERY',
}
METASTORE_RESOURCE_PATH_DICT = {'dpms': 'services', 'dataplex': 'lakes'}
def ValidatePort(port):
"""Python hook to validate that the port is between 1024 and 65535, inclusive."""
if port < 1024 or port > 65535:
raise exceptions.BadArgumentException(
'--port', 'Port ({0}) is not in the range [1025, 65535].'.format(port)
)
return port
def ValidateScalingFactor(scaling_factor):
"""Python hook to validate the scaling factor."""
return ValidateScalingFactorFloat(scaling_factor, '--scaling-factor')
def ValidateMinScalingFactor(scaling_factor):
"""Python hook to validate the min scaling factor."""
return ValidateScalingFactorFloat(scaling_factor, '--min-scaling-factor')
def ValidateMaxScalingFactor(scaling_factor):
"""Python hook to validate the max scaling factor."""
return ValidateScalingFactorFloat(scaling_factor, '--max-scaling-factor')
def ValidateScalingFactorFloat(scaling_factor, flag_name):
"""Validate the scaling factor float value."""
if scaling_factor < 0.1 or scaling_factor > 6:
raise exceptions.BadArgumentException(
flag_name,
'Scaling factor ({0}) is not in the range [0.1, 6.0].'.format(
scaling_factor
),
)
elif scaling_factor < 1 and scaling_factor * 10 % 1 != 0:
raise exceptions.BadArgumentException(
flag_name,
'Scaling factor less than 1.0 ({0}) should be a'
' multiple of 0.1 (e.g. (0.1, 0.2, 0.3))'.format(scaling_factor),
)
elif scaling_factor >= 1 and scaling_factor % 1.0 != 0:
raise exceptions.BadArgumentException(
flag_name,
'Scaling greater than 1.0 ({0}) should be a multiple'
' of 1.0 (e.g. (1.0, 2.0, 3.0))'.format(scaling_factor),
)
return scaling_factor
def ValidateGcsUri(arg_name):
"""Validates the gcs uri is formatted correctly."""
def Process(gcs_uri):
if not gcs_uri.startswith('gs://'):
raise exceptions.BadArgumentException(
arg_name, 'Expected URI {0} to start with `gs://`.'.format(gcs_uri)
)
return gcs_uri
return Process
def ValidateKerberosPrincipal(kerberos_principal):
pattern = re.compile(r'^(.+)/(.+)@(.+)$')
if not pattern.match(kerberos_principal):
raise exceptions.BadArgumentException(
'--kerberos-principal',
'Kerberos Principal {0} does not match ReGeX {1}.'.format(
kerberos_principal, pattern
),
)
return kerberos_principal
def ValidateHourOfDay(hour):
"""Validates that the hour falls between 0 and 23, inclusive."""
if hour < 0 or hour > 23:
raise exceptions.BadArgumentException(
'--maintenance-window-hour-of-day',
'Hour of day ({0}) is not in [0, 23].'.format(hour),
)
return hour
def ValidateStringField(arg_name):
"""Validates that the string field is not longer than STRING_MAX_LENGTH, to avoid abuse issues."""
if len(arg_name) > STRING_MAX_LENGTH:
raise exceptions.BadArgumentException(
arg_name,
'The string field can not be longer than {0} characters.'.format(
STRING_MAX_LENGTH
),
)
return arg_name
def ValidateCloudSqlInstanceConnectionName(connection_name):
"""Validates the connection name of a CloudSQL instance, must be in the form '{project_id}:{region}:{instance_id}'.
Args:
connection_name: The CloudSQL instance connection name string.
Returns:
The connection name string.
Raises:
BadArgumentException: when the input string does not match the pattern.
"""
pattern = re.compile(r'^([^:]+:){2}[^:]+$')
if not pattern.match(connection_name):
raise exceptions.BadArgumentException(
'--instance-connection-name',
'The instance connection name should be in the format'
' project_id:region:instance_id',
)
return connection_name
def ValidateNetworkResourceName(arg_name):
"""Validates the resource name of a compute network, must be in the form 'projects/{project_id}/global/networks/{network_id}'."""
def Process(resource_name):
pattern = re.compile(r'^projects/[^/]+/global/networks/[^/]+$')
if not pattern.match(resource_name):
raise exceptions.BadArgumentException(
arg_name,
'The network resource name should be in the format'
' projects/<project_id>/global/networks/<network_id>',
)
return resource_name
return Process
def ValidateSubnetworkResourceName(arg_name):
"""Validates the resource name of a compute subnetwork, must be in the form 'projects/{project_id}/regions/{region_id}/subnetworks/{subnetwork_id}'."""
def Process(resource_name):
pattern = re.compile(r'^projects/[^/]+/regions/[^/]+/subnetworks/[^/]+$')
if not pattern.match(resource_name):
raise exceptions.BadArgumentException(
arg_name,
'The subnetwork resource name should be in the format'
' projects/{project_id}/regions/{region_id}/subnetworks/{subnetwork_id}',
)
return resource_name
return Process
def ValidateHiveDatabaseName(db_name):
"""Validates the hive database name.
Args:
db_name: the hive database name.
Returns:
the hive database name.
Raises:
BadArgumentException: when the database name doesn't conform to the pattern
or is longer than 64 characters.
"""
pattern = re.compile(r'^[0-9a-zA-Z$_-]+$')
if not pattern.match(db_name):
raise exceptions.BadArgumentException(
'--hive-database-name',
'hive database name must start with an alphanumeric character, and'
' contain only the following characters: letters, numbers, dashes (-),'
' and underscores (_).',
)
if len(db_name) > 64:
raise exceptions.BadArgumentException(
'--hive-database-name',
'hive database name must be less than 64 characters.',
)
return db_name
def ValidateCloudSqlIpAddress(ip_address):
"""Validates the Cloud SQL IP address.
Args:
ip_address: the Cloud SQL IP address.
Returns:
the IP address.
Raises:
BadArgumentException: when the IP address is invalid.
"""
try:
ipaddress.IPv4Address(ip_address)
return ip_address
except ValueError:
raise exceptions.BadArgumentException(
'--ip-address',
'Invalid IP address.',
)
def ValidateSubnetIpRange(cidr):
"""Validates the subnet IP range.
Args:
cidr: the CIDR range for the subnet.
Returns:
the CIDR range.
Raises:
BadArgumentException: when the CIDR range is invalid.
"""
def IsCidrWithinValidRanges(cidr):
"""Checks if a given CIDR block is contained within a list of valid CIDR ranges."""
# Valid CIDR ranges
rfc_1918_spaces = ['10.0.0.0/8', '172.16.0.0/12', '192.168.0.0/16']
rfc_6598_spaces = ['100.64.0.0/10']
rfc_6890_spaces = ['192.0.0.0/24']
rfc_5737_spaces = ['192.0.2.0/24', '198.51.100.0/24', '203.0.113.0/24']
rfc_7526_spaces = ['192.88.99.0/24']
rfc_2544_spaces = ['198.18.0.0/15']
valid_cidr_ranges = (
rfc_1918_spaces
+ rfc_6598_spaces
+ rfc_6890_spaces
+ rfc_5737_spaces
+ rfc_7526_spaces
+ rfc_2544_spaces
)
cidr_block = ipaddress.IPv4Network(cidr)
for valid_range in valid_cidr_ranges:
if cidr_block.subnet_of(ipaddress.IPv4Network(valid_range)):
return True
return False
try:
if not IsCidrWithinValidRanges(cidr):
raise exceptions.BadArgumentException(
'--subnet-ip-range',
'The subnet IP range is invalid, see'
' https://cloud.google.com/vpc/docs/subnets.md#valid-ranges',
)
except ValueError:
raise exceptions.BadArgumentException(
'--subnet-ip-range',
'Invalid CIDR address block.',
)
return cidr
def ValidateMigrationBucketName(bucket_name):
"""Validates the Cloud Storage bucket name used for CDC during migration, should not start with 'gs://'.
Args:
bucket_name: the Cloud Storage bucket name.
Returns:
the Cloud Storage bucket name.
Raises:
BadArgumentException: when the Cloud Storage bucket name doesn't conform to
the pattern.
"""
pattern = re.compile(r'^(?!gs://)([a-z0-9\._-]+)$')
if not pattern.match(bucket_name):
raise exceptions.BadArgumentException(
'--bucket',
'Invalid bucket name',
)
return bucket_name
def ValidateMigrationRootPath(root_path):
"""Validates the root path inside the Cloud Storage bucket used for CDC during migration, must start with a forward slash ('/') character.
Args:
root_path: the root path inside the Cloud Storage bucket.
Returns:
the root path.
Raises:
BadArgumentException: when the root path is invalid.
"""
pattern = re.compile(r'^/([^\n\r]*)$')
if not pattern.match(root_path):
raise exceptions.BadArgumentException(
'--root-path',
'Invalid root path',
)
return root_path
def ValidateServiceMutexConfig(unused_ref, unused_args, req):
"""Validates that the mutual exclusive configurations of Dataproc Metastore service are not set at the same time.
Args:
req: A request with `service` field.
Returns:
A request without service mutex configuration conflicts.
Raises:
BadArgumentException: when mutual exclusive configurations of service are
set at the same time.
"""
return ValidateServiceMutexConfigForV1(unused_ref, unused_args, req)
def ValidateServiceMutexConfigForV1(unused_ref, unused_args, req):
"""Validates exclusively for V1 fields that the mutual exclusive configurations of Dataproc Metastore service are not set at the same time.
Args:
req: A request with `service` field.
Returns:
A request without service mutex configuration conflicts.
Raises:
BadArgumentException: when mutual exclusive configurations of service are
set at the same time.
"""
if (
req.service.hiveMetastoreConfig
and req.service.hiveMetastoreConfig.kerberosConfig
and req.service.hiveMetastoreConfig.kerberosConfig.principal
and _IsNetworkConfigPresentInService(req.service)
):
raise exceptions.BadArgumentException(
'--kerberos-principal',
'Kerberos configuration cannot be used in conjunction with'
' --network-config-from-file or --consumer-subnetworks.',
)
return req
def ValidateScheduledBackupConfigs(unused_ref, args, req):
"""Validates that the cron_schedule and backup_location are set when the scheduled backup is enabled.
Args:
unused_ref: A resource ref to the parsed metastore service resource.
args: The parsed args namespace from CLI.
req: A request with `service` field.
Returns:
A request with service scheduled backups configurations required.
Raises:
BadArgumentException: when cron_schedule and backup_location are not set
when the scheduled backup is enabled.
"""
args_set = set(args.GetSpecifiedArgNames())
if (
req.service.scheduledBackup.enabled
and '--scheduled-backup-cron' not in args_set
):
raise exceptions.BadArgumentException(
'--scheduled-backup-cron',
'--scheduled-backup-cron must be set when the scheduled backup is'
' enabled.',
)
if (
req.service.scheduledBackup.enabled
and '--scheduled-backup-location' not in args_set
):
raise exceptions.BadArgumentException(
'--scheduled-backup-location',
'--scheduled-backup-location must be set when the scheduled backup is'
' enabled.',
)
return req
def _IsNetworkConfigPresentInService(service):
return service.networkConfig and service.networkConfig.consumers
def ValidateClearBackends(unused_ref, args, update_federation_req):
"""Validate if users run update federation command with --clear-backends arg only.
Args:
unused_ref: A resource ref to the parsed Federation resource.
args: The parsed args namespace from CLI.
update_federation_req: The request for the API call.
Returns:
String request
Raises:
BadArgumentException: When users run update federation command with
--clear-backends arg only.
"""
args_set = set(args.GetSpecifiedArgNames())
if '--clear-backends' in args_set and '--update-backends' not in args_set:
raise exceptions.BadArgumentException(
'--clear-backends',
'--clear-backends must be used with --update-backends',
)
return update_federation_req
def _IsZeroOrPositiveNumber(string):
if string.isdigit():
return int(string) >= 0
return False
def _GetMetastoreTypeFromDict(dictionary):
return '|'.join(value for key, value in dictionary.items())
def _GenerateShortOrLongBackendNames(metastore_type_and_name):
"""Validate and process the format of short and long names for backends.
Args:
metastore_type_and_name: Metastore type and name.
Returns:
String backend name.
Raises:
BadArgumentException: When the input backend(s) are invalid
"""
if metastore_type_and_name[0].lower() == 'bigquery':
long_name_regex = r'^projects\/.*[^\/]'
else:
long_name_regex = (
r'^projects\/.*[^\/]\/locations\/.[^\/]*\/('
+ _GetMetastoreTypeFromDict(METASTORE_RESOURCE_PATH_DICT)
+ r')\/.[^\/]*$'
)
if '/' in metastore_type_and_name[1]:
if re.search(long_name_regex, metastore_type_and_name[1]):
return metastore_type_and_name[1]
else:
raise exceptions.BadArgumentException(
'--backends', 'Invalid backends format'
)
else:
if metastore_type_and_name[0].lower() == 'bigquery':
return 'projects/' + metastore_type_and_name[1]
else:
return (
'{0}/'
+ METASTORE_RESOURCE_PATH_DICT[metastore_type_and_name[0]]
+ '/'
+ metastore_type_and_name[1]
)
def ValidateBackendsAndReturnMetastoreDict(backends):
"""Validate backends argument if it has correct format, metastore type and the keys are positive number and not duplicated.
In addition, parsing the backends to backend metastore dict
Args:
backends: A string is passed by user in format
<key>=<metastore_type>:<name>,... For example:
1=dpms:dpms1,2=dataplex:lake1
Returns:
Backend metastore dict
Raises:
BadArgumentException: When the input backends is invalid or duplicated keys
"""
backend_dict = {}
if not backends:
raise exceptions.BadArgumentException('--backends', 'Cannot be empty')
backend = backends.split(',')
for data in backend:
rank_and_metastore = data.split('=')
if len(rank_and_metastore) != 2:
raise exceptions.BadArgumentException(
'--backends', 'Invalid backends format'
)
key = rank_and_metastore[0]
if not _IsZeroOrPositiveNumber(key):
raise exceptions.BadArgumentException(
'--backends',
'Invalid backends format or key of backend is less than 0',
)
value = rank_and_metastore[1]
metastore_type_and_name = value.split(':')
if len(metastore_type_and_name) != 2:
raise exceptions.BadArgumentException(
'--backends', 'Invalid backends format'
)
if key in backend_dict:
raise exceptions.BadArgumentException(
'--backends', 'Duplicated keys of backends'
)
if metastore_type_and_name[0] not in METASTORE_TYPE_DICT.keys():
raise exceptions.BadArgumentException(
'--backends', 'Invalid backends type'
)
generated_name = _GenerateShortOrLongBackendNames(metastore_type_and_name)
backend_metastores_dict = {
'name': generated_name,
'metastoreType': METASTORE_TYPE_DICT[metastore_type_and_name[0]],
}
backend_dict[key] = backend_metastores_dict
return backend_dict
def ParseBackendsIntoRequest(job_ref, request):
"""Generate the long backend name of Dataproc Metastore federation requests.
Args:
job_ref: A resource ref to the parsed Federation resource.
request: The request for the API call.
Returns:
Modified request for the API call.
"""
for prop in request.federation.backendMetastores.additionalProperties:
prop.value.name = prop.value.name.format(job_ref.Parent().RelativeName())
return request
def ValidateKmsKeys(unused_ref, unused_args, req):
"""Validates that the kms keys are valid.
Args:
req: A request with `service` field.
Returns:
The unchaged request.
Raises:
InvalidResourceException: If the line is invalid.
RequiredFieldOmittedException: If resource is underspecified.
UnknownCollectionException: If no collection is provided or can be
inferred.
WrongResourceCollectionException: If the provided URL points into a
collection other than the one specified.
"""
if (req.service.encryptionConfig is None or
req.service.encryptionConfig.kmsKeys is None):
return req
for kms_key in req.service.encryptionConfig.kmsKeys:
parsers.ParseCloudKmsKey(kms_key)
return req