# -*- coding: utf-8 -*- # # Copyright 2015 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Helpers for interacting with the Compute API.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from googlecloudsdk.api_lib.compute import base_classes as compute_base from googlecloudsdk.api_lib.compute import constants as compute_constants from googlecloudsdk.api_lib.compute import utils as compute_utils from googlecloudsdk.calliope import base from googlecloudsdk.command_lib.compute import flags from googlecloudsdk.command_lib.compute import scope as compute_scope from googlecloudsdk.command_lib.compute import scope_prompter from googlecloudsdk.core import log from googlecloudsdk.core import properties # Copy into dataproc for cleaner separation SCOPE_ALIASES = compute_constants.SCOPES SCOPES_HELP = compute_constants.ScopesHelp() def ExpandScopeAliases(scopes): """Replace known aliases in the list of scopes provided by the user.""" scopes = scopes or [] expanded_scopes = [] for scope in scopes: if scope in SCOPE_ALIASES: expanded_scopes += SCOPE_ALIASES[scope] else: # Validate scopes server side. expanded_scopes.append(scope) return sorted(expanded_scopes) def GetComputeResources(release_track, cluster_name, dataproc_region): """Returns a resources object with resolved GCE zone and region.""" holder = compute_base.ComputeApiHolder(release_track) region_prop = properties.VALUES.compute.region zone_prop = properties.VALUES.compute.zone resources = holder.resources # Prompt for scope if necessary. If Dataproc regional stack is used, omitting # the zone allows the server to pick a zone zone = properties.VALUES.compute.zone.Get() if not zone and dataproc_region == 'global': _, zone = scope_prompter.PromptForScope( resource_name='cluster', underspecified_names=[cluster_name], scopes=[compute_scope.ScopeEnum.ZONE], default_scope=None, scope_lister=flags.GetDefaultScopeLister(holder.client)) if not zone: # Still no zone, just raise error generated by this property. zone = properties.VALUES.compute.zone.GetOrFail() if zone: zone_ref = resources.Parse( zone, params={ 'project': properties.VALUES.core.project.GetOrFail, }, collection='compute.zones') zone_name = zone_ref.Name() zone_prop.Set(zone_name) region_name = compute_utils.ZoneNameToRegionName(zone_name) region_prop.Set(region_name) else: # Auto zone zone_prop.Set('') # Set GCE region to dataproc region (which is a 1:1 mapping) region_prop.Set(dataproc_region) return resources def GetDefaultServiceAccount(project_id): """Call Compute.Projects.Get to find project_id's default Service Account.""" holder = compute_base.ComputeApiHolder(base.ReleaseTrack.GA) client = holder.client # Future optimization: Limit the size with"fields='defaultServiceAccount'". request = client.messages.ComputeProjectsGetRequest(project=project_id) default_service_account = client.apitools_client.projects.Get( request=request).defaultServiceAccount log.debug('Default compute Service Account is %s.', default_service_account) return default_service_account