feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,91 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis as core_apis
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
class AppengineApiClientBase(object):
"""Base class for App Engine API client."""
def __init__(self, client):
self.client = client
self.project = properties.VALUES.core.project.Get(required=True)
@property
def messages(self):
return self.client.MESSAGES_MODULE
@classmethod
def ApiVersion(cls):
return 'v1'
@classmethod
def GetApiClient(cls, api_version=None):
"""Initializes an AppengineApiClient using the specified API version.
Uses the api_client_overrides/appengine property to determine which client
version to use if api_version is not set. Additionally uses the
api_endpoint_overrides/appengine property to determine the server endpoint
for the App Engine API.
Args:
api_version: The api version override.
Returns:
An AppengineApiClient used by gcloud to communicate with the App Engine
API.
Raises:
ValueError: If default_version does not correspond to a supported version
of the API.
"""
if api_version is None:
api_version = cls.ApiVersion()
return cls(core_apis.GetClientInstance('appengine', api_version))
def _FormatApp(self):
res = resources.REGISTRY.Parse(
self.project, params={}, collection='appengine.apps')
return res.RelativeName()
def _GetServiceRelativeName(self, service_name):
res = resources.REGISTRY.Parse(
service_name,
params={'appsId': self.project},
collection='appengine.apps.services')
return res.RelativeName()
def _FormatVersion(self, service_name, version_id):
res = resources.REGISTRY.Parse(
version_id,
params={'appsId': self.project,
'servicesId': service_name},
collection='appengine.apps.services.versions')
return res.RelativeName()
def _FormatOperation(self, op_id):
res = resources.REGISTRY.Parse(
op_id,
params={'appsId': self.project},
collection='appengine.apps.operations')
return res.RelativeName()

View File

@@ -0,0 +1,93 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin API."""
from googlecloudsdk.api_lib.app import operations_util
from googlecloudsdk.api_lib.app.api import appengine_api_client_base as base
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.core import log
from googlecloudsdk.core import resources
DEFAULT_VERSION = 'v1beta'
# 'app update' is currently only exposed in beta.
UPDATE_VERSIONS_MAP = {
calliope_base.ReleaseTrack.GA: DEFAULT_VERSION,
calliope_base.ReleaseTrack.ALPHA: DEFAULT_VERSION,
calliope_base.ReleaseTrack.BETA: DEFAULT_VERSION
}
def GetApiClientForTrack(release_track):
return AppengineAppUpdateApiClient.GetApiClient(
UPDATE_VERSIONS_MAP[release_track])
class AppengineAppUpdateApiClient(base.AppengineApiClientBase):
"""Client used by gcloud to communicate with the App Engine API."""
def __init__(self, client):
base.AppengineApiClientBase.__init__(self, client)
self._registry = resources.REGISTRY.Clone()
# pylint: disable=protected-access
self._registry.RegisterApiByName('appengine', client._VERSION)
def PatchApplication(
self, split_health_checks=None, service_account=None, ssl_policy=None
):
"""Updates an application.
Args:
split_health_checks: Boolean, whether to enable split health checks by
default.
service_account: str, the app-level default service account to update for
this App Engine app.
ssl_policy: enum, the app-level SSL policy to update for this App Engine
app. Can be DEFAULT or MODERN.
Returns:
Long running operation.
"""
# Create a configuration update request.
update_mask = ''
if split_health_checks is not None:
update_mask += 'featureSettings.splitHealthChecks,'
if service_account is not None:
update_mask += 'serviceAccount,'
if ssl_policy is not None:
update_mask += 'sslPolicy,'
application_update = self.messages.Application()
application_update.featureSettings = self.messages.FeatureSettings(
splitHealthChecks=split_health_checks)
application_update.serviceAccount = service_account
application_update.sslPolicy = ssl_policy
update_request = self.messages.AppengineAppsPatchRequest(
name=self._FormatApp(),
application=application_update,
updateMask=update_mask)
operation = self.client.apps.Patch(update_request)
log.debug('Received operation: [{operation}] with mask [{mask}]'.format(
operation=operation.name,
mask=update_mask))
return operations_util.WaitForOperation(self.client.apps_operations,
operation)

View File

@@ -0,0 +1,177 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.app import operations_util
from googlecloudsdk.api_lib.app.api import appengine_api_client_base as base
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import resources
DOMAINS_VERSION_MAP = {
calliope_base.ReleaseTrack.GA: 'v1',
calliope_base.ReleaseTrack.ALPHA: 'v1alpha',
calliope_base.ReleaseTrack.BETA: 'v1beta'
}
def GetApiClientForTrack(release_track):
return AppengineDomainsApiClient.GetApiClient(
DOMAINS_VERSION_MAP[release_track])
class AppengineDomainsApiClient(base.AppengineApiClientBase):
"""Client used by gcloud to communicate with the App Engine API."""
def __init__(self, client):
base.AppengineApiClientBase.__init__(self, client)
self._registry = resources.REGISTRY.Clone()
# pylint: disable=protected-access
self._registry.RegisterApiByName('appengine', client._VERSION)
def DeleteDomainMapping(self, domain):
"""Deletes a domain mapping for the given application.
Args:
domain: str, the domain to delete.
"""
request = self.messages.AppengineAppsDomainMappingsDeleteRequest(
name=self._FormatDomainMapping(domain))
operation = self.client.apps_domainMappings.Delete(request)
operations_util.WaitForOperation(self.client.apps_operations, operation)
def GetDomainMapping(self, domain):
"""Gets a domain mapping for the given application.
Args:
domain: str, the domain to retrieve.
Returns:
The retrieved DomainMapping object.
"""
request = self.messages.AppengineAppsDomainMappingsGetRequest(
name=self._FormatDomainMapping(domain))
return self.client.apps_domainMappings.Get(request)
def ListDomainMappings(self):
"""Lists all domain mappings for the given application.
Returns:
A list of DomainMapping objects.
"""
request = self.messages.AppengineAppsDomainMappingsListRequest(
parent=self._FormatApp())
response = self.client.apps_domainMappings.List(request)
return response.domainMappings
def ListVerifiedDomains(self):
"""Lists all domains verified by the current user.
Returns:
A list of AuthorizedDomain objects.
"""
request = self.messages.AppengineAppsAuthorizedDomainsListRequest(
parent=self._FormatApp())
response = self.client.apps_authorizedDomains.List(request)
return response.domains
def CreateDomainMapping(self, domain, certificate_id, management_type):
"""Creates a domain mapping for the given application.
Args:
domain: str, the custom domain string.
certificate_id: str, a certificate id for the new domain.
management_type: SslSettings.SslManagementTypeValueValuesEnum,
AUTOMATIC or MANUAL certificate provisioning.
Returns:
The created DomainMapping object.
"""
ssl = self.messages.SslSettings(certificateId=certificate_id,
sslManagementType=management_type)
domain_mapping = self.messages.DomainMapping(id=domain, sslSettings=ssl)
request = self.messages.AppengineAppsDomainMappingsCreateRequest(
parent=self._FormatApp(),
domainMapping=domain_mapping)
operation = self.client.apps_domainMappings.Create(request)
return operations_util.WaitForOperation(self.client.apps_operations,
operation).response
def UpdateDomainMapping(self,
domain,
certificate_id,
no_certificate_id,
management_type):
"""Updates a domain mapping for the given application.
Args:
domain: str, the custom domain string.
certificate_id: str, a certificate id for the domain.
no_certificate_id: bool, remove the certificate id from the domain.
management_type: SslSettings.SslManagementTypeValueValuesEnum,
AUTOMATIC or MANUAL certificate provisioning.
Returns:
The updated DomainMapping object.
"""
mask_fields = []
if certificate_id or no_certificate_id:
mask_fields.append('sslSettings.certificateId')
if management_type:
mask_fields.append('sslSettings.sslManagementType')
ssl = self.messages.SslSettings(
certificateId=certificate_id, sslManagementType=management_type)
domain_mapping = self.messages.DomainMapping(id=domain, sslSettings=ssl)
if not mask_fields:
raise exceptions.MinimumArgumentException(
['--[no-]certificate-id', '--no_managed_certificate'],
'Please specify at least one attribute to the domain-mapping update.')
request = self.messages.AppengineAppsDomainMappingsPatchRequest(
name=self._FormatDomainMapping(domain),
domainMapping=domain_mapping,
updateMask=','.join(mask_fields))
operation = self.client.apps_domainMappings.Patch(request)
return operations_util.WaitForOperation(self.client.apps_operations,
operation).response
def _FormatDomainMapping(self, domain):
res = self._registry.Parse(
domain,
params={'appsId': self.project},
collection='appengine.apps.domainMappings')
return res.RelativeName()

View File

@@ -0,0 +1,162 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app.api import appengine_api_client_base as base
from googlecloudsdk.calliope import base as calliope_base
VERSION_MAP = {
calliope_base.ReleaseTrack.GA: 'v1',
calliope_base.ReleaseTrack.ALPHA: 'v1alpha',
calliope_base.ReleaseTrack.BETA: 'v1beta'
}
def GetApiClientForTrack(release_track):
api_version = VERSION_MAP[release_track]
return AppengineFirewallApiClient.GetApiClient(api_version)
class AppengineFirewallApiClient(base.AppengineApiClientBase):
"""Client used by gcloud to communicate with the App Engine API."""
def __init__(self, client):
base.AppengineApiClientBase.__init__(self, client)
def Create(self, priority, source_range, action, description):
"""Creates a firewall rule for the given application.
Args:
priority: int, the priority of the rule between [1, 2^31-1].
The default rule may not be created, only updated.
source_range: str, the ip address or range to take action on.
action: firewall_rules_util.Action, optional action to take on matched
addresses.
description: str, an optional string description of the rule.
Returns:
The new firewall rule.
"""
rule = self.messages.FirewallRule(
priority=priority,
action=action,
description=description,
sourceRange=source_range)
request = self.messages.AppengineAppsFirewallIngressRulesCreateRequest(
parent=self._FormatApp(), firewallRule=rule)
return self.client.apps_firewall_ingressRules.Create(request)
def Delete(self, resource):
"""Deletes a firewall rule for the given application.
Args:
resource: str, the resource path to the firewall rule.
"""
request = self.messages.AppengineAppsFirewallIngressRulesDeleteRequest(
name=resource.RelativeName())
self.client.apps_firewall_ingressRules.Delete(request)
def List(self, matching_address=None):
"""Lists all ingress firewall rules for the given application.
Args:
matching_address: str, an optional ip address to filter matching rules.
Returns:
A list of FirewallRule objects.
"""
request = self.messages.AppengineAppsFirewallIngressRulesListRequest(
parent=self._FormatApp(), matchingAddress=matching_address)
return list_pager.YieldFromList(
self.client.apps_firewall_ingressRules,
request,
field='ingressRules',
batch_size_attribute='pageSize')
def Get(self, resource):
"""Gets a firewall rule for the given application.
Args:
resource: str, the resource path to the firewall rule.
Returns:
A FirewallRule object.
"""
request = self.messages.AppengineAppsFirewallIngressRulesGetRequest(
name=resource.RelativeName())
response = self.client.apps_firewall_ingressRules.Get(request)
return response
def Update(self,
resource,
priority,
source_range=None,
action=None,
description=None):
"""Updates a firewall rule for the given application.
Args:
resource: str, the resource path to the firewall rule.
priority: int, the priority of the rule.
source_range: str, optional ip address or range to take action on.
action: firewall_rules_util.Action, optional action to take on matched
addresses.
description: str, optional string description of the rule.
Returns:
The updated firewall rule.
Raises:
NoFieldsSpecifiedError: when no fields have been specified for the update.
"""
mask_fields = []
if action:
mask_fields.append('action')
if source_range:
mask_fields.append('sourceRange')
if description:
mask_fields.append('description')
rule = self.messages.FirewallRule(
priority=priority,
action=action,
description=description,
sourceRange=source_range)
if not mask_fields:
raise util.NoFieldsSpecifiedError()
request = self.messages.AppengineAppsFirewallIngressRulesPatchRequest(
name=resource.RelativeName(),
firewallRule=rule,
updateMask=','.join(mask_fields))
return self.client.apps_firewall_ingressRules.Patch(request)

View File

@@ -0,0 +1,191 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin SSL APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.app.api import appengine_api_client_base as base
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import resources
from googlecloudsdk.core.util import files
SSL_VERSIONS_MAP = {
calliope_base.ReleaseTrack.GA: 'v1',
calliope_base.ReleaseTrack.ALPHA: 'v1alpha',
calliope_base.ReleaseTrack.BETA: 'v1beta'
}
def GetApiClientForTrack(release_track):
"""Retrieves a client based on the release track.
The API clients override the base class for each track so that methods with
functional differences can be overridden. The ssl-certificates api does not
have API changes for alpha, but output is formatted differently, so the alpha
override simply calls the new API.
Args:
release_track: calliope_base.ReleaseTrack, the release track of the command
Returns:
A client that calls appengine using the v1beta or v1alpha API.
"""
api_version = SSL_VERSIONS_MAP[release_track]
return AppengineSslApiClient.GetApiClient(api_version)
class AppengineSslApiClient(base.AppengineApiClientBase):
"""Client used by gcloud to communicate with the App Engine SSL APIs."""
def __init__(self, client):
base.AppengineApiClientBase.__init__(self, client)
self._registry = resources.REGISTRY.Clone()
# pylint: disable=protected-access
self._registry.RegisterApiByName('appengine', client._VERSION)
def CreateSslCertificate(self, display_name, cert_path, private_key_path):
"""Creates a certificate for the given application.
Args:
display_name: str, the display name for the new certificate.
cert_path: str, location on disk to a certificate file.
private_key_path: str, location on disk to a private key file.
Returns:
The created AuthorizedCertificate object.
Raises:
Error if the file does not exist or can't be opened/read.
"""
certificate_data = files.ReadFileContents(cert_path)
private_key_data = files.ReadFileContents(private_key_path)
cert = self.messages.CertificateRawData(
privateKey=private_key_data, publicCertificate=certificate_data)
auth_cert = self.messages.AuthorizedCertificate(
displayName=display_name, certificateRawData=cert)
request = self.messages.AppengineAppsAuthorizedCertificatesCreateRequest(
parent=self._FormatApp(), authorizedCertificate=auth_cert)
return self.client.apps_authorizedCertificates.Create(request)
def DeleteSslCertificate(self, cert_id):
"""Deletes an authorized certificate for the given application.
Args:
cert_id: str, the id of the certificate to delete.
"""
request = self.messages.AppengineAppsAuthorizedCertificatesDeleteRequest(
name=self._FormatSslCert(cert_id))
self.client.apps_authorizedCertificates.Delete(request)
def GetSslCertificate(self, cert_id):
"""Gets a certificate for the given application.
Args:
cert_id: str, the id of the certificate to retrieve.
Returns:
The retrieved AuthorizedCertificate object.
"""
request = self.messages.AppengineAppsAuthorizedCertificatesGetRequest(
name=self._FormatSslCert(cert_id),
view=(self.messages.AppengineAppsAuthorizedCertificatesGetRequest.
ViewValueValuesEnum.FULL_CERTIFICATE))
return self.client.apps_authorizedCertificates.Get(request)
def ListSslCertificates(self):
"""Lists all authorized certificates for the given application.
Returns:
A list of AuthorizedCertificate objects.
"""
request = self.messages.AppengineAppsAuthorizedCertificatesListRequest(
parent=self._FormatApp())
response = self.client.apps_authorizedCertificates.List(request)
return response.certificates
def UpdateSslCertificate(self,
cert_id,
display_name=None,
cert_path=None,
private_key_path=None):
"""Updates a certificate for the given application.
One of display_name, cert_path, or private_key_path should be set. Omitted
fields will not be updated from their current value. Any invalid arguments
will fail the entire command.
Args:
cert_id: str, the id of the certificate to update.
display_name: str, the display name for a new certificate.
cert_path: str, location on disk to a certificate file.
private_key_path: str, location on disk to a private key file.
Returns:
The created AuthorizedCertificate object.
Raises: InvalidInputError if the user does not specify both cert and key.
"""
if bool(cert_path) ^ bool(private_key_path):
missing_arg = '--certificate' if not cert_path else '--private-key'
raise exceptions.RequiredArgumentException(
missing_arg,
'The certificate and the private key must both be updated together.')
mask_fields = []
if display_name:
mask_fields.append('displayName')
cert_data = None
if cert_path and private_key_path:
certificate = files.ReadFileContents(cert_path)
private_key = files.ReadFileContents(private_key_path)
cert_data = self.messages.CertificateRawData(
privateKey=private_key, publicCertificate=certificate)
mask_fields.append('certificateRawData')
auth_cert = self.messages.AuthorizedCertificate(
displayName=display_name, certificateRawData=cert_data)
if not mask_fields:
raise exceptions.MinimumArgumentException([
'--certificate', '--private-key', '--display-name'
], 'Please specify at least one attribute to the certificate update.')
request = self.messages.AppengineAppsAuthorizedCertificatesPatchRequest(
name=self._FormatSslCert(cert_id),
authorizedCertificate=auth_cert,
updateMask=','.join(mask_fields))
return self.client.apps_authorizedCertificates.Patch(request)
def _FormatSslCert(self, cert_id):
res = self._registry.Parse(
cert_id,
params={'appsId': self.project},
collection='appengine.apps.authorizedCertificates')
return res.RelativeName()

View File

@@ -0,0 +1,983 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating a client to talk to the App Engine Admin API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import json
import operator
from apitools.base.py import encoding
from apitools.base.py import exceptions as apitools_exceptions
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.app import build as app_cloud_build
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import exceptions
from googlecloudsdk.api_lib.app import instances_util
from googlecloudsdk.api_lib.app import operations_util
from googlecloudsdk.api_lib.app import region_util
from googlecloudsdk.api_lib.app import service_util
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app import version_util
from googlecloudsdk.api_lib.app.api import appengine_api_client_base
from googlecloudsdk.api_lib.cloudbuild import logs as cloudbuild_logs
from googlecloudsdk.appengine.admin.tools.conversion import convert_yaml
from googlecloudsdk.appengine.api import appinfo
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
import six
from six.moves import filter # pylint: disable=redefined-builtin
from six.moves import map # pylint: disable=redefined-builtin
APPENGINE_VERSIONS_MAP = {
calliope_base.ReleaseTrack.GA: 'v1',
calliope_base.ReleaseTrack.ALPHA: 'v1alpha',
calliope_base.ReleaseTrack.BETA: 'v1beta'
}
def GetApiClientForTrack(release_track):
api_version = APPENGINE_VERSIONS_MAP[release_track]
return AppengineApiClient.GetApiClient(api_version)
gen1_runtimes = ['python27']
class AppengineApiClient(appengine_api_client_base.AppengineApiClientBase):
"""Client used by gcloud to communicate with the App Engine API."""
def GetApplication(self):
"""Retrieves the application resource.
Returns:
An app resource representing the project's app.
Raises:
apitools_exceptions.HttpNotFoundError if app doesn't exist
"""
request = self.messages.AppengineAppsGetRequest(name=self._FormatApp())
return self.client.apps.Get(request)
def ListRuntimes(self, environment):
"""Lists the available runtimes for the given App Engine environment.
Args:
environment: The environment for the application, either Standard or
Flexible.
Returns:
v1beta|v1.ListRuntimesResponse, the list of Runtimes.
Raises:
apitools_exceptions.HttpNotFoundError if app doesn't exist
"""
request = self.messages.AppengineAppsListRuntimesRequest(
parent=self._FormatApp(), environment=environment
)
return self.client.apps.ListRuntimes(request)
def IsStopped(self, app):
"""Checks application resource to get serving status.
Args:
app: appengine_v1_messages.Application, the application to check.
Returns:
bool, whether the application is currently disabled. If serving or not
set, returns False.
"""
stopped = app.servingStatus in [
self.messages.Application.ServingStatusValueValuesEnum.USER_DISABLED,
self.messages.Application.ServingStatusValueValuesEnum.SYSTEM_DISABLED]
return stopped
def RepairApplication(self, progress_message=None):
"""Creates missing app resources.
In particular, the Application.code_bucket GCS reference.
Args:
progress_message: str, the message to use while the operation is polled,
if not the default.
Returns:
A long running operation.
"""
request = self.messages.AppengineAppsRepairRequest(
name=self._FormatApp(),
repairApplicationRequest=self.messages.RepairApplicationRequest())
operation = self.client.apps.Repair(request)
log.debug('Received operation: [{operation}]'.format(
operation=operation.name))
return operations_util.WaitForOperation(
self.client.apps_operations, operation, message=progress_message)
def CreateApp(self, location, service_account=None, ssl_policy=None):
"""Creates an App Engine app within the current cloud project.
Creates a new singleton app within the currently selected Cloud Project.
The action is one-time and irreversible.
Args:
location: str, The location (region) of the app, i.e. "us-central"
service_account: str, The app level service account of the app, i.e.
"123@test-app.iam.gserviceaccount.com"
ssl_policy: enum, the app-level SSL policy to update for this App Engine
app. Can be DEFAULT or MODERN.
Raises:
apitools_exceptions.HttpConflictError if app already exists
Returns:
A long running operation.
"""
create_request = None
if service_account:
create_request = self.messages.Application(
id=self.project, locationId=location, serviceAccount=service_account)
else:
create_request = self.messages.Application(
id=self.project, locationId=location)
if ssl_policy:
create_request.sslPolicy = ssl_policy
operation = self.client.apps.Create(create_request)
log.debug('Received operation: [{operation}]'.format(
operation=operation.name))
message = ('Creating App Engine application in project [{project}] and '
'region [{region}].'.format(project=self.project,
region=location))
return operations_util.WaitForOperation(self.client.apps_operations,
operation, message=message)
def DeployService(self,
service_name,
version_id,
service_config,
manifest,
build,
extra_config_settings=None,
service_account_email=None):
"""Updates and deploys new app versions.
Args:
service_name: str, The service to deploy.
version_id: str, The version of the service to deploy.
service_config: AppInfoExternal, Service info parsed from a service yaml
file.
manifest: Dictionary mapping source files to Google Cloud Storage
locations.
build: BuildArtifact, a wrapper which contains either the build
ID for an in-progress parallel build, the name of the container image
for a serial build, or the options for creating a build elsewhere. Not
present during standard deploys.
extra_config_settings: dict, client config settings to pass to the server
as beta settings.
service_account_email: Identity of this deployed version. If not set, the
Admin API will fall back to use the App Engine default appspot service
account.
Returns:
The Admin API Operation, unfinished.
Raises:
apitools_exceptions.HttpNotFoundError if build ID doesn't exist
"""
operation = self._CreateVersion(service_name, version_id, service_config,
manifest, build, extra_config_settings,
service_account_email)
message = 'Updating service [{service}]'.format(service=service_name)
if service_config.env in [env.FLEX, env.MANAGED_VMS]:
message += ' (this may take several minutes)'
operation_metadata_type = self._ResolveMetadataType()
# This indicates that a server-side build should be created.
if build and build.IsBuildOptions():
if not operation_metadata_type:
log.warning('Unable to determine build from Operation metadata. '
'Skipping log streaming')
else:
# Poll the operation until the build is present.
poller = operations_util.AppEngineOperationBuildPoller(
self.client.apps_operations, operation_metadata_type)
operation = operations_util.WaitForOperation(
self.client.apps_operations, operation, message=message,
poller=poller)
build_id = operations_util.GetBuildFromOperation(
operation, operation_metadata_type)
if build_id:
build = app_cloud_build.BuildArtifact.MakeBuildIdArtifact(build_id)
if build and build.IsBuildId():
try:
build_ref = resources.REGISTRY.Parse(
build.identifier,
params={'projectId': properties.VALUES.core.project.GetOrFail},
collection='cloudbuild.projects.builds')
cloudbuild_logs.CloudBuildClient().Stream(build_ref, out=log.status)
except apitools_exceptions.HttpNotFoundError:
region = util.ConvertToCloudRegion(self.GetApplication().locationId)
build_ref = resources.REGISTRY.Create(
collection='cloudbuild.projects.locations.builds',
projectsId=properties.VALUES.core.project.GetOrFail,
locationsId=region,
buildsId=build.identifier)
cloudbuild_logs.CloudBuildClient().Stream(build_ref, out=log.status)
done_poller = operations_util.AppEngineOperationPoller(
self.client.apps_operations, operation_metadata_type)
return operations_util.WaitForOperation(
self.client.apps_operations,
operation,
poller=done_poller)
def _ResolveMetadataType(self):
"""Attempts to resolve the expected type for the operation metadata."""
# pylint: disable=protected-access
# TODO(b/74075874): Update ApiVersion method to accurately reflect client.
metadata_type_name = 'OperationMetadata' + self.client._VERSION.title()
# pylint: enable=protected-access
return getattr(self.messages, metadata_type_name)
def _CreateVersion(self,
service_name,
version_id,
service_config,
manifest,
build,
extra_config_settings=None,
service_account_email=None):
"""Begins the updates and deployment of new app versions.
Args:
service_name: str, The service to deploy.
version_id: str, The version of the service to deploy.
service_config: AppInfoExternal, Service info parsed from a service yaml
file.
manifest: Dictionary mapping source files to Google Cloud Storage
locations.
build: BuildArtifact, a wrapper which contains either the build ID for an
in-progress parallel build, the name of the container image for a serial
build, or the options to pass to Appengine for a server-side build.
extra_config_settings: dict, client config settings to pass to the server
as beta settings.
service_account_email: Identity of this deployed version. If not set, the
Admin API will fall back to use the App Engine default appspot service
account.
Returns:
The Admin API Operation, unfinished.
"""
version_resource = self._CreateVersionResource(service_config, manifest,
version_id, build,
extra_config_settings,
service_account_email)
create_request = self.messages.AppengineAppsServicesVersionsCreateRequest(
parent=self._GetServiceRelativeName(service_name=service_name),
version=version_resource)
return self.client.apps_services_versions.Create(create_request)
def GetServiceResource(self, service):
"""Describe the given service.
Args:
service: str, the ID of the service
Returns:
Service resource object from the API
"""
request = self.messages.AppengineAppsServicesGetRequest(
name=self._GetServiceRelativeName(service))
return self.client.apps_services.Get(request)
def SetDefaultVersion(self, service_name, version_id):
"""Sets the default serving version of the given services.
Args:
service_name: str, The service name
version_id: str, The version to set as default.
Returns:
Long running operation.
"""
# Create a traffic split where 100% of traffic goes to the specified
# version.
allocations = {version_id: 1.0}
return self.SetTrafficSplit(service_name, allocations)
def SetTrafficSplit(self, service_name, allocations,
shard_by='UNSPECIFIED', migrate=False):
"""Sets the traffic split of the given services.
Args:
service_name: str, The service name
allocations: A dict mapping version ID to traffic split.
shard_by: A ShardByValuesEnum value specifying how to shard the traffic.
migrate: Whether or not to migrate traffic.
Returns:
Long running operation.
"""
# Create a traffic split where 100% of traffic goes to the specified
# version.
traffic_split = encoding.PyValueToMessage(self.messages.TrafficSplit,
{'allocations': allocations,
'shardBy': shard_by})
update_service_request = self.messages.AppengineAppsServicesPatchRequest(
name=self._GetServiceRelativeName(service_name=service_name),
service=self.messages.Service(split=traffic_split),
migrateTraffic=migrate,
updateMask='split')
message = 'Setting traffic split for service [{service}]'.format(
service=service_name)
operation = self.client.apps_services.Patch(update_service_request)
return operations_util.WaitForOperation(self.client.apps_operations,
operation,
message=message)
def SetIngressTrafficAllowed(self, service_name, ingress_traffic_allowed):
"""Sets the ingress traffic allowed for a service.
Args:
service_name: str, The service name
ingress_traffic_allowed: An IngressTrafficAllowed enum.
Returns:
The completed Operation. The Operation will contain a Service resource.
"""
network_settings = self.messages.NetworkSettings(
ingressTrafficAllowed=ingress_traffic_allowed)
update_service_request = self.messages.AppengineAppsServicesPatchRequest(
name=self._GetServiceRelativeName(service_name=service_name),
service=self.messages.Service(networkSettings=network_settings),
updateMask='networkSettings')
message = 'Setting ingress settings for service [{service}]'.format(
service=service_name)
operation = self.client.apps_services.Patch(update_service_request)
return operations_util.WaitForOperation(
self.client.apps_operations, operation, message=message)
def DeleteVersion(self, service_name, version_id):
"""Deletes the specified version of the given service.
Args:
service_name: str, The service name
version_id: str, The version to delete.
Returns:
The completed Operation.
"""
delete_request = self.messages.AppengineAppsServicesVersionsDeleteRequest(
name=self._FormatVersion(service_name=service_name,
version_id=version_id))
operation = self.client.apps_services_versions.Delete(delete_request)
message = 'Deleting [{0}/{1}]'.format(service_name, version_id)
return operations_util.WaitForOperation(
self.client.apps_operations, operation, message=message)
def SetServingStatus(self, service_name, version_id, serving_status,
block=True):
"""Sets the serving status of the specified version.
Args:
service_name: str, The service name
version_id: str, The version to delete.
serving_status: The serving status to set.
block: bool, whether to block on the completion of the operation
Returns:
The completed Operation if block is True, or the Operation to wait on
otherwise.
"""
patch_request = self.messages.AppengineAppsServicesVersionsPatchRequest(
name=self._FormatVersion(service_name=service_name,
version_id=version_id),
version=self.messages.Version(servingStatus=serving_status),
updateMask='servingStatus')
operation = self.client.apps_services_versions.Patch(patch_request)
if block:
return operations_util.WaitForOperation(self.client.apps_operations,
operation)
else:
return operation
def ListInstances(self, versions):
"""Produces a generator of all instances for the given versions.
Args:
versions: list of version_util.Version
Returns:
A list of instances_util.Instance objects for the given versions
"""
instances = []
for version in versions:
request = self.messages.AppengineAppsServicesVersionsInstancesListRequest(
parent=self._FormatVersion(version.service, version.id))
try:
for instance in list_pager.YieldFromList(
self.client.apps_services_versions_instances,
request,
field='instances',
batch_size=100, # Set batch size so tests can expect it.
batch_size_attribute='pageSize'):
instances.append(
instances_util.Instance.FromInstanceResource(instance))
except apitools_exceptions.HttpNotFoundError:
# Drop versions that were presumed deleted since initial enumeration.
pass
return instances
def GetAllInstances(self, service=None, version=None, version_filter=None):
"""Generator of all instances, optionally filtering by service or version.
Args:
service: str, the ID of the service to filter by.
version: str, the ID of the version to filter by.
version_filter: filter function accepting version_util.Version
Returns:
generator of instance_util.Instance
"""
services = self.ListServices()
log.debug('All services: {0}'.format(services))
services = service_util.GetMatchingServices(
services, [service] if service else None)
versions = self.ListVersions(services)
log.debug('Versions: {0}'.format(list(map(str, versions))))
versions = version_util.GetMatchingVersions(
versions, [version] if version else None, service)
versions = list(filter(version_filter, versions))
return self.ListInstances(versions)
def DebugInstance(self, res, ssh_key=None):
"""Enable debugging of a Flexible instance.
Args:
res: A googleclousdk.core.Resource object.
ssh_key: str, Public SSH key to add to the instance. Examples:
`[USERNAME]:ssh-rsa [KEY_VALUE] [USERNAME]` ,
`[USERNAME]:ssh-rsa [KEY_VALUE] google-ssh {"userName":"[USERNAME]",`
`"expireOn":"[EXPIRE_TIME]"}`
For more information, see Adding and Removing SSH Keys
(https://cloud.google.com/compute/docs/instances/adding-removing-ssh-
keys).
Returns:
The completed Operation.
"""
request = self.messages.AppengineAppsServicesVersionsInstancesDebugRequest(
name=res.RelativeName(),
debugInstanceRequest=self.messages.DebugInstanceRequest(sshKey=ssh_key))
operation = self.client.apps_services_versions_instances.Debug(request)
return operations_util.WaitForOperation(self.client.apps_operations,
operation)
def DeleteInstance(self, res):
"""Delete a Flexible instance.
Args:
res: A googlecloudsdk.core.Resource object.
Returns:
The completed Operation.
"""
request = self.messages.AppengineAppsServicesVersionsInstancesDeleteRequest(
name=res.RelativeName())
operation = self.client.apps_services_versions_instances.Delete(request)
return operations_util.WaitForOperation(self.client.apps_operations,
operation)
def GetInstanceResource(self, res):
"""Describe the given instance of the given version of the given service.
Args:
res: A googlecloudsdk.core.Resource object.
Raises:
apitools_exceptions.HttpNotFoundError: If instance does not
exist.
Returns:
Version resource object from the API
"""
request = self.messages.AppengineAppsServicesVersionsInstancesGetRequest(
name=res.RelativeName())
return self.client.apps_services_versions_instances.Get(request)
def StopVersion(self, service_name, version_id, block=True):
"""Stops the specified version.
Args:
service_name: str, The service name
version_id: str, The version to stop.
block: bool, whether to block on the completion of the operation
Returns:
The completed Operation if block is True, or the Operation to wait on
otherwise.
"""
return self.SetServingStatus(
service_name,
version_id,
self.messages.Version.ServingStatusValueValuesEnum.STOPPED,
block)
def StartVersion(self, service_name, version_id, block=True):
"""Starts the specified version.
Args:
service_name: str, The service name
version_id: str, The version to start.
block: bool, whether to block on the completion of the operation
Returns:
The completed Operation if block is True, or the Operation to wait on
otherwise.
"""
return self.SetServingStatus(
service_name,
version_id,
self.messages.Version.ServingStatusValueValuesEnum.SERVING,
block)
def ListServices(self):
"""Lists all services for the given application.
Returns:
A list of service_util.Service objects.
"""
request = self.messages.AppengineAppsServicesListRequest(
parent=self._FormatApp())
services = []
for service in list_pager.YieldFromList(
self.client.apps_services, request, field='services',
batch_size=100, batch_size_attribute='pageSize'):
traffic_split = {}
if service.split:
for split in service.split.allocations.additionalProperties:
traffic_split[split.key] = split.value
services.append(
service_util.Service(self.project, service.id, traffic_split))
return services
def GetVersionResource(self, service, version):
"""Describe the given version of the given service.
Args:
service: str, the ID of the service for the version to describe.
version: str, the ID of the version to describe.
Returns:
Version resource object from the API.
"""
request = self.messages.AppengineAppsServicesVersionsGetRequest(
name=self._FormatVersion(service, version),
view=(self.messages.
AppengineAppsServicesVersionsGetRequest.ViewValueValuesEnum.FULL))
return self.client.apps_services_versions.Get(request)
def ListVersions(self, services):
"""Lists all versions for the specified services.
Args:
services: A list of service_util.Service objects.
Returns:
A list of version_util.Version objects.
"""
versions = []
for service in services:
# Get the versions.
request = self.messages.AppengineAppsServicesVersionsListRequest(
parent=self._GetServiceRelativeName(service.id))
try:
for version in list_pager.YieldFromList(
self.client.apps_services_versions,
request,
field='versions',
batch_size=100,
batch_size_attribute='pageSize'):
versions.append(
version_util.Version.FromVersionResource(version, service))
except apitools_exceptions.HttpNotFoundError:
# Drop services that were presumed deleted since initial enumeration.
pass
return versions
def ListRegions(self):
"""List all regions for the project, and support for standard and flexible.
Returns:
List of region_util.Region instances for the project.
"""
request = self.messages.AppengineAppsLocationsListRequest(
name='apps/{0}'.format(self.project))
regions = list_pager.YieldFromList(
self.client.apps_locations, request, field='locations',
batch_size=100, batch_size_attribute='pageSize')
return [region_util.Region.FromRegionResource(loc) for loc in regions]
def DeleteService(self, service_name):
"""Deletes the specified service.
Args:
service_name: str, Name of the service to delete.
Returns:
The completed Operation.
"""
delete_request = self.messages.AppengineAppsServicesDeleteRequest(
name=self._GetServiceRelativeName(service_name=service_name))
operation = self.client.apps_services.Delete(delete_request)
message = 'Deleting [{}]'.format(service_name)
return operations_util.WaitForOperation(self.client.apps_operations,
operation,
message=message)
def GetOperation(self, op_id):
"""Grabs details about a particular gcloud operation.
Args:
op_id: str, ID of operation.
Returns:
Operation resource object from API call.
"""
request = self.messages.AppengineAppsOperationsGetRequest(
name=self._FormatOperation(op_id))
return self.client.apps_operations.Get(request)
def ListOperations(self, op_filter=None):
"""Lists all operations for the given application.
Args:
op_filter: String to filter which operations to grab.
Returns:
A list of opeartion_util.Operation objects.
"""
request = self.messages.AppengineAppsOperationsListRequest(
name=self._FormatApp(),
filter=op_filter)
operations = list_pager.YieldFromList(
self.client.apps_operations, request, field='operations',
batch_size=100, batch_size_attribute='pageSize')
return [operations_util.Operation(op) for op in operations]
def _CreateVersionResource(self,
service_config,
manifest,
version_id,
build,
extra_config_settings=None,
service_account_email=None):
"""Constructs a Version resource for deployment.
Args:
service_config: ServiceYamlInfo, Service info parsed from a service yaml
file.
manifest: Dictionary mapping source files to Google Cloud Storage
locations.
version_id: str, The version of the service.
build: BuildArtifact, The build ID, image path, or build options.
extra_config_settings: dict, client config settings to pass to the server
as beta settings.
service_account_email: identity of this deployed version. If not set,
Admin API will fallback to use the App Engine default appspot SA.
Returns:
A Version resource whose Deployment includes either a container pointing
to a completed image, or a build pointing to an in-progress build.
"""
config_dict = copy.deepcopy(service_config.parsed.ToDict())
# We always want to set a value for entrypoint when sending the request
# to Zeus, even if one wasn't specified in the yaml file
if 'entrypoint' not in config_dict:
config_dict['entrypoint'] = ''
if (
'app_engine_apis' in config_dict
and 'app_engine_bundled_services' in config_dict
):
raise exceptions.ConfigError(
'Cannot specify both `app_engine_apis` and '
'`app_engine_bundled_services` in the same `app.yaml` file.'
)
try:
# pylint: disable=protected-access
schema_parser = convert_yaml.GetSchemaParser(self.client._VERSION)
json_version_resource = schema_parser.ConvertValue(config_dict)
except ValueError as e:
raise exceptions.ConfigError(
'[{f}] could not be converted to the App Engine configuration '
'format for the following reason: {msg}'.format(
f=service_config.file, msg=six.text_type(e)))
log.debug('Converted YAML to JSON: "{0}"'.format(
json.dumps(json_version_resource, indent=2, sort_keys=True)))
# Override the 'service_account' in app.yaml if CLI provided this param.
if service_account_email is not None:
json_version_resource['serviceAccount'] = service_account_email
json_version_resource['deployment'] = {}
# Add the deployment manifest information.
json_version_resource['deployment']['files'] = manifest
if build:
if build.IsImage():
json_version_resource['deployment']['container'] = {
'image': build.identifier
}
elif build.IsBuildId():
json_version_resource['deployment']['build'] = {
'cloudBuildId': build.identifier
}
elif build.IsBuildOptions():
json_version_resource['deployment']['cloudBuildOptions'] = (
build.identifier)
version_resource = encoding.PyValueToMessage(self.messages.Version,
json_version_resource)
# For consistency in the tests:
if version_resource.envVariables:
version_resource.envVariables.additionalProperties.sort(
key=lambda x: x.key)
# We need to pipe some settings to the server as beta settings.
if extra_config_settings:
if 'betaSettings' not in json_version_resource:
json_version_resource['betaSettings'] = {}
json_version_resource['betaSettings'].update(extra_config_settings)
# In the JSON representation, BetaSettings are a dict of key-value pairs.
# In the Message representation, BetaSettings are an ordered array of
# key-value pairs. Sort the key-value pairs here, so that unit testing is
# possible.
if 'betaSettings' in json_version_resource:
json_dict = json_version_resource.get('betaSettings')
attributes = []
for key, value in sorted(json_dict.items()):
attributes.append(
self.messages.Version.BetaSettingsValue.AdditionalProperty(
key=key, value=value))
version_resource.betaSettings = self.messages.Version.BetaSettingsValue(
additionalProperties=attributes)
# Add the app engine bundled services to the version resource.
if 'appEngineBundledServices' in json_version_resource:
bundled_services_enums = []
for service_name in sorted(
json_version_resource['appEngineBundledServices']
):
enum_value = service_name.upper()
log.debug('enum_value: %s', enum_value)
try:
bundled_services_enums.append(
getattr(
self.messages.Version.AppEngineBundledServicesValueListEntryValuesEnum,
enum_value,
)
)
except AttributeError:
raise appinfo.validation.ValidationError(
f'Invalid bundled service: {service_name}.'
)
if bundled_services_enums:
log.debug(
'Bundled services enums: %s', bundled_services_enums
)
version_resource.appEngineBundledServices = bundled_services_enums
log.debug(
'version_resource.appEngineBundledServices: %s',
version_resource.appEngineBundledServices,
)
# The files in the deployment manifest also need to be sorted for unit
# testing purposes.
try:
version_resource.deployment.files.additionalProperties.sort(
key=operator.attrgetter('key')
)
except AttributeError: # manifest not present, or no files in manifest
pass
# Add an ID for the version which is to be created.
version_resource.id = version_id
return version_resource
def UpdateDispatchRules(self, dispatch_rules):
"""Updates an application's dispatch rules.
Args:
dispatch_rules: [{'service': str, 'domain': str, 'path': str}], dispatch-
rules to set-and-replace.
Returns:
Long running operation.
"""
# Create a configuration update request.
update_mask = 'dispatchRules,'
application_update = self.messages.Application()
application_update.dispatchRules = [self.messages.UrlDispatchRule(**r)
for r in dispatch_rules]
update_request = self.messages.AppengineAppsPatchRequest(
name=self._FormatApp(),
application=application_update,
updateMask=update_mask)
operation = self.client.apps.Patch(update_request)
log.debug('Received operation: [{operation}] with mask [{mask}]'.format(
operation=operation.name,
mask=update_mask))
return operations_util.WaitForOperation(self.client.apps_operations,
operation)
def UpdateDatabaseType(self, database_type):
"""Updates an application's database_type.
Args:
database_type: New database type to switch to
Returns:
Long running operation.
"""
# Create a configuration update request.
update_mask = 'databaseType'
application_update = self.messages.Application()
application_update.databaseType = database_type
update_request = self.messages.AppengineAppsPatchRequest(
name=self._FormatApp(),
application=application_update,
updateMask=update_mask)
operation = self.client.apps.Patch(update_request)
log.debug('Received operation: [{operation}] with mask [{mask}]'.format(
operation=operation.name, mask=update_mask))
return operations_util.WaitForOperation(self.client.apps_operations,
operation)
def CheckGen1AppId(self, service_name, project_id):
"""Checks if the service contains a Gen1 app.
Args:
service_name: str, The service name
project_id: str, The project id
Returns:
boolean, True if the service contains a Gen1 app, False otherwise
"""
request = self.messages.AppengineAppsServicesMigrationCheckGen1appIdRequest(
name=self._GetServiceRelativeName(service_name),
checkGen1AppIdRequest=self.messages.CheckGen1AppIdRequest(
projectId=project_id
),
)
return self.client.apps_services_migration.CheckGen1appId(request)
def MigrateConfigYaml(
self, project_id, config_as_string, runtime, service_name
):
"""Migrates the app.yaml file provided by the user to be Gen2 compatible.
Args:
project_id: str, The project id
config_as_string: str, The config as a string
runtime: str, The runtime
service_name: str, The service name
Returns:
str, The migrated config as a string
"""
if runtime in gen1_runtimes:
runtime_enum = (
self.messages.MigrateConfigYamlRequest.RuntimeValueValuesEnum.GEN1_PYTHON27
)
else:
runtime_enum = (
self.messages.MigrateConfigYamlRequest.RuntimeValueValuesEnum.MIGRATION_ASSIST_RUNTIME_UNSPECIFIED
)
req = self.messages.AppengineAppsServicesMigrationMigrateConfigYamlRequest(
name=self._GetServiceRelativeName(service_name),
migrateConfigYamlRequest=self.messages.MigrateConfigYamlRequest(
projectId=project_id,
configAsString=config_as_string,
runtime=runtime_enum,
),
)
return self.client.apps_services_migration.MigrateConfigYaml(req)
def MigrateCodeFile(self, project_id, code_as_string, runtime, service_name):
"""Migrates the code file provided by the user to Gen2 runtime.
Args:
project_id: str, The project id
code_as_string: str, The code as a string
runtime: str, The runtime
service_name: str, The service name
Returns:
Long running operation
"""
if runtime in gen1_runtimes:
runtime_enum = (
self.messages.MigrateCodeFileRequest.RuntimeValueValuesEnum.GEN1_PYTHON27
)
else:
runtime_enum = (
self.messages.MigrateCodeFileRequest.RuntimeValueValuesEnum.MIGRATION_ASSIST_RUNTIME_UNSPECIFIED
)
request = (
self.messages.AppengineAppsServicesMigrationMigrateCodeFileRequest(
name=self._GetServiceRelativeName(service_name),
migrateCodeFileRequest=self.messages.MigrateCodeFileRequest(
projectId=project_id,
codeAsString=code_as_string,
runtime=runtime_enum,
),
)
)
operation = self.client.apps_services_migration.MigrateCodeFile(request)
return operations_util.WaitForOperation(
self.client.apps_operations, operation
)

View File

@@ -0,0 +1,82 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes for interacting with the Cloud Build API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
from googlecloudsdk.api_lib.cloudbuild import build
class BuildArtifact(object):
"""Represents a build of a flex container, either in-progress or completed.
A build artifact is either a build_id for an in-progress build, the image
name for a completed container build, or options for the build to be created
elsewhere.
"""
class BuildType(enum.Enum):
IMAGE = 1
BUILD_ID = 2
BUILD_OPTIONS = 3
def __init__(self, build_type, identifier, build_op=None):
self.build_type = build_type
self.identifier = identifier
self.build_op = build_op
def IsImage(self):
return self.build_type == self.BuildType.IMAGE
def IsBuildId(self):
return self.build_type == self.BuildType.BUILD_ID
def IsBuildOptions(self):
return self.build_type == self.BuildType.BUILD_OPTIONS
@classmethod
def MakeBuildIdArtifact(cls, build_id):
return cls(cls.BuildType.BUILD_ID, build_id)
@classmethod
def MakeImageArtifact(cls, image_name):
return cls(cls.BuildType.IMAGE, image_name)
@classmethod
def MakeBuildOptionsArtifact(cls, build_options):
return cls(cls.BuildType.BUILD_OPTIONS, build_options)
@classmethod
def MakeBuildIdArtifactFromOp(cls, build_op):
build_id = build.GetBuildProp(build_op, 'id', required=True)
return cls(cls.BuildType.BUILD_ID, build_id, build_op)
@classmethod
def MakeImageArtifactFromOp(cls, build_op):
"""Create Image BuildArtifact from build operation."""
source = build.GetBuildProp(build_op, 'source')
for prop in source.object_value.properties:
if prop.key == 'storageSource':
for storage_prop in prop.value.object_value.properties:
if storage_prop.key == 'object':
image_name = storage_prop.value.string_value
if image_name is None:
raise build.BuildFailedError('Could not determine image name')
return cls(cls.BuildType.IMAGE, image_name, build_op)

View File

@@ -0,0 +1,260 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods to upload source to GCS and call Cloud Build service."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import gzip
import io
import operator
import os
import tarfile
from apitools.base.py import encoding
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import times
import six
from six.moves import filter # pylint: disable=redefined-builtin
# Paths that shouldn't be ignored client-side.
# Behavioral parity with github.com/docker/docker-py.
BLOCKLISTED_DOCKERIGNORE_PATHS = ['Dockerfile', '.dockerignore']
def _CreateTar(upload_dir, gen_files, paths, gz):
"""Create tarfile for upload to GCS.
The third-party code closes the tarfile after creating, which does not
allow us to write generated files after calling docker.utils.tar
since gzipped tarfiles can't be opened in append mode.
Args:
upload_dir: the directory to be archived
gen_files: Generated files to write to the tar
paths: allowed paths in the tarfile
gz: gzipped tarfile object
"""
root = os.path.abspath(upload_dir)
t = tarfile.open(mode='w', fileobj=gz)
for path in sorted(paths):
full_path = os.path.join(root, path)
t.add(full_path, arcname=path, recursive=False)
for name, contents in six.iteritems(gen_files):
genfileobj = io.BytesIO(contents.encode())
tar_info = tarfile.TarInfo(name=name)
tar_info.size = len(genfileobj.getvalue())
t.addfile(tar_info, fileobj=genfileobj)
genfileobj.close()
t.close()
def _GetDockerignoreExclusions(upload_dir, gen_files):
"""Helper function to read the .dockerignore on disk or in generated files.
Args:
upload_dir: the path to the root directory.
gen_files: dict of filename to contents of generated files.
Returns:
Set of exclusion expressions from the dockerignore file.
"""
dockerignore = os.path.join(upload_dir, '.dockerignore')
exclude = set()
ignore_contents = None
if os.path.exists(dockerignore):
ignore_contents = files.ReadFileContents(dockerignore)
else:
ignore_contents = gen_files.get('.dockerignore')
if ignore_contents:
# Read the exclusions from the dockerignore, filtering out blank lines.
exclude = set(filter(bool, ignore_contents.splitlines()))
# Remove paths that shouldn't be excluded on the client.
exclude -= set(BLOCKLISTED_DOCKERIGNORE_PATHS)
return exclude
def _GetIncludedPaths(upload_dir, source_files, exclude):
"""Helper function to filter paths in root using dockerignore and skip_files.
We iterate separately to filter on skip_files in order to preserve expected
behavior (standard deployment skips directories if they contain only files
ignored by skip_files).
Args:
upload_dir: the path to the root directory.
source_files: [str], relative paths to upload.
exclude: the .dockerignore file exclusions.
Returns:
Set of paths (relative to upload_dir) to include.
"""
# Import only when necessary, to decrease startup time.
# pylint: disable=g-import-not-at-top
import docker
# This code replicates how docker.utils.tar() finds the root
# and excluded paths.
root = os.path.abspath(upload_dir)
# Get set of all paths other than exclusions from dockerignore.
paths = docker.utils.exclude_paths(root, list(exclude))
# Also filter on the ignore regex from .gcloudignore or skip_files.
paths.intersection_update(source_files)
return paths
def UploadSource(upload_dir, source_files, object_ref, gen_files=None):
"""Upload a gzipped tarball of the source directory to GCS.
Note: To provide parity with docker's behavior, we must respect .dockerignore.
Args:
upload_dir: the directory to be archived.
source_files: [str], relative paths to upload.
object_ref: storage_util.ObjectReference, the Cloud Storage location to
upload the source tarball to.
gen_files: dict of filename to (str) contents of generated config and
source context files.
"""
gen_files = gen_files or {}
dockerignore_contents = _GetDockerignoreExclusions(upload_dir, gen_files)
included_paths = _GetIncludedPaths(
upload_dir, source_files, dockerignore_contents)
# We can't use tempfile.NamedTemporaryFile here because ... Windows.
# See https://bugs.python.org/issue14243. There are small cleanup races
# during process termination that will leave artifacts on the filesystem.
# eg, CTRL-C on windows leaves both the directory and the file. Unavoidable.
# On Posix, `kill -9` has similar behavior, but CTRL-C allows cleanup.
with files.TemporaryDirectory() as temp_dir:
f = files.BinaryFileWriter(os.path.join(temp_dir, 'src.tgz'))
with gzip.GzipFile(mode='wb', fileobj=f) as gz:
_CreateTar(upload_dir, gen_files, included_paths, gz)
f.close()
storage_client = storage_api.StorageClient()
storage_client.CopyFileToGCS(f.name, object_ref)
def GetServiceTimeoutSeconds(timeout_property_str):
"""Returns the service timeout in seconds given the duration string."""
if timeout_property_str is None:
return None
build_timeout_duration = times.ParseDuration(timeout_property_str,
default_suffix='s')
return int(build_timeout_duration.total_seconds)
def GetServiceTimeoutString(timeout_property_str):
"""Returns the service timeout duration string with suffix appended."""
if timeout_property_str is None:
return None
build_timeout_secs = GetServiceTimeoutSeconds(timeout_property_str)
return six.text_type(build_timeout_secs) + 's'
class InvalidBuildError(ValueError):
"""Error indicating that ExecuteCloudBuild was given a bad Build message."""
def __init__(self, field):
super(InvalidBuildError, self).__init__(
'Field [{}] was provided, but should not have been. '
'You may be using an improper Cloud Build pipeline.'.format(field))
def _ValidateBuildFields(build, fields):
"""Validates that a Build message doesn't have fields that we populate."""
for field in fields:
if getattr(build, field, None) is not None:
raise InvalidBuildError(field)
def GetDefaultBuild(output_image):
"""Get the default build for this runtime.
This build just uses the latest docker builder image (location pulled from the
app/container_builder_image property) to run a `docker build` with the given
tag.
Args:
output_image: GCR location for the output docker image (e.g.
`gcr.io/test-gae/hardcoded-output-tag`)
Returns:
Build, a CloudBuild Build message with the given steps (ready to be given to
FixUpBuild).
"""
messages = cloudbuild_util.GetMessagesModule()
builder = properties.VALUES.app.container_builder_image.Get()
log.debug('Using builder image: [{0}]'.format(builder))
return messages.Build(
steps=[messages.BuildStep(name=builder,
args=['build', '-t', output_image, '.'])],
images=[output_image])
def FixUpBuild(build, object_ref):
"""Return a modified Build object with run-time values populated.
Specifically:
- `source` is pulled from the given object_ref
- `timeout` comes from the app/cloud_build_timeout property
- `logsBucket` uses the bucket from object_ref
Args:
build: cloudbuild Build message. The Build to modify. Fields 'timeout',
'source', and 'logsBucket' will be added and may not be given.
object_ref: storage_util.ObjectReference, the Cloud Storage location of the
source tarball.
Returns:
Build, (copy) of the given Build message with the specified fields
populated.
Raises:
InvalidBuildError: if the Build message had one of the fields this function
sets pre-populated
"""
messages = cloudbuild_util.GetMessagesModule()
# Make a copy, so we don't modify the original
build = encoding.CopyProtoMessage(build)
# CopyProtoMessage doesn't preserve the order of additionalProperties; sort
# these so that they're in a consistent order for tests (this *only* matters
# for tests).
if build.substitutions:
build.substitutions.additionalProperties.sort(
key=operator.attrgetter('key'))
# Check that nothing we're expecting to fill in has been set already
_ValidateBuildFields(build, ('source', 'timeout', 'logsBucket'))
build.timeout = GetServiceTimeoutString(
properties.VALUES.app.cloud_build_timeout.Get())
build.logsBucket = object_ref.bucket
build.source = messages.Source(
storageSource=messages.StorageSource(
bucket=object_ref.bucket,
object=object_ref.name,
),
)
return build

View File

@@ -0,0 +1,318 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods used by the deploy_app command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
import hashlib
import os
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.app import metric_names
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.appengine.tools import context_util
from googlecloudsdk.command_lib.storage import storage_parallel
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import metrics
from googlecloudsdk.core import properties
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import files as file_utils
from googlecloudsdk.core.util import times
from six.moves import map # pylint: disable=redefined-builtin
_DEFAULT_NUM_THREADS = 8
# TTL expiry margin, to compensate for incorrect local time and timezone,
# as well as deployment time.
_TTL_MARGIN = datetime.timedelta(1)
class LargeFileError(core_exceptions.Error):
def __init__(self, path, size, max_size):
super(LargeFileError, self).__init__(
('Cannot upload file [{path}], which has size [{size}] (greater than '
'maximum allowed size of [{max_size}]). Please delete the file or add '
'to the skip_files entry in your application .yaml file and try '
'again.'.format(path=path, size=size, max_size=max_size)))
class MultiError(core_exceptions.Error):
def __init__(self, operation_description, errors):
if len(errors) > 1:
msg = 'Multiple errors occurred {0}\n'.format(operation_description)
else:
msg = 'An error occurred {0}\n'.format(operation_description)
errors_string = '\n\n'.join(map(str, errors))
super(core_exceptions.Error, self).__init__(msg + errors_string)
self.errors = errors
def _BuildDeploymentManifest(upload_dir, source_files, bucket_ref, tmp_dir):
"""Builds a deployment manifest for use with the App Engine Admin API.
Args:
upload_dir: str, path to the service's upload directory
source_files: [str], relative paths to upload.
bucket_ref: The reference to the bucket files will be placed in.
tmp_dir: A temp directory for storing generated files (currently just source
context files).
Returns:
A deployment manifest (dict) for use with the Admin API.
"""
manifest = {}
bucket_url = 'https://storage.googleapis.com/{0}'.format(bucket_ref.bucket)
# Normal application files.
for rel_path in source_files:
full_path = os.path.join(upload_dir, rel_path)
sha1_hash = file_utils.Checksum.HashSingleFile(full_path,
algorithm=hashlib.sha1)
manifest_path = '/'.join([bucket_url, sha1_hash])
manifest[_FormatForManifest(rel_path)] = {
'sourceUrl': manifest_path,
'sha1Sum': sha1_hash
}
# Source context files. These are temporary files which indicate the current
# state of the source repository (git, cloud repo, etc.)
context_files = context_util.CreateContextFiles(
tmp_dir, None, source_dir=upload_dir)
for context_file in context_files:
rel_path = os.path.basename(context_file)
if rel_path in manifest:
# The source context file was explicitly provided by the user.
log.debug('Source context already exists. Using the existing file.')
continue
else:
sha1_hash = file_utils.Checksum.HashSingleFile(context_file,
algorithm=hashlib.sha1)
manifest_path = '/'.join([bucket_url, sha1_hash])
manifest[_FormatForManifest(rel_path)] = {
'sourceUrl': manifest_path,
'sha1Sum': sha1_hash,
}
return manifest
def _GetLifecycleDeletePolicy(storage_client, bucket_ref):
"""Get the TTL of objects in days as specified by the lifecycle policy.
Only "delete by age" policies are accounted for.
Args:
storage_client: storage_api.StorageClient, API client wrapper.
bucket_ref: The GCS bucket reference.
Returns:
datetime.timedelta, TTL of objects in days, or None if no deletion
policy on the bucket.
"""
try:
bucket = storage_client.client.buckets.Get(
request=storage_client.messages.StorageBucketsGetRequest(
bucket=bucket_ref.bucket),
global_params=storage_client.messages.StandardQueryParameters(
fields='lifecycle'))
except apitools_exceptions.HttpForbiddenError:
return None
if not bucket.lifecycle:
return None
rules = bucket.lifecycle.rule
ages = [
rule.condition.age for rule in rules if rule.condition.age is not None and
rule.condition.age >= 0 and rule.action.type == 'Delete'
]
return datetime.timedelta(min(ages)) if ages else None
def _IsTTLSafe(ttl, obj):
"""Determines whether a GCS object is close to end-of-life.
In order to reduce false negative rate (objects that are close to deletion but
aren't marked as such) the returned filter is forward-adjusted with
_TTL_MARGIN.
Args:
ttl: datetime.timedelta, TTL of objects, or None if no TTL.
obj: storage object to check.
Returns:
True if the ojbect is safe or False if it is approaching end of life.
"""
if ttl is None:
return True
now = times.Now(times.UTC)
delta = ttl - _TTL_MARGIN
return (now - obj.timeCreated) <= delta
def _BuildFileUploadMap(manifest, source_dir, bucket_ref, tmp_dir,
max_file_size):
"""Builds a map of files to upload, indexed by their hash.
This skips already-uploaded files.
Args:
manifest: A dict containing the deployment manifest for a single service.
source_dir: The relative source directory of the service.
bucket_ref: The GCS bucket reference to upload files into.
tmp_dir: The path to a temporary directory where generated files may be
stored. If a file in the manifest is not found in the source directory,
it will be retrieved from this directory instead.
max_file_size: int, File size limit per individual file or None if no limit.
Raises:
LargeFileError: if one of the files to upload exceeds the maximum App Engine
file size.
Returns:
A dict mapping hashes to file paths that should be uploaded.
"""
files_to_upload = {}
storage_client = storage_api.StorageClient()
ttl = _GetLifecycleDeletePolicy(storage_client, bucket_ref)
existing_items = set(o.name for o in storage_client.ListBucket(bucket_ref)
if _IsTTLSafe(ttl, o))
skipped_size, total_size = 0, 0
for rel_path in manifest:
full_path = os.path.join(source_dir, rel_path)
# For generated files, the relative path is based on the tmp_dir rather
# than source_dir. If the file is not in the source directory, look in
# tmp_dir instead.
if not os.path.exists(encoding.Encode(full_path, encoding='utf-8')):
full_path = os.path.join(tmp_dir, rel_path)
# Perform this check when creating the upload map, so we catch too-large
# files that have already been uploaded
size = os.path.getsize(encoding.Encode(full_path, encoding='utf-8'))
if max_file_size and size > max_file_size:
raise LargeFileError(full_path, size, max_file_size)
sha1_hash = manifest[rel_path]['sha1Sum']
total_size += size
if sha1_hash in existing_items:
log.debug('Skipping upload of [{f}]'.format(f=rel_path))
skipped_size += size
else:
files_to_upload[sha1_hash] = full_path
if total_size:
log.info('Incremental upload skipped {pct}% of data'.format(
pct=round(100.0 * skipped_size / total_size, 2)))
return files_to_upload
class FileUploadTask(object):
def __init__(self, sha1_hash, path, bucket_url):
self.sha1_hash = sha1_hash
self.path = path
self.bucket_url = bucket_url
def _UploadFilesThreads(files_to_upload, bucket_ref):
"""Uploads files to App Engine Cloud Storage bucket using threads.
Args:
files_to_upload: dict {str: str}, map of checksum to local path
bucket_ref: storage_api.BucketReference, the reference to the bucket files
will be placed in.
Raises:
MultiError: if one or more errors occurred during file upload.
"""
num_threads = (properties.VALUES.app.num_file_upload_threads.GetInt() or
storage_parallel.DEFAULT_NUM_THREADS)
tasks = []
# Have to sort files because the test framework requires a known order for
# mocked API calls.
for sha1_hash, path in sorted(files_to_upload.items()):
dest_obj_ref = storage_util.ObjectReference.FromBucketRef(bucket_ref,
sha1_hash)
task = storage_parallel.FileUploadTask(path, dest_obj_ref)
tasks.append(task)
storage_parallel.UploadFiles(tasks, num_threads=num_threads,
show_progress_bar=True)
def CopyFilesToCodeBucket(upload_dir, source_files,
bucket_ref, max_file_size=None):
"""Copies application files to the Google Cloud Storage code bucket.
Use the Cloud Storage API using threads.
Consider the following original structure:
app/
main.py
tools/
foo.py
Assume main.py has SHA1 hash 123 and foo.py has SHA1 hash 456. The resultant
GCS bucket will look like this:
gs://$BUCKET/
123
456
The resulting App Engine API manifest will be:
{
"app/main.py": {
"sourceUrl": "https://storage.googleapis.com/staging-bucket/123",
"sha1Sum": "123"
},
"app/tools/foo.py": {
"sourceUrl": "https://storage.googleapis.com/staging-bucket/456",
"sha1Sum": "456"
}
}
A 'list' call of the bucket is made at the start, and files that hash to
values already present in the bucket will not be uploaded again.
Args:
upload_dir: str, path to the service's upload directory
source_files: [str], relative paths to upload.
bucket_ref: The reference to the bucket files will be placed in.
max_file_size: int, File size limit per individual file or None if no limit.
Returns:
A dictionary representing the manifest.
"""
metrics.CustomTimedEvent(metric_names.COPY_APP_FILES_START)
# Collect a list of files to upload, indexed by the SHA so uploads are
# deduplicated.
with file_utils.TemporaryDirectory() as tmp_dir:
manifest = _BuildDeploymentManifest(
upload_dir, source_files, bucket_ref, tmp_dir)
files_to_upload = _BuildFileUploadMap(
manifest, upload_dir, bucket_ref, tmp_dir, max_file_size)
_UploadFilesThreads(files_to_upload, bucket_ref)
log.status.Print('File upload done.')
log.info('Manifest: [{0}]'.format(manifest))
metrics.CustomTimedEvent(metric_names.COPY_APP_FILES)
return manifest
def _FormatForManifest(filename):
"""Reformat a filename for the deployment manifest if it is Windows format."""
if os.path.sep == '\\':
return filename.replace('\\', '/')
return filename

View File

@@ -0,0 +1,678 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods used by the deploy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import os
import re
from apitools.base.py import exceptions as apitools_exceptions
from gae_ext_runtime import ext_runtime
from googlecloudsdk.api_lib.app import appengine_api_client
from googlecloudsdk.api_lib.app import build as app_build
from googlecloudsdk.api_lib.app import cloud_build
from googlecloudsdk.api_lib.app import docker_image
from googlecloudsdk.api_lib.app import metric_names
from googlecloudsdk.api_lib.app import runtime_builders
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.api_lib.app.images import config
from googlecloudsdk.api_lib.app.runtimes import fingerprinter
from googlecloudsdk.api_lib.cloudbuild import build as cloudbuild_build
from googlecloudsdk.api_lib.services import enable_api
from googlecloudsdk.api_lib.services import exceptions as s_exceptions
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.util import exceptions as api_lib_exceptions
from googlecloudsdk.appengine.api import appinfo
from googlecloudsdk.appengine.tools import context_util
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import metrics
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.credentials import creds
from googlecloudsdk.core.credentials import store as c_store
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import platforms
import six
from six.moves import filter # pylint: disable=redefined-builtin
from six.moves import zip # pylint: disable=redefined-builtin
DEFAULT_SERVICE = 'default'
ALT_SEPARATOR = '-dot-'
MAX_DNS_LABEL_LENGTH = 63 # http://tools.ietf.org/html/rfc2181#section-11
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
# Technically, this should be 260 because of the drive, ':\', and a null
# terminator, but any time we're getting close we're in dangerous territory.
_WINDOWS_MAX_PATH = 256
# The admin API has a timeout for individual tasks; if the build is greater
# than 10 minutes, it might trigger that timeout, so it's not a candidate for
# parallelized builds.
MAX_PARALLEL_BUILD_TIME = 600
FLEXIBLE_SERVICE_VERIFY_WARNING = (
'Unable to verify that the Appengine Flexible API is enabled for project '
'[{}]. You may not have permission to list enabled services on this '
'project. If it is not enabled, this may cause problems in running your '
'deployment. Please ask the project owner to ensure that the Appengine '
'Flexible API has been enabled and that this account has permission to '
'list enabled APIs.')
FLEXIBLE_SERVICE_VERIFY_WITH_SERVICE_ACCOUNT = (
'Note: When deploying with a service account, the Service Management API '
'needs to be enabled in order to verify that the Appengine Flexible API '
'is enabled. Please ensure the Service Management API has been enabled '
'on this project by the project owner.')
PREPARE_FAILURE_MSG = (
'Enabling the Appengine Flexible API failed on project [{}]. You '
'may not have permission to enable APIs on this project. Please ask '
'the project owner to enable the Appengine Flexible API on this project.')
class Error(exceptions.Error):
"""Base error for this module."""
class PrepareFailureError(Error):
pass
class WindowMaxPathError(Error):
"""Raised if a file cannot be read because of the MAX_PATH limitation."""
_WINDOWS_MAX_PATH_ERROR_TEMPLATE = """\
The following file couldn't be read because its path is too long:
[{0}]
For more information on this issue and possible workarounds, please read the
following (links are specific to Node.js, but the information is generally
applicable):
* https://github.com/Microsoft/nodejstools/issues/69
* https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#max_path-explanation-and-workarounds\
"""
def __init__(self, filename):
super(WindowMaxPathError, self).__init__(
self._WINDOWS_MAX_PATH_ERROR_TEMPLATE.format(filename))
class DockerfileError(exceptions.Error):
"""Raised if a Dockerfile was found along with a non-custom runtime."""
class CloudbuildYamlError(exceptions.Error):
"""Raised if a cloudbuild.yaml was found along with a non-custom runtime."""
class CustomRuntimeFilesError(exceptions.Error):
"""Raised if a custom runtime has both a Dockerfile and a cloudbuild.yaml."""
class NoDockerfileError(exceptions.Error):
"""No Dockerfile found."""
class UnsatisfiedRequirementsError(exceptions.Error):
"""Raised if we are unable to detect the runtime."""
def _NeedsDockerfile(info, source_dir):
"""Returns True if the given directory needs a Dockerfile for this app.
A Dockerfile is necessary when there is no Dockerfile in source_dir,
regardless of whether we generate it here on the client-side, or in Cloud
Container Builder server-side.
The reason this function is more complicated than that is that it additionally
verifies the sanity of the provided configuration by raising an exception if:
- The runtime is "custom", but no Dockerfile is present
- The runtime is not "custom", and a Dockerfile or cloudbuild.yaml is present
- The runtime is "custom", and has both a cloudbuild.yaml and a Dockerfile.
(The reason cloudbuild.yaml is tied into this method is that its use should be
mutually exclusive with the Dockerfile.)
Args:
info: (googlecloudsdk.api_lib.app.yaml_parsing.ServiceYamlInfo). The
configuration for the service.
source_dir: str, the path to the service's source directory
Raises:
CloudbuildYamlError: if a cloudbuild.yaml is present, but the runtime is not
"custom".
DockerfileError: if a Dockerfile is present, but the runtime is not
"custom".
NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
custom runtime.
CustomRuntimeFilesError: if a custom runtime had both a Dockerfile and a
cloudbuild.yaml file.
Returns:
bool, whether Dockerfile generation is necessary.
"""
has_dockerfile = os.path.exists(
os.path.join(source_dir, config.DOCKERFILE))
has_cloudbuild = os.path.exists(
os.path.join(source_dir, runtime_builders.Resolver.CLOUDBUILD_FILE))
if info.runtime == 'custom':
if has_dockerfile and has_cloudbuild:
raise CustomRuntimeFilesError(
('A custom runtime must have exactly one of [{}] and [{}] in the '
'source directory; [{}] contains both').format(
config.DOCKERFILE, runtime_builders.Resolver.CLOUDBUILD_FILE,
source_dir))
elif has_dockerfile:
log.info('Using %s found in %s', config.DOCKERFILE, source_dir)
return False
elif has_cloudbuild:
log.info('Not using %s because cloudbuild.yaml was found instead.',
config.DOCKERFILE)
return True
else:
raise NoDockerfileError(
'You must provide your own Dockerfile when using a custom runtime. '
'Otherwise provide a "runtime" field with one of the supported '
'runtimes.')
else:
if has_dockerfile:
raise DockerfileError(
'There is a Dockerfile in the current directory, and the runtime '
'field in {0} is currently set to [runtime: {1}]. To use your '
'Dockerfile to build a custom runtime, set the runtime field to '
'[runtime: custom]. To continue using the [{1}] runtime, please '
'remove the Dockerfile from this directory.'.format(info.file,
info.runtime))
elif has_cloudbuild:
raise CloudbuildYamlError(
'There is a cloudbuild.yaml in the current directory, and the '
'runtime field in {0} is currently set to [runtime: {1}]. To use '
'your cloudbuild.yaml to build a custom runtime, set the runtime '
'field to [runtime: custom]. To continue using the [{1}] runtime, '
'please remove the cloudbuild.yaml from this directory.'.format(
info.file, info.runtime))
log.info('Need Dockerfile to be generated for runtime %s', info.runtime)
return True
def ShouldUseRuntimeBuilders(service, strategy, needs_dockerfile):
"""Returns whether we whould use runtime builders for this application build.
If there is no image that needs to be built (service.RequiresImage() ==
False), runtime builders are irrelevant, so they do not need to be built.
If there is an image that needs to be built, whether to use runtime builders
is determined by the RuntimeBuilderStrategy, based on the service runtime and
whether the service being deployed has a Dockerfile already made, or whether
it needs one built.
Args:
service: ServiceYamlInfo, The parsed service config.
strategy: runtime_builders.RuntimeBuilderStrategy, the strategy for
determining whether a runtime should use runtime builders.
needs_dockerfile: bool, whether the Dockerfile in the source directory is
absent.
Returns:
bool, whether to use the runtime builders.
Raises:
ValueError: if an unrecognized runtime_builder_strategy is given
"""
return (service.RequiresImage() and
strategy.ShouldUseRuntimeBuilders(service.runtime, needs_dockerfile))
def _GetDockerfiles(info, dockerfile_dir):
"""Returns map of in-memory Docker-related files to be packaged.
Returns the files in-memory, so that we don't have to drop them on disk;
instead, we include them in the archive sent to App Engine directly.
Args:
info: (googlecloudsdk.api_lib.app.yaml_parsing.ServiceYamlInfo)
The service config.
dockerfile_dir: str, path to the directory to fingerprint and generate
Dockerfiles for.
Raises:
UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
satisfy the requirements of the specified runtime type.
Returns:
A dictionary of filename relative to the archive root (str) to file contents
(str).
"""
params = ext_runtime.Params(appinfo=info.parsed, deploy=True)
configurator = fingerprinter.IdentifyDirectory(dockerfile_dir, params)
if configurator:
dockerfiles = configurator.GenerateConfigData()
return {d.filename: d.contents for d in dockerfiles}
else:
raise UnsatisfiedRequirementsError(
'Your application does not satisfy all of the requirements for a '
'runtime of type [{0}]. Please correct the errors and try '
'again.'.format(info.runtime))
def _GetSourceContextsForUpload(source_dir):
"""Gets source context file information.
Args:
source_dir: str, path to the service's source directory
Returns:
A dict of filename to (str) source context file contents.
"""
source_contexts = {}
# Error message in case of failure.
m = ('Could not generate [{name}]: {error}\n'
'Stackdriver Debugger may not be configured or enabled on this '
'application. See https://cloud.google.com/debugger/ for more '
'information.')
try:
contexts = context_util.CalculateExtendedSourceContexts(source_dir)
except context_util.GenerateSourceContextError as e:
log.info(m.format(name=context_util.CONTEXT_FILENAME, error=e))
return source_contexts
try:
context = context_util.BestSourceContext(contexts)
source_contexts[context_util.CONTEXT_FILENAME] = six.text_type(
json.dumps(context))
except KeyError as e:
log.info(m.format(name=context_util.CONTEXT_FILENAME, error=e))
return source_contexts
def _GetDomainAndDisplayId(project_id):
"""Returns tuple (displayed app id, domain)."""
l = project_id.split(':')
if len(l) == 1:
return l[0], None
return l[1], l[0]
def _GetImageName(project, service, version, gcr_domain):
"""Returns image tag according to App Engine convention."""
display, domain = _GetDomainAndDisplayId(project)
return (config.DOCKER_IMAGE_NAME_DOMAIN_FORMAT if domain
else config.DOCKER_IMAGE_NAME_FORMAT).format(
gcr_domain=gcr_domain,
display=display,
domain=domain,
service=service,
version=version)
def _GetYamlPath(source_dir, service_path, skip_files, gen_files):
"""Returns the yaml path, optionally updating gen_files.
Args:
source_dir: str, the absolute path to the root of the application directory.
service_path: str, the absolute path to the service YAML file
skip_files: appengine.api.Validation._RegexStr, the validated regex object
from the service info file.
gen_files: dict, the dict of files to generate. May be updated if a file
needs to be generated.
Returns:
str, the relative path to the service YAML file that should be used for
build.
"""
if files.IsDirAncestorOf(source_dir, service_path):
rel_path = os.path.relpath(service_path, start=source_dir)
if not util.ShouldSkip(skip_files, rel_path):
return rel_path
yaml_contents = files.ReadFileContents(service_path)
# Use a checksum to ensure file uniqueness, not for security reasons.
checksum = files.Checksum().AddContents(yaml_contents.encode()).HexDigest()
generated_path = '_app_{}.yaml'.format(checksum)
gen_files[generated_path] = yaml_contents
return generated_path
def BuildAndPushDockerImage(
project,
service,
upload_dir,
source_files,
version_id,
code_bucket_ref,
gcr_domain,
runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER,
parallel_build=False,
use_flex_with_buildpacks=False):
"""Builds and pushes a set of docker images.
Args:
project: str, The project being deployed to.
service: ServiceYamlInfo, The parsed service config.
upload_dir: str, path to the service's upload directory
source_files: [str], relative paths to upload.
version_id: The version id to deploy these services under.
code_bucket_ref: The reference to the GCS bucket where the source will be
uploaded.
gcr_domain: str, Cloud Registry domain, determines the physical location
of the image. E.g. `us.gcr.io`.
runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
to use the new CloudBuild-based runtime builders (alternative is old
externalized runtimes).
parallel_build: bool, if True, enable parallel build and deploy.
use_flex_with_buildpacks: bool, if true, use the build-image and
run-image built through buildpacks.
Returns:
BuildArtifact, Representing the pushed container image or in-progress build.
Raises:
DockerfileError: if a Dockerfile is present, but the runtime is not
"custom".
NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
custom runtime.
UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
satisfy the requirements of the specified runtime type.
ValueError: if an unrecognized runtime_builder_strategy is given
"""
needs_dockerfile = _NeedsDockerfile(service, upload_dir)
use_runtime_builders = ShouldUseRuntimeBuilders(service,
runtime_builder_strategy,
needs_dockerfile)
# Nothing to do if this is not an image-based deployment.
if not service.RequiresImage():
return None
log.status.Print(
'Building and pushing image for service [{service}]'
.format(service=service.module))
gen_files = dict(_GetSourceContextsForUpload(upload_dir))
if needs_dockerfile and not use_runtime_builders:
# The runtime builders will generate a Dockerfile in the Cloud, so we only
# need to do this if use_runtime_builders is True
gen_files.update(_GetDockerfiles(service, upload_dir))
image = docker_image.Image(
dockerfile_dir=upload_dir,
repo=_GetImageName(project, service.module, version_id, gcr_domain),
nocache=False,
tag=config.DOCKER_IMAGE_TAG)
metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
object_ref = storage_util.ObjectReference.FromBucketRef(
code_bucket_ref, image.tagged_repo)
relative_yaml_path = _GetYamlPath(upload_dir, service.file,
service.parsed.skip_files, gen_files)
try:
cloud_build.UploadSource(upload_dir, source_files, object_ref,
gen_files=gen_files)
except (OSError, IOError) as err:
if platforms.OperatingSystem.IsWindows():
if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
raise WindowMaxPathError(err.filename)
raise
metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)
if use_runtime_builders:
builder_reference = runtime_builders.FromServiceInfo(
service, upload_dir, use_flex_with_buildpacks)
log.info('Using runtime builder [%s]', builder_reference.build_file_uri)
builder_reference.WarnIfDeprecated()
yaml_path = util.ConvertToPosixPath(relative_yaml_path)
substitute = {
'_OUTPUT_IMAGE': image.tagged_repo,
'_GAE_APPLICATION_YAML_PATH': yaml_path,
}
if use_flex_with_buildpacks:
python_version = yaml_parsing.GetRuntimeConfigAttr(
service.parsed, 'python_version')
if yaml_parsing.GetRuntimeConfigAttr(service.parsed, 'python_version'):
substitute['_GOOGLE_RUNTIME_VERSION'] = python_version
build = builder_reference.LoadCloudBuild(substitute)
else:
build = cloud_build.GetDefaultBuild(image.tagged_repo)
build = cloud_build.FixUpBuild(build, object_ref)
return _SubmitBuild(build, image, project, parallel_build)
def _SubmitBuild(build, image, project, parallel_build):
"""Builds and pushes a set of docker images.
Args:
build: A fixed up Build object.
image: docker_image.Image, A docker image.
project: str, The project being deployed to.
parallel_build: bool, if True, enable parallel build and deploy.
Returns:
BuildArtifact, Representing the pushed container image or in-progress build.
"""
build_timeout = cloud_build.GetServiceTimeoutSeconds(
properties.VALUES.app.cloud_build_timeout.Get())
if build_timeout and build_timeout > MAX_PARALLEL_BUILD_TIME:
parallel_build = False
log.info(
'Property cloud_build_timeout configured to [{0}], which exceeds '
'the maximum build time for parallelized beta deployments of [{1}] '
'seconds. Performing serial deployment.'.format(
build_timeout, MAX_PARALLEL_BUILD_TIME))
if parallel_build:
metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_ASYNC_START)
build_op = cloudbuild_build.CloudBuildClient().ExecuteCloudBuildAsync(
build, project=project)
return app_build.BuildArtifact.MakeBuildIdArtifactFromOp(build_op)
else:
metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
build, project=project)
metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
return app_build.BuildArtifact.MakeImageArtifact(image.tagged_repo)
def DoPrepareManagedVms(gae_client):
"""Call an API to prepare the for App Engine Flexible."""
metrics.CustomTimedEvent(metric_names.PREPARE_ENV_START)
try:
message = 'If this is your first deployment, this may take a while'
with progress_tracker.ProgressTracker(message):
# Note: this doesn't actually boot the VM, it just prepares some stuff
# for the project via an undocumented Admin API.
gae_client.PrepareVmRuntime()
log.status.Print()
except util.RPCError as err:
# Any failures later due to an unprepared project will be noisy, so it's
# okay not to fail here.
log.warning(
("We couldn't validate that your project is ready to deploy to App "
'Engine Flexible Environment. If deployment fails, please check the '
'following message and try again:\n') + six.text_type(err))
metrics.CustomTimedEvent(metric_names.PREPARE_ENV)
def PossiblyEnableFlex(project):
"""Attempts to enable the Flexible Environment API on the project.
Possible scenarios:
-If Flexible Environment is already enabled, success.
-If Flexible Environment API is not yet enabled, attempts to enable it. If
that succeeds, success.
-If the account doesn't have permissions to confirm that the Flexible
Environment API is or isn't enabled on this project, succeeds with a warning.
-If the account is a service account, adds an additional warning that
the Service Management API may need to be enabled.
-If the Flexible Environment API is not enabled on the project and the attempt
to enable it fails, raises PrepareFailureError.
Args:
project: str, the project ID.
Raises:
PrepareFailureError: if enabling the API fails with a 403 or 404 error code.
googlecloudsdk.api_lib.util.exceptions.HttpException: miscellaneous errors
returned by server.
"""
try:
enable_api.EnableServiceIfDisabled(project,
'appengineflex.googleapis.com')
except s_exceptions.GetServicePermissionDeniedException:
# If we can't find out whether the Flexible API is enabled, proceed with
# a warning.
warning = FLEXIBLE_SERVICE_VERIFY_WARNING.format(project)
# If user is using a service account, add more info about what might
# have gone wrong.
credential = c_store.LoadIfEnabled(use_google_auth=True)
if credential and creds.IsServiceAccountCredentials(credential):
warning += '\n\n{}'.format(FLEXIBLE_SERVICE_VERIFY_WITH_SERVICE_ACCOUNT)
log.warning(warning)
except s_exceptions.EnableServiceException:
# If enabling the Flexible API fails due to a permissions error, the
# deployment fails.
raise PrepareFailureError(PREPARE_FAILURE_MSG.format(project))
except apitools_exceptions.HttpError as err:
# The deployment should also fail if there are unforeseen errors in
# enabling the Flexible API. If so, display detailed information.
raise api_lib_exceptions.HttpException(
err, error_format=('Error [{status_code}] {status_message}'
'{error.details?'
'\nDetailed error information:\n{?}}'))
def UseSsl(service_info):
"""Returns whether the root URL for an application is served over HTTPS.
More specifically, returns the 'secure' setting of the handler that will serve
the application. This can be 'always', 'optional', or 'never', depending on
when the URL is served over HTTPS.
Will miss a small number of cases, but HTTP is always okay (an HTTP URL to an
HTTPS-only service will result in a redirect).
Args:
service_info: ServiceYamlInfo, the service configuration.
Returns:
str, the 'secure' setting of the handler for the root URL.
"""
if service_info.is_ti_runtime and not service_info.parsed.handlers:
return appinfo.SECURE_HTTP_OR_HTTPS
for handler in service_info.parsed.handlers:
try:
if re.match(handler.url + '$', '/'):
return handler.secure
except re.error:
# AppEngine uses POSIX Extended regular expressions, which are not 100%
# compatible with Python's re module.
pass
return appinfo.SECURE_HTTP
def GetAppHostname(app=None, app_id=None, service=None, version=None,
use_ssl=appinfo.SECURE_HTTP, deploy=True):
"""Returns the hostname of the given version of the deployed app.
Args:
app: Application resource. One of {app, app_id} must be given.
app_id: str, project ID. One of {app, app_id} must be given. If both are
provided, the hostname from app is preferred.
service: str, the (optional) service being deployed
version: str, the deployed version ID (omit to get the default version URL).
use_ssl: bool, whether to construct an HTTPS URL.
deploy: bool, if this is called during a deployment.
Returns:
str. Constructed URL.
Raises:
TypeError: if neither an app nor an app_id is provided
"""
if not app and not app_id:
raise TypeError('Must provide an application resource or application ID.')
version = version or ''
service_name = service or ''
if service == DEFAULT_SERVICE:
service_name = ''
if not app:
api_client = appengine_api_client.AppengineApiClient.GetApiClient()
app = api_client.GetApplication()
if app:
app_id, domain = app.defaultHostname.split('.', 1)
# Normally, AppEngine URLs are of the form
# 'http[s]://version.service.app.appspot.com'. However, the SSL certificate
# for appspot.com is not valid for subdomains of subdomains of appspot.com
# (e.g. 'https://app.appspot.com/' is okay; 'https://service.app.appspot.com/'
# is not). To deal with this, AppEngine recognizes URLs like
# 'http[s]://version-dot-service-dot-app.appspot.com/'.
#
# This works well as long as the domain name part constructed in this fashion
# is less than 63 characters long, as per the DNS spec. If the domain name
# part is longer than that, we are forced to use the URL with an invalid
# certificate.
#
# We've tried to do the best possible thing in every case here.
subdomain_parts = list(filter(bool, [version, service_name, app_id]))
scheme = 'http'
if use_ssl == appinfo.SECURE_HTTP:
subdomain = '.'.join(subdomain_parts)
scheme = 'http'
else:
subdomain = ALT_SEPARATOR.join(subdomain_parts)
if len(subdomain) <= MAX_DNS_LABEL_LENGTH:
scheme = 'https'
else:
if deploy:
format_parts = ['$VERSION_ID', '$SERVICE_ID', '$APP_ID']
subdomain_format = ALT_SEPARATOR.join(
[j for (i, j) in zip([version, service_name, app_id], format_parts)
if i])
msg = ('This deployment will result in an invalid SSL certificate for '
'service [{0}]. The total length of your subdomain in the '
'format {1} should not exceed {2} characters. Please verify '
'that the certificate corresponds to the parent domain of your '
'application when you connect.').format(service,
subdomain_format,
MAX_DNS_LABEL_LENGTH)
log.warning(msg)
subdomain = '.'.join(subdomain_parts)
if use_ssl == appinfo.SECURE_HTTP_OR_HTTPS:
scheme = 'http'
elif use_ssl == appinfo.SECURE_HTTPS:
if not deploy:
msg = ('Most browsers will reject the SSL certificate for '
'service [{0}].').format(service)
log.warning(msg)
scheme = 'https'
return '{0}://{1}.{2}'.format(scheme, subdomain, domain)
DEFAULT_DEPLOYABLE = 'app.yaml'

View File

@@ -0,0 +1,74 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encapsulation of a docker image."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
class Image(object):
"""Docker image that requires building and should be removed afterwards."""
def __init__(self, dockerfile_dir=None, repo=None, tag=None, nocache=False,
rm=True):
"""Initializer for Image.
Args:
dockerfile_dir: str, Path to the directory with the Dockerfile.
repo: str, Repository name to be applied to the image in case of
successful build.
tag: str, Repository tag to be applied to the image in case of successful
build.
nocache: boolean, True if cache should not be used when building the
image.
rm: boolean, True if intermediate images should be removed after a
successful build. Default value is set to True because this is the
default value used by "docker build" command.
"""
self._dockerfile_dir = dockerfile_dir
self._repo = repo
self._tag = tag
self._nocache = nocache
self._rm = rm
# Will be set during Build() method.
self._id = None
@property
def dockerfile_dir(self):
"""Returns the directory the image is to be built from."""
return self._dockerfile_dir
@property
def id(self):
"""Returns 64 hexadecimal digit string identifying the image."""
# Might also be a first 12-characters shortcut.
return self._id
@property
def repo(self):
"""Returns image repo string."""
return self._repo
@property
def tag(self):
"""Returns image tag string."""
return self._tag
@property
def tagged_repo(self):
"""Returns image repo string with tag, if it exists."""
return '{0}:{1}'.format(self.repo, self.tag) if self.tag else self.repo

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Auxiliary environment information about App Engine."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
import enum
from googlecloudsdk.api_lib.app import runtime_registry
NODE_TI_RUNTIME_EXPR = re.compile(r'nodejs\d*')
PHP_TI_RUNTIME_EXPR = re.compile(r'php[789]\d*')
PYTHON_TI_RUNTIME_EXPR = re.compile(r'python3\d*')
# Allow things like go110 and g110beta1
GO_TI_RUNTIME_EXPR = re.compile(r'go1\d\d(\w+\d)?')
# Java 7, 8 still allows handlers
JAVA_TI_RUNTIME_EXPR = re.compile(r'java[123456]\d*')
class Environment(enum.Enum):
"""Enum for different application environments.
STANDARD corresponds to App Engine Standard applications.
FLEX corresponds to any App Engine `env: flex` applications.
MANAGED_VMS corresponds to `vm: true` applications.
"""
STANDARD = 1
MANAGED_VMS = 2
FLEX = 3
def GetTiRuntimeRegistry():
"""A simple registry whose `Get()` method answers True if runtime is Ti."""
return runtime_registry.Registry(_TI_RUNTIME_REGISTRY, default=False)
STANDARD = Environment.STANDARD
FLEX = Environment.FLEX
MANAGED_VMS = Environment.MANAGED_VMS
_TI_RUNTIME_REGISTRY = {
runtime_registry.RegistryEntry(NODE_TI_RUNTIME_EXPR, {STANDARD}): True,
runtime_registry.RegistryEntry(PHP_TI_RUNTIME_EXPR, {STANDARD}): True,
runtime_registry.RegistryEntry(PYTHON_TI_RUNTIME_EXPR, {STANDARD}): True,
runtime_registry.RegistryEntry(GO_TI_RUNTIME_EXPR, {STANDARD}): True,
runtime_registry.RegistryEntry(JAVA_TI_RUNTIME_EXPR, {STANDARD}): True,
}

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds exceptions raised by api lib."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class Error(exceptions.Error):
"""Base error for this module."""
class ConfigError(Error):
"""Raised when unable to parse a config file."""
def __init__(self, message=None, **kwargs):
message = message or 'Config Error.'
super(ConfigError, self).__init__(message, **kwargs)

View File

@@ -0,0 +1,124 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adapter to use externalized runtimes loaders from gcloud."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from gae_ext_runtime import ext_runtime
from googlecloudsdk.core import config
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import execution_utils
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
class NoRuntimeRootError(exceptions.Error):
"""Raised when we can't determine where the runtimes are."""
def _GetRuntimeDefDir():
runtime_root = properties.VALUES.app.runtime_root.Get()
if runtime_root:
return runtime_root
raise NoRuntimeRootError('Unable to determine the root directory where '
'GAE runtimes are stored. Please define '
'the CLOUDSDK_APP_RUNTIME_ROOT environmnent '
'variable.')
class GCloudExecutionEnvironment(ext_runtime.ExecutionEnvironment):
"""ExecutionEnvironment implemented using gcloud's core functions."""
def GetPythonExecutable(self):
return execution_utils.GetPythonExecutable()
def CanPrompt(self):
return console_io.CanPrompt()
def PromptResponse(self, message):
return console_io.PromptResponse(message)
def Print(self, message):
return log.status.Print(message)
class CoreRuntimeLoader(object):
"""A loader stub for the core runtimes.
The externalized core runtimes are currently distributed with the cloud sdk.
This class encapsulates the name of a core runtime to avoid having to load
it at module load time. Instead, the wrapped runtime is demand-loaded when
the Fingerprint() method is called.
"""
def __init__(self, name, visible_name, allowed_runtime_names):
self._name = name
self._rep = None
self._visible_name = visible_name
self._allowed_runtime_names = allowed_runtime_names
# These need to be named this way because they're constants in the
# non-externalized implementation.
# pylint:disable=invalid-name
@property
def ALLOWED_RUNTIME_NAMES(self):
return self._allowed_runtime_names
# pylint:disable=invalid-name
@property
def NAME(self):
return self._visible_name
def Fingerprint(self, path, params):
if not self._rep:
path_to_runtime = os.path.join(_GetRuntimeDefDir(), self._name)
self._rep = ext_runtime.ExternalizedRuntime.Load(
path_to_runtime, GCloudExecutionEnvironment())
return self._rep.Fingerprint(path, params)
_PROMPTS_DISABLED_ERROR_MESSAGE = (
'("disable_prompts" set to true, run "gcloud config set disable_prompts '
'False" to fix this)')
def GetNonInteractiveErrorMessage():
"""Returns useful instructions when running non-interactive.
Certain fingerprinting modules require interactive functionality. It isn't
always obvious why gcloud is running in non-interactive mode (e.g. when
"disable_prompts" is set) so this returns an appropriate addition to the
error message in these circumstances.
Returns:
(str) The appropriate error message snippet.
"""
if properties.VALUES.core.disable_prompts.GetBool():
# We add a leading space to the raw message so that it meshes well with
# its display context.
return ' ' + _PROMPTS_DISABLED_ERROR_MESSAGE
else:
# The other case for non-interactivity (running detached from a terminal)
# should be obvious.
return ''

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Magic constants for images module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The version of the docker API the docker-py client uses.
# Warning: other versions might have different return values for some functions.
DOCKER_PY_VERSION = 'auto'
# Timeout of HTTP request from docker-py client to docker daemon, in seconds.
DOCKER_D_REQUEST_TIMEOUT = 300
DOCKER_IMAGE_NAME_FORMAT = (
'{gcr_domain}/{display}/appengine/{service}.{version}')
DOCKER_IMAGE_TAG = 'latest'
DOCKER_IMAGE_NAME_DOMAIN_FORMAT = (
'{gcr_domain}/{domain}/{display}/appengine/{service}.{version}')
# Name of the a Dockerfile.
DOCKERFILE = 'Dockerfile'
# A map of runtimes values if they need to be overwritten to match our
# base Docker images naming rules.
CANONICAL_RUNTIMES = {'java7': 'java'}

View File

@@ -0,0 +1,273 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for manipulating GCE instances running an App Engine project."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from six.moves import filter # pylint: disable=redefined-builtin
from six.moves import map # pylint: disable=redefined-builtin
class InvalidInstanceSpecificationError(exceptions.Error):
pass
class SelectInstanceError(exceptions.Error):
pass
class Instance(object):
"""Value class for instances running the current App Engine project."""
# TODO(b/27900246): Once API supports "Get" verb, convert to use resource
# parser.
_INSTANCE_NAME_PATTERN = ('apps/(?P<project>.*)/'
'services/(?P<service>.*)/'
'versions/(?P<version>.*)/'
'instances/(?P<instance>.*)')
def __init__(self, service, version, id_, instance=None):
self.service = service
self.version = version
self.id = id_
self.instance = instance # The Client API instance object
@classmethod
def FromInstanceResource(cls, instance):
match = re.match(cls._INSTANCE_NAME_PATTERN, instance.name)
service = match.group('service')
version = match.group('version')
return cls(service, version, instance.id, instance)
@classmethod
def FromResourcePath(cls, path, service=None, version=None):
"""Convert a resource path into an AppEngineInstance.
A resource path is of the form '<service>/<version>/<instance>'.
'<service>' and '<version>' can be omitted, in which case they are None in
the resulting instance.
>>> (AppEngineInstance.FromResourcePath('a/b/c') ==
... AppEngineInstance('a', 'b', 'c'))
True
>>> (AppEngineInstance.FromResourcePath('b/c', service='a') ==
... AppEngineInstance('a', 'b', 'c'))
True
>>> (AppEngineInstance.FromResourcePath('c', service='a', version='b') ==
... AppEngineInstance('a', 'b', 'c'))
True
Args:
path: str, the resource path
service: the service of the instance (replaces the service from the
resource path)
version: the version of the instance (replaces the version from the
resource path)
Returns:
AppEngineInstance, an AppEngineInstance representing the path
Raises:
InvalidInstanceSpecificationError: if the instance is over- or
under-specified
"""
parts = path.split('/')
if len(parts) == 1:
path_service, path_version, instance = None, None, parts[0]
elif len(parts) == 2:
path_service, path_version, instance = None, parts[0], parts[1]
elif len(parts) == 3:
path_service, path_version, instance = parts
else:
raise InvalidInstanceSpecificationError(
'Instance resource path is incorrectly specified. '
'Please provide at most one service, version, and instance id, '
'.\n\n'
'You provided:\n' + path)
if path_service and service and path_service != service:
raise InvalidInstanceSpecificationError(
'Service [{0}] is inconsistent with specified instance [{1}].'.format(
service, path))
service = service or path_service
if path_version and version and path_version != version:
raise InvalidInstanceSpecificationError(
'Version [{0}] is inconsistent with specified instance [{1}].'.format(
version, path))
version = version or path_version
return cls(service, version, instance)
def __eq__(self, other):
return (type(self) is type(other) and
self.service == other.service and
self.version == other.version and
self.id == other.id)
def __ne__(self, other):
return not self == other
# needed for set comparisons in tests
def __hash__(self):
return hash((self.service, self.version, self.id))
def __str__(self):
return '/'.join(filter(bool, [self.service, self.version, self.id]))
def __cmp__(self, other):
return cmp((self.service, self.version, self.id),
(other.service, other.version, other.id))
def FilterInstances(instances, service=None, version=None, instance=None):
"""Filter a list of App Engine instances.
Args:
instances: list of AppEngineInstance, all App Engine instances
service: str, the name of the service to filter by or None to match all
services
version: str, the name of the version to filter by or None to match all
versions
instance: str, the instance id to filter by or None to match all versions.
Returns:
list of instances matching the given filters
"""
matching_instances = []
for provided_instance in instances:
if ((not service or provided_instance.service == service) and
(not version or provided_instance.version == version) and
(not instance or provided_instance.id == instance)):
matching_instances.append(provided_instance)
return matching_instances
def GetMatchingInstance(instances, service=None, version=None, instance=None):
"""Return exactly one matching instance.
If instance is given, filter down based on the given criteria (service,
version, instance) and return the matching instance (it is an error unless
exactly one instance matches).
Otherwise, prompt the user to select the instance interactively.
Args:
instances: list of AppEngineInstance, all instances to select from
service: str, a service to filter by or None to include all services
version: str, a version to filter by or None to include all versions
instance: str, an instance ID to filter by. If not given, the instance will
be selected interactively.
Returns:
AppEngineInstance, an instance from the given list.
Raises:
InvalidInstanceSpecificationError: if no matching instances or more than one
matching instance were found.
"""
if not instance:
return SelectInstanceInteractive(instances, service=service,
version=version)
matching = FilterInstances(instances, service, version, instance)
if len(matching) > 1:
raise InvalidInstanceSpecificationError(
'More than one instance matches the given specification.\n\n'
'Matching instances: {0}'.format(list(sorted(map(str, matching)))))
elif not matching:
raise InvalidInstanceSpecificationError(
'No instances match the given specification.\n\n'
'All instances: {0}'.format(list(sorted(map(str, instances)))))
return matching[0]
def SelectInstanceInteractive(all_instances, service=None, version=None):
"""Interactively choose an instance from a provided list.
Example interaction:
Which service?
[1] default
[2] service1
Please enter your numeric choice: 1
Which version?
[1] v1
[2] v2
Please enter your numeric choice: 1
Which instance?
[1] i1
[2] i2
Please enter your numeric choice: 1
Skips any prompts with only one option.
Args:
all_instances: list of AppEngineInstance, the list of instances to drill
down on.
service: str. If provided, skip the service prompt.
version: str. If provided, skip the version prompt.
Returns:
AppEngineInstance, the selected instance from the list.
Raises:
SelectInstanceError: if no versions matching the criteria can be found or
prompts are disabled.
"""
if properties.VALUES.core.disable_prompts.GetBool():
raise SelectInstanceError(
'Cannot interactively select instances with prompts disabled.')
# Defined here to close over all_instances for the error message
def _PromptOptions(options, type_):
"""Given an iterable options of type type_, prompt and return one."""
options = sorted(set(options), key=str)
if len(options) > 1:
idx = console_io.PromptChoice(options, message='Which {0}?'.format(type_))
elif len(options) == 1:
idx = 0
log.status.Print('Choosing [{0}] for {1}.\n'.format(options[0], type_))
else:
if all_instances:
msg = ('No instances could be found matching the given criteria.\n\n'
'All instances:\n' +
'\n'.join(
map('* [{0}]'.format, sorted(all_instances, key=str))))
else:
msg = 'No instances were found for the current project [{0}].'.format(
properties.VALUES.core.project.Get(required=True))
raise SelectInstanceError(msg)
return options[idx]
matching_instances = FilterInstances(all_instances, service, version)
service = _PromptOptions((i.service for i in matching_instances), 'service')
matching_instances = FilterInstances(matching_instances, service=service)
version = _PromptOptions((i.version for i in matching_instances), 'version')
matching_instances = FilterInstances(matching_instances, version=version)
return _PromptOptions(matching_instances, 'instance')

View File

@@ -0,0 +1,283 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""General formatting utils, App Engine specific formatters."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.core import log
from googlecloudsdk.core import resources
from googlecloudsdk.core.util import times
import six
LOG_LEVELS = ['critical', 'error', 'warning', 'info', 'debug', 'any']
# Request logs come from different sources if the app is Flex or Standard.
FLEX_REQUEST = 'nginx.request'
STANDARD_REQUEST = 'request_log'
DEFAULT_LOGS = ['stderr', 'stdout', 'crash.log',
FLEX_REQUEST, STANDARD_REQUEST]
NGINX_LOGS = [
'appengine.googleapis.com/nginx.request',
'appengine.googleapis.com/nginx.health_check']
def GetFilters(project, log_sources, service=None, version=None, level='any'):
"""Returns filters for App Engine app logs.
Args:
project: string name of project ID.
log_sources: List of streams to fetch logs from.
service: String name of service to fetch logs from.
version: String name of version to fetch logs from.
level: A string representing the severity of logs to fetch.
Returns:
A list of filter strings.
"""
filters = ['resource.type="gae_app"']
if service:
filters.append('resource.labels.module_id="{0}"'.format(service))
if version:
filters.append('resource.labels.version_id="{0}"'.format(version))
if level != 'any':
filters.append('severity>={0}'.format(level.upper()))
log_ids = []
for log_type in sorted(log_sources):
log_ids.append('appengine.googleapis.com/{0}'.format(log_type))
if log_type in ('stderr', 'stdout'):
log_ids.append(log_type)
res = resources.REGISTRY.Parse(
project, collection='appengine.projects').RelativeName()
filters.append(_LogFilterForIds(log_ids, res))
return filters
def _LogFilterForIds(log_ids, parent):
"""Constructs a log filter expression from the log_ids and parent name."""
if not log_ids:
return None
log_names = ['"{0}"'.format(util.CreateLogResourceName(parent, log_id))
for log_id in log_ids]
log_names = ' OR '.join(log_names)
if len(log_ids) > 1:
log_names = '(%s)' % log_names
return 'logName=%s' % log_names
def FormatAppEntry(entry):
"""App Engine formatter for `LogPrinter`.
Args:
entry: A log entry message emitted from the V2 API client.
Returns:
A string representing the entry or None if there was no text payload.
"""
# TODO(b/36056460): Output others than text here too?
if entry.resource.type != 'gae_app':
return None
if entry.protoPayload:
text = six.text_type(entry.protoPayload)
elif entry.jsonPayload:
text = six.text_type(entry.jsonPayload)
else:
text = entry.textPayload
service, version = _ExtractServiceAndVersion(entry)
return '{service}[{version}] {text}'.format(service=service,
version=version,
text=text)
def FormatRequestLogEntry(entry):
"""App Engine request_log formatter for `LogPrinter`.
Args:
entry: A log entry message emitted from the V2 API client.
Returns:
A string representing the entry if it is a request entry.
"""
if entry.resource.type != 'gae_app':
return None
log_id = util.ExtractLogId(entry.logName)
if log_id != 'appengine.googleapis.com/request_log':
return None
service, version = _ExtractServiceAndVersion(entry)
def GetStr(key):
return next((x.value.string_value for x in
entry.protoPayload.additionalProperties
if x.key == key), '-')
def GetInt(key):
return next((x.value.integer_value for x in
entry.protoPayload.additionalProperties
if x.key == key), '-')
msg = ('"{method} {resource} {http_version}" {status}'
.format(
method=GetStr('method'),
resource=GetStr('resource'),
http_version=GetStr('httpVersion'),
status=GetInt('status')))
return '{service}[{version}] {msg}'.format(service=service,
version=version,
msg=msg)
def FormatNginxLogEntry(entry):
"""App Engine nginx.* formatter for `LogPrinter`.
Args:
entry: A log entry message emitted from the V2 API client.
Returns:
A string representing the entry if it is a request entry.
"""
if entry.resource.type != 'gae_app':
return None
log_id = util.ExtractLogId(entry.logName)
if log_id not in NGINX_LOGS:
return None
service, version = _ExtractServiceAndVersion(entry)
msg = ('"{method} {resource}" {status}'
.format(
method=entry.httpRequest.requestMethod or '-',
resource=entry.httpRequest.requestUrl or '-',
status=entry.httpRequest.status or '-'))
return '{service}[{version}] {msg}'.format(service=service,
version=version,
msg=msg)
def _ExtractServiceAndVersion(entry):
"""Extract service and version from a App Engine log entry.
Args:
entry: An App Engine log entry.
Returns:
A 2-tuple of the form (service_id, version_id)
"""
# TODO(b/36051034): If possible, extract instance ID too
ad_prop = entry.resource.labels.additionalProperties
service = next(x.value
for x in ad_prop
if x.key == 'module_id')
version = next(x.value
for x in ad_prop
if x.key == 'version_id')
return (service, version)
class LogPrinter(object):
"""Formats V2 API log entries to human readable text on a best effort basis.
A LogPrinter consists of a collection of formatter functions which attempts
to format specific log entries in a human readable form. The `Format` method
safely returns a human readable string representation of a log entry, even if
the provided formatters fails.
The output format is `{timestamp} {log_text}`, where `timestamp` has a
configurable but consistent format within a LogPrinter whereas `log_text` is
emitted from one of its formatters (and truncated if necessary).
See https://cloud.google.com/logging/docs/api/introduction_v2
Attributes:
api_time_format: str, the output format to print. See datetime.strftime()
max_length: The maximum length of a formatted log entry after truncation.
"""
def __init__(self, api_time_format='%Y-%m-%d %H:%M:%S', max_length=None):
self.formatters = []
self.api_time_format = api_time_format
self.max_length = max_length
def Format(self, entry):
"""Safely formats a log entry into human readable text.
Args:
entry: A log entry message emitted from the V2 API client.
Returns:
A string without line breaks respecting the `max_length` property.
"""
text = self._LogEntryToText(entry)
text = text.strip().replace('\n', ' ')
try:
time = times.FormatDateTime(times.ParseDateTime(entry.timestamp),
self.api_time_format)
except times.Error:
log.warning('Received timestamp [{0}] does not match expected'
' format.'.format(entry.timestamp))
time = '????-??-?? ??:??:??'
out = '{timestamp} {log_text}'.format(
timestamp=time,
log_text=text)
if self.max_length and len(out) > self.max_length:
out = out[:self.max_length - 3] + '...'
return out
def RegisterFormatter(self, formatter):
"""Attach a log entry formatter function to the printer.
Note that if multiple formatters are attached to the same printer, the first
added formatter that successfully formats the entry will be used.
Args:
formatter: A formatter function which accepts a single argument, a log
entry. The formatter must either return the formatted log entry as a
string, or None if it is unable to format the log entry.
The formatter is allowed to raise exceptions, which will be caught and
ignored by the printer.
"""
self.formatters.append(formatter)
def _LogEntryToText(self, entry):
"""Use the formatters to convert a log entry to unprocessed text."""
out = None
for fn in self.formatters + [self._FallbackFormatter]:
# pylint:disable=bare-except
try:
out = fn(entry)
if out:
break
except KeyboardInterrupt as e:
raise e
except:
pass
if not out:
log.debug('Could not format log entry: %s %s %s', entry.timestamp,
entry.logName, entry.insertId)
out = ('< UNREADABLE LOG ENTRY {0}. OPEN THE DEVELOPER CONSOLE TO '
'INSPECT. >'.format(entry.insertId))
return out
def _FallbackFormatter(self, entry):
# TODO(b/36057358): Is there better serialization for messages than
# six.text_type()?
if entry.protoPayload:
return six.text_type(entry.protoPayload)
elif entry.jsonPayload:
return six.text_type(entry.jsonPayload)
else:
return entry.textPayload

View File

@@ -0,0 +1,70 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Appengine CSI metric names."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# Metric names for CSI
# Reserved CSI metric prefix for appengine
_APPENGINE_PREFIX = 'app_deploy_'
# "Start" suffix
START = '_start'
# Time to upload project source tarball to GCS
CLOUDBUILD_UPLOAD = _APPENGINE_PREFIX + 'cloudbuild_upload'
CLOUDBUILD_UPLOAD_START = CLOUDBUILD_UPLOAD + START
# Time to execute Argo Cloud Build request
CLOUDBUILD_EXECUTE = _APPENGINE_PREFIX + 'cloudbuild_execute'
CLOUDBUILD_EXECUTE_START = CLOUDBUILD_EXECUTE + START
CLOUDBUILD_EXECUTE_ASYNC = CLOUDBUILD_EXECUTE + '_async'
CLOUDBUILD_EXECUTE_ASYNC_START = CLOUDBUILD_EXECUTE_ASYNC + START
# Time to copy application files to the application code bucket
COPY_APP_FILES = _APPENGINE_PREFIX + 'copy_app_files'
COPY_APP_FILES_START = COPY_APP_FILES + START
# Time to copy application files to the application code bucket without gsutil.
# No longer used, but may still come in from old versions.
COPY_APP_FILES_NO_GSUTIL = _APPENGINE_PREFIX + 'copy_app_files_no_gsutil'
# Time for a deploy using appengine API
DEPLOY_API = _APPENGINE_PREFIX + 'deploy_api'
DEPLOY_API_START = DEPLOY_API + START
# Time for API request to get the application code bucket.
GET_CODE_BUCKET = _APPENGINE_PREFIX + 'get_code_bucket'
GET_CODE_BUCKET_START = GET_CODE_BUCKET + START
# Time for setting deployed version to default using appengine API
SET_DEFAULT_VERSION_API = (_APPENGINE_PREFIX + 'set_default_version_api')
SET_DEFAULT_VERSION_API_START = SET_DEFAULT_VERSION_API + START
# Time for API request to prepare environment for VMs.
PREPARE_ENV = _APPENGINE_PREFIX + 'prepare_environment'
PREPARE_ENV_START = PREPARE_ENV + START
# Time to update config files.
UPDATE_CONFIG = _APPENGINE_PREFIX + 'update_config'
UPDATE_CONFIG_START = UPDATE_CONFIG + START
# First service deployment
FIRST_SERVICE_DEPLOY = _APPENGINE_PREFIX + 'first_service_deploy'
FIRST_SERVICE_DEPLOY_START = FIRST_SERVICE_DEPLOY + START

View File

@@ -0,0 +1,319 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for working with long running operations go/long-running-operation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
from apitools.base.py import encoding
from apitools.base.py import exceptions as apitools_exceptions
import enum
from googlecloudsdk.api_lib.app import exceptions as app_exceptions
from googlecloudsdk.api_lib.util import exceptions as api_exceptions
from googlecloudsdk.api_lib.util import requests
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import resources
import six
# Default is to retry every 5 seconds for 1 hour.
DEFAULT_OPERATION_RETRY_INTERVAL = 5
DEFAULT_OPERATION_MAX_TRIES = (60 // DEFAULT_OPERATION_RETRY_INTERVAL) * 60
def CallAndCollectOpErrors(method, *args, **kwargs):
"""Wrapper for method(...) which re-raises operation-style errors.
Args:
method: Original method to call.
*args: Positional arguments to method.
**kwargs: Keyword arguments to method.
Raises:
MiscOperationError: If the method call itself raises one of the exceptions
listed below. Otherwise, the original exception is raised. Preserves
stack trace. Re-uses the error string from original error or in the case
of HttpError, we synthesize human-friendly string from HttpException.
However, HttpException is neither raised nor part of the stack trace.
Returns:
Result of calling method(*args, **kwargs).
"""
try:
return method(*args, **kwargs)
except apitools_exceptions.HttpError as http_err:
# Create HttpException locally only to get its human friendly string
_ReraiseMiscOperationError(api_exceptions.HttpException(http_err))
except (OperationError, OperationTimeoutError, app_exceptions.Error) as err:
_ReraiseMiscOperationError(err)
def _ReraiseMiscOperationError(err):
"""Transform and re-raise error helper."""
exceptions.reraise(MiscOperationError(six.text_type(err)))
class MiscOperationError(exceptions.Error):
"""Wrapper exception for errors treated as operation failures."""
class OperationError(exceptions.Error):
pass
class OperationTimeoutError(exceptions.Error):
pass
class Status(enum.Enum):
PENDING = 1
COMPLETED = 2
ERROR = 3
class Operation(object):
"""Wrapper around Operation response objects for console output.
Attributes:
project: String, name of the project.
id: String, ID of operation.
start_time: String, time the operation started.
status: Status enum, either PENDING, COMPLETED, or Error.
op_resource: messages.Operation, the original Operation resource.
"""
def __init__(self, op_response):
"""Creates the operation wrapper object."""
res = resources.REGISTRY.ParseRelativeName(op_response.name,
'appengine.apps.operations')
self.project = res.appsId
self.id = res.Name()
self.start_time = _GetInsertTime(op_response)
self.status = GetStatus(op_response)
self.op_resource = op_response
def __eq__(self, other):
return (isinstance(other, Operation) and
self.project == other.project and
self.id == other.id and
self.start_time == other.start_time and
self.status == other.status and
self.op_resource == other.op_resource)
def GetStatus(operation):
"""Returns string status for given operation.
Args:
operation: A messages.Operation instance.
Returns:
The status of the operation in string form.
"""
if not operation.done:
return Status.PENDING.name
elif operation.error:
return Status.ERROR.name
else:
return Status.COMPLETED.name
def _GetInsertTime(operation):
"""Finds the insertTime property and return its string form.
Args:
operation: A messages.Operation instance.
Returns:
The time the operation started in string form or None if N/A.
"""
if not operation.metadata:
return None
properties = operation.metadata.additionalProperties
for prop in properties:
if prop.key == 'insertTime':
return prop.value.string_value
class AppEngineOperationPoller(waiter.OperationPoller):
"""A poller for appengine operations."""
def __init__(self, operation_service, operation_metadata_type=None):
"""Sets up poller for appengine operations.
Args:
operation_service: apitools.base.py.base_api.BaseApiService, api service
for retrieving information about ongoing operation.
operation_metadata_type: Message class for the Operation metadata (for
instance, OperationMetadataV1, or OperationMetadataV1Beta).
"""
self.operation_service = operation_service
self.operation_metadata_type = operation_metadata_type
self.warnings_seen = set()
def IsDone(self, operation):
"""Overrides."""
self._LogNewWarnings(operation)
if operation.done:
log.debug('Operation [{0}] complete. Result: {1}'.format(
operation.name,
json.dumps(encoding.MessageToDict(operation), indent=4)))
if operation.error:
raise OperationError(requests.ExtractErrorMessage(
encoding.MessageToPyValue(operation.error)))
return True
log.debug('Operation [{0}] not complete. Waiting to retry.'.format(
operation.name))
return False
def Poll(self, operation_ref):
"""Overrides.
Args:
operation_ref: googlecloudsdk.core.resources.Resource.
Returns:
fetched operation message.
"""
request_type = self.operation_service.GetRequestType('Get')
request = request_type(name=operation_ref.RelativeName())
operation = self.operation_service.Get(request)
self._LogNewWarnings(operation)
return operation
def _LogNewWarnings(self, operation):
if self.operation_metadata_type:
# Log any new warnings to the end user.
new_warnings = GetWarningsFromOperation(
operation, self.operation_metadata_type) - self.warnings_seen
for warning in new_warnings:
log.warning(warning + '\n')
self.warnings_seen.add(warning)
def GetResult(self, operation):
"""Simply returns the operation.
Args:
operation: api_name_messages.Operation.
Returns:
the 'response' field of the Operation.
"""
return operation
class AppEngineOperationBuildPoller(AppEngineOperationPoller):
"""Waits for a build to be present, or for the operation to finish."""
def __init__(self, operation_service, operation_metadata_type):
"""Sets up poller for appengine operations.
Args:
operation_service: apitools.base.py.base_api.BaseApiService, api service
for retrieving information about ongoing operation.
operation_metadata_type: Message class for the Operation metadata (for
instance, OperationMetadataV1, or OperationMetadataV1Beta).
"""
super(AppEngineOperationBuildPoller, self).__init__(operation_service,
operation_metadata_type)
def IsDone(self, operation):
if GetBuildFromOperation(operation, self.operation_metadata_type):
return True
return super(AppEngineOperationBuildPoller, self).IsDone(operation)
def GetMetadataFromOperation(operation, operation_metadata_type):
if not operation.metadata:
return None
return encoding.JsonToMessage(
operation_metadata_type,
encoding.MessageToJson(operation.metadata))
def GetBuildFromOperation(operation, operation_metadata_type):
metadata = GetMetadataFromOperation(operation, operation_metadata_type)
if not metadata or not metadata.createVersionMetadata:
return None
return metadata.createVersionMetadata.cloudBuildId
def GetWarningsFromOperation(operation, operation_metadata_type):
metadata = GetMetadataFromOperation(operation, operation_metadata_type)
if not metadata:
return set()
return set(warning for warning in metadata.warning)
def WaitForOperation(operation_service, operation,
max_retries=None,
retry_interval=None,
operation_collection='appengine.apps.operations',
message=None,
poller=None):
"""Wait until the operation is complete or times out.
Args:
operation_service: The apitools service type for operations
operation: The operation resource to wait on
max_retries: Maximum number of times to poll the operation
retry_interval: Frequency of polling in seconds
operation_collection: The resource collection of the operation.
message: str, the message to display while progress tracker displays.
poller: AppEngineOperationPoller to poll with, defaulting to done.
Returns:
The operation resource when it has completed
Raises:
OperationError: if the operation contains an error.
OperationTimeoutError: when the operation polling times out
"""
poller = poller or AppEngineOperationPoller(operation_service)
if poller.IsDone(operation):
return poller.GetResult(operation)
operation_ref = resources.REGISTRY.ParseRelativeName(
operation.name,
operation_collection)
if max_retries is None:
max_retries = DEFAULT_OPERATION_MAX_TRIES - 1
if retry_interval is None:
retry_interval = DEFAULT_OPERATION_RETRY_INTERVAL
if message is None:
message = 'Waiting for operation [{}] to complete'.format(
operation_ref.RelativeName())
# Convert to milliseconds
retry_interval *= 1000
try:
completed_operation = waiter.WaitFor(
poller,
operation_ref,
message,
pre_start_sleep_ms=1000,
max_retrials=max_retries,
exponential_sleep_multiplier=1.0,
sleep_ms=retry_interval)
except waiter.TimeoutError:
raise OperationTimeoutError(('Operation [{0}] timed out. This operation '
'may still be underway.').format(
operation.name))
return completed_operation

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dealing with region resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
class Region(object):
"""Value class representing a region resource."""
def __init__(self, region, standard, flexible, search_api):
self.region = region
self.standard = standard
self.flexible = flexible
self.search_api = search_api
@classmethod
def FromRegionResource(cls, region):
"""Create region from a google.cloud.location.Location message."""
flex = False
standard = False
search_api = False
region_id = region.labels.additionalProperties[0].value
for p in region.metadata.additionalProperties:
if p.key == 'flexibleEnvironmentAvailable' and p.value.boolean_value:
flex = True
elif p.key == 'standardEnvironmentAvailable' and p.value.boolean_value:
standard = True
elif p.key == 'searchApiAvailable' and p.value.boolean_value:
search_api = True
return cls(region_id, standard, flex, search_api)
def __str__(self):
envs = (
x[1] for x in [(self.standard, 'standard'),
(self.flexible, 'flexible'),
(self.search_api, 'search_api')] if x[0])
out = '{region: <13}'.format(region=self.region)
return out + ' (supports {envs})'.format(envs=' and '.join(envs))

View File

@@ -0,0 +1,770 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Library code to support App Engine Flex runtime builders.
The App Engine Flex platform runs a user's application that has been packaged
into a docker image. At the lowest level, the user provides us with a source
directory complete with Dockerfile, which we build into an image and deploy.
To make development easier, Google provides blessed language runtimes that the
user can extend in their Dockerfile to get a working base image for their
application. To further make development easier, we do not require users to
author their own Dockerfiles for "canonical" applications for each of the
Silver Languages.
In order for this to be possible, preprocessing must be done prior to the
Docker build to inspect the user's source code and automatically generate a
Dockerfile.
Flex runtime builders are a per-runtime pipeline that covers the full journey
from source directory to docker image. They are stored as templated .yaml files
representing CloudBuild Build messages. These .yaml files contain a series of
CloudBuild build steps. Additionally, the runtime root stores a `runtimes.yaml`
file which contains a list of runtime names and mappings to the corresponding
builder yaml files.
Such a builder will look something like this (note that <angle_brackets> denote
values to be filled in by the builder author, and $DOLLAR_SIGNS denote a
literal part of the template to be substituted at runtime):
steps:
- name: 'gcr.io/google_appengine/python-builder:<version>'
env: ['GAE_APPLICATION_YAML_PATH=${_GAE_APPLICATION_YAML_PATH}']
- name: 'gcr.io/cloud-builders/docker:<docker_image_version>'
args: ['build', '-t', '$_OUTPUT_IMAGE', '.']
images: ['$_OUTPUT_IMAGE']
To test this out in the context of a real deployment, do something like the
following (ls/grep steps just for illustrating where files are):
$ ls /tmp/runtime-root
runtimes.yaml python-v1.yaml
$ cat /tmp/runtime-root/runtimes.yaml
schema_version: 1
runtimes:
python:
target:
file: python-v1.yaml
$ gcloud config set app/use_runtime_builders true
$ gcloud config set app/runtime_builders_root file:///tmp/runtime-root
$ cd $MY_APP_DIR
$ grep 'runtime' app.yaml
runtime: python
$ grep 'env' app.yaml
env: flex
$ gcloud beta app deploy
A (possibly) easier way of achieving the same thing if you don't have a
runtime_builders_root set up for development yet:
$ cd $MY_APP_DIR
$ export _OUTPUT_IMAGE=gcr.io/$PROJECT/appengine/placeholder
$ gcloud container builds submit \
--config=<(envsubst < /path/to/cloudbuild.yaml) .
$ gcloud app deploy --image-url=$_OUTPUT_IMAGE
Or (even easier) use a 'custom' runtime:
$ cd $MY_APP_DIR
$ ls
cloudbuild.yaml app.yaml
$ rm -f Dockerfile
$ grep 'runtime' app.yaml
runtime: custom
$ gcloud beta app deploy
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import contextlib
import enum
import os
import re
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.cloudbuild import config as cloudbuild_config
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.calliope import exceptions as calliope_exceptions
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import yaml
import six
import six.moves.urllib.error
import six.moves.urllib.parse
import six.moves.urllib.request
# "test-{ga,beta}" runtimes are canaries for unit testing
_ALLOWLISTED_RUNTIMES_GA = frozenset(
{'aspnetcore', 'php', 'nodejs', 'ruby', 'java',
re.compile(r'(python|python-.+)$'),
re.compile(r'(go|go1\..+)$'),
re.compile('^gs://'),
'test-ga', re.compile('test-re-[ab]')})
_ALLOWLISTED_RUNTIMES_BETA = frozenset(
_ALLOWLISTED_RUNTIMES_GA |
{'test-beta'})
class FileReadError(exceptions.Error):
"""Error indicating a file read operation failed."""
class ManifestError(exceptions.Error):
"""Error indicating a problem parsing or using the manifest."""
class ExperimentsError(exceptions.Error):
"""Error indicating a problem parsing or using the experiment config."""
class CloudBuildLoadError(exceptions.Error):
"""Error indicating an issue loading the runtime Cloud Build specification."""
class CloudBuildFileNotFound(CloudBuildLoadError):
"""Error indicating a missing Cloud Build file."""
class InvalidRuntimeBuilderURI(CloudBuildLoadError):
"""Error indicating that the runtime builder URI format wasn't recognized."""
def __init__(self, uri):
super(InvalidRuntimeBuilderURI, self).__init__(
'[{}] is not a valid runtime builder URI. '
'Please set the app/runtime_builders_root property to a URI with '
'either the Google Cloud Storage (`gs://`) or local file (`file://`) '
'protocol.'.format(uri))
class BuilderResolveError(exceptions.Error):
"""Error indicating that a build file could not be resolved."""
class RuntimeBuilderStrategy(enum.Enum):
"""Enum indicating when to use runtime builders."""
NEVER = 1
ALLOWLIST_BETA = 2 # That is, turned on for an allowed set of runtimes
ALLOWLIST_GA = 3 # That is, turned on for an allowed set of runtimes
ALWAYS = 4
def _GetAllowlist(self):
"""Return the allowlist of runtimes for this strategy.
The allowlist is kept as a constant within this module.
Returns:
list of str, the names of runtimes that are allowed for this strategy.
Raises:
ValueError: if this strategy is not allowlist-based.
"""
if self is self.ALLOWLIST_GA:
return _ALLOWLISTED_RUNTIMES_GA
elif self is self.ALLOWLIST_BETA:
return _ALLOWLISTED_RUNTIMES_BETA
raise ValueError(
'RuntimeBuilderStrategy {} is not an allowed strategy.'.format(self))
def _IsAllowed(self, runtime):
for allowlisted_runtime in self._GetAllowlist():
try:
if allowlisted_runtime.match(runtime):
return True
except AttributeError:
if runtime == allowlisted_runtime:
return True
return False
def ShouldUseRuntimeBuilders(self, runtime, needs_dockerfile):
"""Returns True if runtime should use runtime builders under this strategy.
For the most part, this is obvious: the ALWAYS strategy returns True, the
ALLOWLIST_${TRACK} strategies return True if the given runtime is in the
list of _ALLOWLISTED_RUNTIMES_${TRACK}, and the NEVER strategy returns
False.
However, in the case of 'custom' runtimes, things get tricky: if the
strategy *is not* NEVER, we return True only if there is no `Dockerfile` in
the current directory (this method assumes that there is *either* a
`Dockerfile` or a `cloudbuild.yaml` file), since one needs to get generated
by the Cloud Build.
Args:
runtime: str, the runtime being built.
needs_dockerfile: bool, whether the Dockerfile in the source directory is
absent.
Returns:
bool, whether to use the runtime builders.
Raises:
ValueError: if an unrecognized runtime_builder_strategy is given
"""
# For these strategies, if a user provides a 'custom' runtime, we use
# runtime builders unless there is a Dockerfile. For other strategies, we
# never use runtime builders with 'custom'.
if runtime == 'custom' and self in (self.ALWAYS,
self.ALLOWLIST_BETA,
self.ALLOWLIST_GA):
return needs_dockerfile
if self is self.ALWAYS:
return True
elif self is self.ALLOWLIST_BETA or self is self.ALLOWLIST_GA:
return self._IsAllowed(runtime)
elif self is self.NEVER:
return False
else:
raise ValueError('Invalid runtime builder strategy [{}].'.format(self))
def _Join(*args):
"""Join parts of a gs:// Cloud Storage or local file:// path."""
# URIs always uses '/' as separator, regardless of local platform.
return '/'.join([arg.strip('/') for arg in args])
@contextlib.contextmanager
def _Read(uri):
"""Read a file/object (local file:// or gs:// Cloud Storage path).
>>> with _Read('gs://builder/object.txt') as f:
... assert f.read() == 'foo'
>>> with _Read('file:///path/to/object.txt') as f:
... assert f.read() == 'bar'
Args:
uri: str, the path to the file/object to read. Must begin with 'file://' or
'gs://'
Yields:
a file-like context manager.
Raises:
FileReadError: If opening or reading the file failed.
InvalidRuntimeBuilderPath: If the path is invalid (doesn't begin with an
appropriate prefix).
"""
try:
if uri.startswith('file://'):
with contextlib.closing(six.moves.urllib.request.urlopen(uri)) as req:
yield req
elif uri.startswith('gs://'):
storage_client = storage_api.StorageClient()
object_ = storage_util.ObjectReference.FromUrl(uri)
with contextlib.closing(storage_client.ReadObject(object_)) as f:
yield f
else:
raise InvalidRuntimeBuilderURI(uri)
except (six.moves.urllib.error.HTTPError, six.moves.urllib.error.URLError,
calliope_exceptions.BadFileException) as e:
log.debug('', exc_info=True)
raise FileReadError(six.text_type(e))
class BuilderReference(object):
"""A reference to a specific cloudbuild.yaml file to use."""
def __init__(self, runtime, build_file_uri, deprecation_message=None):
"""Constructs a BuilderReference.
Args:
runtime: str, The runtime this builder corresponds to.
build_file_uri: str, The full URI of the build configuration or None if
this runtime existed but no longer can be built (deprecated).
deprecation_message: str, A message to print when using this builder or
None if not deprecated.
"""
self.runtime = runtime
self.build_file_uri = build_file_uri
self.deprecation_message = deprecation_message
def LoadCloudBuild(self, params):
"""Loads the Cloud Build configuration file for this builder reference.
Args:
params: dict, a dictionary of values to be substituted in to the
Cloud Build configuration template corresponding to this runtime
version.
Returns:
Build message, the parsed and parameterized Cloud Build configuration
file.
Raises:
CloudBuildLoadError: If the Cloud Build configuration file is unknown.
FileReadError: If reading the configuration file fails.
InvalidRuntimeBuilderPath: If the path of the configuration file is
invalid.
"""
if not self.build_file_uri:
raise CloudBuildLoadError(
'There is no build file associated with runtime [{runtime}]'
.format(runtime=self.runtime))
messages = cloudbuild_util.GetMessagesModule()
with _Read(self.build_file_uri) as data:
build = cloudbuild_config.LoadCloudbuildConfigFromStream(
data, messages=messages, params=params)
if build.options is None:
build.options = messages.BuildOptions()
build.options.substitutionOption = (
build.options.SubstitutionOptionValueValuesEnum.ALLOW_LOOSE)
for step in build.steps:
has_yaml_path = False
has_runtime_version = False
for env in step.env:
parts = env.split('=')
log.debug('Env var in build step: ' + str(parts))
if 'GAE_APPLICATION_YAML_PATH' in parts:
has_yaml_path = True
if 'GOOGLE_RUNTIME_VERSION' in parts:
has_runtime_version = True
if not has_yaml_path:
step.env.append(
'GAE_APPLICATION_YAML_PATH=${_GAE_APPLICATION_YAML_PATH}')
if not has_runtime_version and '_GOOGLE_RUNTIME_VERSION' in params:
step.env.append('GOOGLE_RUNTIME_VERSION=${_GOOGLE_RUNTIME_VERSION}')
return build
def WarnIfDeprecated(self):
"""Warns that this runtime is deprecated (if it has been marked as such)."""
if self.deprecation_message:
log.warning(self.deprecation_message)
def __eq__(self, other):
return (self.runtime == other.runtime and
self.build_file_uri == other.build_file_uri and
self.deprecation_message == other.deprecation_message)
def __ne__(self, other):
return not self.__eq__(other)
class Manifest(object):
"""Loads and parses a runtimes.yaml manifest.
To resolve a builder configuration file to use, a given runtime name is
looked up in this manifest. For each runtime, it either points to a
configuration file directly, or to another runtime. If it points to a runtime,
resolution continues until a configuration file is reached.
The following is the proto-ish spec for the yaml schema of the mainfest:
# Used to determine if this client can parse this manifest. If the number is
# less than or equal to the version this client knows about, it is compatible.
int schema_version; # Required
# The registry of all the runtimes that this manifest defines. The key of the
# map is the runtime name that appears in app.yaml.
<string, Runtime> runtimes {
# Determines which builder this runtime points to.
Target target {
oneof {
# A path relative to the manifest's location of the builder spec to use.
string file;
# Another runtime registered in this file that should be resolved and
# used for this runtime.
string runtime;
}
}
# Specifies deprecation information about this runtime.
Deprecation deprecation {
# A message to be displayed to the user on use of this runtime.
string message;
}
}
"""
SCHEMA_VERSION = 1
@classmethod
def LoadFromURI(cls, uri):
"""Loads a manifest from a gs:// or file:// path.
Args:
uri: str, A gs:// or file:// URI
Returns:
Manifest, the loaded manifest.
"""
log.debug('Loading runtimes manifest from [%s]', uri)
with _Read(uri) as f:
data = yaml.load(f, file_hint=uri)
return cls(uri, data)
def __init__(self, uri, data):
"""Use LoadFromFile, not this constructor directly."""
self._uri = uri
self._data = data
required_version = self._data.get('schema_version', None)
if required_version is None:
raise ManifestError(
'Unable to parse the runtimes manifest: [{}]'.format(uri))
if required_version > Manifest.SCHEMA_VERSION:
raise ManifestError(
'Unable to parse the runtimes manifest. Your client supports schema '
'version [{supported}] but requires [{required}]. Please update your '
'SDK to a later version.'.format(supported=Manifest.SCHEMA_VERSION,
required=required_version))
def Runtimes(self):
"""Get all registered runtimes in the manifest.
Returns:
[str], The runtime names.
"""
return list(self._data.get('runtimes', {}).keys())
def GetBuilderReference(self, runtime):
"""Gets the associated reference for the given runtime.
Args:
runtime: str, The name of the runtime.
Returns:
BuilderReference, The reference pointed to by the manifest, or None if the
runtime is not registered.
Raises:
ManifestError: if a problem occurred parsing the manifest.
"""
runtimes = self._data.get('runtimes', {})
current_runtime = runtime
seen = {current_runtime}
while True:
runtime_def = runtimes.get(current_runtime, None)
if not runtime_def:
log.debug('Runtime [%s] not found in manifest [%s]',
current_runtime, self._uri)
return None
new_runtime = runtime_def.get('target', {}).get('runtime', None)
if new_runtime:
# Runtime is an alias for another runtime, resolve the alias.
log.debug('Runtime [%s] is an alias for [%s]',
current_runtime, new_runtime)
if new_runtime in seen:
raise ManifestError(
'A circular dependency was found while resolving the builder for '
'runtime [{runtime}]'.format(runtime=runtime))
seen.add(new_runtime)
current_runtime = new_runtime
continue
deprecation_msg = runtime_def.get('deprecation', {}).get('message', None)
build_file = runtime_def.get('target', {}).get('file', None)
if build_file:
# This points to a build configuration file, create the reference.
full_build_uri = _Join(os.path.dirname(self._uri), build_file)
log.debug('Resolved runtime [%s] as build configuration [%s]',
current_runtime, full_build_uri)
return BuilderReference(
current_runtime, full_build_uri, deprecation_msg)
# There is no alias or build file. This means the runtime exists, but
# cannot be used. There might still be a deprecation message we can show
# to the user.
log.debug('Resolved runtime [%s] has no build configuration',
current_runtime)
return BuilderReference(current_runtime, None, deprecation_msg)
class Experiments(object):
"""Runtime experiment configs as read from a gs:// or a file:// source.
The experiment config file follows the following protoish schema:
# Used to determine if this client can parse this manifest. If the number is
# less than or equal to the version this client knows about, it is compatible.
int schema_version; # Required
# Map of experiments and their rollout percentage.
# The key is the name of the experiment, the value is an integer between 0
# and 100 representing the rollout percentage
# In case no experiments are defined, an empty 'experiments:' section needs to
# be present.
<String, Number> experiments
"""
SCHEMA_VERSION = 1
CONFIG_FILE = 'experiments.yaml'
TRIGGER_BUILD_SERVER_SIDE = 'trigger_build_server_side'
@classmethod
def LoadFromURI(cls, dir_uri):
"""Loads a runtime experiment config from a gs:// or file:// path.
Args:
dir_uri: str, A gs:// or file:// URI pointing to a folder that contains
the file called Experiments.CONFIG_FILE
Returns:
Experiments, the loaded runtime experiments config.
"""
uri = _Join(dir_uri, cls.CONFIG_FILE)
log.debug('Loading runtimes experiment config from [%s]', uri)
try:
with _Read(uri) as f:
data = yaml.load(f, file_hint=uri)
return cls(uri, data)
except FileReadError as e:
raise ExperimentsError(
'Unable to read the runtimes experiment config: [{}], error: {}'
.format(uri, e))
except yaml.YAMLParseError as e:
raise ExperimentsError(
'Unable to read the runtimes experiment config: [{}], error: {}'
.format(uri, e))
def __init__(self, uri, data):
"""Use LoadFromFile, not this constructor directly."""
self._uri = uri
self._data = data
required_version = self._data.get('schema_version', None)
if required_version is None:
raise ExperimentsError(
'Unable to parse the runtimes experiment config due to missing '
'schema_version field: [{}]'.format(uri))
if required_version > Experiments.SCHEMA_VERSION:
raise ExperimentsError(
'Unable to parse the runtimes experiments config. Your client '
'supports schema version [{supported}] but requires [{required}]. '
'Please update your SDK to a newer version.'.format(
supported=Manifest.SCHEMA_VERSION, required=required_version))
def Experiments(self):
"""Get all experiments and their rollout percentage.
Returns:
dict[str,int] Experiments and their rollout state.
"""
return self._data.get('experiments')
def GetExperimentPercentWithDefault(self, experiment, default=0):
"""Get the rollout percentage of an experiment or return 'default'.
Args:
experiment: the name of the experiment
default: the value to return if the experiment was not found
Returns:
int the percent of the experiment
"""
try:
return self._data.get('experiments')[experiment]
except KeyError:
return default
class Resolver(object):
"""Resolves the location of a builder configuration for a runtime.
There are several possible locations that builder configuration can be found
for a given runtime, and they are checked in order. Check GetBuilderReference
for the locations checked.
"""
# The name of the manifest in the builders root that registers the runtimes.
MANIFEST_NAME = 'runtimes.yaml'
BUILDPACKS_MANIFEST_NAME = 'runtimes_buildpacks.yaml'
# The name of the file in your local source for when you are using custom.
CLOUDBUILD_FILE = 'cloudbuild.yaml'
def __init__(self, runtime, source_dir, legacy_runtime_version,
use_flex_with_buildpacks=False):
"""Instantiates a resolver.
Args:
runtime: str, The name of the runtime to be resolved.
source_dir: str, The local path of the source code being deployed.
legacy_runtime_version: str, The value from runtime_config.runtime_version
in app.yaml. This is only used in legacy mode.
use_flex_with_buildpacks: bool, if true, use the build-image and
run-image built through buildpacks.
Returns:
Resolver, The instantiated resolver.
"""
self.runtime = runtime
self.source_dir = os.path.abspath(source_dir)
self.legacy_runtime_version = legacy_runtime_version
self.build_file_root = properties.VALUES.app.runtime_builders_root.Get(
required=True)
self.use_flex_with_buildpacks = use_flex_with_buildpacks
log.debug('Using use_flex_with_buildpacks [%s]',
self.use_flex_with_buildpacks)
log.debug('Using runtime builder root [%s]', self.build_file_root)
def GetBuilderReference(self):
"""Resolve the builder reference.
Returns:
BuilderReference, The reference to the builder configuration.
Raises:
BuilderResolveError: if this fails to resolve a builder.
"""
# Try builder resolution in the following order, stopping once one is found.
builder_def = (
self._GetReferenceCustom() or
self._GetReferencePinned() or
self._GetReferenceFromManifest() or
self._GetReferenceFromLegacy()
)
if not builder_def:
raise BuilderResolveError(
'Unable to resolve a builder for runtime: [{runtime}]'
.format(runtime=self.runtime))
return builder_def
def _GetReferenceCustom(self):
"""Tries to resolve the reference for runtime: custom.
If the user has an app.yaml with runtime: custom we will look in the root
of their source directory for a custom build pipeline named cloudbuild.yaml.
This should only be called if there is *not* a Dockerfile in the source
root since that means they just want to build and deploy that Docker image.
Returns:
BuilderReference or None
"""
if self.runtime == 'custom':
log.debug('Using local cloud build file [%s] for custom runtime.',
Resolver.CLOUDBUILD_FILE)
return BuilderReference(
self.runtime,
_Join('file:///' + self.source_dir.replace('\\', '/').strip('/'),
Resolver.CLOUDBUILD_FILE))
return None
def _GetReferencePinned(self):
"""Tries to resolve the reference for when a runtime is pinned.
Usually a runtime is looked up in the manifest and resolved to a
configuration file. The user does have the option of 'pinning' their build
to a specific configuration by specifying the absolute path to a builder
in the runtime field.
Returns:
BuilderReference or None
"""
if self.runtime.startswith('gs://'):
log.debug('Using pinned cloud build file [%s].', self.runtime)
return BuilderReference(self.runtime, self.runtime)
return None
def _GetReferenceFromManifest(self):
"""Tries to resolve the reference by looking up the runtime in the manifest.
Calculate the location of the manifest based on the builder root and load
that data. Then try to resolve a reference based on the contents of the
manifest.
Returns:
BuilderReference or None
"""
manifest_file_name = (
Resolver.BUILDPACKS_MANIFEST_NAME
if self.use_flex_with_buildpacks
else Resolver.MANIFEST_NAME)
manifest_uri = _Join(self.build_file_root, manifest_file_name)
log.debug('Using manifest_uri [%s]', manifest_uri)
try:
manifest = Manifest.LoadFromURI(manifest_uri)
return manifest.GetBuilderReference(self.runtime)
except FileReadError:
log.debug('', exc_info=True)
return None
def _GetReferenceFromLegacy(self):
"""Tries to resolve the reference by the legacy resolution process.
TODO(b/37542861): This can be removed after all runtimes have been migrated
to publish their builders in the manifest instead of <runtime>.version
files.
If the runtime is not found in the manifest, use legacy resolution. If the
app.yaml contains a runtime_config.runtime_version, this loads the file from
'<runtime>-<version>.yaml' in the runtime builders root. Otherwise, it
checks '<runtime>.version' to get the default version, and loads the
configuration for that version.
Returns:
BuilderReference or None
"""
if self.legacy_runtime_version:
# We already have a pinned version specified, just use that file.
return self._GetReferenceFromLegacyWithVersion(
self.legacy_runtime_version)
log.debug('Fetching version for runtime [%s] in legacy mode', self.runtime)
version_file_name = self.runtime + '.version'
version_file_uri = _Join(self.build_file_root, version_file_name)
try:
with _Read(version_file_uri) as f:
version = f.read().decode().strip()
except FileReadError:
log.debug('', exc_info=True)
return None
# Now that we resolved the default version, use that for the file.
log.debug('Using version [%s] for runtime [%s] in legacy mode',
version, self.runtime)
return self._GetReferenceFromLegacyWithVersion(version)
def _GetReferenceFromLegacyWithVersion(self, version):
"""Gets the name of configuration file to use for legacy mode.
Args:
version: str, The pinned version of the configuration file.
Returns:
BuilderReference
"""
file_name = '-'.join([self.runtime, version]) + '.yaml'
file_uri = _Join(self.build_file_root, file_name)
log.debug('Calculated builder definition using legacy version [%s]',
file_uri)
return BuilderReference(self.runtime, file_uri)
def FromServiceInfo(service, source_dir, use_flex_with_buildpacks=False):
"""Constructs a BuilderReference from a ServiceYamlInfo.
Args:
service: ServiceYamlInfo, The parsed service config.
source_dir: str, the source containing the application directory to build.
use_flex_with_buildpacks: bool, if true, use the build-image and
run-image built through buildpacks.
Returns:
RuntimeBuilderVersion for the service.
"""
runtime_config = service.parsed.runtime_config
legacy_version = (runtime_config.get('runtime_version', None)
if runtime_config else None)
resolver = Resolver(service.runtime, source_dir, legacy_version,
use_flex_with_buildpacks)
return resolver.GetBuilderReference()

View File

@@ -0,0 +1,127 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines a registry for storing per-runtime information.
A registry is essentially a wrapper around a Python dict that stores a mapping
from (runtime, environment) to arbitrary data. Its main feature is that it
supports lookups by matching both the runtime and the environment.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from six.moves import map # pylint:disable=redefined-builtin
class RegistryEntry(object):
"""An entry in the Registry.
Attributes:
runtime: str or re.RegexObject, the runtime to be staged
envs: set(env.Environment), the environments to be staged
"""
def __init__(self, runtime, envs):
self.runtime = runtime
self.envs = envs
def _RuntimeMatches(self, runtime):
try:
return self.runtime.match(runtime)
except AttributeError:
return self.runtime == runtime
def _EnvMatches(self, env):
return env in self.envs
def Matches(self, runtime, env):
"""Returns True iff the given runtime and environment match this entry.
The runtime matches if it is an exact string match.
The environment matches if it is an exact Enum match or if this entry has a
"wildcard" (that is, None) for the environment.
Args:
runtime: str, the runtime to match
env: env.Environment, the environment to match
Returns:
bool, whether the given runtime and environment match.
"""
return self._RuntimeMatches(runtime) and self._EnvMatches(env)
def __hash__(self):
# Sets are unhashable; Environments are unorderable
return hash((self.runtime, sum(sorted(map(hash, self.envs)))))
def __eq__(self, other):
return self.runtime == other.runtime and self.envs == other.envs
def __ne__(self, other):
return not self.__eq__(other)
class Registry(object):
"""A registry to store values for various runtimes and environments.
The registry is a map from (runtime, app-engine-environment) to
user-specified values. As an example, storing Booleans for different
runtimes/environments would look like:
REGISTRY = {
RegistryEntry('php72', {env.STANDARD}): True,
RegistryEntry('php55', {env.STANDARD}): False,
RegistryEntry('nodejs8', {env.FLEX}): False,
}
Attributes:
mappings: dict, where keys are RegistryEntry objects and values can be
of any type
override: object or None; if specified, this value will always be returned
by Get()
default: object or None; if specified, will be returned if Get() could not
find a matching registry entry
"""
def __init__(self, mappings=None, override=None, default=None):
self.mappings = mappings or {}
self.override = override
self.default = default
def Get(self, runtime, env):
"""Return the associated value for the given runtime/environment.
Args:
runtime: str, the runtime to get a stager for
env: env, the environment to get a stager for
Returns:
object, the matching entry, or override if one was specified. If no
match is found, will return default if specified or None otherwise.
"""
if self.override:
return self.override
for entry, value in self.mappings.items():
if entry.Matches(runtime, env):
return value
if self.default is not None:
return self.default
else:
return None

View File

@@ -0,0 +1,235 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package containing fingerprinting for all runtimes.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from gae_ext_runtime import ext_runtime
from googlecloudsdk.api_lib.app import ext_runtime_adapter
from googlecloudsdk.api_lib.app.runtimes import python
from googlecloudsdk.api_lib.app.runtimes import python_compat
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
RUNTIMES = [
# Note that ordering of runtimes here is very important and changes to the
# relative positions need to be tested carefully.
# Custom comes first, if we've got a Dockerfile this is a custom runtime.
ext_runtime_adapter.CoreRuntimeLoader('custom', 'Custom',
['custom']),
# Go's position is relatively flexible due to its orthogonal nature.
ext_runtime_adapter.CoreRuntimeLoader('go', 'Go', ['go', 'custom']),
ext_runtime_adapter.CoreRuntimeLoader('ruby', 'Ruby', ['ruby', 'custom']),
ext_runtime_adapter.CoreRuntimeLoader('nodejs', 'Node.js',
['nodejs', 'custom']),
ext_runtime_adapter.CoreRuntimeLoader('java', 'Java',
['java', 'java7', 'custom']),
python_compat,
# Python and PHP are last because they match if any .py or .php file is
# present.
ext_runtime_adapter.CoreRuntimeLoader('python', 'Python',
['python', 'custom']),
ext_runtime_adapter.CoreRuntimeLoader('php', 'PHP', ['php', 'custom']),
]
class UnidentifiedDirectoryError(exceptions.Error):
"""Raised when GenerateConfigs() can't identify the directory."""
def __init__(self, path):
"""Constructor.
Args:
path: (basestring) Directory we failed to identify.
"""
super(UnidentifiedDirectoryError, self).__init__(
'Unrecognized directory type: [{0}]'.format(path))
self.path = path
class ExtRuntimeError(exceptions.Error):
"""ext_runtime.Error errors are converted to this."""
class ConflictingConfigError(exceptions.Error):
"""Property in app.yaml conflicts with params passed to fingerprinter."""
class AlterConfigFileError(exceptions.Error):
"""Error when attempting to update an existing config file (app.yaml)."""
def __init__(self, inner_exception):
super(AlterConfigFileError, self).__init__(
'Could not alter app.yaml due to an internal error:\n{0}\n'
'Please update app.yaml manually.'.format(inner_exception))
def IdentifyDirectory(path, params=None):
"""Try to identify the given directory.
As a side-effect, if there is a config file in 'params' with a runtime of
'custom', this sets params.custom to True.
Args:
path: (basestring) Root directory to identify.
params: (ext_runtime.Params or None) Parameters passed through to the
fingerprinters. Uses defaults if not provided.
Returns:
(ext_runtime.Configurator or None) Returns a module if we've identified
it, None if not.
"""
if not params:
params = ext_runtime.Params()
# Parameter runtime has precedence
if params.runtime:
specified_runtime = params.runtime
elif params.appinfo:
specified_runtime = params.appinfo.GetEffectiveRuntime()
else:
specified_runtime = None
if specified_runtime == 'custom':
params.custom = True
for runtime in RUNTIMES:
# If we have an app.yaml, don't fingerprint for any runtimes that don't
# allow the runtime name it specifies.
if (specified_runtime and runtime.ALLOWED_RUNTIME_NAMES and
specified_runtime not in runtime.ALLOWED_RUNTIME_NAMES):
log.info('Not checking for [%s] because runtime is [%s]' %
(runtime.NAME, specified_runtime))
continue
try:
configurator = runtime.Fingerprint(path, params)
except ext_runtime.Error as ex:
raise ExtRuntimeError(ex.message)
if configurator:
return configurator
return None
def _GetModule(path, params=None, config_filename=None):
"""Helper function for generating configs.
Args:
path: (basestring) Root directory to identify.
params: (ext_runtime.Params or None) Parameters passed through to the
fingerprinters. Uses defaults if not provided.
config_filename: (str or None) Filename of the config file (app.yaml).
Raises:
UnidentifiedDirectoryError: No runtime module matched the directory.
ConflictingConfigError: Current app.yaml conflicts with other params.
Returns:
ext_runtime.Configurator, the configurator for the path
"""
if not params:
params = ext_runtime.Params()
config = params.appinfo
# An app.yaml exists, results in a lot more cases
if config and not params.deploy:
# Enforce --custom
if not params.custom:
raise ConflictingConfigError(
'Configuration file already exists. This command generates an '
'app.yaml configured to run an application on Google App Engine. '
'To create the configuration files needed to run this '
'application with docker, try `gcloud preview app gen-config '
'--custom`.')
# Check that current config is for MVM
if not config.IsVm():
raise ConflictingConfigError(
'gen-config is only supported for App Engine Flexible. Please '
'use "vm: true" in your app.yaml if you would like to use App Engine '
'Flexible to run your application.')
# Check for conflicting --runtime and runtime in app.yaml
if (config.GetEffectiveRuntime() != 'custom' and params.runtime is not None
and params.runtime != config.GetEffectiveRuntime()):
raise ConflictingConfigError(
'[{0}] contains "runtime: {1}" which conficts with '
'--runtime={2}.'.format(config_filename, config.GetEffectiveRuntime(),
params.runtime))
module = IdentifyDirectory(path, params)
if not module:
raise UnidentifiedDirectoryError(path)
return module
def GenerateConfigs(path, params=None, config_filename=None):
"""Identify runtime and generate config files for a directory.
If a runtime can be identified for the given directory, calls the runtime's
GenerateConfigs method, which writes configs to the directory.
Args:
path: (basestring) Root directory to identify.
params: (ext_runtime.Params or None) Parameters passed through to the
fingerprinters. Uses defaults if not provided.
config_filename: (str or None) Filename of the config file (app.yaml).
Raises:
ExtRuntimeError: if there was an error generating configs
Returns:
(bool): True if files were written
"""
module = _GetModule(path, params=params, config_filename=config_filename)
try:
return module.GenerateConfigs()
except ext_runtime.Error as ex:
raise ExtRuntimeError(ex.message)
def GenerateConfigData(path, params=None, config_filename=None):
"""Identify runtime and generate contents of config files for a directory.
If a runtime can be identified for the given directory, calls the runtime's
GenerateConfigData method, which generates the contents of config files.
Args:
path: (basestring) Root directory to identify.
params: (ext_runtime.Params or None) Parameters passed through to the
fingerprinters. Uses defaults if not provided.
config_filename: (str or None) Filename of the config file (app.yaml).
Raises:
ExtRuntimeError: if there was an error generating configs
Returns:
[ext_runtime.GeneratedFile] generated config files.
"""
module = _GetModule(path, params=params, config_filename=config_filename)
try:
return module.GenerateConfigData()
except ext_runtime.Error as ex:
raise ExtRuntimeError(ex.message)

View File

@@ -0,0 +1,219 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fingerprinting code for the Go runtime."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import fnmatch
import os
import re
import textwrap
from gae_ext_runtime import ext_runtime
from googlecloudsdk.api_lib.app.images import config as images_config
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
import six
NAME ='go'
ALLOWED_RUNTIME_NAMES = ('go', 'custom')
GO_RUNTIME_NAME = 'go'
GO_APP_YAML = textwrap.dedent("""\
env: flex
runtime: {runtime}
api_version: go1
""")
DOCKERIGNORE = textwrap.dedent("""\
.dockerignore
Dockerfile
.git
.hg
.svn
""")
DOCKERFILE = textwrap.dedent("""\
# Dockerfile extending the generic Go image with application files for a
# single application.
FROM gcr.io/google_appengine/golang
COPY . /go/src/app
RUN go-wrapper install -tags appenginevm
""")
class GoConfigurator(ext_runtime.Configurator):
"""Generates configuration for a Go app."""
def __init__(self, path, params):
"""Constructor.
Args:
path: (str) Root path of the source tree.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
"""
self.root = path
self.params = params
def GetAllConfigFiles(self):
all_config_files = []
# Generate app.yaml.
if not self.params.appinfo:
app_yaml_path = os.path.join(self.root, 'app.yaml')
if not os.path.exists(app_yaml_path):
runtime = 'custom' if self.params.custom else 'go'
app_yaml_contents = GO_APP_YAML.format(runtime=runtime)
app_yaml = ext_runtime.GeneratedFile('app.yaml', app_yaml_contents)
all_config_files.append(app_yaml)
if self.params.custom or self.params.deploy:
dockerfile_path = os.path.join(self.root, images_config.DOCKERFILE)
if not os.path.exists(dockerfile_path):
dockerfile = ext_runtime.GeneratedFile(images_config.DOCKERFILE,
DOCKERFILE)
all_config_files.append(dockerfile)
# Generate .dockerignore
dockerignore_path = os.path.join(self.root, '.dockerignore')
if not os.path.exists(dockerignore_path):
dockerignore = ext_runtime.GeneratedFile('.dockerignore', DOCKERIGNORE)
all_config_files.append(dockerignore)
return all_config_files
def GenerateConfigs(self):
"""Generate config files for the module.
Returns:
(bool) True if files were created
"""
# Write "Writing file" messages to the user or to log depending on whether
# we're in "deploy."
if self.params.deploy:
notify = log.info
else:
notify = log.status.Print
cfg_files = self.GetAllConfigFiles()
created = False
for cfg_file in cfg_files:
if cfg_file.WriteTo(self.root, notify):
created = True
if not created:
notify('All config files already exist, not generating anything.')
return created
def GenerateConfigData(self):
"""Generate config files for the module.
Returns:
list(ext_runtime.GeneratedFile) list of generated files.
"""
# Write "Writing file" messages to the user or to log depending on whether
# we're in "deploy."
if self.params.deploy:
notify = log.info
else:
notify = log.status.Print
cfg_files = self.GetAllConfigFiles()
for cfg_file in cfg_files:
if cfg_file.filename == 'app.yaml':
cfg_file.WriteTo(self.root, notify)
final_cfg_files = []
for f in cfg_files:
if f.filename != 'app.yaml' and not os.path.exists(
os.path.join(self.root, f.filename)):
final_cfg_files.append(f)
return final_cfg_files
def _GoFiles(path):
"""Return list of '*.go' files under directory 'path'.
Note that os.walk by default performs a top-down search, so files higher in
the directory tree appear before others.
Args:
path: (str) Application path.
Returns:
([str, ...]) List of full pathnames for all '*.go' files under 'path' dir.
"""
go_files = []
for root, _, filenames in os.walk(six.text_type(path)):
for filename in fnmatch.filter(filenames, '*.go'):
go_files.append(os.path.join(root, filename))
return go_files
def _FindMain(filename):
"""Check filename for 'package main' and 'func main'.
Args:
filename: (str) File name to check.
Returns:
(bool) True if main is found in filename.
"""
with files.FileReader(filename) as f:
found_package = False
found_func = False
for line in f:
if re.match('^package main', line):
found_package = True
elif re.match('^func main', line):
found_func = True
if found_package and found_func:
return True
return False
def Fingerprint(path, params):
"""Check for a Go app.
Args:
path: (str) Application path.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
Returns:
(GoConfigurator or None) Returns a module if the path contains a
Go app.
"""
log.info('Checking for Go.')
# Test #1 - are there any '*.go' files at or below 'path'?
go_files = _GoFiles(path)
if not go_files:
return None
# Test #2 - check that one of these files has "package main" and "func main".
main_found = False
for f in go_files:
if _FindMain(f):
log.info('Found Go main in %s', f)
main_found = True
break
if not main_found:
return None
return GoConfigurator(path, params)

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Old fingerprinting module for the Java runtime.
This file is almost dead. It currently just contains constants that we use in
runtimes_test, which should also mostly go away.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
JAVA_APP_YAML = textwrap.dedent("""\
env: flex
runtime: {runtime}
""")
DOCKERIGNORE = textwrap.dedent("""\
.dockerignore
Dockerfile
.git
.hg
.svn
app.yaml
""")
DOCKERFILE_JAVA8_PREAMBLE = 'FROM gcr.io/google_appengine/openjdk8\n'
DOCKERFILE_JETTY9_PREAMBLE = 'FROM gcr.io/google_appengine/jetty9\n'
DOCKERFILE_JAVA_PREAMBLE = 'FROM gcr.io/google_appengine/openjdk\n'
DOCKERFILE_JETTY_PREAMBLE = 'FROM gcr.io/google_appengine/jetty\n'
DOCKERFILE_LEGACY_PREAMBLE = 'FROM gcr.io/google_appengine/java-compat\n'
DOCKERFILE_COMPAT_PREAMBLE = 'FROM gcr.io/google_appengine/jetty9-compat\n'
DOCKERFILE_JAVA8_JAR_CMD = 'CMD ["java", "-jar", "/app/{0}"]\n'
DOCKERFILE_INSTALL_APP = 'ADD {0} /app/\n'
DOCKERFILE_INSTALL_WAR = 'ADD {0} $JETTY_BASE/webapps/root.war\n'

View File

@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fingerprinting code for the node.js runtime.
WARNING WARNING WARNING: this file will shortly be removed. Don't make any
changes here. See ./ext_runtimes/runtime_defs/nodejs instead.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
# TODO(b/36050883): move these into the node_app directory.
NODEJS_APP_YAML = textwrap.dedent("""\
env: flex
runtime: {runtime}
""")
DOCKERIGNORE = textwrap.dedent("""\
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
node_modules
.dockerignore
Dockerfile
npm-debug.log
yarn-error.log
.git
.hg
.svn
""")

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fingerprinting code for the Python runtime."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
DOCKERFILE_PREAMBLE = 'FROM gcr.io/google-appengine/python\n'
DOCKERFILE_VIRTUALENV_TEMPLATE = textwrap.dedent("""\
LABEL python_version=python{python_version}
RUN virtualenv --no-download /env -p python{python_version}
# Set virtualenv environment variables. This is equivalent to running
# source /env/bin/activate
ENV VIRTUAL_ENV /env
ENV PATH /env/bin:$PATH
""")
DOCKERFILE_REQUIREMENTS_TXT = textwrap.dedent("""\
ADD requirements.txt /app/
RUN pip install -r requirements.txt
""")
DOCKERFILE_INSTALL_APP = 'ADD . /app/\n'

View File

@@ -0,0 +1,197 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fingerprinting code for the Python runtime."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import textwrap
from gae_ext_runtime import ext_runtime
from googlecloudsdk.api_lib.app.images import config
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
NAME = 'Python Compat'
ALLOWED_RUNTIME_NAMES = ('python27', 'python-compat')
PYTHON_RUNTIME_NAME = 'python27'
PYTHON_APP_YAML = textwrap.dedent("""\
env: flex
runtime: {runtime}
api_version: 1
threadsafe: false
# You must add a handlers section here. Example:
# handlers:
# - url: .*
# script: main.app
""")
APP_YAML_WARNING = ('app.yaml has been generated, but needs to be provided a '
'"handlers" section.')
DOCKERIGNORE = textwrap.dedent("""\
.dockerignore
Dockerfile
.git
.hg
.svn
""")
COMPAT_DOCKERFILE_PREAMBLE = (
'FROM gcr.io/google_appengine/python-compat-multicore\n')
PYTHON27_DOCKERFILE_PREAMBLE = 'FROM gcr.io/google_appengine/python-compat\n'
DOCKERFILE_INSTALL_APP = 'ADD . /app/\n'
# TODO(b/36057458): Do the check for requirements.txt in the source inspection
# and don't generate the pip install if it doesn't exist.
DOCKERFILE_INSTALL_REQUIREMENTS_TXT = (
'RUN if [ -s requirements.txt ]; then pip install -r requirements.txt; '
'fi\n')
class PythonConfigurator(ext_runtime.Configurator):
"""Generates configuration for a Python application."""
def __init__(self, path, params, runtime):
"""Constructor.
Args:
path: (str) Root path of the source tree.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
runtime: (str) The runtime name.
"""
self.root = path
self.params = params
self.runtime = runtime
def GenerateAppYaml(self, notify):
"""Generate app.yaml.
Args:
notify: depending on whether we're in deploy, write messages to the
user or to log.
Returns:
(bool) True if file was written
Note: this is not a recommended use-case,
python-compat users likely have an existing app.yaml. But users can
still get here with the --runtime flag.
"""
if not self.params.appinfo:
app_yaml = os.path.join(self.root, 'app.yaml')
if not os.path.exists(app_yaml):
notify('Writing [app.yaml] to [%s].' % self.root)
runtime = 'custom' if self.params.custom else self.runtime
files.WriteFileContents(app_yaml,
PYTHON_APP_YAML.format(runtime=runtime))
log.warning(APP_YAML_WARNING)
return True
return False
def GenerateDockerfileData(self):
"""Generates dockerfiles.
Returns:
list(ext_runtime.GeneratedFile) the list of generated dockerfiles
"""
if self.runtime == 'python-compat':
dockerfile_preamble = COMPAT_DOCKERFILE_PREAMBLE
else:
dockerfile_preamble = PYTHON27_DOCKERFILE_PREAMBLE
all_config_files = []
dockerfile_name = config.DOCKERFILE
dockerfile_components = [dockerfile_preamble, DOCKERFILE_INSTALL_APP]
if self.runtime == 'python-compat':
dockerfile_components.append(DOCKERFILE_INSTALL_REQUIREMENTS_TXT)
dockerfile_contents = ''.join(c for c in dockerfile_components)
dockerfile = ext_runtime.GeneratedFile(dockerfile_name,
dockerfile_contents)
all_config_files.append(dockerfile)
dockerignore = ext_runtime.GeneratedFile('.dockerignore', DOCKERIGNORE)
all_config_files.append(dockerignore)
return all_config_files
def GenerateConfigs(self):
"""Generate all config files for the module."""
# Write messages to user or to log depending on whether we're in "deploy."
notify = log.info if self.params.deploy else log.status.Print
self.GenerateAppYaml(notify)
created = False
if self.params.custom or self.params.deploy:
dockerfiles = self.GenerateDockerfileData()
for dockerfile in dockerfiles:
if dockerfile.WriteTo(self.root, notify):
created = True
if not created:
notify('All config files already exist, not generating anything.')
return created
def GenerateConfigData(self):
"""Generate all config files for the module.
Returns:
list(ext_runtime.GeneratedFile) A list of the config files
that were generated
"""
# Write messages to user or to log depending on whether we're in "deploy."
notify = log.info if self.params.deploy else log.status.Print
self.GenerateAppYaml(notify)
if not (self.params.custom or self.params.deploy):
return []
all_config_files = self.GenerateDockerfileData()
return [f for f in all_config_files
if not os.path.exists(os.path.join(self.root, f.filename))]
def Fingerprint(path, params):
"""Check for a Python app.
Args:
path: (str) Application path.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
Returns:
(PythonConfigurator or None) Returns a module if the path contains a
python app.
"""
log.info('Checking for Python Compat.')
# The only way we select these runtimes is if either the user has specified
# it or a matching runtime is specified in the app.yaml.
if (not params.runtime and
(not params.appinfo or
params.appinfo.GetEffectiveRuntime() not in ALLOWED_RUNTIME_NAMES)):
return None
if params.appinfo:
runtime = params.appinfo.GetEffectiveRuntime()
else:
runtime = params.runtime
log.info('Python Compat matches ([{0}] specified in "runtime" field)'.format(
runtime))
return PythonConfigurator(path, params, runtime)

View File

@@ -0,0 +1,545 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fingerprinting code for the Ruby runtime."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import re
import subprocess
import textwrap
from gae_ext_runtime import ext_runtime
from googlecloudsdk.api_lib.app import ext_runtime_adapter
from googlecloudsdk.api_lib.app.images import config
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import files
NAME = 'Ruby'
ALLOWED_RUNTIME_NAMES = ('ruby', 'custom')
# This should be kept in sync with the default Ruby version specified in
# the base docker image.
PREFERRED_RUBY_VERSION = '2.3.0'
# Keep these up to date. You can find the latest versions by visiting
# rubygems.org and searching for "bundler" and for "foreman".
# Checking about once every month or two should be sufficient.
# (Last checked 2016-01-08.)
BUNDLER_VERSION = '1.11.2'
FOREMAN_VERSION = '0.78.0'
# Mapping from Gemfile versions to rbenv versions with patchlevel.
# Keep this up to date. The canonical version list can be found at
# https://github.com/sstephenson/ruby-build/tree/master/share/ruby-build
# Find the highest patchlevel for each version. (At this point, we expect
# only 2.0.0 to need updating, since earlier versions are end-of-lifed, and
# later versions don't seem to be using patchlevels.)
# Checking about once a quarter should be sufficient.
# (Last checked 2016-01-08.)
RUBY_VERSION_MAP = {
'1.8.6': '1.8.6-p420',
'1.8.7': '1.8.7-p375',
'1.9.1': '1.9.1-p430',
'1.9.2': '1.9.2-p330',
'1.9.3': '1.9.3-p551',
'2.0.0': '2.0.0-p648'
}
# Mapping from gems to libraries they expect.
# We should add to this list as we find more common cases.
GEM_PACKAGES = {
'rgeo': ['libgeos-dev', 'libproj-dev']
}
APP_YAML_CONTENTS = textwrap.dedent("""\
env: flex
runtime: {runtime}
entrypoint: {entrypoint}
""")
DOCKERIGNORE_CONTENTS = textwrap.dedent("""\
.dockerignore
Dockerfile
.git
.hg
.svn
""")
DOCKERFILE_HEADER = textwrap.dedent("""\
# This Dockerfile for a Ruby application was generated by gcloud.
# The base Dockerfile installs:
# * A number of packages needed by the Ruby runtime and by gems
# commonly used in Ruby web apps (such as libsqlite3)
# * A recent version of NodeJS
# * A recent version of the standard Ruby runtime to use by default
# * The bundler and foreman gems
FROM gcr.io/google_appengine/ruby
""")
DOCKERFILE_DEFAULT_INTERPRETER = textwrap.dedent("""\
# This Dockerfile uses the default Ruby interpreter installed and
# specified by the base image.
# If you want to use a specific ruby interpreter, provide a
# .ruby-version file, then delete this Dockerfile and re-run
# "gcloud app gen-config --custom" to recreate it.
""")
DOCKERFILE_CUSTOM_INTERPRETER = textwrap.dedent("""\
# Install ruby {{0}} if not already preinstalled by the base image
RUN cd /rbenv/plugins/ruby-build && \\
git pull && \\
rbenv install -s {{0}} && \\
rbenv global {{0}} && \\
gem install -q --no-rdoc --no-ri bundler --version {0} && \\
gem install -q --no-rdoc --no-ri foreman --version {1}
ENV RBENV_VERSION {{0}}
""".format(BUNDLER_VERSION, FOREMAN_VERSION))
DOCKERFILE_MORE_PACKAGES = textwrap.dedent("""\
# Install additional package dependencies needed by installed gems.
# Feel free to add any more needed by your gems.
RUN apt-get update -y && \\
apt-get install -y -q --no-install-recommends \\
{0} \\
&& apt-get clean && rm /var/lib/apt/lists/*_*
""")
DOCKERFILE_NO_MORE_PACKAGES = textwrap.dedent("""\
# To install additional packages needed by your gems, uncomment
# the "RUN apt-get update" and "RUN apt-get install" lines below
# and specify your packages.
# RUN apt-get update
# RUN apt-get install -y -q (your packages here)
""")
DOCKERFILE_GEM_INSTALL = textwrap.dedent("""\
# Install required gems.
COPY Gemfile Gemfile.lock /app/
RUN bundle install --deployment && rbenv rehash
""")
DOCKERFILE_ENTRYPOINT = textwrap.dedent("""\
# Start application on port 8080.
COPY . /app/
ENTRYPOINT {0}
""")
ENTRYPOINT_FOREMAN = 'foreman start web -p 8080'
ENTRYPOINT_PUMA = 'bundle exec puma -p 8080 -e deployment'
ENTRYPOINT_UNICORN = 'bundle exec unicorn -p 8080 -E deployment'
ENTRYPOINT_RACKUP = 'bundle exec rackup -p 8080 -E deployment config.ru'
class RubyConfigError(exceptions.Error):
"""Error during Ruby application configuration."""
class MissingGemfileError(RubyConfigError):
"""Gemfile is missing."""
class StaleBundleError(RubyConfigError):
"""Bundle is stale and needs a bundle install."""
class RubyConfigurator(ext_runtime.Configurator):
"""Generates configuration for a Ruby app."""
def __init__(self, path, params, ruby_version, entrypoint, packages):
"""Constructor.
Args:
path: (str) Root path of the source tree.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
ruby_version: (str) The ruby interpreter in rbenv format
entrypoint: (str) The entrypoint command
packages: ([str, ...]) A set of packages to install
"""
self.root = path
self.params = params
self.ruby_version = ruby_version
self.entrypoint = entrypoint
self.packages = packages
# Write messages to the console or to the log depending on whether we're
# doing a "deploy."
if params.deploy:
self.notify = log.info
else:
self.notify = log.status.Print
def GenerateConfigs(self):
"""Generates all config files for the module.
Returns:
(bool) True if files were written.
"""
all_config_files = []
if not self.params.appinfo:
all_config_files.append(self._GenerateAppYaml())
if self.params.custom or self.params.deploy:
all_config_files.append(self._GenerateDockerfile())
all_config_files.append(self._GenerateDockerignore())
created = [config_file.WriteTo(self.root, self.notify)
for config_file in all_config_files]
if not any(created):
self.notify('All config files already exist. No files generated.')
return any(created)
def GenerateConfigData(self):
"""Generates all config files for the module.
Returns:
list(ext_runtime.GeneratedFile):
The generated files
"""
if not self.params.appinfo:
app_yaml = self._GenerateAppYaml()
app_yaml.WriteTo(self.root, self.notify)
all_config_files = []
if self.params.custom or self.params.deploy:
all_config_files.append(self._GenerateDockerfile())
all_config_files.append(self._GenerateDockerignore())
return [f for f in all_config_files
if not os.path.exists(os.path.join(self.root, f.filename))]
def _GenerateAppYaml(self):
"""Generates an app.yaml file appropriate to this application.
Returns:
(ext_runtime.GeneratedFile) A file wrapper for app.yaml
"""
app_yaml = os.path.join(self.root, 'app.yaml')
runtime = 'custom' if self.params.custom else 'ruby'
app_yaml_contents = APP_YAML_CONTENTS.format(runtime=runtime,
entrypoint=self.entrypoint)
app_yaml = ext_runtime.GeneratedFile('app.yaml', app_yaml_contents)
return app_yaml
def _GenerateDockerfile(self):
"""Generates a Dockerfile appropriate to this application.
Returns:
(ext_runtime.GeneratedFile) A file wrapper for Dockerignore
"""
dockerfile_content = [DOCKERFILE_HEADER]
if self.ruby_version:
dockerfile_content.append(
DOCKERFILE_CUSTOM_INTERPRETER.format(self.ruby_version))
else:
dockerfile_content.append(DOCKERFILE_DEFAULT_INTERPRETER)
if self.packages:
dockerfile_content.append(
DOCKERFILE_MORE_PACKAGES.format(' '.join(self.packages)))
else:
dockerfile_content.append(DOCKERFILE_NO_MORE_PACKAGES)
dockerfile_content.append(DOCKERFILE_GEM_INSTALL)
dockerfile_content.append(
DOCKERFILE_ENTRYPOINT.format(self.entrypoint))
dockerfile = ext_runtime.GeneratedFile(config.DOCKERFILE,
'\n'.join(dockerfile_content))
return dockerfile
def _GenerateDockerignore(self):
"""Generates a .dockerignore file appropriate to this application."""
dockerignore = os.path.join(self.root, '.dockerignore')
dockerignore = ext_runtime.GeneratedFile('.dockerignore',
DOCKERIGNORE_CONTENTS)
return dockerignore
def Fingerprint(path, params):
"""Check for a Ruby app.
Args:
path: (str) Application path.
params: (ext_runtime.Params) Parameters passed through to the
fingerprinters.
Returns:
(RubyConfigurator or None) Returns a configurator if the path contains a
Ruby app, or None if not.
"""
appinfo = params.appinfo
if not _CheckForRubyRuntime(path, appinfo):
return None
bundler_available = _CheckEnvironment(path)
gems = _DetectGems(bundler_available)
ruby_version = _DetectRubyInterpreter(path, bundler_available)
packages = _DetectNeededPackages(gems)
if appinfo and appinfo.entrypoint:
entrypoint = appinfo.entrypoint
else:
default_entrypoint = _DetectDefaultEntrypoint(path, gems)
entrypoint = _ChooseEntrypoint(default_entrypoint, appinfo)
return RubyConfigurator(path, params, ruby_version, entrypoint, packages)
def _CheckForRubyRuntime(path, appinfo):
"""Determines whether to treat this application as runtime:ruby.
Honors the appinfo runtime setting; otherwise looks at the contents of the
current directory and confirms with the user.
Args:
path: (str) Application path.
appinfo: (apphosting.api.appinfo.AppInfoExternal or None) The parsed
app.yaml file for the module if it exists.
Returns:
(bool) Whether this app should be treated as runtime:ruby.
"""
if appinfo and appinfo.GetEffectiveRuntime() == 'ruby':
return True
log.info('Checking for Ruby.')
gemfile_path = os.path.join(path, 'Gemfile')
if not os.path.isfile(gemfile_path):
return False
got_ruby_message = 'This looks like a Ruby application.'
if console_io.CanPrompt():
return console_io.PromptContinue(
message=got_ruby_message,
prompt_string='Proceed to configure deployment for Ruby?')
else:
log.info(got_ruby_message)
return True
def _CheckEnvironment(path):
"""Gathers information about the local environment, and performs some checks.
Args:
path: (str) Application path.
Returns:
(bool) Whether bundler is available in the environment.
Raises:
RubyConfigError: The application is recognized as a Ruby app but
malformed in some way.
"""
if not os.path.isfile(os.path.join(path, 'Gemfile')):
raise MissingGemfileError('Gemfile is required for Ruby runtime.')
gemfile_lock_present = os.path.isfile(os.path.join(path, 'Gemfile.lock'))
bundler_available = _SubprocessSucceeds('bundle version')
if bundler_available:
if not _SubprocessSucceeds('bundle check'):
raise StaleBundleError('Your bundle is not up-to-date. '
"Install missing gems with 'bundle install'.")
if not gemfile_lock_present:
msg = ('\nNOTICE: We could not find a Gemfile.lock, which suggests this '
'application has not been tested locally, or the Gemfile.lock has '
'not been committed to source control. We have created a '
'Gemfile.lock for you, but it is recommended that you verify it '
'yourself (by installing your bundle and testing locally) to '
'ensure that the gems we deploy are the same as those you tested.')
log.status.Print(msg)
else:
msg = ('\nNOTICE: gcloud could not run bundler in your local environment, '
"and so its ability to determine your application's requirements "
'will be limited. We will still attempt to deploy your application, '
'but if your application has trouble starting up due to missing '
'requirements, we recommend installing bundler by running '
'[gem install bundler]')
log.status.Print(msg)
return bundler_available
def _DetectRubyInterpreter(path, bundler_available):
"""Determines the ruby interpreter and version expected by this application.
Args:
path: (str) Application path.
bundler_available: (bool) Whether bundler is available in the environment.
Returns:
(str or None) The interpreter version in rbenv (.ruby-version) format, or
None to use the base image default.
"""
if bundler_available:
ruby_info = _RunSubprocess('bundle platform --ruby')
if not re.match('^No ', ruby_info):
match = re.match(r'^ruby (\d+\.\d+(\.\d+)?)', ruby_info)
if match:
ruby_version = match.group(1)
ruby_version = RUBY_VERSION_MAP.get(ruby_version, ruby_version)
msg = ('\nUsing Ruby {0} as requested in the Gemfile.'.
format(ruby_version))
log.status.Print(msg)
return ruby_version
# TODO(b/12036082): Recognize JRuby
msg = 'Unrecognized platform in Gemfile: [{0}]'.format(ruby_info)
log.status.Print(msg)
ruby_version = _ReadFile(path, '.ruby-version')
if ruby_version:
ruby_version = ruby_version.strip()
msg = ('\nUsing Ruby {0} as requested in the .ruby-version file'.
format(ruby_version))
log.status.Print(msg)
return ruby_version
msg = ('\nNOTICE: We will deploy your application using a recent version of '
'the standard "MRI" Ruby runtime by default. If you want to use a '
'specific Ruby runtime, you can create a ".ruby-version" file in this '
'directory. (For best performance, we recommend MRI version {0}.)'.
format(PREFERRED_RUBY_VERSION))
log.status.Print(msg)
return None
def _DetectGems(bundler_available):
"""Returns a list of gems requested by this application.
Args:
bundler_available: (bool) Whether bundler is available in the environment.
Returns:
([str, ...]) A list of gem names.
"""
gems = []
if bundler_available:
for line in _RunSubprocess('bundle list').splitlines():
match = re.match(r'\s*\*\s+(\S+)\s+\(', line)
if match:
gems.append(match.group(1))
return gems
def _DetectDefaultEntrypoint(path, gems):
"""Returns the app server expected by this application.
Args:
path: (str) Application path.
gems: ([str, ...]) A list of gems used by this application.
Returns:
(str) The default entrypoint command, or the empty string if unknown.
"""
procfile_path = os.path.join(path, 'Procfile')
if os.path.isfile(procfile_path):
return ENTRYPOINT_FOREMAN
if 'puma' in gems:
return ENTRYPOINT_PUMA
elif 'unicorn' in gems:
return ENTRYPOINT_UNICORN
configru_path = os.path.join(path, 'config.ru')
if os.path.isfile(configru_path):
return ENTRYPOINT_RACKUP
return ''
def _ChooseEntrypoint(default_entrypoint, appinfo):
"""Prompt the user for an entrypoint.
Args:
default_entrypoint: (str) Default entrypoint determined from the app.
appinfo: (apphosting.api.appinfo.AppInfoExternal or None) The parsed
app.yaml file for the module if it exists.
Returns:
(str) The actual entrypoint to use.
Raises:
RubyConfigError: Unable to get entrypoint from the user.
"""
if console_io.CanPrompt():
if default_entrypoint:
prompt = ('\nPlease enter the command to run this Ruby app in '
'production, or leave blank to accept the default:\n[{0}] ')
entrypoint = console_io.PromptResponse(prompt.format(default_entrypoint))
else:
entrypoint = console_io.PromptResponse(
'\nPlease enter the command to run this Ruby app in production: ')
entrypoint = entrypoint.strip()
if not entrypoint:
if not default_entrypoint:
raise RubyConfigError('Entrypoint command is required.')
entrypoint = default_entrypoint
if appinfo:
msg = ('\nTo avoid being asked for an entrypoint in the future, please '
'add it to your app.yaml. e.g.\n entrypoint: {0}'.
format(entrypoint))
log.status.Print(msg)
return entrypoint
else:
msg = ("This appears to be a Ruby app. You'll need to provide the full "
'command to run the app in production, but gcloud is not running '
'interactively and cannot ask for the entrypoint{0}. Please either '
'run gcloud interactively, or create an app.yaml with '
'"runtime:ruby" and an "entrypoint" field.'.
format(ext_runtime_adapter.GetNonInteractiveErrorMessage()))
raise RubyConfigError(msg)
def _DetectNeededPackages(gems):
"""Determines additional apt-get packages required by the given gems.
Args:
gems: ([str, ...]) A list of gems used by this application.
Returns:
([str, ...]) A sorted list of strings indicating packages to install
"""
package_set = set()
for gem in gems:
if gem in GEM_PACKAGES:
package_set.update(GEM_PACKAGES[gem])
packages = list(package_set)
packages.sort()
return packages
def _RunSubprocess(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
if p.wait() != 0:
raise RubyConfigError('Unable to run script: [{0}]'.format(cmd))
return p.stdout.read()
def _SubprocessSucceeds(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
return p.wait() == 0
def _ReadFile(root, filename, required=False):
path = os.path.join(root, filename)
if not os.path.isfile(path):
if required:
raise RubyConfigError(
'Could not find required file: [{0}]'.format(filename))
return None
return files.ReadFileContents(path)

View File

@@ -0,0 +1,203 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dealing with service resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.app import operations_util
from googlecloudsdk.core import exceptions
from googlecloudsdk.core.util import text
import six
class ServiceValidationError(exceptions.Error):
pass
class ServicesDeleteError(exceptions.Error):
pass
class ServicesNotFoundError(exceptions.Error):
@classmethod
def FromServiceLists(cls, requested_services, all_services):
"""Format a ServiceNotFoundError.
Args:
requested_services: list of str, IDs of services that were not found.
all_services: list of str, IDs of all available services
Returns:
ServicesNotFoundError, error with properly formatted message
"""
return cls(
'The following {0} not found: [{1}]\n\n'
'All services: [{2}]'.format(
text.Pluralize(len(requested_services), 'service was',
plural='services were'),
', '.join(requested_services),
', '.join(all_services)))
class ServicesSplitTrafficError(exceptions.Error):
pass
class Service(object):
"""Value class representing a service resource."""
def __init__(self, project, id_, split=None):
self.project = project
self.id = id_
self.split = split or {}
def __eq__(self, other):
return (type(other) is Service and
self.project == other.project and self.id == other.id)
def __ne__(self, other):
return not self == other
@classmethod
def FromResourcePath(cls, path):
parts = path.split('/')
if len(parts) != 2:
raise ServiceValidationError('[{0}] is not a valid resource path. '
'Expected <project>/<service>.')
return cls(*parts)
def __lt__(self, other):
return (self.project, self.id) < (other.project, other.id)
def __le__(self, other):
return (self.project, self.id) <= (other.project, other.id)
def __gt__(self, other):
return (self.project, self.id) > (other.project, other.id)
def __ge__(self, other):
return (self.project, self.id) >= (other.project, other.id)
def __repr__(self):
return '{0}/{1}'.format(self.project, self.id)
def _ValidateServicesAreSubset(filtered_services, all_services):
not_found_services = set(filtered_services) - set(all_services)
if not_found_services:
raise ServicesNotFoundError.FromServiceLists(not_found_services,
all_services)
def GetMatchingServices(all_services, args_services):
"""Return a list of services to act on based on user arguments.
Args:
all_services: list of Services representing all services in the project.
args_services: list of string, service IDs to filter for, from arguments
given by the user to the command line. If empty, match all services.
Returns:
list of matching Services sorted by the order they were given to the
command line.
Raises:
ServiceValidationError: If an improper combination of arguments is given
"""
if not args_services:
args_services = sorted(s.id for s in all_services)
else:
_ValidateServicesAreSubset(args_services, [s.id for s in all_services])
matching_services = []
# Match the order to the order of arguments.
for service_id in args_services:
matching_services += [s for s in all_services if s.id == service_id]
return matching_services
def ParseTrafficAllocations(args_allocations, split_method):
"""Parses the user-supplied allocations into a format acceptable by the API.
Args:
args_allocations: The raw allocations passed on the command line. A dict
mapping version_id (str) to the allocation (float).
split_method: Whether the traffic will be split by ip or cookie. This
affects the format we specify the splits in.
Returns:
A dict mapping version id (str) to traffic split (float).
Raises:
ServicesSplitTrafficError: if the sum of traffic allocations is zero.
"""
# Splitting by IP allows 2 decimal places, splitting by cookie allows 3.
max_decimal_places = 2 if split_method == 'ip' else 3
sum_of_splits = sum([float(s) for s in args_allocations.values()])
err = ServicesSplitTrafficError(
'Cannot set traffic split to zero. If you would like a version to '
'receive no traffic, send 100% of traffic to other versions or delete '
'the service.')
# Prevent division by zero
if sum_of_splits == 0.0:
raise err
allocations = {}
for version, split in six.iteritems(args_allocations):
allocation = float(split) / sum_of_splits
allocation = round(allocation, max_decimal_places)
if allocation == 0.0:
raise err
allocations[version] = allocation
# The API requires that these sum to 1.0. This is hard to get exactly correct,
# (think .33, .33, .33) so we take our difference and subtract it from the
# first maximum element of our sorted allocations dictionary
total_splits = round(sum(allocations.values()), max_decimal_places)
difference = total_splits - 1.0
max_split = max(allocations.values())
for version, split in sorted(allocations.items()):
if max_split == split:
allocations[version] -= difference
break
return allocations
def DeleteServices(api_client, services):
"""Delete the given services."""
errors = {}
for service in services:
try:
operations_util.CallAndCollectOpErrors(
api_client.DeleteService, service.id)
except operations_util.MiscOperationError as err:
errors[service.id] = six.text_type(err)
if errors:
printable_errors = {}
for service_id, error_msg in errors.items():
printable_errors[service_id] = '[{0}]: {1}'.format(service_id,
error_msg)
raise ServicesDeleteError(
'Issue deleting {0}: [{1}]\n\n'.format(
text.Pluralize(len(printable_errors), 'service'),
', '.join(list(printable_errors.keys()))) +
'\n\n'.join(list(printable_errors.values())))

View File

@@ -0,0 +1,411 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for gcloud app."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
import os
import posixpath
import sys
import time
from googlecloudsdk.appengine.api import client_deployinfo
from googlecloudsdk.core import config
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.util import platforms
import six
from six.moves import urllib
class Error(exceptions.Error):
"""Exceptions for the appcfg module."""
class NoFieldsSpecifiedError(Error):
"""The user specified no fields to a command which requires at least one."""
class NoCloudSDKError(Error):
"""The module was unable to find Cloud SDK."""
def __init__(self):
super(NoCloudSDKError, self).__init__(
'Unable to find a Cloud SDK installation.')
class NoAppengineSDKError(Error):
"""The module was unable to find the appengine SDK."""
class TimeoutError(Error):
"""An exception for when a retry with wait operation times out."""
def __init__(self):
super(TimeoutError, self).__init__(
'Timed out waiting for the operation to complete.')
class RPCError(Error):
"""For when an error occurs when making an RPC call."""
def __init__(self, url_error, body=''):
super(RPCError, self).__init__(
'Server responded with code [{code}]:\n {reason}.\n {body}'
.format(code=url_error.code,
reason=getattr(url_error, 'reason', '(unknown)'),
body=body))
self.url_error = url_error
def GetCloudSDKRoot():
"""Gets the directory of the root of the Cloud SDK, error if it doesn't exist.
Raises:
NoCloudSDKError: If there is no SDK root.
Returns:
str, The path to the root of the Cloud SDK.
"""
sdk_root = config.Paths().sdk_root
if not sdk_root:
raise NoCloudSDKError()
log.debug('Found Cloud SDK root: %s', sdk_root)
return sdk_root
def GetAppEngineSDKRoot():
"""Gets the directory of the GAE SDK directory in the SDK.
Raises:
NoCloudSDKError: If there is no SDK root.
NoAppengineSDKError: If the GAE SDK cannot be found.
Returns:
str, The path to the root of the GAE SDK within the Cloud SDK.
"""
sdk_root = GetCloudSDKRoot()
gae_sdk_dir = os.path.join(sdk_root, 'platform', 'google_appengine')
if not os.path.isdir(gae_sdk_dir):
raise NoAppengineSDKError()
log.debug('Found App Engine SDK root: %s', gae_sdk_dir)
return gae_sdk_dir
def GenerateVersionId(datetime_getter=datetime.datetime.now):
"""Generates a version id based off the current time.
Args:
datetime_getter: A function that returns a datetime.datetime instance.
Returns:
A version string based.
"""
return datetime_getter().isoformat().lower().replace('-', '').replace(
':', '')[:15]
def ConvertToPosixPath(path):
"""Converts a native-OS path to /-separated: os.path.join('a', 'b')->'a/b'."""
return posixpath.join(*path.split(os.path.sep))
def ConvertToCloudRegion(region):
"""Converts a App Engine region to the format used elsewhere in Cloud."""
if region in {'europe-west', 'us-central'}:
return region + '1'
else:
return region
def ShouldSkip(skip_files, path):
"""Returns whether the given path should be skipped by the skip_files field.
A user can specify a `skip_files` field in their .yaml file, which is a list
of regular expressions matching files that should be skipped. By this point in
the code, it's been turned into one mega-regex that matches any file to skip.
Args:
skip_files: A regular expression object for files/directories to skip.
path: str, the path to the file/directory which might be skipped (relative
to the application root)
Returns:
bool, whether the file/dir should be skipped.
"""
# On Windows, os.path.join uses the path separator '\' instead of '/'.
# However, the skip_files regular expression always uses '/'.
# To handle this, we'll replace '\' characters with '/' characters.
path = ConvertToPosixPath(path)
return skip_files.match(path)
def FileIterator(base, skip_files):
"""Walks a directory tree, returning all the files. Follows symlinks.
Args:
base: The base path to search for files under.
skip_files: A regular expression object for files/directories to skip.
Yields:
Paths of files found, relative to base.
"""
dirs = ['']
while dirs:
current_dir = dirs.pop()
entries = set(os.listdir(os.path.join(base, current_dir)))
for entry in sorted(entries):
name = os.path.join(current_dir, entry)
fullname = os.path.join(base, name)
if os.path.isfile(fullname):
if ShouldSkip(skip_files, name):
log.info('Ignoring file [%s]: File matches ignore regex.', name)
else:
yield name
elif os.path.isdir(fullname):
if ShouldSkip(skip_files, name):
log.info('Ignoring directory [%s]: Directory matches ignore regex.',
name)
else:
dirs.append(name)
def RetryWithBackoff(func, retry_notify_func,
initial_delay=1, backoff_factor=2,
max_delay=60, max_tries=20, raise_on_timeout=True):
"""Calls a function multiple times, backing off more and more each time.
Args:
func: f() -> (bool, value), A function that performs some operation that
should be retried a number of times upon failure. If the first tuple
element is True, we'll immediately return (True, value). If False, we'll
delay a bit and try again, unless we've hit the 'max_tries' limit, in
which case we'll return (False, value).
retry_notify_func: f(value, delay) -> None, This function will be called
immediately before the next retry delay. 'value' is the value returned
by the last call to 'func'. 'delay' is the retry delay, in seconds
initial_delay: int, Initial delay after first try, in seconds.
backoff_factor: int, Delay will be multiplied by this factor after each
try.
max_delay: int, Maximum delay, in seconds.
max_tries: int, Maximum number of tries (the first one counts).
raise_on_timeout: bool, True to raise an exception if the operation times
out instead of returning False.
Returns:
What the last call to 'func' returned, which is of the form (done, value).
If 'done' is True, you know 'func' returned True before we ran out of
retries. If 'done' is False, you know 'func' kept returning False and we
ran out of retries.
Raises:
TimeoutError: If raise_on_timeout is True and max_tries is exhausted.
"""
delay = initial_delay
try_count = max_tries
value = None
while True:
try_count -= 1
done, value = func()
if done:
return True, value
if try_count <= 0:
if raise_on_timeout:
raise TimeoutError()
return False, value
retry_notify_func(value, delay)
time.sleep(delay)
delay = min(delay * backoff_factor, max_delay)
def RetryNoBackoff(callable_func, retry_notify_func, delay=5, max_tries=200):
"""Calls a function multiple times, with the same delay each time.
Args:
callable_func: A function that performs some operation that should be
retried a number of times upon failure. Signature: () -> (done, value)
If 'done' is True, we'll immediately return (True, value)
If 'done' is False, we'll delay a bit and try again, unless we've
hit the 'max_tries' limit, in which case we'll return (False, value).
retry_notify_func: This function will be called immediately before the
next retry delay. Signature: (value, delay) -> None
'value' is the value returned by the last call to 'callable_func'
'delay' is the retry delay, in seconds
delay: Delay between tries, in seconds.
max_tries: Maximum number of tries (the first one counts).
Returns:
What the last call to 'callable_func' returned, which is of the form
(done, value). If 'done' is True, you know 'callable_func' returned True
before we ran out of retries. If 'done' is False, you know 'callable_func'
kept returning False and we ran out of retries.
Raises:
Whatever the function raises--an exception will immediately stop retries.
"""
# A backoff_factor of 1 means the delay won't grow.
return RetryWithBackoff(callable_func, retry_notify_func, delay, 1, delay,
max_tries)
def GetSourceName():
"""Gets the name of this source version."""
return 'Google-appcfg-{0}'.format(config.CLOUD_SDK_VERSION)
def GetUserAgent():
"""Determines the value of the 'User-agent' header to use for HTTP requests.
Returns:
String containing the 'user-agent' header value.
"""
product_tokens = []
# SDK version
product_tokens.append(config.CLOUDSDK_USER_AGENT)
# Platform
product_tokens.append(platforms.Platform.Current().UserAgentFragment())
# Python version
python_version = '.'.join(six.text_type(i) for i in sys.version_info)
product_tokens.append('Python/%s' % python_version)
return ' '.join(product_tokens)
class ClientDeployLoggingContext(object):
"""Context for sending and recording server rpc requests.
Attributes:
rpcserver: The AbstractRpcServer to use for the upload.
requests: A list of client_deployinfo.Request objects to include
with the client deploy log.
time_func: Function to get the current time in milliseconds.
request_params: A dictionary with params to append to requests
"""
def __init__(self,
rpcserver,
request_params,
usage_reporting,
time_func=time.time):
"""Creates a new AppVersionUpload.
Args:
rpcserver: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
request_params: A dictionary with params to append to requests
usage_reporting: Whether to actually upload data.
time_func: Function to return the current time in millisecods
(default time.time).
"""
self.rpcserver = rpcserver
self.request_params = request_params
self.usage_reporting = usage_reporting
self.time_func = time_func
self.requests = []
def Send(self, url, payload='', **kwargs):
"""Sends a request to the server, with common params."""
start_time_usec = self.GetCurrentTimeUsec()
request_size_bytes = len(payload)
try:
log.debug('Send: {0}, params={1}'.format(url, self.request_params))
kwargs.update(self.request_params)
result = self.rpcserver.Send(url, payload=payload, **kwargs)
self._RegisterReqestForLogging(url, 200, start_time_usec,
request_size_bytes)
return result
except RPCError as err:
self._RegisterReqestForLogging(url, err.url_error.code, start_time_usec,
request_size_bytes)
raise
def GetCurrentTimeUsec(self):
"""Returns the current time in microseconds."""
return int(round(self.time_func() * 1000 * 1000))
def _RegisterReqestForLogging(self, path, response_code, start_time_usec,
request_size_bytes):
"""Registers a request for client deploy logging purposes."""
end_time_usec = self.GetCurrentTimeUsec()
self.requests.append(client_deployinfo.Request(
path=path,
response_code=response_code,
start_time_usec=start_time_usec,
end_time_usec=end_time_usec,
request_size_bytes=request_size_bytes))
def LogClientDeploy(self, runtime, start_time_usec, success):
"""Logs a client deployment attempt.
Args:
runtime: The runtime for the app being deployed.
start_time_usec: The start time of the deployment in micro seconds.
success: True if the deployment succeeded otherwise False.
"""
if not self.usage_reporting:
log.info('Skipping usage reporting.')
return
end_time_usec = self.GetCurrentTimeUsec()
try:
info = client_deployinfo.ClientDeployInfoExternal(
runtime=runtime,
start_time_usec=start_time_usec,
end_time_usec=end_time_usec,
requests=self.requests,
success=success,
sdk_version=config.CLOUD_SDK_VERSION)
self.Send('/api/logclientdeploy', info.ToYAML())
except BaseException as e: # pylint: disable=broad-except
log.debug('Exception logging deploy info continuing - {0}'.format(e))
class RPCServer(object):
"""This wraps the underlying RPC server so we can make a nice error message.
This will go away once we switch to just using our own http object.
"""
def __init__(self, original_server):
"""Construct a new rpc server.
Args:
original_server: The server to wrap.
"""
self._server = original_server
def Send(self, *args, **kwargs):
try:
response = self._server.Send(*args, **kwargs)
log.debug('Got response: %s', response)
return response
except urllib.error.HTTPError as e:
# This is the message body, if included in e
if hasattr(e, 'read'):
body = e.read()
else:
body = ''
exceptions.reraise(RPCError(e, body=body))

View File

@@ -0,0 +1,426 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dealing with version resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import metric_names
from googlecloudsdk.api_lib.app import operations_util
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import metrics
from googlecloudsdk.core.util import retry
from googlecloudsdk.core.util import text
from googlecloudsdk.core.util import times
import six
from six.moves import map # pylint: disable=redefined-builtin
class VersionValidationError(exceptions.Error):
pass
class VersionsDeleteError(exceptions.Error):
pass
class Version(object):
"""Value class representing a version resource.
This wrapper around appengine_<API-version>_messages.Version is necessary
because Versions don't have traffic split, project, or last_deployed_time as a
datetime object.
"""
# The smallest allowed traffic split is 1e-3. Because of floating point
# peculiarities, we use 1e-4 as our max allowed epsilon when testing whether a
# version is receiving all traffic.
_ALL_TRAFFIC_EPSILON = 1e-4
_RESOURCE_PATH_PARTS = 3 # project/service/version
# This is the name in the Version resource from the API
_VERSION_NAME_PATTERN = ('apps/(?P<project>.*)/'
'services/(?P<service>.*)/'
'versions/(?P<version>.*)')
def __init__(self,
project,
service,
version_id,
traffic_split=None,
last_deployed_time=None,
environment=None,
version_resource=None,
service_account=None):
self.project = project
self.service = service
self.id = version_id
self.version = version_resource
self.traffic_split = traffic_split
self.last_deployed_time = last_deployed_time
self.environment = environment
self.service_account = service_account
@classmethod
def FromResourcePath(cls, path):
parts = path.split('/')
if not 0 < len(parts) <= cls._RESOURCE_PATH_PARTS:
raise VersionValidationError('[{0}] is not a valid resource path. '
'Expected <project>/<service>/<version>')
parts = [None] * (cls._RESOURCE_PATH_PARTS - len(parts)) + parts
return cls(*parts)
@classmethod
def FromVersionResource(cls, version, service):
"""Convert appengine_<API-version>_messages.Version into wrapped Version."""
project, service_id, _ = re.match(cls._VERSION_NAME_PATTERN,
version.name).groups()
traffic_split = service and service.split.get(version.id, 0.0)
last_deployed = None
try:
if version.createTime:
last_deployed_dt = times.ParseDateTime(version.createTime).replace(
microsecond=0)
last_deployed = times.LocalizeDateTime(last_deployed_dt)
except ValueError:
pass
if version.env == 'flexible':
environment = env.FLEX
elif version.vm:
environment = env.MANAGED_VMS
else:
environment = env.STANDARD
return cls(project, service_id, version.id, traffic_split=traffic_split,
last_deployed_time=last_deployed, environment=environment,
version_resource=version)
def IsReceivingAllTraffic(self):
return abs(self.traffic_split - 1.0) < self._ALL_TRAFFIC_EPSILON
def GetVersionResource(self, api_client):
"""Attempts to load the Version resource for this version.
Returns the cached Version resource if it exists. Otherwise, attempts to
load it from the server. Errors are logged and ignored.
Args:
api_client: An AppengineApiClient.
Returns:
The Version resource, or None if it could not be loaded.
"""
if not self.version:
try:
self.version = api_client.GetVersionResource(self.service, self.id)
if not self.version:
log.info('Failed to retrieve resource for version [{0}]'.format(self))
except apitools_exceptions.Error as e:
# Log and drop the exception so we don't introduce a new failure mode
# into the app deployment flow. If we find this isn't happening very
# often, we could choose to propagate the error.
log.warning('Error retrieving Version resource [{0}]: {1}'
.format(six.text_type(self), six.text_type(e)))
return self.version
def __eq__(self, other):
return (type(other) is Version and
self.project == other.project and
self.service == other.service and
self.id == other.id)
def __ne__(self, other):
return not self == other
def __cmp__(self, other):
return cmp((self.project, self.service, self.id),
(other.project, other.service, other.id))
def __str__(self):
return '{0}/{1}/{2}'.format(self.project, self.service, self.id)
def _ValidateServicesAreSubset(filtered_versions, all_versions):
"""Validate that each version in filtered_versions is also in all_versions.
Args:
filtered_versions: list of Version representing a filtered subset of
all_versions.
all_versions: list of Version representing all versions in the current
project.
Raises:
VersionValidationError: If a service or version is not found.
"""
for version in filtered_versions:
if version.service not in [v.service for v in all_versions]:
raise VersionValidationError(
'Service [{0}] not found.'.format(version.service))
if version not in all_versions:
raise VersionValidationError(
'Version [{0}/{1}] not found.'.format(version.service,
version.id))
def ParseVersionResourcePaths(paths, project):
"""Parse the list of resource paths specifying versions.
Args:
paths: The list of resource paths by which to filter.
project: The current project. Used for validation.
Returns:
list of Version
Raises:
VersionValidationError: If not all versions are valid resource paths for the
current project.
"""
versions = list(map(Version.FromResourcePath, paths))
for version in versions:
if not (version.project or version.service):
raise VersionValidationError('If you provide a resource path as an '
'argument, all arguments must be resource '
'paths.')
if version.project and version.project != project:
raise VersionValidationError(
'All versions must be in the current project.')
version.project = project
return versions
def GetMatchingVersions(all_versions, versions, service):
"""Return a list of versions to act on based on user arguments.
Args:
all_versions: list of Version representing all services in the project.
versions: list of string, version names to filter for.
If empty, match all versions.
service: string or None, service name. If given, only match versions in the
given service.
Returns:
list of matching Version
Raises:
VersionValidationError: If an improper combination of arguments is given.
"""
filtered_versions = all_versions
if service:
if service not in [v.service for v in all_versions]:
raise VersionValidationError('Service [{0}] not found.'.format(service))
filtered_versions = [v for v in all_versions if v.service == service]
if versions:
filtered_versions = [v for v in filtered_versions if v.id in versions]
return filtered_versions
def DeleteVersions(api_client, versions):
"""Delete the given version of the given services."""
errors = {}
for version in versions:
version_path = '{0}/{1}'.format(version.service, version.id)
try:
operations_util.CallAndCollectOpErrors(
api_client.DeleteVersion, version.service, version.id)
except operations_util.MiscOperationError as err:
errors[version_path] = six.text_type(err)
if errors:
printable_errors = {}
for version_path, error_msg in errors.items():
printable_errors[version_path] = '[{0}]: {1}'.format(version_path,
error_msg)
raise VersionsDeleteError(
'Issue deleting {0}: [{1}]\n\n'.format(
text.Pluralize(len(printable_errors), 'version'),
', '.join(list(printable_errors.keys()))) +
'\n\n'.join(list(printable_errors.values())))
def PromoteVersion(all_services, new_version, api_client, stop_previous_version,
wait_for_stop_version):
"""Promote the new version to receive all traffic.
First starts the new version if it is not running.
Additionally, stops the previous version if stop_previous_version is True and
it is possible to stop the previous version.
Args:
all_services: {str, Service}, A mapping of service id to Service objects
for all services in the app.
new_version: Version, The version to promote.
api_client: appengine_api_client.AppengineApiClient to use to make requests.
stop_previous_version: bool, True to stop the previous version which was
receiving all traffic, if any.
wait_for_stop_version: bool, indicating whether to wait for stop operation
to finish.
"""
old_default_version = None
if stop_previous_version:
# Grab the list of versions before we promote, since we need to
# figure out what the previous default version was
old_default_version = _GetPreviousVersion(
all_services, new_version, api_client)
# If the new version is stopped, try to start it.
new_version_resource = new_version.GetVersionResource(api_client)
status_enum = api_client.messages.Version.ServingStatusValueValuesEnum
if (new_version_resource and
new_version_resource.servingStatus == status_enum.STOPPED):
# start new version
log.status.Print('Starting version [{0}] before promoting it.'
.format(new_version))
api_client.StartVersion(new_version.service, new_version.id, block=True)
_SetDefaultVersion(new_version, api_client)
if old_default_version:
_StopPreviousVersionIfApplies(old_default_version, api_client,
wait_for_stop_version)
def GetUri(version):
return version.version.versionUrl
def _GetPreviousVersion(all_services, new_version, api_client):
"""Get the previous default version of which new_version is replacing.
If there is no such version, return None.
Args:
all_services: {str, Service}, A mapping of service id to Service objects
for all services in the app.
new_version: Version, The version to promote.
api_client: appengine_api_client.AppengineApiClient, The client for talking
to the App Engine Admin API.
Returns:
Version, The previous version or None.
"""
service = all_services.get(new_version.service, None)
if not service:
return None
for old_version in api_client.ListVersions([service]):
# Make sure not to stop the just-deployed version!
# This can happen with a new service, or with a deployment over
# an existing version.
if (old_version.IsReceivingAllTraffic() and
old_version.id != new_version.id):
return old_version
def _SetDefaultVersion(new_version, api_client):
"""Sets the given version as the default.
Args:
new_version: Version, The version to promote.
api_client: appengine_api_client.AppengineApiClient to use to make requests.
"""
metrics.CustomTimedEvent(metric_names.SET_DEFAULT_VERSION_API_START)
# Retry it if we get a service not found error.
def ShouldRetry(exc_type, unused_exc_value, unused_traceback, unused_state):
return issubclass(exc_type, apitools_exceptions.HttpError)
try:
retryer = retry.Retryer(max_retrials=3, exponential_sleep_multiplier=2)
retryer.RetryOnException(
api_client.SetDefaultVersion, [new_version.service, new_version.id],
should_retry_if=ShouldRetry, sleep_ms=1000)
except retry.MaxRetrialsException as e:
(unused_result, exc_info) = e.last_result
if exc_info:
exceptions.reraise(exc_info[1], tb=exc_info[2])
else:
# This shouldn't happen, but if we don't have the exception info for some
# reason, just convert the MaxRetrialsException.
raise exceptions.InternalError()
metrics.CustomTimedEvent(metric_names.SET_DEFAULT_VERSION_API)
def _StopPreviousVersionIfApplies(old_default_version, api_client,
wait_for_stop_version):
"""Stop the previous default version if applicable.
Cases where a version will not be stopped:
* If the previous default version is not serving, there is no need to stop it.
* If the previous default version is an automatically scaled standard
environment app, it cannot be stopped.
Args:
old_default_version: Version, The old default version to stop.
api_client: appengine_api_client.AppengineApiClient to use to make requests.
wait_for_stop_version: bool, indicating whether to wait for stop operation
to finish.
"""
version_object = old_default_version.version
status_enum = api_client.messages.Version.ServingStatusValueValuesEnum
if version_object.servingStatus != status_enum.SERVING:
log.info(
'Previous default version [{0}] not serving, so not stopping '
'it.'.format(old_default_version))
return
is_standard = not (version_object.vm or version_object.env == 'flex' or
version_object.env == 'flexible')
if (is_standard and not version_object.basicScaling and
not version_object.manualScaling):
log.info(
'Previous default version [{0}] is an automatically scaled '
'standard environment app, so not stopping it.'.format(
old_default_version))
return
log.status.Print('Stopping version [{0}].'.format(old_default_version))
try:
# Block only if wait_for_stop_version is true.
# Waiting for stop the previous version to finish adds a long time
# (reports of 2.5 minutes) to deployment. The risk is that if we don't wait,
# the operation might fail and leave an old version running. But the time
# savings is substantial.
operations_util.CallAndCollectOpErrors(
api_client.StopVersion,
service_name=old_default_version.service,
version_id=old_default_version.id,
block=wait_for_stop_version)
except operations_util.MiscOperationError as err:
log.warning('Error stopping version [{0}]: {1}'.format(old_default_version,
six.text_type(err)))
log.warning('Version [{0}] is still running and you must stop or delete it '
'yourself in order to turn it off. (If you do not, you may be '
'charged.)'.format(old_default_version))
else:
if not wait_for_stop_version:
# TODO(b/318248525): Switch to refer to `gcloud app operations wait` when
# available
log.status.Print(
'Sent request to stop version [{0}]. This operation may take some time '
'to complete. If you would like to verify that it succeeded, run:\n'
' $ gcloud app versions describe -s {0.service} {0.id}\n'
'until it shows that the version has stopped.'.format(
old_default_version))

View File

@@ -0,0 +1,176 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the dev_appserver.py wrapper script.
Functions for parsing app.yaml files and installing the required components.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
import os
from googlecloudsdk.core import yaml
import six
# Runtime ID to component mapping. python27-libs is a special token indicating
# that the real runtime id is python27, and that a libraries section has been
# specified in the app.yaml.
_RUNTIME_COMPONENTS = {
'java': 'app-engine-java',
'go': 'app-engine-go',
'python27-libs': 'app-engine-python-extras',
}
_YAML_FILE_EXTENSIONS = ('.yaml', '.yml')
_TRUE_VALUES = ['true', 'yes', '1']
_FALSE_VALUES = ['false', 'no', '0']
_UPSTREAM_DEV_APPSERVER_FLAGS = ['--support_datastore_emulator']
class MultipleAppYamlError(Exception):
"""An application configuration has more than one valid app yaml files."""
def GetRuntimes(args):
"""Gets a list of unique runtimes that the user is about to run.
Args:
args: A list of arguments (typically sys.argv).
Returns:
A set of runtime strings. If python27 and libraries section is populated
in any of the yaml-files, 'python27-libs', a fake runtime id, will be part
of the set, in conjunction with the original 'python27'.
Raises:
MultipleAppYamlError: The supplied application configuration has duplicate
app yamls.
"""
runtimes = set()
for arg in args:
# Check all the arguments to see if they're application yaml files or
# directories that include yaml files.
yaml_candidate = None
if (os.path.isfile(arg) and
os.path.splitext(arg)[1] in _YAML_FILE_EXTENSIONS):
yaml_candidate = arg
elif os.path.isdir(arg):
for extension in _YAML_FILE_EXTENSIONS:
fullname = os.path.join(arg, 'app' + extension)
if os.path.isfile(fullname):
if yaml_candidate:
raise MultipleAppYamlError(
'Directory "{0}" contains conflicting files {1}'.format(
arg, ' and '.join(yaml_candidate)))
yaml_candidate = fullname
if yaml_candidate:
try:
info = yaml.load_path(yaml_candidate)
except yaml.Error:
continue
# safe_load can return arbitrary objects, we need a dict.
if not isinstance(info, dict):
continue
# Grab the runtime from the yaml, if it exists.
if 'runtime' in info:
runtime = info.get('runtime')
if type(runtime) == str:
if runtime == 'python27' and info.get('libraries'):
runtimes.add('python27-libs')
runtimes.add(runtime)
elif os.path.isfile(os.path.join(arg, 'WEB-INF', 'appengine-web.xml')):
# For unstanged Java App Engine apps, which may not have any yaml files.
runtimes.add('java')
return runtimes
def GetComponents(runtimes):
"""Gets a list of required components.
Args:
runtimes: A list containing the required runtime ids.
Returns:
A list of components that must be present.
"""
# Always install python.
components = ['app-engine-python']
for requested_runtime in runtimes:
for component_runtime, component in six.iteritems(_RUNTIME_COMPONENTS):
if component_runtime in requested_runtime:
components.append(component)
return components
def _ParseBoolean(value):
"""This is upstream logic from dev_appserver for parsing boolean arguments.
Args:
value: value assigned to a flag.
Returns:
A boolean parsed from value.
Raises:
ValueError: value.lower() is not in _TRUE_VALUES + _FALSE_VALUES.
"""
if isinstance(value, bool):
return value
if value:
value = value.lower()
if value in _TRUE_VALUES:
return True
if value in _FALSE_VALUES:
return False
repr_value = (repr(value) for value in _TRUE_VALUES + _FALSE_VALUES)
raise ValueError('%r unrecognized boolean; known booleans are %s.' %
(value, ', '.join(repr_value)))
return True
def ParseDevAppserverFlags(args):
"""Parse flags from app engine dev_appserver.py.
Only the subset of args are parsed here. These args are listed in
_UPSTREAM_DEV_APPSERVER_FLAGS.
Args:
args: A list of arguments (typically sys.argv).
Returns:
options: An argparse.Namespace containing the command line arguments.
"""
upstream_args = [
arg for arg in args if
any(arg.startswith(upstream_arg) for upstream_arg
in _UPSTREAM_DEV_APPSERVER_FLAGS)]
parser = argparse.ArgumentParser()
parser.add_argument(
'--support_datastore_emulator', dest='support_datastore_emulator',
type=_ParseBoolean, const=True, nargs='?', default=False)
return parser.parse_args(upstream_args)

View File

@@ -0,0 +1,583 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module to parse .yaml files for an appengine app."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.appengine.api import appinfo
from googlecloudsdk.appengine.api import appinfo_errors
from googlecloudsdk.appengine.api import appinfo_includes
from googlecloudsdk.appengine.api import croninfo
from googlecloudsdk.appengine.api import dispatchinfo
from googlecloudsdk.appengine.api import queueinfo
from googlecloudsdk.appengine.api import validation
from googlecloudsdk.appengine.api import yaml_errors
from googlecloudsdk.appengine.datastore import datastore_index
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
HINT_PROJECT = ('This field is not used by gcloud and must be removed. '
'Project name should instead be specified either by '
'`gcloud config set project MY_PROJECT` or by setting the '
'`--project` flag on individual command executions.')
HINT_VERSION = ('This field is not used by gcloud and must be removed. '
'Versions are generated automatically by default but can also '
'be manually specified by setting the `--version` flag on '
'individual command executions.')
HINT_THREADSAFE = ('This field is not supported with runtime [{}] and can '
'safely be removed.')
HINT_READABLE = ('This field is not configurable with runtime [{}] since '
'static files are always readable by the application. It '
'can safely be removed.')
MANAGED_VMS_DEPRECATION_WARNING = """\
Deployments using `vm: true` have been deprecated. Please update your \
app.yaml to use `env: flex`. To learn more, please visit \
https://cloud.google.com/appengine/docs/flexible/migration.
"""
UPGRADE_FLEX_PYTHON_URL = (
'https://cloud.google.com/appengine/docs/flexible/python/migrating')
APP_ENGINE_APIS_DEPRECATION_WARNING = (
'Support for the compat runtimes and their base images '
'(enable_app_engine_apis: true) has been deprecated. Please migrate to a '
'new base image, or use a Google managed runtime. '
'To learn more, visit {}.').format(UPGRADE_FLEX_PYTHON_URL)
PYTHON_SSL_WARNING = (
'You are using an outdated version [2.7] of the Python SSL library. '
'Please update your app.yaml file to specify SSL library [latest] to '
'avoid security risks. On April 2, 2018, version 2.7 will be '
'decommissioned and your app will be blocked from deploying until you '
'you specify SSL library [latest] or [2.7.11].'
'For more information, visit {}.'
).format('https://cloud.google.com/appengine/docs/deprecations/python-ssl-27')
FLEX_PY34_WARNING = (
'You are using a deprecated version [3.4] of Python on the App '
'Engine Flexible environment. Please update your app.yaml file to specify '
'[python_version: latest]. Python 3.4 will be decommissioned on March 29, '
'2019. After this date, new deployments will fail. For more information '
'about this deprecation, visit {}.'
).format('https://cloud.google.com/appengine/docs/deprecations/python34')
DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_ZERO_WARNING = (
'You might have set automatic_scaling.max_instances to 0. Starting from'
' March, 2025, App Engine sets the automatic scaling maximum'
' instances default for standard environment deployments to 20. This change'
" doesn't impact existing apps. To disable the maximum instances default"
' configuration setting, specify the maximum permitted value 2147483647.'
' For more information, see {}. \n'
).format(
'https://cloud.google.com/appengine/docs/standard/reference/app-yaml.md#scaling_elements'
)
DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_WARNING = (
'You might be using automatic scaling for a standard environment'
' deployment, without providing a value for'
' automatic_scaling.max_instances. Starting from March, 2025, App'
' Engine sets the automatic scaling maximum instances default for standard'
" environment deployments to 20. This change doesn't impact existing apps."
' To override the default, specify the new max_instances value in your'
' app.yaml file, and deploy a new version or redeploy over an existing'
' version. For details on max_instances, see {}. \n'
).format(
'https://cloud.google.com/appengine/docs/standard/reference/app-yaml.md#scaling_elements'
)
# This is the equivalent of the following in app.yaml:
# skip_files:
# - ^(.*/)?#.*#$
# - ^(.*/)?.*~$
# - ^(.*/)?.*\.py[co]$
# - ^(.*/)?.*/RCS/.*$
# - ^(.*/)?.git(ignore|/.*)$
# - ^(.*/)?node_modules/.*
DEFAULT_SKIP_FILES_FLEX = (r'^(.*/)?#.*#$|'
r'^(.*/)?.*~$|'
r'^(.*/)?.*\.py[co]$|'
r'^(.*/)?.*/RCS/.*$|'
r'^(.*/)?.git(ignore|/.*)$|'
r'^(.*/)?node_modules/.*$')
class Error(exceptions.Error):
"""A base error for this module."""
pass
class YamlParseError(Error):
"""An exception for when a specific yaml file is not well formed."""
def __init__(self, file_path, e):
"""Creates a new Error.
Args:
file_path: str, The full path of the file that failed to parse.
e: Exception, The exception that was originally raised.
"""
super(YamlParseError, self).__init__(
'An error occurred while parsing file: [{file_path}]\n{err}'
.format(file_path=file_path, err=e))
class YamlValidationError(Error):
"""An exception for when a specific yaml file has invalid info."""
pass
class AppConfigError(Error):
"""Errors in Application Config."""
class _YamlInfo(object):
"""A base class for holding some basic attributes of a parsed .yaml file."""
def __init__(self, file_path, parsed):
"""Creates a new _YamlInfo.
Args:
file_path: str, The full path the file that was parsed.
parsed: The parsed yaml data as one of the *_info objects.
"""
self.file = file_path
self.parsed = parsed
@staticmethod
def _ParseYaml(file_path, parser):
"""Parses the given file using the given parser.
Args:
file_path: str, The full path of the file to parse.
parser: str, The parser to use to parse this yaml file.
Returns:
The result of the parse.
"""
with files.FileReader(file_path) as fp:
return parser(fp)
class ConfigYamlInfo(_YamlInfo):
"""A class for holding some basic attributes of a parsed config .yaml file."""
CRON = 'cron'
DISPATCH = 'dispatch'
INDEX = 'index'
QUEUE = 'queue'
CONFIG_YAML_PARSERS = {
CRON: croninfo.LoadSingleCron,
DISPATCH: dispatchinfo.LoadSingleDispatch,
INDEX: datastore_index.ParseIndexDefinitions,
QUEUE: queueinfo.LoadSingleQueue,
}
def __init__(self, file_path, config, parsed):
"""Creates a new ConfigYamlInfo.
Args:
file_path: str, The full path the file that was parsed.
config: str, The name of the config that was parsed (i.e. 'cron')
parsed: The parsed yaml data as one of the *_info objects.
"""
super(ConfigYamlInfo, self).__init__(file_path, parsed)
self.config = config
@property
def name(self):
"""Name of the config file without extension, e.g. `cron`."""
(base, _) = os.path.splitext(os.path.basename(self.file))
return base
@staticmethod
def FromFile(file_path):
"""Parses the given config file.
Args:
file_path: str, The full path to the config file.
Raises:
Error: If a user tries to parse a dos.yaml file.
YamlParseError: If the file is not valid.
Returns:
A ConfigYamlInfo object for the parsed file.
"""
base, ext = os.path.splitext(os.path.basename(file_path))
if base == 'dos':
raise Error(
'`gcloud app deploy dos.yaml` is no longer supported. Please use'
' `gcloud app firewall-rules` instead.'
)
parser = (ConfigYamlInfo.CONFIG_YAML_PARSERS.get(base)
if os.path.isfile(file_path) and ext.lower() in ['.yaml', '.yml']
else None)
if not parser:
return None
try:
parsed = _YamlInfo._ParseYaml(file_path, parser)
if not parsed:
raise YamlParseError(file_path, 'The file is empty')
except (yaml_errors.Error, validation.Error) as e:
raise YamlParseError(file_path, e)
_CheckIllegalAttribute(
name='application',
yaml_info=parsed,
extractor_func=lambda yaml: yaml.application,
file_path=file_path,
msg=HINT_PROJECT)
if base == 'dispatch':
return DispatchConfigYamlInfo(file_path, config=base, parsed=parsed)
return ConfigYamlInfo(file_path, config=base, parsed=parsed)
class DispatchConfigYamlInfo(ConfigYamlInfo):
"""Provides methods for getting 1p-ready representation."""
def _HandlerToDict(self, handler):
"""Converst a dispatchinfo handler into a 1p-ready dict."""
parsed_url = dispatchinfo.ParsedURL(handler.url)
dispatch_domain = parsed_url.host
if not parsed_url.host_exact:
dispatch_domain = '*' + dispatch_domain
dispatch_path = parsed_url.path
if not parsed_url.path_exact:
trailing_matcher = '/*' if dispatch_path.endswith('/') else '*'
dispatch_path = dispatch_path.rstrip('/') + trailing_matcher
return {
'domain': dispatch_domain,
'path': dispatch_path,
'service': handler.service,
}
def GetRules(self):
"""Get dispatch rules on a format suitable for Admin API.
Returns:
[{'service': str, 'domain': str, 'path': str}], rules.
"""
return [self._HandlerToDict(h) for h in self.parsed.dispatch or []]
class ServiceYamlInfo(_YamlInfo):
"""A class for holding some basic attributes of a parsed service yaml file."""
DEFAULT_SERVICE_NAME = 'default'
def __init__(self, file_path, parsed):
"""Creates a new ServiceYamlInfo.
Args:
file_path: str, The full path the file that was parsed.
parsed: appinfo.AppInfoExternal, parsed Application Configuration.
"""
super(ServiceYamlInfo, self).__init__(file_path, parsed)
self.module = parsed.service or ServiceYamlInfo.DEFAULT_SERVICE_NAME
if parsed.env in ['2', 'flex', 'flexible']:
self.env = env.FLEX
elif parsed.vm or parsed.runtime == 'vm':
self.env = env.MANAGED_VMS
else:
self.env = env.STANDARD
# All `env: flex` apps are hermetic. All `env: standard` apps are not
# hermetic. All `vm: true` apps are hermetic IFF they don't use static
# files.
if self.env is env.FLEX:
self.is_hermetic = True
elif parsed.vm:
for urlmap in parsed.handlers:
if urlmap.static_dir or urlmap.static_files:
self.is_hermetic = False
break
else:
self.is_hermetic = True
else:
self.is_hermetic = False
self._InitializeHasExplicitSkipFiles(file_path, parsed)
self._UpdateSkipFiles(parsed)
if (self.env is env.MANAGED_VMS) or self.is_hermetic:
self.runtime = parsed.GetEffectiveRuntime()
self._UpdateVMSettings()
else:
self.runtime = parsed.runtime
# New "Ti" style runtimes
self.is_ti_runtime = env.GetTiRuntimeRegistry().Get(self.runtime, self.env)
@staticmethod
def FromFile(file_path):
"""Parses the given service file.
Args:
file_path: str, The full path to the service file.
Raises:
YamlParseError: If the file is not a valid Yaml-file.
YamlValidationError: If validation of parsed info fails.
Returns:
A ServiceYamlInfo object for the parsed file.
"""
try:
parsed = _YamlInfo._ParseYaml(file_path, appinfo_includes.Parse)
except (yaml_errors.Error, appinfo_errors.Error) as e:
raise YamlParseError(file_path, e)
info = ServiceYamlInfo(file_path, parsed)
info.Validate()
return info
def Validate(self):
"""Displays warnings and raises exceptions for non-schema errors.
Raises:
YamlValidationError: If validation of parsed info fails.
"""
if self.parsed.runtime == 'vm':
vm_runtime = self.parsed.GetEffectiveRuntime()
else:
vm_runtime = None
if self.parsed.runtime == 'python':
raise YamlValidationError(
'Service [{service}] uses unsupported Python 2.5 runtime. '
'Please use [runtime: python27] instead.'.format(
service=(self.parsed.service or
ServiceYamlInfo.DEFAULT_SERVICE_NAME)))
elif self.parsed.runtime == 'python-compat':
raise YamlValidationError(
'"python-compat" is not a supported runtime.')
elif self.parsed.runtime == 'custom' and not self.parsed.env:
raise YamlValidationError(
'runtime "custom" requires that env be explicitly specified.')
if self.env is env.MANAGED_VMS:
log.warning(MANAGED_VMS_DEPRECATION_WARNING)
if (self.env is env.FLEX and self.parsed.beta_settings and
self.parsed.beta_settings.get('enable_app_engine_apis')):
log.warning(APP_ENGINE_APIS_DEPRECATION_WARNING)
if self.env is env.FLEX and vm_runtime == 'python27':
raise YamlValidationError(
'The "python27" is not a valid runtime in env: flex. '
'Please use [python] instead.')
if self.env is env.FLEX and vm_runtime == 'python-compat':
log.warning('[runtime: {}] is deprecated. Please use [runtime: python] '
'instead. See {} for more info.'
.format(vm_runtime, UPGRADE_FLEX_PYTHON_URL))
# TODO: b/388712720 - Cleanup warning once backend experiments are cleaned
# Raise warning about default max instances forward change for GAE Standard
# when the user has selected AutomaticScaling without providing a
# max_instances value or will use AutomaticScaling by default.
if (
self.env is not env.FLEX
and not self.parsed.basic_scaling
and not self.parsed.manual_scaling
and (
not self.parsed.automatic_scaling
or (
self.parsed.automatic_scaling
and not self.parsed.automatic_scaling.max_instances
and self.parsed.automatic_scaling.max_instances != 0
)
)
):
log.warning(DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_WARNING)
# TODO: b/388712720 - Cleanup warning once backend experiments are cleaned
# Raise warning about default max instances forward change for GAE Standard
# when the user has selected AutomaticScaling and explicitly provided a
# value of zero for max_instances.
if (
self.env is not env.FLEX
and self.parsed.automatic_scaling
and self.parsed.automatic_scaling.max_instances == 0
):
log.warning(DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_ZERO_WARNING)
for warn_text in self.parsed.GetWarnings():
log.warning('In file [{0}]: {1}'.format(self.file, warn_text))
if (self.env is env.STANDARD and
self.parsed.runtime == 'python27' and
HasLib(self.parsed, 'ssl', '2.7')):
log.warning(PYTHON_SSL_WARNING)
if (self.env is env.FLEX and
vm_runtime == 'python' and
GetRuntimeConfigAttr(self.parsed, 'python_version') == '3.4'):
log.warning(FLEX_PY34_WARNING)
_CheckIllegalAttribute(
name='application',
yaml_info=self.parsed,
extractor_func=lambda yaml: yaml.application,
file_path=self.file,
msg=HINT_PROJECT)
_CheckIllegalAttribute(
name='version',
yaml_info=self.parsed,
extractor_func=lambda yaml: yaml.version,
file_path=self.file,
msg=HINT_VERSION)
self._ValidateTi()
def _ValidateTi(self):
"""Validation specifically for Ti-runtimes."""
if not self.is_ti_runtime:
return
_CheckIllegalAttribute(
name='threadsafe',
yaml_info=self.parsed,
extractor_func=lambda yaml: yaml.threadsafe,
file_path=self.file,
msg=HINT_THREADSAFE.format(self.runtime))
# pylint: disable=cell-var-from-loop
for handler in self.parsed.handlers:
_CheckIllegalAttribute(
name='application_readable',
yaml_info=handler,
extractor_func=lambda yaml: handler.application_readable,
file_path=self.file,
msg=HINT_READABLE.format(self.runtime))
def RequiresImage(self):
"""Returns True if we'll need to build a docker image."""
return self.env is env.MANAGED_VMS or self.is_hermetic
def _UpdateVMSettings(self):
"""Overwrites vm_settings for App Engine services with VMs.
Also sets module_yaml_path which is needed for some runtimes.
Raises:
AppConfigError: if the function was called for a standard service
"""
if self.env not in [env.MANAGED_VMS, env.FLEX]:
raise AppConfigError(
'This is not an App Engine Flexible service. Please set `env` '
'field to `flex`.')
if not self.parsed.vm_settings:
self.parsed.vm_settings = appinfo.VmSettings()
self.parsed.vm_settings['module_yaml_path'] = os.path.basename(self.file)
def GetAppYamlBasename(self):
"""Returns the basename for the app.yaml file for this service."""
return os.path.basename(self.file)
def HasExplicitSkipFiles(self):
"""Returns whether user has explicitly defined skip_files in app.yaml."""
return self._has_explicit_skip_files
def _InitializeHasExplicitSkipFiles(self, file_path, parsed):
"""Read app.yaml to determine whether user explicitly defined skip_files."""
if getattr(parsed, 'skip_files', None) == appinfo.DEFAULT_SKIP_FILES:
# Make sure that this was actually a default, not from the file.
try:
contents = files.ReadFileContents(file_path)
except files.Error: # If the class was initiated with a non-existent file
contents = ''
self._has_explicit_skip_files = 'skip_files' in contents
else:
self._has_explicit_skip_files = True
def _UpdateSkipFiles(self, parsed):
"""Resets skip_files field to Flex default if applicable."""
if self.RequiresImage() and not self.HasExplicitSkipFiles():
# pylint:disable=protected-access
parsed.skip_files = validation._RegexStrValue(
validation.Regex(DEFAULT_SKIP_FILES_FLEX),
DEFAULT_SKIP_FILES_FLEX,
'skip_files')
# pylint:enable=protected-access
def HasLib(parsed, name, version=None):
"""Check if the parsed yaml has specified the given library.
Args:
parsed: parsed from yaml to python object
name: str, Name of the library
version: str, If specified, also matches against the version of the library.
Returns:
True if library with optionally the given version is present.
"""
libs = parsed.libraries or []
if version:
return any(lib.name == name and lib.version == version for lib in libs)
else:
return any(lib.name == name for lib in libs)
def GetRuntimeConfigAttr(parsed, attr):
"""Retrieve an attribute under runtime_config section.
Args:
parsed: parsed from yaml to python object
attr: str, Attribute name, e.g. `runtime_version`
Returns:
The value of runtime_config.attr or None if the attribute isn't set.
"""
return (parsed.runtime_config or {}).get(attr)
def _CheckIllegalAttribute(name, yaml_info, extractor_func, file_path, msg=''):
"""Validates that an illegal attribute is not set.
Args:
name: str, The name of the attribute in the yaml files.
yaml_info: AppInfoExternal, The yaml to validate.
extractor_func: func(AppInfoExternal)->str, A function to extract the
value of the attribute from a _YamlInfo object.
file_path: str, The path of file from which yaml_info was parsed.
msg: str, Message to couple with the error
Raises:
YamlValidationError: If illegal attribute is set.
"""
attribute = extractor_func(yaml_info)
if attribute is not None:
# Disallow use of the given attribute.
raise YamlValidationError(
'The [{0}] field is specified in file [{1}]. '.format(name, file_path)
+ msg)