feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,79 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for opening URL:s related to the app in the browser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.app import deploy_command_util
from googlecloudsdk.appengine.api import appinfo
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.command_lib.util import check_browser
from googlecloudsdk.core import log
from googlecloudsdk.core.credentials import devshell
def OpenURL(url):
"""Open a URL in the default web browser in a new tab.
Args:
url: The full HTTP(S) URL to open.
"""
# Import in here for performance reasons
# pylint: disable=g-import-not-at-top
import webbrowser
# Devshell has its own 'browser' handler which simply prints the URL; this is
# redundant
if not devshell.IsDevshellEnvironment():
log.status.Print(
'Opening [{0}] in a new tab in your default browser.'.format(url))
webbrowser.open_new_tab(url)
def BrowseApp(project, service, version, launch_browser):
"""Let you browse the given service at the given version.
Args:
project: str, project ID.
service: str, specific service, 'default' if None
version: str, specific version, latest if None
launch_browser: boolean, if False only print url
Returns:
None if the browser should open the URL
The relevant output as a dict for calliope format to print if not
Raises:
MissingApplicationError: If an app does not exist.
"""
try:
url = deploy_command_util.GetAppHostname(
app_id=project, service=service, version=version,
use_ssl=appinfo.SECURE_HTTPS, deploy=False)
except apitools_exceptions.HttpNotFoundError:
log.debug('No app found:', exc_info=True)
raise exceptions.MissingApplicationError(project)
if check_browser.ShouldLaunchBrowser(launch_browser):
OpenURL(url)
return None
else:
return {
'url': url,
'service': service or 'default',
'version': version,
}

View File

@@ -0,0 +1,240 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for app creation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
APP_CREATE_WARNING = """\
Creating an App Engine application for a project is irreversible and the region
cannot be changed. More information about regions is at
<https://cloud.google.com/appengine/docs/locations>.
"""
DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_WARNING = """\
Starting from March, 2025, App Engine sets the automatic scaling maximum instances
default for standard environment deployments to 20. This change doesn't impact
existing apps. To override the default, specify the new max_instances value in your
app.yaml file, and deploy a new version or redeploy over an existing version.
For more details on max_instances, see
<https://cloud.google.com/appengine/docs/standard/reference/app-yaml.md#scaling_elements>.
"""
TRY_CLOUD_RUN_NUDGE_MSG = """\
Cloud Run offers the most modern fully managed application hosting experience
with lower minimum billable times and support for GPUs on demand for your AI/ML workloads.
Deploy code written in any programming language supported by App Engine on Cloud Run.
Learn more at https://cloud.google.com/run/docs/quickstarts#build-and-deploy-a-web-service
"""
class UnspecifiedRegionError(exceptions.Error):
"""Region is not provided on the command line and running interactively."""
class AppAlreadyExistsError(exceptions.Error):
"""The app which is getting created already exists."""
def AddAppCreateFlags(parser):
"""Add the common flags to a app create command."""
parser.add_argument(
'--region',
help=(
'The region to create the app within. '
'Use `gcloud app regions list` to list available regions. '
'If not provided, select region interactively.'
),
)
parser.add_argument(
'--service-account',
help=("""\
The app-level default service account to create the app with.
Note that you can specify a distinct service account for each
App Engine version with `gcloud app deploy --service-account`.
However if you do not specify a version-level service account,
this default will be used. If this parameter is not provided for app
creation, the app-level default will be set to be the out-of-box
App Engine Default Service Account,
https://cloud.google.com/appengine/docs/standard/python3/service-account
outlines the limitation of that service account."""),
)
parser.add_argument(
'--ssl-policy',
choices=['TLS_VERSION_1_0', 'TLS_VERSION_1_2'],
help='The app-level SSL policy to create the app with.',
)
def CheckAppNotExists(api_client, project):
"""Raises an error if the app already exists.
Args:
api_client: The App Engine Admin API client
project: The GCP project
Raises:
AppAlreadyExistsError if app already exists
"""
try:
app = api_client.GetApplication() # Should raise NotFoundError
except apitools_exceptions.HttpNotFoundError:
pass
else:
region = ' in region [{}]'.format(app.locationId) if app.locationId else ''
raise AppAlreadyExistsError(
'The project [{project}] already contains an App Engine '
'application{region}. You can deploy your application using '
'`gcloud app deploy`.'.format(project=project, region=region))
def CreateApp(
api_client,
project,
region,
suppress_warning=False,
service_account=None,
ssl_policy=None,
):
"""Create an App Engine app in the given region.
Prints info about the app being created and displays a progress tracker.
Args:
api_client: The App Engine Admin API client
project: The GCP project
region: The region to create the app
suppress_warning: True if user doesn't need to be warned this is
irreversible.
service_account: The app level service account for the App Engine app.
ssl_policy: str, the app-level SSL policy to update for this App Engine app.
Can be default or modern.
Raises:
AppAlreadyExistsError if app already exists
"""
ssl_policy_enum = {
'TLS_VERSION_1_0': (
api_client.messages.Application.SslPolicyValueValuesEnum.DEFAULT
),
'TLS_VERSION_1_2': (
api_client.messages.Application.SslPolicyValueValuesEnum.MODERN
),
}.get(ssl_policy)
if not suppress_warning:
log.status.Print(
'You are creating an app for project [{project}].'.format(
project=project
)
)
if service_account:
log.status.Print(
'Designating app-level default service account to be '
'[{service_account}].'.format(service_account=service_account)
)
if ssl_policy_enum:
log.status.Print(
'Designating app-level SSL policy to be [{ssl_policy}].'.format(
ssl_policy=ssl_policy
)
)
log.warning(APP_CREATE_WARNING)
# TODO: b/388712720 - Cleanup warning once backend experiments are cleaned
log.warning(DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_WARNING)
log.status.Print('NOTE: ' + TRY_CLOUD_RUN_NUDGE_MSG)
try:
api_client.CreateApp(
region, service_account=service_account, ssl_policy=ssl_policy_enum
)
except apitools_exceptions.HttpConflictError:
raise AppAlreadyExistsError(
'The project [{project}] already contains an App Engine application. '
'You can deploy your application using `gcloud app deploy`.'.format(
project=project))
def CreateAppInteractively(
api_client,
project,
regions=None,
extra_warning='',
service_account=None,
ssl_policy=None,
):
"""Interactively choose a region and create an App Engine app.
The caller is responsible for calling this method only when the user can be
prompted interactively.
Example interaction:
Please choose the region where you want your App Engine application
located:
[1] us-east1 (supports standard and flexible)
[2] europe-west (supports standard)
[3] us-central (supports standard and flexible)
[4] cancel
Please enter your numeric choice: 1
Args:
api_client: The App Engine Admin API client
project: The GCP project
regions: The list of regions to choose from; if None, all possible regions
are listed
extra_warning: An additional warning to print before listing regions.
service_account: The app level service account for the App Engine app.
ssl_policy: str, the app-level SSL policy to update for this App Engine app.
Can be default or modern.
Raises:
AppAlreadyExistsError if app already exists
"""
log.status.Print('You are creating an app for project [{}].'.format(project))
log.warning(APP_CREATE_WARNING)
# TODO: b/388712720 - Cleanup warning once backend experiments are cleaned
log.warning(DEFAULT_MAX_INSTANCES_FORWARD_CHANGE_WARNING)
log.status.Print('NOTE: ' + TRY_CLOUD_RUN_NUDGE_MSG)
regions = regions or sorted(set(api_client.ListRegions()), key=str)
if extra_warning:
log.warning(extra_warning)
idx = console_io.PromptChoice(
regions,
message=(
'Please choose the region where you want your App Engine '
'application located:\n\n'
),
cancel_option=True,
)
region = regions[idx]
CreateApp(
api_client,
project,
region.region,
suppress_warning=True,
service_account=service_account,
ssl_policy=ssl_policy,
)

View File

@@ -0,0 +1,939 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for `gcloud app` deployment.
Mostly created to selectively enable Cloud Endpoints in the beta/preview release
tracks.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
import os
import re
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib import scheduler
from googlecloudsdk.api_lib import tasks
from googlecloudsdk.api_lib.app import build as app_cloud_build
from googlecloudsdk.api_lib.app import deploy_app_command_util
from googlecloudsdk.api_lib.app import deploy_command_util
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import metric_names
from googlecloudsdk.api_lib.app import runtime_builders
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app import version_util
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.api_lib.datastore import index_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.tasks import app_deploy_migration_util
from googlecloudsdk.api_lib.util import exceptions as core_api_exceptions
from googlecloudsdk.calliope import actions
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.app import create_util
from googlecloudsdk.command_lib.app import deployables
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.command_lib.app import flags
from googlecloudsdk.command_lib.app import output_helpers
from googlecloudsdk.command_lib.app import source_files_util
from googlecloudsdk.command_lib.app import staging
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import metrics
from googlecloudsdk.core import properties
from googlecloudsdk.core.configurations import named_configs
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import times
import six
_TASK_CONSOLE_LINK = """\
https://console.cloud.google.com/appengine/taskqueues/cron?project={}
"""
# The regex for runtimes prior to runtime builders support. Used to deny the
# use of pinned runtime builders when this feature is disabled.
ORIGINAL_RUNTIME_RE_STRING = r'[a-z][a-z0-9\-]{0,29}'
ORIGINAL_RUNTIME_RE = re.compile(ORIGINAL_RUNTIME_RE_STRING + r'\Z')
# Max App Engine file size; see https://cloud.google.com/appengine/docs/quotas
_MAX_FILE_SIZE_STANDARD = 32 * 1024 * 1024
# 1rst gen runtimes that still need the _MAX_FILE_SIZE_STANDARD check:
_RUNTIMES_WITH_FILE_SIZE_LIMITS = [
'java7', 'java8', 'java8g', 'python27', 'go19', 'php55'
]
class Error(core_exceptions.Error):
"""Base error for this module."""
class VersionPromotionError(Error):
def __init__(self, err_str):
super(VersionPromotionError, self).__init__(
'Your deployment has succeeded, but promoting the new version to '
'default failed. '
'You may not have permissions to change traffic splits. '
'Changing traffic splits requires the Owner, Editor, App Engine Admin, '
'or App Engine Service Admin role. '
'Please contact your project owner and use the '
'`gcloud app services set-traffic --splits <version>=1` command to '
'redirect traffic to your newly deployed version.\n\n'
'Original error: ' + err_str)
class StoppedApplicationError(Error):
"""Error if deployment fails because application is stopped/disabled."""
def __init__(self, app):
super(StoppedApplicationError, self).__init__(
'Unable to deploy to application [{}] with status [{}]: Deploying '
'to stopped apps is not allowed.'.format(app.id, app.servingStatus))
class InvalidRuntimeNameError(Error):
"""Error for runtime names that are not allowed in the given environment."""
def __init__(self, runtime, allowed_regex):
super(InvalidRuntimeNameError,
self).__init__('Invalid runtime name: [{}]. '
'Must match regular expression [{}].'.format(
runtime, allowed_regex))
class RequiredFileMissingError(Error):
"""Error for skipped/ignored files that must be uploaded."""
def __init__(self, filename):
super(RequiredFileMissingError, self).__init__(
'Required file is not uploaded: [{}]. '
'This file should not be added to an ignore list ('
'https://cloud.google.com/sdk/gcloud/reference/topic/gcloudignore)'
.format(filename))
class FlexImageBuildOptions(enum.Enum):
"""Enum declaring different options for building image for flex deploys."""
ON_CLIENT = 1
ON_SERVER = 2
BUILDPACK_ON_CLIENT = 3
def GetFlexImageBuildOption(default_strategy=FlexImageBuildOptions.ON_CLIENT):
"""Determines where the build should be performed."""
trigger_build_server_side = (
properties.VALUES.app.trigger_build_server_side.GetBool(required=False))
use_flex_with_buildpacks = (
properties.VALUES.app.use_flex_with_buildpacks.GetBool(required=False))
if trigger_build_server_side:
result = FlexImageBuildOptions.ON_SERVER
elif (trigger_build_server_side is None and not use_flex_with_buildpacks):
result = default_strategy
elif use_flex_with_buildpacks:
result = FlexImageBuildOptions.BUILDPACK_ON_CLIENT
else:
result = FlexImageBuildOptions.ON_CLIENT
log.debug('Flex image build option: %s', result)
return result
class DeployOptions(object):
"""Values of options that affect deployment process in general.
No deployment details (e.g. sources for a specific deployment).
Attributes:
promote: True if the deployed version should receive all traffic.
stop_previous_version: Stop previous version
runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, when to
use the new CloudBuild-based runtime builders (alternative is old
externalized runtimes).
parallel_build: bool, whether to use parallel build and deployment path.
Only supported in v1beta and v1alpha App Engine Admin API.
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image, or build the
image on client, or build the image on client using the buildpacks.
"""
def __init__(self,
promote,
stop_previous_version,
runtime_builder_strategy,
parallel_build=False,
flex_image_build_option=FlexImageBuildOptions.ON_CLIENT):
self.promote = promote
self.stop_previous_version = stop_previous_version
self.runtime_builder_strategy = runtime_builder_strategy
self.parallel_build = parallel_build
self.flex_image_build_option = flex_image_build_option
@classmethod
def FromProperties(cls,
runtime_builder_strategy,
parallel_build=False,
flex_image_build_option=FlexImageBuildOptions.ON_CLIENT):
"""Initialize DeloyOptions using user properties where necessary.
Args:
runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, when to
use the new CloudBuild-based runtime builders (alternative is old
externalized runtimes).
parallel_build: bool, whether to use parallel build and deployment path.
Only supported in v1beta and v1alpha App Engine Admin API.
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image or build the
image on client or build the image on client using the buildpacks.
Returns:
DeployOptions, the deploy options.
"""
promote = properties.VALUES.app.promote_by_default.GetBool()
stop_previous_version = (
properties.VALUES.app.stop_previous_version.GetBool())
return cls(promote, stop_previous_version, runtime_builder_strategy,
parallel_build, flex_image_build_option)
class ServiceDeployer(object):
"""Coordinator (reusable) for deployment of one service at a time.
Attributes:
api_client: api_lib.app.appengine_api_client.AppengineClient, App Engine
Admin API client.
deploy_options: DeployOptions, the options to use for services deployed by
this ServiceDeployer.
"""
def __init__(self, api_client, deploy_options):
self.api_client = api_client
self.deploy_options = deploy_options
def _ValidateRuntime(self, service_info):
"""Validates explicit runtime builders are not used without the feature on.
Args:
service_info: yaml_parsing.ServiceYamlInfo, service configuration to be
deployed
Raises:
InvalidRuntimeNameError: if the runtime name is invalid for the deployment
(see above).
"""
runtime = service_info.runtime
if runtime == 'custom':
return
# This may or may not be accurate, but it only matters for custom runtimes,
# which are handled above.
needs_dockerfile = True
strategy = self.deploy_options.runtime_builder_strategy
use_runtime_builders = deploy_command_util.ShouldUseRuntimeBuilders(
service_info, strategy, needs_dockerfile)
if not use_runtime_builders and not ORIGINAL_RUNTIME_RE.match(runtime):
raise InvalidRuntimeNameError(runtime, ORIGINAL_RUNTIME_RE_STRING)
def _PossiblyBuildAndPush(self, new_version, service, upload_dir,
source_files, image, code_bucket_ref, gcr_domain,
flex_image_build_option):
"""Builds and Pushes the Docker image if necessary for this service.
Args:
new_version: version_util.Version describing where to deploy the service
service: yaml_parsing.ServiceYamlInfo, service configuration to be
deployed
upload_dir: str, path to the service's upload directory
source_files: [str], relative paths to upload.
image: str or None, the URL for the Docker image to be deployed (if image
already exists).
code_bucket_ref: cloud_storage.BucketReference where the service's files
have been uploaded
gcr_domain: str, Cloud Registry domain, determines the physical location
of the image. E.g. `us.gcr.io`.
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image or build the
image on client or build the image on client using the buildpacks.
Returns:
BuildArtifact, a wrapper which contains either the build ID for
an in-progress build, or the name of the container image for a serial
build. Possibly None if the service does not require an image.
Raises:
RequiredFileMissingError: if a required file is not uploaded.
"""
build = None
if image:
if service.RequiresImage() and service.parsed.skip_files.regex:
log.warning('Deployment of service [{0}] will ignore the skip_files '
'field in the configuration file, because the image has '
'already been built.'.format(new_version.service))
return app_cloud_build.BuildArtifact.MakeImageArtifact(image)
elif service.RequiresImage():
if not _AppYamlInSourceFiles(source_files, service.GetAppYamlBasename()):
raise RequiredFileMissingError(service.GetAppYamlBasename())
if flex_image_build_option == FlexImageBuildOptions.ON_SERVER:
cloud_build_options = {
'appYamlPath': service.GetAppYamlBasename(),
}
timeout = properties.VALUES.app.cloud_build_timeout.Get()
if timeout:
build_timeout = int(
times.ParseDuration(timeout, default_suffix='s').total_seconds)
cloud_build_options['cloudBuildTimeout'] = six.text_type(
build_timeout) + 's'
build = app_cloud_build.BuildArtifact.MakeBuildOptionsArtifact(
cloud_build_options)
else:
build = deploy_command_util.BuildAndPushDockerImage(
new_version.project, service, upload_dir, source_files,
new_version.id, code_bucket_ref, gcr_domain,
self.deploy_options.runtime_builder_strategy,
self.deploy_options.parallel_build, flex_image_build_option ==
FlexImageBuildOptions.BUILDPACK_ON_CLIENT)
return build
def _PossiblyPromote(self, all_services, new_version, wait_for_stop_version):
"""Promotes the new version to default (if specified by the user).
Args:
all_services: dict of service ID to service_util.Service objects
corresponding to all pre-existing services (used to determine how to
promote this version to receive all traffic, if applicable).
new_version: version_util.Version describing where to deploy the service
wait_for_stop_version: bool, indicating whether to wait for stop operation
to finish.
Raises:
VersionPromotionError: if the version could not successfully promoted
"""
if self.deploy_options.promote:
try:
version_util.PromoteVersion(all_services, new_version, self.api_client,
self.deploy_options.stop_previous_version,
wait_for_stop_version)
except apitools_exceptions.HttpError as err:
err_str = six.text_type(core_api_exceptions.HttpException(err))
raise VersionPromotionError(err_str)
elif self.deploy_options.stop_previous_version:
log.info('Not stopping previous version because new version was '
'not promoted.')
def _PossiblyUploadFiles(self, image, service_info, upload_dir, source_files,
code_bucket_ref, flex_image_build_option):
"""Uploads files for this deployment is required for this service.
Uploads if flex_image_build_option is FlexImageBuildOptions.ON_SERVER,
or if the deployment is non-hermetic and the image is not provided.
Args:
image: str or None, the URL for the Docker image to be deployed (if image
already exists).
service_info: yaml_parsing.ServiceYamlInfo, service configuration to be
deployed
upload_dir: str, path to the service's upload directory
source_files: [str], relative paths to upload.
code_bucket_ref: cloud_storage.BucketReference where the service's files
have been uploaded
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image or build the
image on client or build the image on client using the buildpacks.
Returns:
Dictionary mapping source files to Google Cloud Storage locations.
Raises:
RequiredFileMissingError: if a required file is not uploaded.
"""
manifest = None
# "Non-hermetic" services require file upload outside the Docker image
# unless an image was already built.
if (not image and
(flex_image_build_option == FlexImageBuildOptions.ON_SERVER or
not service_info.is_hermetic)):
if (service_info.env == env.FLEX and not _AppYamlInSourceFiles(
source_files, service_info.GetAppYamlBasename())):
raise RequiredFileMissingError(service_info.GetAppYamlBasename())
limit = None
if (service_info.env == env.STANDARD and
service_info.runtime in _RUNTIMES_WITH_FILE_SIZE_LIMITS):
limit = _MAX_FILE_SIZE_STANDARD
manifest = deploy_app_command_util.CopyFilesToCodeBucket(
upload_dir, source_files, code_bucket_ref, max_file_size=limit)
return manifest
def Deploy(self,
service,
new_version,
code_bucket_ref,
image,
all_services,
gcr_domain,
disable_build_cache,
wait_for_stop_version,
flex_image_build_option=FlexImageBuildOptions.ON_CLIENT,
ignore_file=None,
service_account=None):
"""Deploy the given service.
Performs all deployment steps for the given service (if applicable):
* Enable endpoints (for beta deployments)
* Build and push the Docker image (Flex only, if image_url not provided)
* Upload files (non-hermetic deployments and flex deployments with
flex_image_build_option=FlexImageBuildOptions.ON_SERVER)
* Create the new version
* Promote the version to receive all traffic (if --promote given (default))
* Stop the previous version (if new version promoted and
--stop-previous-version given (default))
Args:
service: deployables.Service, service to be deployed.
new_version: version_util.Version describing where to deploy the service
code_bucket_ref: cloud_storage.BucketReference where the service's files
will be uploaded
image: str or None, the URL for the Docker image to be deployed (if image
already exists).
all_services: dict of service ID to service_util.Service objects
corresponding to all pre-existing services (used to determine how to
promote this version to receive all traffic, if applicable).
gcr_domain: str, Cloud Registry domain, determines the physical location
of the image. E.g. `us.gcr.io`.
disable_build_cache: bool, disable the build cache.
wait_for_stop_version: bool, indicating whether to wait for stop operation
to finish.
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image or build the
image on client or build the image on client using the buildpacks.
ignore_file: custom ignore_file name. Override .gcloudignore file to
customize files to be skipped.
service_account: identity this version runs as. If not set, Admin API will
fallback to use the App Engine default appspot SA.
"""
log.status.Print('Beginning deployment of service [{service}]...'.format(
service=new_version.service))
if (service.service_info.env == env.MANAGED_VMS and
flex_image_build_option == FlexImageBuildOptions.ON_SERVER):
# Server-side builds are not supported for Managed VMs.
flex_image_build_option = FlexImageBuildOptions.ON_CLIENT
service_info = service.service_info
self._ValidateRuntime(service_info)
source_files = source_files_util.GetSourceFiles(
service.upload_dir,
service_info.parsed.skip_files.regex,
service_info.HasExplicitSkipFiles(),
service_info.runtime,
service_info.env,
service.source,
ignore_file=ignore_file)
# Tar-based upload for flex
build = self._PossiblyBuildAndPush(new_version, service_info,
service.upload_dir, source_files, image,
code_bucket_ref, gcr_domain,
flex_image_build_option)
# Manifest-based incremental source upload for all envs
manifest = self._PossiblyUploadFiles(image, service_info,
service.upload_dir, source_files,
code_bucket_ref,
flex_image_build_option)
del source_files # Free some memory
extra_config_settings = {}
if disable_build_cache:
extra_config_settings['no-cache'] = 'true'
# Actually create the new version of the service.
metrics.CustomTimedEvent(metric_names.DEPLOY_API_START)
self.api_client.DeployService(new_version.service, new_version.id,
service_info, manifest, build,
extra_config_settings, service_account)
metrics.CustomTimedEvent(metric_names.DEPLOY_API)
self._PossiblyPromote(all_services, new_version, wait_for_stop_version)
def ArgsDeploy(parser):
"""Get arguments for this command.
Args:
parser: argparse.ArgumentParser, the parser for this command.
"""
flags.SERVER_FLAG.AddToParser(parser)
flags.IGNORE_CERTS_FLAG.AddToParser(parser)
flags.DOCKER_BUILD_FLAG.AddToParser(parser)
flags.IGNORE_FILE_FLAG.AddToParser(parser)
parser.add_argument(
'--version',
'-v',
type=flags.VERSION_TYPE,
help='The version of the app that will be created or replaced by this '
'deployment. If you do not specify a version, one will be generated for '
'you.')
parser.add_argument(
'--bucket',
type=storage_util.BucketReference.FromArgument,
help=('The Google Cloud Storage bucket used to stage files associated '
'with the deployment. If this argument is not specified, the '
"application's default code bucket is used."))
parser.add_argument(
'--service-account',
help=('The service account that this deployed version will run as. '
'If this argument is not specified, the App Engine default '
'service account will be used for your current deployed version.'))
parser.add_argument(
'deployables',
nargs='*',
help="""\
The yaml files for the services or configurations you want to deploy.
If not given, defaults to `app.yaml` in the current directory.
If that is not found, attempts to automatically generate necessary
configuration files (such as app.yaml) in the current directory.""")
parser.add_argument(
'--stop-previous-version',
action=actions.StoreBooleanProperty(
properties.VALUES.app.stop_previous_version),
help="""\
Stop the previously running version when deploying a new version that
receives all traffic.
Note that if the version is running on an instance
of an auto-scaled service in the App Engine Standard
environment, using `--stop-previous-version` will not work
and the previous version will continue to run because auto-scaled service
instances are always running.""")
parser.add_argument(
'--image-url',
help='(App Engine flexible environment only.) Deploy with a specific '
'Docker image. Docker url must be from one of the valid Artifact '
'Registry hostnames.')
parser.add_argument(
'--appyaml',
help='Deploy with a specific app.yaml that will replace '
'the one defined in the DEPLOYABLE.')
parser.add_argument(
'--promote',
action=actions.StoreBooleanProperty(
properties.VALUES.app.promote_by_default),
help='Promote the deployed version to receive all traffic.')
parser.add_argument(
'--cache',
action='store_true',
default=True,
help='Enable caching mechanisms involved in the deployment process, '
'particularly in the build step.')
staging_group = parser.add_mutually_exclusive_group(hidden=True)
staging_group.add_argument(
'--skip-staging',
action='store_true',
default=False,
help='THIS ARGUMENT NEEDS HELP TEXT.')
staging_group.add_argument(
'--staging-command', help='THIS ARGUMENT NEEDS HELP TEXT.')
def _MakeStager(skip_staging, use_beta_stager, staging_command, staging_area):
"""Creates the appropriate stager for the given arguments/release track.
The stager is responsible for invoking the right local staging depending on
env and runtime.
Args:
skip_staging: bool, if True use a no-op Stager. Takes precedence over other
arguments.
use_beta_stager: bool, if True, use a stager that includes beta staging
commands.
staging_command: str, path to an executable on disk. If given, use this
command explicitly for staging. Takes precedence over later arguments.
staging_area: str, the path to the staging area
Returns:
staging.Stager, the appropriate stager for the command
"""
if skip_staging:
return staging.GetNoopStager(staging_area)
elif staging_command:
command = staging.ExecutableCommand.FromInput(staging_command)
return staging.GetOverrideStager(command, staging_area)
elif use_beta_stager:
return staging.GetBetaStager(staging_area)
else:
return staging.GetStager(staging_area)
def RunDeploy(
args,
api_client,
use_beta_stager=False,
runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER,
parallel_build=True,
flex_image_build_option=FlexImageBuildOptions.ON_CLIENT,
):
"""Perform a deployment based on the given args.
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the ArgsDeploy() function.
api_client: api_lib.app.appengine_api_client.AppengineClient, App Engine
Admin API client.
use_beta_stager: Use the stager registry defined for the beta track rather
than the default stager registry.
runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, when to
use the new CloudBuild-based runtime builders (alternative is old
externalized runtimes).
parallel_build: bool, whether to use parallel build and deployment path.
Only supported in v1beta and v1alpha App Engine Admin API.
flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
should upload files so that the server can build the image or build the
image on client or build the image on client using the buildpacks.
Returns:
A dict on the form `{'versions': new_versions, 'configs': updated_configs}`
where new_versions is a list of version_util.Version, and updated_configs
is a list of config file identifiers, see yaml_parsing.ConfigYamlInfo.
"""
project = properties.VALUES.core.project.Get(required=True)
deploy_options = DeployOptions.FromProperties(
runtime_builder_strategy=runtime_builder_strategy,
parallel_build=parallel_build,
flex_image_build_option=flex_image_build_option)
with files.TemporaryDirectory() as staging_area:
stager = _MakeStager(args.skip_staging, use_beta_stager,
args.staging_command, staging_area)
services, configs = deployables.GetDeployables(
args.deployables, stager, deployables.GetPathMatchers(), args.appyaml)
wait_for_stop_version = _CheckIfConfigsContainDispatch(configs)
service_infos = [d.service_info for d in services]
flags.ValidateImageUrl(args.image_url, service_infos)
# pylint: disable=protected-access
log.debug('API endpoint: [{endpoint}], API version: [{version}]'.format(
endpoint=api_client.client.url, version=api_client.client._VERSION))
app = _PossiblyCreateApp(api_client, project)
_RaiseIfStopped(api_client, app)
# Call _PossiblyRepairApp when --bucket param is unspecified
if not args.bucket:
app = _PossiblyRepairApp(api_client, app)
# Tell the user what is going to happen, and ask them to confirm.
version_id = args.version or util.GenerateVersionId()
deployed_urls = output_helpers.DisplayProposedDeployment(
app, project, services, configs, version_id, deploy_options.promote,
args.service_account, api_client.client._VERSION)
console_io.PromptContinue(cancel_on_no=True)
if service_infos:
# Do generic app setup if deploying any services.
# All deployment paths for a service involve uploading source to GCS.
metrics.CustomTimedEvent(metric_names.GET_CODE_BUCKET_START)
code_bucket_ref = args.bucket or flags.GetCodeBucket(app, project)
metrics.CustomTimedEvent(metric_names.GET_CODE_BUCKET)
log.debug('Using bucket [{b}].'.format(b=code_bucket_ref.ToUrl()))
# Prepare Flex if any service is going to deploy an image.
if any([s.RequiresImage() for s in service_infos]):
deploy_command_util.PossiblyEnableFlex(project)
all_services = dict([(s.id, s) for s in api_client.ListServices()])
else:
code_bucket_ref = None
all_services = {}
new_versions = []
deployer = ServiceDeployer(api_client, deploy_options)
# Track whether a service has been deployed yet, for metrics.
service_deployed = False
for service in services:
if not service_deployed:
metrics.CustomTimedEvent(metric_names.FIRST_SERVICE_DEPLOY_START)
new_version = version_util.Version(project, service.service_id,
version_id)
deployer.Deploy(
service,
new_version,
code_bucket_ref,
args.image_url,
all_services,
app.gcrDomain,
disable_build_cache=(not args.cache),
wait_for_stop_version=wait_for_stop_version,
flex_image_build_option=flex_image_build_option,
ignore_file=args.ignore_file,
service_account=args.service_account)
new_versions.append(new_version)
log.status.Print('Deployed service [{0}] to [{1}]'.format(
service.service_id, deployed_urls[service.service_id]))
if not service_deployed:
metrics.CustomTimedEvent(metric_names.FIRST_SERVICE_DEPLOY)
service_deployed = True
# Deploy config files.
if configs:
metrics.CustomTimedEvent(metric_names.UPDATE_CONFIG_START)
for config in configs:
message = 'Updating config [{config}]'.format(config=config.name)
with progress_tracker.ProgressTracker(message):
if config.name == 'dispatch':
api_client.UpdateDispatchRules(config.GetRules())
elif config.name == yaml_parsing.ConfigYamlInfo.INDEX:
index_api.CreateMissingIndexesViaDatastoreApi(project, config.parsed)
elif config.name == yaml_parsing.ConfigYamlInfo.QUEUE:
RunDeployCloudTasks(config)
elif config.name == yaml_parsing.ConfigYamlInfo.CRON:
RunDeployCloudScheduler(config)
else:
raise ValueError(
'Unknown config [{config}]'.format(config=config.name)
)
metrics.CustomTimedEvent(metric_names.UPDATE_CONFIG)
updated_configs = [c.name for c in configs]
PrintPostDeployHints(new_versions, updated_configs)
# Return all the things that were deployed.
return {'versions': new_versions, 'configs': updated_configs}
def RunDeployCloudTasks(config):
"""Perform a deployment using Cloud Tasks API based on the given args.
Args:
config: A yaml_parsing.ConfigYamlInfo object for the parsed YAML file we are
going to process.
Returns:
A list of config file identifiers, see yaml_parsing.ConfigYamlInfo.
"""
# TODO(b/169069379): Upgrade to use GA once the relevant code is promoted
tasks_api = tasks.GetApiAdapter(base.ReleaseTrack.BETA)
queues_data = app_deploy_migration_util.FetchCurrentQueuesData(tasks_api)
app_deploy_migration_util.ValidateQueueYamlFileConfig(config)
app_deploy_migration_util.DeployQueuesYamlFile(tasks_api, config, queues_data)
def RunDeployCloudScheduler(config):
"""Perform a deployment using Cloud Scheduler APIs based on the given args.
Args:
config: A yaml_parsing.ConfigYamlInfo object for the parsed YAML file we are
going to process.
Returns:
A list of config file identifiers, see yaml_parsing.ConfigYamlInfo.
"""
# TODO(b/169069379): Upgrade to use GA once the relevant code is promoted
scheduler_api = scheduler.GetApiAdapter(
base.ReleaseTrack.BETA, legacy_cron=True)
jobs_data = app_deploy_migration_util.FetchCurrentJobsData(scheduler_api)
app_deploy_migration_util.ValidateCronYamlFileConfig(config)
app_deploy_migration_util.DeployCronYamlFile(scheduler_api, config, jobs_data)
# TODO(b/30632016): Move to Epilog() when we have a good way to pass
# information about the deployed versions
def PrintPostDeployHints(new_versions, updated_configs):
"""Print hints for user at the end of a deployment."""
if yaml_parsing.ConfigYamlInfo.CRON in updated_configs:
log.status.Print('\nCron jobs have been updated.')
if yaml_parsing.ConfigYamlInfo.QUEUE not in updated_configs:
log.status.Print('\nVisit the Cloud Platform Console Task Queues page '
'to view your queues and cron jobs.')
log.status.Print(
_TASK_CONSOLE_LINK.format(properties.VALUES.core.project.Get()))
if yaml_parsing.ConfigYamlInfo.DISPATCH in updated_configs:
log.status.Print('\nCustom routings have been updated.')
if yaml_parsing.ConfigYamlInfo.QUEUE in updated_configs:
log.status.Print('\nTask queues have been updated.')
log.status.Print('\nVisit the Cloud Platform Console Task Queues page '
'to view your queues and cron jobs.')
if yaml_parsing.ConfigYamlInfo.INDEX in updated_configs:
log.status.Print('\nIndexes are being rebuilt. This may take a moment.')
if not new_versions:
return
elif len(new_versions) > 1:
service_hint = ' -s <service>'
elif new_versions[0].service == 'default':
service_hint = ''
else:
service = new_versions[0].service
service_hint = ' -s {svc}'.format(svc=service)
proj_conf = named_configs.ActivePropertiesFile.Load().Get('core', 'project')
project = properties.VALUES.core.project.Get()
if proj_conf != project:
project_hint = ' --project=' + project
else:
project_hint = ''
log.status.Print('\nYou can stream logs from the command line by running:\n'
' $ gcloud app logs tail' + (service_hint or ' -s default'))
log.status.Print('\nTo view your application in the web browser run:\n'
' $ gcloud app browse' + service_hint + project_hint)
def _PossiblyCreateApp(api_client, project):
"""Returns an app resource, and creates it if the stars are aligned.
App creation happens only if the current project is app-less, we are running
in interactive mode and the user explicitly wants to.
Args:
api_client: Admin API client.
project: The GCP project/app id.
Returns:
An app object (never returns None).
Raises:
MissingApplicationError: If an app does not exist and cannot be created.
"""
try:
return api_client.GetApplication()
except apitools_exceptions.HttpNotFoundError:
# Invariant: GCP Project does exist but (singleton) GAE app is not yet
# created.
#
# Check for interactive mode, since this action is irreversible and somewhat
# surprising. CreateAppInteractively will provide a cancel option for
# interactive users, and MissingApplicationException includes instructions
# for non-interactive users to fix this.
log.debug('No app found:', exc_info=True)
if console_io.CanPrompt():
# Equivalent to running `gcloud app create`
create_util.CreateAppInteractively(api_client, project)
# App resource must be fetched again
return api_client.GetApplication()
raise exceptions.MissingApplicationError(project)
except apitools_exceptions.HttpForbiddenError:
active_account = properties.VALUES.core.account.Get()
# pylint: disable=protected-access
raise core_api_exceptions.HttpException(
('Permissions error fetching application [{}]. Please '
'make sure that you have permission to view applications on the '
'project and that {} has the App Engine Deployer '
'(roles/appengine.deployer) role.'.format(api_client._FormatApp(),
active_account)))
def _PossiblyRepairApp(api_client, app):
"""Repairs the app if necessary and returns a healthy app object.
An app is considered unhealthy if the codeBucket field is missing.
This may include more conditions in the future.
Args:
api_client: Admin API client.
app: App object (with potentially missing resources).
Returns:
An app object (either the same or a new one), which contains the right
resources, including code bucket.
"""
if not app.codeBucket:
message = 'Initializing App Engine resources'
api_client.RepairApplication(progress_message=message)
app = api_client.GetApplication()
return app
def _RaiseIfStopped(api_client, app):
"""Checks if app is disabled and raises error if so.
Deploying to a disabled app is not allowed.
Args:
api_client: Admin API client.
app: App object (including status).
Raises:
StoppedApplicationError: if the app is currently disabled.
"""
if api_client.IsStopped(app):
raise StoppedApplicationError(app)
def _CheckIfConfigsContainDispatch(configs):
"""Checks if list of configs contains dispatch config.
Args:
configs: list of configs
Returns:
bool, indicating if configs contain dispatch config.
"""
for config in configs:
if config.name == 'dispatch':
return True
return False
def GetRuntimeBuilderStrategy(release_track):
"""Gets the appropriate strategy to use for runtime builders.
Depends on the release track (beta or GA; alpha is not supported) and whether
the hidden `app/use_runtime_builders` configuration property is set (in which
case it overrides).
Args:
release_track: the base.ReleaseTrack that determines the default strategy.
Returns:
The RuntimeBuilderStrategy to use.
Raises:
ValueError: if the release track is not supported (and there is no property
override set).
"""
# Use Get(), not GetBool, since GetBool() doesn't differentiate between "None"
# and "False"
if properties.VALUES.app.use_runtime_builders.Get() is not None:
if properties.VALUES.app.use_runtime_builders.GetBool():
return runtime_builders.RuntimeBuilderStrategy.ALWAYS
else:
return runtime_builders.RuntimeBuilderStrategy.NEVER
if release_track is base.ReleaseTrack.GA:
return runtime_builders.RuntimeBuilderStrategy.ALLOWLIST_GA
elif release_track is base.ReleaseTrack.BETA:
return runtime_builders.RuntimeBuilderStrategy.ALLOWLIST_BETA
else:
raise ValueError('Unrecognized release track [{}]'.format(release_track))
def _AppYamlInSourceFiles(source_files, app_yaml_path):
if not source_files:
return False
# TODO(b/171495697) until the bug is fixed, the app yaml has to be located in
# the root of the app code, hence we're searching only the filename
app_yaml_filename = os.path.basename(app_yaml_path)
return any([f == app_yaml_filename for f in source_files])

View File

@@ -0,0 +1,491 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for deriving services and configs from paths.
Paths are typically given as positional params, like
`gcloud app deploy <path1> <path2>...`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import os
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
_STANDARD_APP_YAML_URL = (
'https://cloud.google.com/appengine/docs/standard/reference/app-yaml')
_FLEXIBLE_APP_YAML_URL = (
'https://cloud.google.com/appengine/docs/flexible/reference/app-yaml')
APP_YAML_INSTRUCTIONS = (
'using the directions at {flex} (App Engine flexible environment) or {std} '
'(App Engine standard environment) under the tab for your language.'
).format(flex=_FLEXIBLE_APP_YAML_URL, std=_STANDARD_APP_YAML_URL)
FINGERPRINTING_WARNING = (
'As an alternative, create an app.yaml file yourself ' +
APP_YAML_INSTRUCTIONS)
NO_YAML_ERROR = (
'An app.yaml (or appengine-web.xml) file is required to deploy this '
'directory as an App Engine application. Create an app.yaml file '
+ APP_YAML_INSTRUCTIONS)
class Service(object):
"""Represents data around a deployable service.
Attributes:
descriptor: str, File path to the original deployment descriptor, which is
either a `<service>.yaml` or an `appengine-web.xml`.
source: str, Path to the original deployable artifact or directory, which
is typically the original source directory, but could also be an artifact
such as a fat JAR file.
service_info: yaml_parsing.ServiceYamlInfo, Info parsed from the
`<service>.yaml` file. Note that service_info.file may point to a
file in a staged directory.
upload_dir: str, Path to the source directory. If staging is required, this
points to the staged directory.
service_id: str, the service id.
path: str, File path to the staged deployment `<service>.yaml` descriptor
or to the original one, if no staging is used.
"""
def __init__(self, descriptor, source, service_info, upload_dir):
self.descriptor = descriptor
self.source = source
self.service_info = service_info
self.upload_dir = upload_dir
@property
def service_id(self):
return self.service_info.module
@property
def path(self):
return self.service_info.file
@classmethod
def FromPath(cls, path, stager, path_matchers, appyaml):
"""Return a Service from a path using staging if necessary.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file
of any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
path_matchers: List[Function], ordered list of functions on the form
fn(path, stager), where fn returns a Service or None if no match.
appyaml: str or None, the app.yaml location to used for deployment.
Returns:
Service, if one can be derived, else None.
"""
for matcher in path_matchers:
service = matcher(path, stager, appyaml)
if service:
return service
return None
def ServiceYamlMatcher(path, stager, appyaml):
"""Generate a Service from an <service>.yaml source path.
This function is a path matcher that returns if and only if:
- `path` points to either a `<service>.yaml` or `<app-dir>` where
`<app-dir>/app.yaml` exists.
- the yaml-file is a valid <service>.yaml file.
If the runtime and environment match an entry in the stager, the service will
be staged into a directory.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
staging.StagingCommandFailedError, staging command failed.
Returns:
Service, fully populated with entries that respect a potentially
staged deployable service, or None if the path does not match the
pattern described.
"""
descriptor = path if os.path.isfile(path) else os.path.join(path,
'app.yaml')
_, ext = os.path.splitext(descriptor)
if os.path.exists(descriptor) and ext in ['.yaml', '.yml']:
app_dir = os.path.dirname(descriptor)
service_info = yaml_parsing.ServiceYamlInfo.FromFile(descriptor)
staging_dir = stager.Stage(descriptor, app_dir, service_info.runtime,
service_info.env, appyaml)
# If staging, stage, get stage_dir
return Service(descriptor, app_dir, service_info, staging_dir or app_dir)
return None
def JarMatcher(jar_path, stager, appyaml):
"""Generate a Service from a Java fatjar path.
This function is a path matcher that returns if and only if:
- `jar_path` points to a jar file .
The service will be staged according to the stager as a jar runtime,
which is defined in staging.py.
Args:
jar_path: str, Unsanitized absolute path pointing to a file of jar type.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
staging.StagingCommandFailedError, staging command failed.
Returns:
Service, fully populated with entries that respect a staged deployable
service, or None if the path does not match the pattern described.
"""
_, ext = os.path.splitext(jar_path)
if os.path.exists(jar_path) and ext in ['.jar']:
app_dir = os.path.abspath(os.path.join(jar_path, os.pardir))
descriptor = jar_path
staging_dir = stager.Stage(descriptor, app_dir, 'java-jar', env.STANDARD,
appyaml)
yaml_path = os.path.join(staging_dir, 'app.yaml')
service_info = yaml_parsing.ServiceYamlInfo.FromFile(yaml_path)
return Service(descriptor, app_dir, service_info, staging_dir)
return None
def PomXmlMatcher(path, stager, appyaml):
"""Generate a Service from an Maven project source path.
This function is a path matcher that returns true if and only if:
- `path` points to either a Maven `pom.xml` or `<maven=project-dir>` where
`<maven-project-dir>/pom.xml` exists.
If the runtime and environment match an entry in the stager, the service will
be staged into a directory.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
staging.StagingCommandFailedError, staging command failed.
Returns:
Service, fully populated with entries that respect a potentially
staged deployable service, or None if the path does not match the
pattern described.
"""
descriptor = path if os.path.isfile(path) else os.path.join(path, 'pom.xml')
filename = os.path.basename(descriptor)
if os.path.exists(descriptor) and filename == 'pom.xml':
app_dir = os.path.dirname(descriptor)
staging_dir = stager.Stage(descriptor, app_dir, 'java-maven-project',
env.STANDARD, appyaml)
yaml_path = os.path.join(staging_dir, 'app.yaml')
service_info = yaml_parsing.ServiceYamlInfo.FromFile(yaml_path)
return Service(descriptor, app_dir, service_info, staging_dir)
return None
def BuildGradleMatcher(path, stager, appyaml):
"""Generate a Service from an Gradle project source path.
This function is a path matcher that returns true if and only if:
- `path` points to either a Gradle `build.gradle` or `<gradle-project-dir>`
where `<gradle-project-dir>/build.gradle` exists.
If the runtime and environment match an entry in the stager, the service will
be staged into a directory.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
staging.StagingCommandFailedError, staging command failed.
Returns:
Service, fully populated with entries that respect a potentially
staged deployable service, or None if the path does not match the
pattern described.
"""
descriptor = path if os.path.isfile(path) else os.path.join(
path, 'build.gradle')
filename = os.path.basename(descriptor)
if os.path.exists(descriptor) and filename == 'build.gradle':
app_dir = os.path.dirname(descriptor)
staging_dir = stager.Stage(descriptor, app_dir, 'java-gradle-project',
env.STANDARD, appyaml)
yaml_path = os.path.join(staging_dir, 'app.yaml')
service_info = yaml_parsing.ServiceYamlInfo.FromFile(yaml_path)
return Service(descriptor, app_dir, service_info, staging_dir)
return None
def AppengineWebMatcher(path, stager, appyaml):
"""Generate a Service from an appengine-web.xml source path.
This function is a path matcher that returns if and only if:
- `path` points to either `.../WEB-INF/appengine-web.xml` or `<app-dir>` where
`<app-dir>/WEB-INF/appengine-web.xml` exists.
- the xml-file is a valid appengine-web.xml file according to the Java stager.
The service will be staged according to the stager as a java-xml runtime,
which is defined in staging.py.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will be invoked if there is a runtime
and environment match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
staging.StagingCommandFailedError, staging command failed.
Returns:
Service, fully populated with entries that respect a staged deployable
service, or None if the path does not match the pattern described.
"""
suffix = os.path.join(os.sep, 'WEB-INF', 'appengine-web.xml')
app_dir = path[:-len(suffix)] if path.endswith(suffix) else path
descriptor = os.path.join(app_dir, 'WEB-INF', 'appengine-web.xml')
if not os.path.isfile(descriptor):
return None
xml_file = files.ReadFileContents(descriptor)
if '<application>' in xml_file or '<version>' in xml_file:
log.warning('<application> and <version> elements in ' +
'`appengine-web.xml` are not respected')
staging_dir = stager.Stage(descriptor, app_dir, 'java-xml', env.STANDARD,
appyaml)
if not staging_dir:
# After GA launch of appengine-web.xml support, this should never occur.
return None
yaml_path = os.path.join(staging_dir, 'app.yaml')
service_info = yaml_parsing.ServiceYamlInfo.FromFile(yaml_path)
return Service(descriptor, app_dir, service_info, staging_dir)
def ExplicitAppYamlMatcher(path, stager, appyaml):
"""Use optional app.yaml with a directory or a file the user wants to deploy.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will not be invoked.
appyaml: str or None, the app.yaml location to used for deployment.
Returns:
Service, fully populated with entries that respect a staged deployable
service, or None if there is no optional --appyaml flag usage.
"""
if appyaml:
service_info = yaml_parsing.ServiceYamlInfo.FromFile(appyaml)
staging_dir = stager.Stage(appyaml, path, 'generic-copy', service_info.env,
appyaml)
return Service(appyaml, path, service_info, staging_dir)
return None
def UnidentifiedDirMatcher(path, stager, appyaml):
"""Points out to the user that they need an app.yaml to deploy.
Args:
path: str, Unsanitized absolute path, may point to a directory or a file of
any type. There is no guarantee that it exists.
stager: staging.Stager, stager that will not be invoked.
appyaml: str or None, the app.yaml location to used for deployment.
Returns:
None
"""
del stager, appyaml
if os.path.isdir(path):
log.error(NO_YAML_ERROR)
return None
def GetPathMatchers():
"""Get list of path matchers ordered by descending precedence.
Returns:
List[Function], ordered list of functions on the form fn(path, stager),
where fn returns a Service or None if no match.
"""
return [
ServiceYamlMatcher, AppengineWebMatcher, JarMatcher, PomXmlMatcher,
BuildGradleMatcher, ExplicitAppYamlMatcher, UnidentifiedDirMatcher
]
class Services(object):
"""Collection of deployable services."""
def __init__(self, services=None):
"""Instantiate a set of deployable services.
Args:
services: List[Service], optional list of services for quick
initialization.
Raises:
DuplicateServiceError: Two or more services have the same service id.
"""
self._services = collections.OrderedDict()
if services:
for d in services:
self.Add(d)
def Add(self, service):
"""Add a deployable service to the set.
Args:
service: Service, to add.
Raises:
DuplicateServiceError: Two or more services have the same service id.
"""
existing = self._services.get(service.service_id)
if existing:
raise exceptions.DuplicateServiceError(existing.path, service.path,
service.service_id)
self._services[service.service_id] = service
def GetAll(self):
"""Retrieve the service info objects in the order they were added.
Returns:
List[Service], list of services.
"""
return list(self._services.values())
class Configs(object):
"""Collection of config files."""
def __init__(self):
self._configs = collections.OrderedDict()
def Add(self, config):
"""Add a ConfigYamlInfo to the set of configs.
Args:
config: ConfigYamlInfo, the config to add.
Raises:
exceptions.DuplicateConfigError, the config type is already in the set.
"""
config_type = config.config
existing = self._configs.get(config_type)
if existing:
raise exceptions.DuplicateConfigError(existing.file, config.file,
config_type)
self._configs[config_type] = config
def GetAll(self):
"""Retreive the config file objects in the order they were added.
Returns:
List[ConfigYamlInfo], list of config file objects.
"""
return list(self._configs.values())
def GetDeployables(args, stager, path_matchers, appyaml=None):
"""Given a list of args, infer the deployable services and configs.
Given a deploy command, e.g. `gcloud app deploy ./dir other/service.yaml
cron.yaml WEB-INF/appengine-web.xml`, the deployables can be on multiple
forms. This method pre-processes and infers yaml descriptors from the
various formats accepted. The rules are as following:
This function is a context manager, and should be used in conjunction with
the `with` keyword.
1. If `args` is an empty list, add the current directory to it.
2. For each arg:
- If arg refers to a config file, add it to the configs set.
- Else match the arg against the path matchers. The first match will win.
The match will be added to the services set. Matchers may run staging.
Args:
args: List[str], positional args as given on the command-line.
stager: staging.Stager, stager that will be invoked on sources that have
entries in the stager's registry.
path_matchers: List[Function], list of functions on the form
fn(path, stager) ordered by descending precedence, where fn returns
a Service or None if no match.
appyaml: str or None, the app.yaml location to used for deployment.
Raises:
FileNotFoundError: One or more argument does not point to an existing file
or directory.
UnknownSourceError: Could not infer a config or service from an arg.
DuplicateConfigError: Two or more config files have the same type.
DuplicateServiceError: Two or more services have the same service id.
Returns:
Tuple[List[Service], List[ConfigYamlInfo]], lists of deployable services
and configs.
"""
if not args:
args = ['.']
paths = [os.path.abspath(arg) for arg in args]
configs = Configs()
services = Services()
if appyaml:
if len(paths) > 1:
raise exceptions.MultiDeployError()
if not os.path.exists(os.path.abspath(appyaml)):
raise exceptions.FileNotFoundError('File {0} referenced by --appyaml '
'does not exist.'.format(appyaml))
if not os.path.exists(paths[0]):
raise exceptions.FileNotFoundError(paths[0])
for path in paths:
if not os.path.exists(path):
raise exceptions.FileNotFoundError(path)
config = yaml_parsing.ConfigYamlInfo.FromFile(path)
if config:
configs.Add(config)
continue
service = Service.FromPath(path, stager, path_matchers, appyaml)
if service:
services.Add(service)
continue
raise exceptions.UnknownSourceError(path)
return services.GetAll(), configs.GetAll()

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for `gcloud app domain-mappings`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import exceptions
NO_CERTIFICATE_ID_MESSAGE = ('A certificate ID cannot be provided when using'
' automatic certificate management.')
def ParseCertificateManagement(messages, certificate_management):
if not certificate_management:
return None
else:
return messages.SslSettings.SslManagementTypeValueValuesEnum(
certificate_management.upper())
def ValidateCertificateArgs(certificate_id, certificate_management):
if (certificate_management and
certificate_management.upper() == 'AUTOMATIC' and certificate_id):
raise exceptions.InvalidArgumentException('certificate-id',
NO_CERTIFICATE_ID_MESSAGE)
def ValidateCertificateArgsForUpdate(certificate_id, no_certificate,
certificate_management):
ValidateCertificateArgs(certificate_id, certificate_management)
if certificate_management:
if certificate_management.upper() == 'AUTOMATIC' and no_certificate:
raise exceptions.InvalidArgumentException('no-certificate-id',
NO_CERTIFICATE_ID_MESSAGE)
elif (certificate_management.upper() == 'MANUAL'
and not certificate_id and not no_certificate):
raise exceptions.InvalidArgumentException(
'certificate-id',
('A certificate ID or no-certificate must be provided when using '
'manual certificate management.'))

View File

@@ -0,0 +1,196 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds exceptions raised by commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class NoAppIdentifiedError(exceptions.Error):
pass
class DeployError(exceptions.Error):
"""Base class for app deploy failures."""
class RepoInfoLoadError(DeployError):
"""Indicates a failure to load a source context file."""
def __init__(self, filename, inner_exception):
super(RepoInfoLoadError, self).__init__()
self.filename = filename
self.inner_exception = inner_exception
def __str__(self):
return 'Could not read repo info file {0}: {1}'.format(
self.filename, self.inner_exception)
class MultiDeployError(DeployError):
"""Indicates a failed attempt to deploy multiple image urls."""
def __str__(self):
return ('No more than one service may be deployed when using the '
'image-url or appyaml flag')
class NoRepoInfoWithImageUrlError(DeployError):
"""The user tried to specify a repo info file with a docker image."""
def __str__(self):
return 'The --repo-info-file option is not compatible with --image_url.'
class DefaultBucketAccessError(DeployError):
"""Indicates a failed attempt to access a project's default bucket."""
def __init__(self, project):
super(DefaultBucketAccessError, self).__init__()
self.project = project
def __str__(self):
return (
'Could not retrieve the default Google Cloud Storage bucket for [{a}]. '
'Please try again or use the [bucket] argument.').format(a=self.project)
class InvalidVersionIdError(exceptions.Error):
"""Indicates an invalid version ID."""
def __init__(self, version):
self.version = version
def __str__(self):
return (
'Invalid version id [{version}]. May only contain lowercase letters, '
'digits, and hyphens. Must begin and end with a letter or digit. Must '
'not exceed 63 characters.').format(version=self.version)
class MissingApplicationError(exceptions.Error):
"""If an app does not exist within the current project."""
def __init__(self, project):
self.project = project
def __str__(self):
return (
'The current Google Cloud project [{0}] does not contain an App Engine '
'application. Use `gcloud app create` to initialize an App Engine '
'application within the project.').format(self.project)
class MissingInstanceError(exceptions.Error):
"""An instance required for the operation does not exist."""
def __init__(self, instance):
super(MissingInstanceError, self).__init__(
'Instance [{}] does not exist.'.format(instance))
class MissingVersionError(exceptions.Error):
"""A version required for the operation does not exist."""
def __init__(self, version):
super(MissingVersionError, self).__init__(
'Version [{}] does not exist.'.format(version))
class InvalidInstanceTypeError(exceptions.Error):
"""Instance has the wrong environment."""
def __init__(self, environment, message=None):
msg = '{} instances do not support this operation.'.format(environment)
if message:
msg += ' ' + message
super(InvalidInstanceTypeError, self).__init__(msg)
class FileNotFoundError(exceptions.Error):
"""File or directory that was supposed to exist didn't exist."""
def __init__(self, path):
super(FileNotFoundError, self).__init__('[{}] does not exist.'.format(path))
class DuplicateConfigError(exceptions.Error):
"""Two config files of the same type."""
def __init__(self, path1, path2, config_type):
super(DuplicateConfigError, self).__init__(
'[{path1}] and [{path2}] are both trying to define a {t} config file. '
'Only one config file of the same type can be updated at once.'.format(
path1=path1, path2=path2, t=config_type))
class DuplicateServiceError(exceptions.Error):
"""Two <service>.yaml files defining the same service id."""
def __init__(self, path1, path2, service_id):
super(DuplicateServiceError, self).__init__(
'[{path1}] and [{path2}] are both defining the service id [{s}]. '
'All <service>.yaml files must have unique service ids.'.format(
path1=path1, path2=path2, s=service_id))
class UnknownSourceError(exceptions.Error):
"""The path exists but points to an unknown file or directory."""
def __init__(self, path):
super(UnknownSourceError, self).__init__(
'[{path}] could not be identified as a valid source directory or file.'
.format(path=path))
class NotSupportedPy3Exception(exceptions.Error):
"""Commands that do not support python3."""
class UnsupportedRuntimeError(exceptions.Error):
"""The runtime in app.yaml is not a valid Gen 1 runtime."""
def __init__(self, file_path, supported_runtimes):
super().__init__(
'Migration is not supported for runtime present in [{file_path}].'
' Currently supported runtimes are: {runtimes}'.format(
file_path=file_path, runtimes=supported_runtimes
)
)
class InvalidOutputDirectoryError(exceptions.Error):
"""The output directory given for migration is not empty."""
def __init__(self, dir_path):
super().__init__(
'Unable to write to output directory [{dir_path}]. The directory is not'
' empty. Please provide an empty directory to start a new migration.'
.format(dir_path=dir_path)
)
class MissingGen1ApplicationError(exceptions.Error):
"""The project does not contain an AppEngine version with a Gen1 runtime."""
def __init__(self, project):
super().__init__(
'The provided project {0} does not contain an AppEngine version with a'
' Gen1 runtime.'.format(project)
)

View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for `gcloud app firewall-rules`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import resources
import six
# The default rule is placed at MaxInt32 - 1 and is always evaluated last
DEFAULT_RULE_PRIORITY = 2**31 - 1
LIST_FORMAT = """
table(
priority:sort=1,
action,
source_range,
description
)
"""
registry = resources.REGISTRY
def GetRegistry(version):
global registry
try:
resources.REGISTRY.GetCollectionInfo('appengine', version)
except resources.InvalidCollectionException:
registry = resources.REGISTRY.Clone()
registry.RegisterApiByName('appengine', version)
return registry
def ParseFirewallRule(client, priority):
"""Creates a resource path given a firewall rule priority.
Args:
client: AppengineFirewallApiClient, the API client for this release track.
priority: str, the priority of the rule.
Returns:
The resource for the rule.
"""
res = GetRegistry(client.ApiVersion()).Parse(
six.text_type(ParsePriority(priority)),
params={'appsId': client.project},
collection='appengine.apps.firewall.ingressRules')
return res
def ParsePriority(priority):
"""Converts a priority to an integer."""
if priority == 'default':
priority = DEFAULT_RULE_PRIORITY
try:
priority_int = int(priority)
if priority_int <= 0 or priority_int > DEFAULT_RULE_PRIORITY:
raise exceptions.InvalidArgumentException(
'priority', 'Priority must be between 1 and {0} inclusive.'.format(
DEFAULT_RULE_PRIORITY))
return priority_int
except ValueError:
raise exceptions.InvalidArgumentException(
'priority', 'Priority should be an integer value or `default`.')
def ParseAction(messages, action):
"""Converts an action string to the corresponding enum value.
Options are: 'allow' or 'deny', otherwise None will be returned.
Args:
messages: apitools.base.protorpclite.messages, the proto messages class for
this API version for firewall.
action: str, the action as a string
Returns:
ActionValueValuesEnum type
"""
if not action:
return None
return messages.FirewallRule.ActionValueValuesEnum(action.upper())
def RaiseMinArgument():
raise exceptions.MinimumArgumentException([
'--action', '--source-range', '--description'
], 'Please specify at least one attribute to the firewall-rules update.')

View File

@@ -0,0 +1,263 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds common flags used by the gcloud app commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
from googlecloudsdk.api_lib.app import logs_util
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.appengine.api import appinfo
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.docker import constants
from googlecloudsdk.core.docker import docker
DOMAIN_FLAG = base.Argument(
'domain',
help=('A valid domain which may begin with a wildcard, such as: '
'`example.com` or `*.example.com`'))
CERTIFICATE_ID_FLAG = base.Argument(
'id',
help=('The id of the certificate. This identifier is printed upon'
' creation of a new certificate. Run `{parent_command}'
' list` to view existing certificates.'))
LAUNCH_BROWSER = base.Argument(
'--launch-browser',
action='store_true', default=True, dest='launch_browser',
help='Launch a browser if possible. When disabled, only displays the URL.')
IGNORE_CERTS_FLAG = base.Argument(
'--ignore-bad-certs',
action='store_true',
default=False,
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.')
IGNORE_FILE_FLAG = base.Argument(
'--ignore-file',
help='Override the .gcloudignore file and use the specified file instead.')
FIREWALL_PRIORITY_FLAG = base.Argument(
'priority',
help=('An integer between 1 and 2^32-1 which indicates the evaluation order'
' of rules. Lowest priority rules are evaluated first. The handle '
'`default` may also be used to refer to the final rule at priority'
' 2^32-1 which is always present in a set of rules.'))
LEVEL = base.Argument(
'--level',
help='Filter entries with severity equal to or higher than a given level.',
required=False,
default='any',
choices=logs_util.LOG_LEVELS)
LOGS = base.Argument(
'--logs',
help=('Filter entries from a particular set of logs. Must be a '
'comma-separated list of log names (request_log, stdout, stderr, '
'etc).'),
required=False,
default=logs_util.DEFAULT_LOGS,
metavar='APP_LOG',
type=arg_parsers.ArgList(min_length=1))
SERVER_FLAG = base.Argument(
'--server', hidden=True, help='THIS ARGUMENT NEEDS HELP TEXT.')
SERVICE = base.Argument(
'--service', '-s', help='Limit to specific service.', required=False)
VERSION = base.Argument(
'--version', '-v', help='Limit to specific version.', required=False)
def AddServiceVersionSelectArgs(parser, short_flags=False):
"""Add arguments to a parser for selecting service and version.
Args:
parser: An argparse.ArgumentParser.
short_flags: bool, whether to add short flags `-s` and `-v` for service
and version respectively.
"""
parser.add_argument(
'--service', *['-s'] if short_flags else [],
required=False,
help='The service ID.')
parser.add_argument(
'--version', *['-v'] if short_flags else [],
required=False,
help='The version ID.')
def AddCertificateIdFlag(parser, include_no_cert):
"""Add the --certificate-id flag to a domain-mappings command."""
certificate_id = base.Argument(
'--certificate-id',
help=('A certificate id to use for this domain. May not be used on a '
'domain mapping with automatically managed certificates. Use the '
'`gcloud app ssl-certificates list` to see available certificates '
'for this app.'))
if include_no_cert:
group = parser.add_mutually_exclusive_group()
certificate_id.AddToParser(group)
group.add_argument(
'--no-certificate-id',
action='store_true',
help='Do not associate any certificate with this domain.')
else:
certificate_id.AddToParser(parser)
def AddCertificateManagementFlag(parser):
"""Adds common flags to a domain-mappings command."""
certificate_argument = base.ChoiceArgument(
'--certificate-management',
choices=['automatic', 'manual'],
help_str=('Type of certificate management. \'automatic\' will provision '
'an SSL certificate automatically while \'manual\' requires '
'the user to provide a certificate id to provision.'))
certificate_argument.AddToParser(parser)
def AddSslCertificateFlags(parser, required):
"""Add the common flags to an ssl-certificates command."""
parser.add_argument(
'--display-name',
required=required,
help='A display name for this certificate.')
parser.add_argument(
'--certificate',
required=required,
metavar='LOCAL_FILE_PATH',
help="""\
The file path for the new certificate to upload. Must be in PEM
x.509 format including the header and footer.
""")
parser.add_argument(
'--private-key',
required=required,
metavar='LOCAL_FILE_PATH',
help="""\
The file path to a local RSA private key file. The private key must be
PEM encoded with header and footer and must be 2048 bits
or fewer.
""")
def AddFirewallRulesFlags(parser, required):
"""Add the common flags to a firewall-rules command."""
parser.add_argument('--source-range',
required=required,
help=('An IP address or range in CIDR notation or'
' the ```*``` wildcard to match all traffic.'))
parser.add_argument('--action',
required=required,
choices=['ALLOW', 'DENY'],
type=lambda x: x.upper(),
help='Allow or deny matched traffic.')
parser.add_argument(
'--description', help='A text description of the rule.')
def ValidateDockerBuildFlag(unused_value):
raise argparse.ArgumentTypeError("""\
The --docker-build flag no longer exists.
Docker images are now built remotely using Google Container Builder. To run a
Docker build on your own host, you can run:
docker build -t gcr.io/<project>/<service.version> .
gcloud docker push gcr.io/<project>/<service.version>
gcloud app deploy --image-url=gcr.io/<project>/<service.version>
If you don't already have a Dockerfile, you must run:
gcloud beta app gen-config
first to get one.
""")
DOCKER_BUILD_FLAG = base.Argument(
'--docker-build',
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.',
type=ValidateDockerBuildFlag)
LOG_SEVERITIES = ['debug', 'info', 'warning', 'error', 'critical']
def GetCodeBucket(app, project):
"""Gets a bucket reference for a Cloud Build.
Args:
app: App resource for this project
project: str, The name of the current project.
Returns:
storage_util.BucketReference, The bucket to use.
"""
# Attempt to retrieve the default appspot bucket, if one can be created.
log.debug('No bucket specified, retrieving default bucket.')
if not app.codeBucket:
raise exceptions.DefaultBucketAccessError(project)
return storage_util.BucketReference.FromUrl(app.codeBucket)
VERSION_TYPE = arg_parsers.RegexpValidator(
appinfo.MODULE_VERSION_ID_RE_STRING,
'May only contain lowercase letters, digits, and hyphens. '
'Must begin and end with a letter or digit. Must not exceed 63 characters.')
def ValidateImageUrl(image_url, services):
"""Check the user-provided image URL.
Ensures that:
- it is consistent with the services being deployed (there must be exactly
one)
- it is an image in a supported Docker registry
Args:
image_url: str, the URL of the image to deploy provided by the user
services: list, the services to deploy
Raises:
MultiDeployError: if image_url is provided and more than one service is
being deployed
docker.UnsupportedRegistryError: if image_url is provided and does not point
to one of the supported registries
"""
# Validate the image url if provided, and ensure there is a single service
# being deployed.
if image_url is None:
return
if len(services) != 1:
raise exceptions.MultiDeployError()
for registry in constants.ALL_SUPPORTED_REGISTRIES:
if image_url.startswith(registry):
return
raise docker.UnsupportedRegistryError(image_url)

View File

@@ -0,0 +1,87 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for GAE to CR migration."""
import argparse
import collections
import enum
import json
import logging
import os
import os.path
import re
import subprocess
from googlecloudsdk.api_lib.app import appengine_api_client
from googlecloudsdk.api_lib.resource_manager import folders
from googlecloudsdk.api_lib.run import api_enabler
from googlecloudsdk.api_lib.run import k8s_object
from googlecloudsdk.api_lib.run import service as service_lib
from googlecloudsdk.api_lib.run import traffic
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as c_exceptions
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util import list_incompatible_features
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util import translate
from googlecloudsdk.command_lib.artifacts import docker_util
from googlecloudsdk.command_lib.run import artifact_registry
from googlecloudsdk.command_lib.run import build_util
from googlecloudsdk.command_lib.run import config_changes
from googlecloudsdk.command_lib.run import connection_context
from googlecloudsdk.command_lib.run import container_parser
from googlecloudsdk.command_lib.run import exceptions
from googlecloudsdk.command_lib.run import flags
from googlecloudsdk.command_lib.run import messages_util
from googlecloudsdk.command_lib.run import platforms
from googlecloudsdk.command_lib.run import pretty_print
from googlecloudsdk.command_lib.run import resource_args
from googlecloudsdk.command_lib.run import resource_change_validators
from googlecloudsdk.command_lib.run import serverless_operations
from googlecloudsdk.command_lib.run import stages
from googlecloudsdk.command_lib.util.args import map_util
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.command_lib.util.concepts import presentation_specs
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import progress_tracker
class GAEToCRMigrationUtil():
"""Utility class for GAE to CR migration."""
DEFAULT_APPYAML = 'app.yaml'
DEFAULT_SERVICE_NAME = 'default'
SERVICE_FIELD = 'service'
def __init__(self, api_client, args):
"""Initializes the GAEToCRMigration utility class.
Args:
api_client: The AppEngine API client.
args: The argparse arguments.
"""
print('\nDeploying to Cloud Run...\n')
self.api_client = api_client
self.input_dir = os.getcwd()
# if app.yaml is not provided, use app.yaml in current directory
if args.appyaml:
self.appyaml_path = os.path.relpath(args.appyaml)
elif args.service is None or args.version is None:
print(
'Using app.yaml in current directory.\n'
)
self.appyaml_path = os.path.join(self.input_dir, self.DEFAULT_APPYAML)
self.project = properties.VALUES.core.project.Get()

View File

@@ -0,0 +1,229 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains common utility function for GAE to CR migration."""
import logging
from typing import Mapping, Sequence, Tuple, cast
from googlecloudsdk.api_lib.app import appengine_api_client
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import yaml
from googlecloudsdk.core.util import files
class InvalidAppYamlPathError(exceptions.Error):
"""An error that is raised when invalid app.yaml path is provided."""
# Entrypoint for these runtimes must be specified in a Procfile
# instead of via the `--command` flag at the gcloud run deploy
# command.
ENTRYPOINT_FEATURE_KEYS: Sequence[str] = ['entrypoint', 'entrypoint.shell']
PYTHON_RUNTIMES_WITH_PROCFILE_ENTRYPOINT: Sequence[str] = [
'python',
'python37',
'python38',
'python39',
'python310',
]
RUBY_RUNTIMES_WITH_PROCFILE_ENTRYPOINT: Sequence[str] = [
'ruby',
'ruby25',
'ruby26',
'ruby27',
'ruby30',
]
RUNTIMES_WITH_PROCFILE_ENTRYPOINT: Sequence[str] = (
PYTHON_RUNTIMES_WITH_PROCFILE_ENTRYPOINT
+ RUBY_RUNTIMES_WITH_PROCFILE_ENTRYPOINT
)
_FLATTEN_EXCLUDE_KEYS: Sequence[str] = ['env_variables', 'envVariables']
def generate_output_flags(flags: Sequence[str], value: str) -> Sequence[str]:
"""Generate output flags by given list of flag names and value."""
if flags[0] == '--service-account' and value.endswith('"'):
value = value[1:-1]
return [f'{flag}={value}' for flag in flags]
def get_feature_key_from_input(
input_key_value_pairs: Mapping[str, any], allow_keys: Sequence[str]
) -> str:
"""Get feature key from input based on list of allowed keys."""
allow_keys_from_input = [
key for key in input_key_value_pairs if key in allow_keys
]
if not allow_keys_from_input:
return None
if len(allow_keys_from_input) > 1:
logging.error(
'[Error] Conflicting configurations found: %s. '
' Please ensure only one is specified".',
allow_keys_from_input,
)
return None
return allow_keys_from_input[0]
def get_features_by_prefix(
features: Mapping[str, feature_helper.Feature], prefix: str
) -> Mapping[str, feature_helper.Feature]:
"""Return a dict of features matched with the prefix."""
return {
key: value for key, value in features.items() if key.startswith(prefix)
}
def flatten_keys(
input_data: Mapping[str, any],
parent_path: str,
) -> Mapping[str, any]:
"""Flatten nested paths (root to leaf) of a dictionary to a single level.
Args:
input_data: The input dictionary to be flattened.
parent_path: The parent path of the input dictionary.
Returns:
A dictionary with flattened paths.
For example:
Input: {
"resources": {
"cpu": 5,
"memory_gb": 10
}
}
output: {
"resources.cpu": 5,
"resources.memory_gb": 10
}
}
"""
paths = {}
for key in input_data:
curr_path = f'{parent_path}.{key}' if parent_path else key
if not isinstance(input_data[key], Mapping) or key in _FLATTEN_EXCLUDE_KEYS:
paths[curr_path] = input_data[key]
else:
paths.update(flatten_keys(input_data[key], curr_path))
return paths
def validate_input(
appyaml: str, service: str, version: str
) -> Tuple[feature_helper.InputType, Mapping[str, any]]:
r"""Validate the input for cli commands.
could be used as an input at any given time.
Return the input type and input data (as python objects) if validation passes.
Args:
appyaml: The app.yaml file path.
service: The service name.
version: The version name.
Returns:
A tuple of (input type, input data).
"""
# `gcloud app migrate app-engine-to-cloudrun --service=XXX --version=XXX
# --source=XXX` is invalid,
# because both appyaml and deployed version are specified.
appyaml_param_specified = appyaml is not None
deployed_version_specified = service is not None and version is not None
if appyaml_param_specified and deployed_version_specified:
logging.error(
'[Error] Invalid input, only one of app.yaml or deployed '
' version can be used as an input. Use --appyaml flag t '
' specify the app.yaml, or use --service and --version '
' to specify the deployed version.'
)
return (None, None)
# If user runs `gcloud app migrate app-engine-to-cloudrun`
# without providing any parameters,
# it assumes the current directory has an `app.yaml` file by default.
if not deployed_version_specified and not appyaml_param_specified:
appyaml = 'app.yaml'
input_type = (
feature_helper.InputType.ADMIN_API
if deployed_version_specified
else feature_helper.InputType.APP_YAML
)
input_data = get_input_data_by_input_type(
input_type, appyaml, service, version
)
if input_data is None:
logging.error('[Error] Failed to read input data.')
return (input_type, input_data)
def get_input_data_by_input_type(
input_type: feature_helper.InputType,
appyaml: str,
service: str = None,
version: str = None,
) -> Mapping[str, any]:
"""Retrieve the input_data (from yaml to python objects) by a given input_type."""
# deployed version is input type
if input_type == feature_helper.InputType.ADMIN_API:
api_client = appengine_api_client.GetApiClientForTrack(base.ReleaseTrack.GA)
gcloud_output = api_client.GetVersionResource(
service=service, version=version
)
if gcloud_output is None:
logging.error('gcloud_output is empty.')
return None
version_data = {
'automaticScaling': gcloud_output.automaticScaling,
'createTime': gcloud_output.createTime,
'createdBy': gcloud_output.createdBy,
'deployment': gcloud_output.deployment,
'diskUsageBytes': gcloud_output.diskUsageBytes,
'env': gcloud_output.env,
'errorHandlers': gcloud_output.errorHandlers,
'handlers': gcloud_output.handlers,
'id': gcloud_output.id,
'inboundServices': gcloud_output.inboundServices,
'instanceClass': gcloud_output.instanceClass,
'libraries': gcloud_output.libraries,
'name': gcloud_output.name,
'network': gcloud_output.network,
'runtime': gcloud_output.runtime,
'runtimeChannel': gcloud_output.runtimeChannel,
'serviceAccount': gcloud_output.serviceAccount,
'servingStatus': gcloud_output.servingStatus,
'threadsafe': gcloud_output.threadsafe,
'versionUrl': gcloud_output.versionUrl,
'zones': gcloud_output.zones,
}
if gcloud_output.envVariables is not None:
version_data.update(
{'envVariables': cast(
Mapping[str, str], gcloud_output.envVariables.additionalProperties
)}
)
return version_data
# appyaml is input type
try:
with files.FileReader(appyaml) as file:
appyaml_data = yaml.load(file.read())
if appyaml_data is None:
logging.error('%s is empty.', file.name)
return appyaml_data
except files.MissingFileError:
raise InvalidAppYamlPathError(
'app.yaml does not exist in the provided directory, please use'
' --appyaml flag to specify the correct app.yaml location.'
)
return None

View File

@@ -0,0 +1,252 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper module to access data in the features.yaml file as dataclass types."""
import dataclasses
import enum
from os import path as os_path
import re
from typing import Mapping, Sequence
from googlecloudsdk.core.util import files
from googlecloudsdk.core.yaml import yaml
_CONFIG_PATH = os_path.join(
os_path.dirname(__file__), "../config/features.yaml"
)
class FeatureType(enum.Enum):
"""Enum of feature types."""
UNSUPPORTED = "unsupported"
RANGE_LIMITED = "range-limited"
class InputType(enum.Enum):
"""Enum of input types."""
APP_YAML = "app_yaml"
ADMIN_API = "admin_api"
@dataclasses.dataclass(frozen=True)
class Range:
"""Range limit of a RangeLimitFeature.
Attributes:
min: The minimum value of the range.
max: The maximum value of the range.
"""
min: int
max: int
def __post_init__(self):
"""Check if the range is valid.
Raises:
ValueError: If the range is invalid.
"""
if self.min is not None and self.max is not None and self.min > self.max:
raise ValueError("min must be less than or equal to max")
@dataclasses.dataclass(frozen=True)
class Path:
"""Path variants for appyaml and api input data.
Attributes:
app_yaml: The path of the feature in app.yaml.
admin_api: The path of the feature in admin api.
"""
admin_api: str
app_yaml: str
@dataclasses.dataclass
class Feature:
"""Contains common fields for all features."""
path: Path
@dataclasses.dataclass
class SupportedFeature(Feature):
"""Supported feature with 1:1 mappings between App Engine and Cloud Run features."""
flags: Sequence[str]
@dataclasses.dataclass
class UnsupportedFeature(Feature):
"""Contains common fields for all unsupported features."""
severity: str
reason: str
@dataclasses.dataclass
class RangeLimitFeature(UnsupportedFeature):
"""Contains common fields for all range limited features.
It extends UnsupportedFeature and adds additional field of range limit.
"""
range: Range
flags: Sequence[str] = None
def validate(self, val: int) -> bool:
"""Check if the given value is within range limit."""
return self.range["min"] <= val <= self.range["max"]
@dataclasses.dataclass
class ValueLimitFeature(UnsupportedFeature):
"""ValueLmimtFeature presents a value_limited Features, it extends UnsupportedFeature and adds additional fields to validate compatible value."""
allowed_values: Sequence[str] = None
known_values: Sequence[str] = None
valid_format: str = None
flags: Sequence[str] = None
def _check_runtime_value(self, val: str):
"""Check if the given value is a valid runtime value."""
if self.known_values is not None and val not in self.known_values:
reason = f"'{val}' is not a valid runtime value."
self.reason = reason
return False
return True
def validate(self, key: str, val: str) -> bool:
"""Check if the given value is valid, either by regex or set of known/allowed values."""
if self.valid_format is not None:
# validate by regex only when valid_format is present.
return re.search(self.valid_format, val) is not None
if key.startswith("runtime") and not self._check_runtime_value(val):
return False
return self.allowed_values is not None and val in self.allowed_values
@dataclasses.dataclass
class FeatureConfig:
"""FeatureConfig represents the incompatible features configuration."""
unsupported: Sequence[UnsupportedFeature]
range_limited: Sequence[RangeLimitFeature]
value_limited: Sequence[ValueLimitFeature]
supported: Sequence[SupportedFeature]
def __post_init__(self):
"""Convert the data into dataclass types."""
unsupported_data = [UnsupportedFeature(**f) for f in self.unsupported]
self.unsupported = unsupported_data
range_limited_data = [RangeLimitFeature(**f) for f in self.range_limited]
self.range_limited = range_limited_data
value_limited_data = [ValueLimitFeature(**f) for f in self.value_limited]
self.value_limited = value_limited_data
supported_data = [SupportedFeature(**f) for f in self.supported]
self.supported = supported_data
def get_feature_config() -> FeatureConfig:
"""Read config data from features yaml and convert data into dataclass types."""
read_yaml = _read_yaml_file()
parsed_yaml_dict = _parse_yaml_file(read_yaml)
return _dict_to_features(parsed_yaml_dict)
def get_feature_list_by_input_type(
input_type: InputType, features: UnsupportedFeature
) -> Mapping[str, UnsupportedFeature]:
"""Construct a dictionary of feature list by input type.
With path as key and the Feature as the value based on the input type. e.g:
input:
input_type: appyaml
features:[
{
path: {
app_yaml: 'inbound_services',
admin_api: 'inboundServices',
},
severity: 'major',
reason: 'CR does not support GAE bundled services.'
}
]
output:
{
'inbound_services':{
path: {
app_yaml: 'inbound_services',
admin_api: 'inboundServices'
},
severity: 'major',
reason: 'CR does not support GAE bundled services.'
}
}
Args:
input_type: InputType enum to indicate the type of input data.
features: List of UnsupportedFeature to be converted.
Returns:
A dictionary with path as key and Feature as value.
Raises:
KeyError: If the input_type is not a valid enum value.
Example:
>>> get_feature_list_by_input_type(InputType.APP_YAML, features)
{
'inbound_services':{
path: {
app_yaml: 'inbound_services',
admin_api: 'inboundServices'
},
severity: 'major',
reason: 'CR does not support GAE bundled services.'
}
}
"""
return {i.path[input_type.value]: i for i in features}
def _read_yaml_file() -> str:
"""Read the config yaml file of incompatible features."""
with files.FileReader(_CONFIG_PATH) as incompatible_features_yaml_file:
return incompatible_features_yaml_file.read()
def _parse_yaml_file(yaml_string: str) -> Mapping[str, any]:
"""Parse the input string as yaml file.
Args:
yaml_string: Input string to be parsed as yaml.
Returns:
A dictionary of the parsed yaml content.
"""
return yaml.safe_load(yaml_string)
def _dict_to_features(parsed_yaml: Mapping[str, any]) -> FeatureConfig:
"""Convert the input dictionary into FeatureConfig type."""
return FeatureConfig(**parsed_yaml)

View File

@@ -0,0 +1,259 @@
---
unsupported:
- path:
admin_api: inboundServices
app_yaml: inbound_services
severity: major
reason: Cloud Run does not support GAE Inbound Services.
- path:
admin_api: handlers
app_yaml: handlers
severity: major
reason: Cloud Run does not support GAE Handlers.
- path:
admin_api: errorHandlers
app_yaml: error_handlers
severity: major
reason: Cloud Run does not support GAE Error Handlers.
- path:
admin_api: appEngineApis
app_yaml: app_engine_apis
severity: major
reason: Cloud Run does not support GAE Bundled Services.
- path:
admin_api: buildEnvVariables
app_yaml: build_env_variables
severity: major
reason: No support for passing environment vars to configure buildpacks.
range_limited:
- path:
admin_api: resources.cpu
app_yaml: resources.cpu
range:
max: 8
min: 0
severity: minor
reason: Cloud Run supports CPU values between 0 to 8.
flags:
- --cpu
- path:
admin_api: resources.memoryGb
app_yaml: resources.memory_gb
range:
max: 32
min: 0.5
severity: minor
reason: Cloud Run supports memory values between 0.5 to 32Gi.
flags:
- --memory
- path:
admin_api: automaticScaling.standardSchedulerSettings.minInstances
app_yaml: automatic_scaling.min_instances
range:
max: 1000
min: 0
severity: minor
reason: Cloud Run supports automatic scaling min instances values between 0 to 1000.
flags:
- --min-instances
- path:
admin_api: automaticScaling.standardSchedulerSettings.maxInstances
app_yaml: automatic_scaling.max_instances
range:
max: 1000
min: 0
severity: minor
reason: Cloud Run supports automatic scaling max instances values between 0 to 1000.
flags:
- --max-instances
- path:
admin_api: manualScaling.instances
app_yaml: manual_scaling.instances
range:
max: 1000
min: 0
severity: minor
reason: Cloud Run supports manual scaling values between 0 and 1000.
flags:
- --min-instances
- --max-instances
- path:
admin_api: basicScaling.maxInstances
app_yaml: basic_scaling.max_instances
range:
max: 1000
min: 0
severity: minor
reason: Cloud Run supports basic scaling values between 0 and 1000.
flags:
- --min-instances
- --max-instances
- path:
admin_api: automaticScaling.maxConcurrentRequests
app_yaml: automatic_scaling.max_concurrent_requests
range:
max: 1000
min: 1
severity: minor
reason: Cloud Run supports concurrency values between 1 and 1000.
flags:
- --concurrency
- path:
admin_api: automaticScaling.targetConcurrentRequests
app_yaml: automatic_scaling.target_concurrent_requests
range:
max: 1000
min: 1
severity: minor
reason: Cloud Run supports concurrency values between 1 and 1000.
flags:
- --concurrency
value_limited:
- path:
admin_api: runtimeConfig.pythonVersion
app_yaml: runtime_config.python_version
known_values:
- 2
- 3
allowed_values:
- 3
severity: major
reason: Cloud Run only supports second generation App Engine runtimes. Older runtimes are not compatible.
- path:
admin_api: runtime
app_yaml: runtime
known_values:
- aspnetcore
- custom
- java
- java8
- java11
- java17
- java21
- python
- python27
- python39
- python310
- python311
- python312
- python313
- php
- php55
- php72
- php73
- php74
- php81
- php82
- php83
- php84
- ruby
- ruby25
- ruby26
- ruby27
- ruby30
- ruby31
- ruby32
- ruby33
- ruby34
- go
- go111
- go112
- go113
- go114
- go115
- go116
- go117
- go118
- go119
- go120
- go121
- go122
- go123
- go124
- go125
- nodejs
- nodejs8
- nodejs10
- nodejs12
- nodejs14
- nodejs16
- nodejs18
- nodejs20
- nodejs22
allowed_values:
- aspnetcore
- custom
- java11
- java17
- java21
- python37
- python39
- python310
- python311
- python312
- python313
- php72
- php73
- php74
- php81
- php82
- php83
- php84
- ruby25
- ruby26
- ruby27
- ruby30
- ruby31
- ruby32
- ruby33
- ruby34
- go112
- go113
- go114
- go115
- go116
- go117
- go118
- go119
- go120
- go121
- go122
- go123
- go124
- go125
- nodejs8
- nodejs10
- nodejs12
- nodejs14
- nodejs16
- nodejs18
- nodejs20
- nodejs22
severity: major
reason: Cloud Run only supports second generation App Engine runtimes. Older runtimes are not compatible.
supported:
- path:
admin_api: entrypoint.shell
app_yaml: entrypoint
flags:
- --command
- path:
admin_api: envVariables
app_yaml: env_variables
flags:
- --set-env-vars
- path:
admin_api: vpcAccessConnector.name
app_yaml: vpc_access_connector.name
flags:
- --vpc-connector
- path:
admin_api: vpcAccessConnector.egressSetting
app_yaml: vpc_access_connector.egress_setting
flags:
- --vpc-egress
- path:
admin_api: serviceAccount
app_yaml: service_account
flags:
- --service-account

View File

@@ -0,0 +1,218 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List incompatible features for GAE to CR migration."""
import logging
from os import path as os_path
from typing import Dict, List, Mapping, Sequence
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import properties
from googlecloudsdk.core import yaml
_TEMPLATE_PATH = os_path.join(os_path.dirname(__file__), '../config/')
class IncompatibleFeaturesFoundError(exceptions.Error):
"""An error that is raised when incompatible features are found."""
def check_for_urlmap_conditions(
url_maps: List[any], input_type: feature_helper.InputType
) -> bool:
"""Checks if any UrlMap in the list has urlRegex='.*' and scriptPath='auto'.
Args:
url_maps: A list of UrlMap objects.
input_type: The input type of the app.yaml file.
Returns:
True if all UrlMap matches the conditions, False otherwise.
"""
for url_map in url_maps:
if (
input_type == feature_helper.InputType.ADMIN_API
and (url_map.urlRegex == '.*' or url_map.urlRegex == '/.*')
and url_map.script.scriptPath == 'auto'
):
continue
elif (
input_type == feature_helper.InputType.APP_YAML
and url_map['url'] == '/.*'
and url_map['script'] == 'auto'
):
continue
else:
return False
return True
def get_length(val: any) -> int:
"""Returns the length of the given value."""
if isinstance(val, list):
return len(val)
elif isinstance(val, str):
return len(val)
elif isinstance(val, bytes):
return len(val)
else:
return 0
def list_incompatible_features(
appyaml: str, service: str, version: str
) -> None:
"""Lists the incompatible features in the app.yaml file or deployed app version.
Args:
appyaml: The path to the app.yaml file.
service: The service name.
version: The version name.
"""
input_type, input_data = util.validate_input(appyaml, service, version)
if not input_type or not input_data:
return
incompatible_list = _check_for_incompatibility(input_data, input_type)
appyaml = 'app.yaml' if appyaml is None else appyaml
input_name = _generate_input_name(input_type, appyaml, service, version)
_generate_output(incompatible_list, input_type, input_name)
def _generate_input_name(
input_type: feature_helper.InputType,
appyaml: str,
service: str,
version: str,
) -> str:
"""Generates the input name for the input type."""
if input_type == feature_helper.InputType.APP_YAML:
return appyaml
project_id = properties.VALUES.core.project.Get()
return f'{project_id}/{service}/{version}'
def _check_for_incompatibility(
input_data: Mapping[str, any], input_type: feature_helper.InputType
) -> Sequence[any]:
"""Check for incompatibility features in the input yaml."""
incompatible_list: List[any] = []
feature_config = feature_helper.get_feature_config()
unsupported_features = feature_helper.get_feature_list_by_input_type(
input_type, feature_config.unsupported
)
range_limited_features = feature_helper.get_feature_list_by_input_type(
input_type, feature_config.range_limited
)
value_restricted_features = feature_helper.get_feature_list_by_input_type(
input_type, feature_config.value_limited
)
input_key_value_pairs = util.flatten_keys(input_data, '')
for key, val in input_key_value_pairs.items():
# Check for unsupported features.
if key.startswith('build_env_variables'):
incompatible_list.append(unsupported_features['build_env_variables'])
continue
if key.startswith('buildEnvVariables'):
incompatible_list.append(unsupported_features['buildEnvVariables'])
continue
if key in unsupported_features:
if (
key.startswith('inboundServices')
or key.startswith('inbound_services')
) and get_length(val) > 0:
incompatible_list.append(unsupported_features[key])
continue
if (
key.startswith('errorHandlers') or key.startswith('error_handlers')
) and get_length(val) > 0:
incompatible_list.append(unsupported_features[key])
continue
if key == 'handlers' and not check_for_urlmap_conditions(val, input_type):
incompatible_list.append(unsupported_features[key])
continue
if key not in [
'handlers',
'inbound_services',
'error_handlers',
'inboundServices',
'errorHandlers',
]:
incompatible_list.append(unsupported_features[key])
# Check for range_limited features.
if key in range_limited_features:
if not range_limited_features[key].validate(val):
incompatible_list.append(range_limited_features[key])
# Check for value_restricted features.
if key in value_restricted_features:
if not value_restricted_features[key].validate(key, val):
incompatible_list.append(value_restricted_features[key])
return incompatible_list
def _generate_output(
incompatible_features: List[feature_helper.UnsupportedFeature],
input_type: feature_helper.InputType,
input_name: str,
) -> None:
"""Generate readable output for features compatibility check result."""
print(f'List incompatible features output for {input_name}:\n')
logging.info('list-incompatible-features output for %s:\n', input_name)
if not incompatible_features:
print('No incompatibilities found.\n')
logging.info('No incompatibilities found.\n')
return
major_features = []
minor_features = []
for feature in incompatible_features:
if feature.severity == 'major':
major_features.append(feature)
elif feature.severity == 'minor':
minor_features.append(feature)
if minor_features:
print(
f'Summary:\nminor: {len(minor_features)}\n'
f'incompatible_features\n{yaml.dump(_get_display_features(minor_features, input_type))}\n'
)
logging.info(
'Summary:\nminor: %s\nincompatible_features\n%s',
len(minor_features),
yaml.dump(_get_display_features(minor_features, input_type)),
)
if major_features:
display_major_features = yaml.dump(
_get_display_features(major_features, input_type)
)
error_message = (
f'Summary:\nmajor: {len(major_features)}\n'
f'incompatible_features\n{display_major_features}\n '
)
raise IncompatibleFeaturesFoundError(
error_message
)
def _get_display_features(
features: List[feature_helper.UnsupportedFeature],
input_type: feature_helper.InputType
) -> Sequence[Dict[str, str]]:
"""Convert a List List[Tuple] to List[Object] in order to print desired out format."""
features_display = []
for feature in features:
features_display.append({
'message': feature.reason,
'category': feature.path[input_type.value],
'severity': feature.severity,
})
return features_display

View File

@@ -0,0 +1,176 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translate module contains the implementation for conversion of App Engine app.yaml or deployed version to Cloud Run."""
from collections.abc import Mapping, Sequence
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import concurrent_requests
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import cpu_memory
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import entrypoint
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import required_flags
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import scaling
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import supported_features
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import timeout
from googlecloudsdk.core import properties
def translate(appyaml: str, service: str, version: str, entrypoint_command: str) -> Sequence[str]:
"""Translate command translates an App Engine app.yaml or a deployed version to equivalent gcloud command to migrate the GAE App to Cloud Run.
"""
input_type, input_data = util.validate_input(appyaml, service, version)
if not input_type or not input_data:
return []
target_service = (
service or _get_service_name(input_data)
)
input_flatten_as_appyaml = (
util.flatten_keys(input_data, parent_path='')
if input_type == feature_helper.InputType.APP_YAML
else _convert_admin_api_input_to_app_yaml(input_data)
)
source_path = _get_source_path(input_type, appyaml)
flags: Sequence[str] = _get_cloud_run_flags(
input_data,
input_flatten_as_appyaml,
input_type,
entrypoint_command,
source_path,
)
return _generate_output(target_service, flags, source_path)
def _get_source_path(input_type: feature_helper.InputType, appyaml: str) -> str:
"""Gets the source path for the Cloud Run deploy command."""
if input_type == feature_helper.InputType.APP_YAML:
source_path = appyaml.rsplit('app.yaml', 1)[0] if appyaml else ''
if not source_path:
source_path = '.'
return source_path
else:
return input(
'Is the source code located in the current directory? If not, please'
' provide its path relative to the current directory: '
) + '/'
def _convert_admin_api_input_to_app_yaml(
admin_api_input_data: Mapping[str, any],
) -> Mapping[str, any]:
"""Converts the input from admin api to app yaml."""
input_key_value_pairs = util.flatten_keys(
admin_api_input_data, parent_path=''
)
feature_config = feature_helper.get_feature_config()
translatable_features: Mapping[str, feature_helper.Feature] = {}
translatable_features.update(
feature_helper.get_feature_list_by_input_type(
feature_helper.InputType.ADMIN_API, feature_config.range_limited
)
)
translatable_features.update(
feature_helper.get_feature_list_by_input_type(
feature_helper.InputType.ADMIN_API, feature_config.value_limited
)
)
translatable_features.update(
feature_helper.get_feature_list_by_input_type(
feature_helper.InputType.ADMIN_API, feature_config.supported
)
)
merged_keys = [
key for key in input_key_value_pairs if key in translatable_features
]
merged_features: list[feature_helper.Feature] = []
for key in merged_keys:
merged_features.append(translatable_features[key])
app_yaml_input = {}
for feature in merged_features:
app_yaml_input[feature.path[feature_helper.InputType.APP_YAML.value]] = (
input_key_value_pairs[
feature.path[feature_helper.InputType.ADMIN_API.value]
]
)
if 'instanceClass' in admin_api_input_data:
app_yaml_input['instance_class'] = input_key_value_pairs['instanceClass']
return app_yaml_input
def _get_cloud_run_flags(
input_data: Mapping[str, any],
input_flatten_as_appyaml: Mapping[str, any],
input_type: feature_helper.InputType,
entrypoint_command: str,
source_path: str,
) -> Sequence[str]:
"""Gets the cloud run flags for the given input data."""
feature_config = feature_helper.get_feature_config()
range_limited_features_app_yaml = (
feature_helper.get_feature_list_by_input_type(
feature_helper.InputType.APP_YAML, feature_config.range_limited
)
)
supported_features_app_yaml = feature_helper.get_feature_list_by_input_type(
feature_helper.InputType.APP_YAML, feature_config.supported
)
project = properties.VALUES.core.project.Get()
return (
concurrent_requests.translate_concurrent_requests_features(
input_flatten_as_appyaml, range_limited_features_app_yaml
)
+ scaling.translate_scaling_features(
input_flatten_as_appyaml, range_limited_features_app_yaml
)
+ timeout.translate_timeout_features(input_flatten_as_appyaml)
+ supported_features.translate_supported_features(
input_type,
input_flatten_as_appyaml,
supported_features_app_yaml,
project,
)
+ entrypoint.translate_entrypoint_features(entrypoint_command)
+ required_flags.translate_add_required_flags(input_data, source_path)
+ cpu_memory.translate_app_resources(input_data)
)
def _get_service_name(input_data: Mapping[str, any]) -> str:
"""Gets the service name from the input data."""
if 'service' in input_data:
custom_service_name = input_data['service'].strip()
if custom_service_name:
return custom_service_name
return 'default'
def _generate_output(
service_name: str, flags: Sequence[str], source_path: str
) -> Sequence[str]:
"""Generates the output for the Cloud Run deploy command."""
output = [
'gcloud',
'run',
'deploy',
f'{service_name}',
f'--source={source_path}',
]
if flags is not None:
output.extend(flags)
return output

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translation rule for concurrent_requests feature."""
import logging
from typing import Mapping, Sequence
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
_MAX_CONCURRENT_REQUESTS_KEY = 'automatic_scaling.max_concurrent_requests'
_ALLOW_MAX_CONCURRENT_REQ_KEYS = _MAX_CONCURRENT_REQUESTS_KEY
_DEFAULT_STANDARD_CONCURRENCY = 10
def translate_concurrent_requests_features(
input_data: Mapping[str, any],
range_limited_features: feature_helper.RangeLimitFeature) -> Sequence[str]:
"""Translate max_concurrent_requests (standard) to Cloud Run --concurrency flag."""
feature_key = util.get_feature_key_from_input(
input_data, _ALLOW_MAX_CONCURRENT_REQ_KEYS
)
input_has_concurrent_requests = feature_key is not None
# if input does not have max_concurrent_request/target_concurrent_request
# specified, use the `automatic_scaling.max_concurrent_requests` from the
# app2run/config/features.yaml as the default feature.
if not input_has_concurrent_requests:
feature = range_limited_features[_MAX_CONCURRENT_REQUESTS_KEY]
default_value = _DEFAULT_STANDARD_CONCURRENCY
return util.generate_output_flags(feature.flags, default_value)
feature = range_limited_features[feature_key]
input_value = input_data[feature_key]
if input_value < feature.range['min']:
logging.warning(
'%s has invalid value of %s, minimum value is %s',
feature_key, input_value, feature.range['min']
)
return []
if input_value > feature.range['max']:
logging.warning(
'%s has invalid value of %s, maximum value is %s.',
feature_key, input_value, feature.range['max']
)
return util.generate_output_flags(feature.flags, feature.range['max'])
target_value = (
input_value if feature.validate(input_value) else feature.range['max']
)
return util.generate_output_flags(feature.flags, target_value)

View File

@@ -0,0 +1,157 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translation rule for app resources (instance_class, cpu, memory)."""
import logging
from typing import Mapping, Sequence
import frozendict
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import scaling
_ALLOWED_RESOURCE_KEY = tuple(['resources.cpu', 'resources.memory_gb'])
_ALLOW_INSTANCE_CLASS_KEY = 'instance_class'
_DEFAULT_CPU_MEM_CONFIG = frozendict.frozendict({
scaling.ScalingTypeAppYaml.AUTOMATIC_SCALING: 'F1',
scaling.ScalingTypeAppYaml.MANUAL_SCALING: 'B2',
scaling.ScalingTypeAppYaml.BASIC_SCALING: 'B2',
})
# See https://cloud.google.com/run/docs/configuring/cpu
# See https://cloud.google.com/run/docs/configuring/memory-limits
_INSTANCE_CLASS_MAP = frozendict.frozendict({
'F1': {'cpu': 1, 'memory': 0.25},
'F2': {'cpu': 1, 'memory': 0.5},
'F4': {'cpu': 1, 'memory': 1},
'F4_1G': {'cpu': 1, 'memory': 2},
'B1': {'cpu': 1, 'memory': 0.25},
'B2': {'cpu': 1, 'memory': 0.5},
'B4': {'cpu': 1, 'memory': 1},
'B4_1G': {'cpu': 1, 'memory': 2},
'B8': {'cpu': 2, 'memory': 2},
})
def translate_app_resources(
input_data: Mapping[str, any]
) -> Sequence[str]:
"""Translate instance_class(standard) to equivalent/compatible.
Cloud Run --cpu and --memory flags.
Args:
input_data: Dictionary of the input data from app.yaml.
Returns:
List of output flags.
"""
return _translate_standard_instance_class(input_data)
def _translate_standard_instance_class(
input_data: Mapping[str, any]
) -> Sequence[str]:
"""Translate standard instance_class to equivalent/compatible Cloud Run flags.
Args:
input_data: Dictionary of the input data from app.yaml.
Returns:
List of output flags.
"""
instance_class_key_from_input = util.get_feature_key_from_input(
input_data, [_ALLOW_INSTANCE_CLASS_KEY]
)
if instance_class_key_from_input:
instance_class = input_data[instance_class_key_from_input]
return _generate_cpu_memory_flags_by_instance_class(instance_class)
return _get_cpu_memory_default_based_on_scaling_method(input_data)
def _get_cpu_memory_default_based_on_scaling_method(
input_data: Mapping[str, any]
) -> Sequence[str]:
"""Get default cpu/memory based on scaling method.
Args:
input_data: Dictionary of the input data from app.yaml.
Returns:
List of output flags.
"""
scaling_features_used = scaling.get_scaling_features_used(input_data)
if not scaling_features_used:
return []
if len(scaling_features_used) > 1:
logging.warning(
'Warning: More than one scaling option is defined, only one'
' scaling option should be used.'
)
return []
scaling_method = scaling_features_used[0]
default_instance_class = _DEFAULT_CPU_MEM_CONFIG[scaling_method]
return _generate_cpu_memory_flags_by_instance_class(default_instance_class)
def _generate_cpu_memory_flags_by_instance_class(
instance_class: str
) -> Sequence[str]:
"""Generate cpu/memory flags based on instance class.
Args:
instance_class: Instance class string.
Returns:
List of output flags.
"""
cpu_memory_config = _INSTANCE_CLASS_MAP[instance_class]
cpu_value = cpu_memory_config['cpu']
memory_value = cpu_memory_config['memory']
if memory_value < 0.5:
memory_value = 0.5
if memory_value > 32:
memory_value = 32
cpu_value = 8
if memory_value > 24:
cpu_value = 8
if memory_value > 16:
cpu_value = 6
if memory_value > 8:
cpu_value = 4
if memory_value > 4:
cpu_value = 2
# Cloud Run --memory requires a unit suffix
# https://cloud.google.com/run/docs/configuring/memory-limits#setting-services
return [
f'--cpu={cpu_value}',
f'--memory={_format_cloud_run_memory_unit(memory_value)}',
]
def _format_cloud_run_memory_unit(value: float) -> str:
"""Format memory value with Cloud Run unit.
Args:
value: Memory value in float.
Returns:
Memory value with Cloud Run unit.
"""
# 1GB = 953Mi, 1Gi = 1024Mi memory, in Cloud Run, a minimum of 512MiB memory
# is required for 1 CPU. Therefore, using Gi works for the lower bound of
# memory requirement.
# Allowed values are [m, k, M, G, T, Ki, Mi, Gi, Ti, Pi, Ei]
return f'{value}Gi'

View File

@@ -0,0 +1,44 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translation rule for entrypoint."""
import logging
from typing import Sequence
_DEFAULT_PYTHON_ENTRYPOINT = 'gunicorn -b :$PORT main:app'
# Cloud Run service must listen on 0.0.0.0 host,
# ref https://cloud.google.com/run/docs/container-contract#port
_DEFAULT_RUBY_ENTRYPOINT = 'bundle exec ruby app.rb -o 0.0.0.0'
_DEFAULT_ENTRYPOINT_INFO_FORMAT = (
'[Info] Default entrypoint for %s is : "%s", retry'
' `gcloud app migrate-to-run` with the'
' --entrypoint="%s" flag.\n'
)
def translate_entrypoint_features(
command: str,
) -> Sequence[str]:
"""Tranlsate entrypoint from App Engine app to entrypoint for equivalent Cloud Run app."""
if command is None:
warning_text = (
'Warning: entrypoint for the app is not detected/provided, if an'
' entrypoint is needed to start the app, please use the `--entrypoint`'
' flag to specify the entrypoint for the App.\n'
)
logging.info(warning_text)
return []

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add required flags to output gcloud run deploy command."""
from collections.abc import Mapping, Sequence
import os
def translate_add_required_flags(
input_data: Mapping[str, any],
source_path: str,
) -> Sequence[str]:
"""Add required flags to gcloud run deploy command."""
required_flags = [f'--labels={_get_labels()}']
if _check_dockerfile_exists(source_path):
required_flags.extend([
'--clear-base-image',
])
else:
required_flags.extend([
f'--base-image={input_data["runtime"]}'
if 'runtime' in input_data
else '',
])
return required_flags
def _get_labels() -> str:
"""Get labels for gcloud run deploy command."""
return ','.join([
'migrated-from',
'gcloud-gae2cr-version=1',
])
def _check_dockerfile_exists(source_path: str) -> bool:
"""Checks if a Dockerfile exists in the same directory as the app.yaml file."""
dockerfile_path = os.path.join(
os.path.dirname(source_path), 'Dockerfile'
)
return os.path.exists(dockerfile_path)

View File

@@ -0,0 +1,160 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translation rule for scaling features."""
import enum
import logging
from typing import Mapping, Sequence
import frozendict
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
class ScalingTypeAppYaml(enum.Enum):
"""Enum of scaling types in app.yaml."""
AUTOMATIC_SCALING = 'automatic_scaling'
MANUAL_SCALING = 'manual_scaling'
BASIC_SCALING = 'basic_scaling'
_SCALING_FEATURE_KEYS_ALLOWED_LIST = frozendict.frozendict({
ScalingTypeAppYaml.AUTOMATIC_SCALING: [
'automatic_scaling.min_instances',
'automatic_scaling.max_instances',
],
ScalingTypeAppYaml.MANUAL_SCALING: ['manual_scaling.instances'],
ScalingTypeAppYaml.BASIC_SCALING: ['basic_scaling.max_instances'],
})
def translate_scaling_features(
input_data: Mapping[str, any],
range_limited_features: Mapping[str, feature_helper.RangeLimitFeature],
) -> Sequence[str]:
"""Translate scaling features.
Args:
input_data: Dictionary of the parsed app.yaml file.
range_limited_features: Dictionary of scaling features with range limits.
Returns:
A list of strings representing the flags for Cloud Run.
Translation rule: - Only one of the scaling options could be specified:
- automatic_scaling
- manual_scaling
- basic_scaling.
"""
scaling_types_used = get_scaling_features_used(input_data)
if not scaling_types_used:
return []
if len(scaling_types_used) > 1:
logging.warning(
'Warning: More than one scaling type is defined,only one'
' scaling option should be used.'
)
return []
scaling_type = scaling_types_used[0]
return _get_output_flags(input_data, range_limited_features, scaling_type)
def _get_output_flags(
input_data: Mapping[str, any],
range_limited_features: Mapping[str, feature_helper.RangeLimitFeature],
scaling_type: ScalingTypeAppYaml,
) -> Sequence[str]:
"""Get the output flags for the given scaling type.
Args:
input_data: Dictionary of the parsed app.yaml file.
range_limited_features: Dictionary of scaling features with range limits.
scaling_type: The scaling type used in app.yaml.
Returns:
A list of strings representing the flags for Cloud Run.
"""
input_key_value_pairs = util.flatten_keys(input_data, '')
# Get feature keys from the input app.yaml that has the scaling type
# (e.g. 'automatic_scaling') prefix.
input_feature_keys = util.get_features_by_prefix(
input_key_value_pairs, scaling_type.value
)
# Filter the input_feature_keys by allowed_list, this is to avoid processing
# other scaling features such as `automatic_scaling.max_concurrent_requests`
# and `automatic_scaling.target_concurrent_requests`, etc.
allowed_keys = _SCALING_FEATURE_KEYS_ALLOWED_LIST[scaling_type]
allowed_input_feature_keys = [
key for key in input_feature_keys if key in allowed_keys
]
output_flags = []
for key in allowed_input_feature_keys:
input_value = input_key_value_pairs[key]
range_limited_feature = range_limited_features[key]
output_flags += _get_output_flags_by_scaling_type(
key, range_limited_feature, input_value
)
return output_flags
def _get_output_flags_by_scaling_type(
feature_key: str,
range_limited_feature: feature_helper.RangeLimitFeature,
input_value: str,
) -> Sequence[str]:
"""Get the output flags for the given scaling type.
Args:
feature_key: The feature key in app.yaml.
range_limited_feature: The range limited feature.
input_value: The input value from app.yaml.
Returns:
A list of strings representing the flags for Cloud Run.
"""
if input_value < range_limited_feature.range['min']:
logging.warning(
'Warning: %s has a negagive value of %s, minimum value is %s.',
feature_key,
input_value,
range_limited_feature.range['min'],
)
return []
target_value = (
input_value
if range_limited_feature.validate(input_value)
else range_limited_feature.range['max']
)
return util.generate_output_flags(range_limited_feature.flags, target_value)
def get_scaling_features_used(
input_data: Mapping[str, any],
) -> Sequence[ScalingTypeAppYaml]:
"""Detect which scaling features are used in input (app.yaml)."""
scaling_types_detected = set()
for scaling_type in ScalingTypeAppYaml:
scaling_features_from_input = util.get_features_by_prefix(
input_data, scaling_type.value
)
if scaling_features_from_input:
scaling_types_detected.add(scaling_type)
return list(scaling_types_detected)

View File

@@ -0,0 +1,146 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translate supported features found at app.yaml to equivalent Cloud Run flags."""
from typing import Mapping, Sequence
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.common import util
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.config import feature_helper
from googlecloudsdk.core import properties
ENTRYPOINT_FEATURE_KEYS = util.ENTRYPOINT_FEATURE_KEYS
_ALLOW_ENV_VARIABLES_KEY = 'env_variables'
_ALLOW_SERVICE_ACCOUNT_KEY = 'service_account'
_EXCLUDE_FEATURES = util.ENTRYPOINT_FEATURE_KEYS + [_ALLOW_ENV_VARIABLES_KEY]
def translate_supported_features(
input_type: feature_helper.InputType,
input_data: Mapping[str, any],
supported_features: Mapping[str, feature_helper.SupportedFeature],
project_cli_flag: str,
) -> Sequence[str]:
"""Translate supported features."""
output_flags = []
for key, feature in supported_features.items():
if key in input_data:
# excluded features are handled in separate translation rules.
if key in _EXCLUDE_FEATURES:
continue
input_value = f'"{input_data[key]}"'
output_flags += util.generate_output_flags(feature.flags, input_value)
output_flags += _get_output_flags_for_env_variables(
input_type, input_data, supported_features
)
output_flags += _get_output_flags_for_default_service_account(
input_data, supported_features, project_cli_flag
)
return output_flags
def _get_output_flags_for_env_variables(
input_type: feature_helper.InputType,
input_data: Mapping[str, any],
supported_features: Mapping[str, feature_helper.SupportedFeature],
) -> Sequence[str]:
"""Get output flags for env_variables."""
# env_variables values is a dict, therefore, the feature key 'env_variables'
# won't be contained in the flatten input_key_value_pairs, it would be
# contain in the unflatten input_data instead.
output_flags = []
env_variables_key_from_input = util.get_feature_key_from_input(
input_data, [_ALLOW_ENV_VARIABLES_KEY]
)
if env_variables_key_from_input:
# If input is deployed version, envVariables is a list, otherwise it is a
# dict.
if input_type == feature_helper.InputType.ADMIN_API:
env_variables_value_for_admin_api = input_data[
env_variables_key_from_input
]
dict_env_variables_value_for_admin_api = {
value.key: value.value
for value in env_variables_value_for_admin_api
}
env_variables_value = _generate_envs_output(
dict_env_variables_value_for_admin_api
)
else:
env_variables_value = _generate_envs_output(
input_data[env_variables_key_from_input]
)
feature = supported_features[env_variables_key_from_input]
output_flags += util.generate_output_flags(
feature.flags, f'"{env_variables_value}"'
)
return output_flags
def _get_output_flags_for_default_service_account(
input_data: Mapping[str, any],
supported_features: Mapping[str, feature_helper.SupportedFeature],
project_cli_flag: str,
) -> Sequence[str]:
"""Get output flags for default service account."""
input_has_service_account_key = util.get_feature_key_from_input(
input_data, [_ALLOW_SERVICE_ACCOUNT_KEY]
)
# if service_account is not specified in app.yaml/deployed version, use
# the default service account:
# https://cloud.google.com/appengine/docs/standard/go/service-account
if not input_has_service_account_key:
# if input doesn't contain service account, try to generate the default \
# service account with the project id:
# - check if a project id is provided via the --project cli flag.
# or
# - check if gcloud config has project id .
project_id = (
project_cli_flag
if project_cli_flag is not None
else properties.VALUES.core.project.Get()
)
feature = supported_features['service_account']
default_service_account = f'{project_id}@appspot.gserviceaccount.com'
return util.generate_output_flags(feature.flags, default_service_account)
return []
def _generate_envs_output(envs: Mapping[str, str]) -> str:
"""Generate output string for env variables.
Args:
envs: A dictionary of environment variables.
Returns:
A string representing the environment variables in the format
key=value,key=value or key=value@key=value if value contains comma.
Returns an empty string if the input is empty.
"""
if not envs.items():
return ''
value_contains_comma = False
for _, value in envs.items():
if ',' in value:
value_contains_comma = True
break
delimiter = '@' if value_contains_comma else ','
output_str = '' if delimiter == ',' else f'^{delimiter}^'
for key, value in envs.items():
output_str += f'{key}={value}{delimiter}'
return output_str[:-1]

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translation rule for timeout feature."""
from typing import Mapping, Sequence
from googlecloudsdk.command_lib.app.gae_to_cr_migration_util.translation_rules import scaling
_SCALING_METHOD_W_10_MIN_TIMEOUT = frozenset(
{scaling.ScalingTypeAppYaml.AUTOMATIC_SCALING}
)
_SCALING_METHOD_W_60_MIN_TIMEOUT = frozenset({
scaling.ScalingTypeAppYaml.MANUAL_SCALING,
scaling.ScalingTypeAppYaml.BASIC_SCALING,
})
def translate_timeout_features(input_data: Mapping[str, any]) -> Sequence[str]:
"""Translate timeout features based on scaling method."""
scaling_features_used = scaling.get_scaling_features_used(input_data)
if len(scaling_features_used) == 1:
scaling_feature = scaling_features_used[0]
if scaling_feature in _SCALING_METHOD_W_10_MIN_TIMEOUT:
return ['--timeout=600']
if scaling_feature in _SCALING_METHOD_W_60_MIN_TIMEOUT:
return ['--timeout=3600']
return []

View File

@@ -0,0 +1,91 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tunnel TCP traffic over Cloud IAP WebSocket connection."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.compute import iap_tunnel
from googlecloudsdk.core import log
def AddSshTunnelArgs(parser):
parser.add_argument(
'--tunnel-through-iap',
action='store_true',
help="""\
Tunnel the ssh connection through Identity-Aware Proxy for TCP forwarding.
To learn more, see the
[IAP for TCP forwarding documentation](https://cloud.google.com/iap/docs/tcp-forwarding-overview).
""",
)
def CreateSshTunnelArgs(args, api_client, track, project, version, instance):
"""Construct an SshTunnelArgs from command line args and values.
Args:
args: The parsed commandline arguments. May or may not have had
AddSshTunnelArgs called.
api_client: An appengine_api_client.AppEngineApiClient.
track: ReleaseTrack, The currently running release track.
project: str, the project id (string with dashes).
version: The target version reference object.
instance: The target instance reference object.
Returns:
SshTunnelArgs or None if IAP Tunnel is disabled.
"""
# If tunneling through IAP is not available, then abort.
if not hasattr(args, 'tunnel_through_iap'):
return None
instance_ip_mode_enum = (
api_client.messages.Network.InstanceIpModeValueValuesEnum
)
# If IAP tunnelling is specified, then use it.
if args.IsSpecified('tunnel_through_iap'):
# If IAP tunneling is explicitly disabled, then abort.
if not args.tunnel_through_iap:
log.status.Print(
'IAP tunnel is disabled; ssh/scp operations that require'
' IAP tunneling will fail.'
)
return None
else:
# allow IAP tunneling for instances with external ip.
log.status.Print(
'IAP tunnel is enabled; ssh/scp operations that require'
' IAP tunneling will succeed.'
)
else:
# defaults to using IAP tunneling for only instances without an external ip.
if version.network.instanceIpMode is not instance_ip_mode_enum.INTERNAL:
log.status.Print(
'External IP address was found while IAP tunneling not specified;'
)
return None
res = iap_tunnel.SshTunnelArgs()
res.track = track.prefix
res.project = project
res.zone = instance.vmZoneName
res.instance = instance.id
return res

View File

@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for handling Manifest file in a Java jar file.
Jar files are just zip files with a particular interpretation for certain files
in the zip under the META-INF/ directory. So we can read and write them using
the standard zipfile module.
The specification for jar files is at
http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from __future__ import with_statement
import re
import zipfile
_MANIFEST_NAME = 'META-INF/MANIFEST.MF'
class Error(Exception):
pass
class InvalidJarError(Error):
pass
class Manifest(object):
"""The parsed manifest from a jar file.
Attributes:
main_section: a dict representing the main (first) section of the manifest.
Each key is a string that is an attribute, such as 'Manifest-Version', and
the corresponding value is a string that is the value of the attribute,
such as '1.0'.
sections: a dict representing the other sections of the manifest. Each key
is a string that is the value of the 'Name' attribute for the section,
and the corresponding value is a dict like the main_section one, for the
other attributes.
"""
def __init__(self, main_section, sections):
self.main_section = main_section
self.sections = sections
def ReadManifest(jar_file_name):
"""Read and parse the manifest out of the given jar.
Args:
jar_file_name: the name of the jar from which the manifest is to be read.
Returns:
A parsed Manifest object, or None if the jar has no manifest.
Raises:
IOError: if the jar does not exist or cannot be read.
"""
with zipfile.ZipFile(jar_file_name) as jar:
try:
manifest_string = jar.read(_MANIFEST_NAME).decode('utf-8')
except KeyError:
return None
return _ParseManifest(manifest_string, jar_file_name)
def _ParseManifest(manifest_string, jar_file_name):
"""Parse a Manifest object out of the given string.
Args:
manifest_string: a str or unicode that is the manifest contents.
jar_file_name: a str that is the path of the jar, for use in exception
messages.
Returns:
A Manifest object parsed out of the string.
Raises:
InvalidJarError: if the manifest is not well-formed.
"""
# Lines in the manifest might be terminated by \r\n so normalize.
manifest_string = '\n'.join(manifest_string.splitlines()).rstrip('\n')
section_strings = re.split('\n{2,}', manifest_string)
parsed_sections = [_ParseManifestSection(s, jar_file_name)
for s in section_strings]
main_section = parsed_sections[0]
sections = {}
for entry in parsed_sections[1:]:
name = entry.get('Name')
if name is None:
raise InvalidJarError('%s: Manifest entry has no Name attribute: %r' %
(jar_file_name, entry))
else:
sections[name] = entry
return Manifest(main_section, sections)
def _ParseManifestSection(section, jar_file_name):
"""Parse a dict out of the given manifest section string.
Args:
section: a str or unicode that is the manifest section. It looks something
like this (without the >):
> Name: section-name
> Some-Attribute: some value
> Another-Attribute: another value
jar_file_name: a str that is the path of the jar, for use in exception
messages.
Returns:
A dict where the keys are the attributes (here, 'Name', 'Some-Attribute',
'Another-Attribute'), and the values are the corresponding attribute values.
Raises:
InvalidJarError: if the manifest section is not well-formed.
"""
# Join continuation lines.
section = section.replace('\n ', '').rstrip('\n')
if not section:
return {}
try:
return dict(line.split(': ', 1) for line in section.split('\n'))
except ValueError:
raise InvalidJarError('%s: Invalid manifest %r' % (jar_file_name, section))

View File

@@ -0,0 +1,169 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for safe migrations of config files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import shutil
from googlecloudsdk.appengine.datastore import datastore_index_xml
from googlecloudsdk.appengine.tools import cron_xml_parser
from googlecloudsdk.appengine.tools import dispatch_xml_parser
from googlecloudsdk.appengine.tools import queue_xml_parser
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import files
_CRON_DESC = 'Translates a cron.xml into cron.yaml.'
_QUEUE_DESC = 'Translates a queue.xml into queue.yaml.'
_DISPATCH_DESC = 'Translates a dispatch.xml into dispatch.yaml.'
_INDEX_DESC = 'Translates a datastore-indexes.xml into index.yaml.'
class MigrationError(exceptions.Error):
pass
def _Bakify(file_path):
return file_path + '.bak'
class MigrationResult(object):
"""The changes that are about to be applied on a declarative form.
Args:
new_files: {str, str} a mapping from absolute file path to new contents of
the file, or None if the file should be deleted.
"""
def __init__(self, new_files):
self.new_files = new_files
def __eq__(self, other):
return self.new_files == other.new_files
def __ne__(self, other):
return not self == other
def _Backup(self):
for path in self.new_files.keys():
bak_path = _Bakify(path)
if not os.path.isfile(path):
continue
if os.path.exists(bak_path):
raise MigrationError(
'Backup file path [{}] already exists.'.format(bak_path))
log.err.Print('Copying [{}] to [{}]'.format(path, bak_path))
shutil.copy2(path, bak_path)
def _WriteFiles(self):
for path, new_contents in self.new_files.items():
if new_contents is None:
log.err.Print('Deleting [{}]'.format(path))
os.remove(path)
else:
log.err.Print('{} [{}]'.format(
'Overwriting' if os.path.exists(path) else 'Writing', path))
files.WriteFileContents(path, new_contents)
def Apply(self):
"""Backs up first, then deletes, overwrites and writes new files."""
self._Backup()
self._WriteFiles()
def _MigrateCronXML(src, dst):
"""Migration script for cron.xml."""
xml_str = files.ReadFileContents(src)
yaml_contents = cron_xml_parser.GetCronYaml(None, xml_str)
new_files = {src: None, dst: yaml_contents}
return MigrationResult(new_files)
def _MigrateQueueXML(src, dst):
"""Migration script for queue.xml."""
xml_str = files.ReadFileContents(src)
yaml_contents = queue_xml_parser.GetQueueYaml(None, xml_str)
new_files = {src: None, dst: yaml_contents}
return MigrationResult(new_files)
def _MigrateDispatchXML(src, dst):
"""Migration script for dispatch.xml."""
xml_str = files.ReadFileContents(src)
yaml_contents = dispatch_xml_parser.GetDispatchYaml(None, xml_str)
new_files = {src: None, dst: yaml_contents}
return MigrationResult(new_files)
def _MigrateDatastoreIndexXML(src, dst, auto_src=None):
"""Migration script for datastore-indexes.xml."""
xml_str = files.ReadFileContents(src)
indexes = datastore_index_xml.IndexesXmlToIndexDefinitions(xml_str)
new_files = {src: None}
if auto_src:
xml_str_2 = files.ReadFileContents(auto_src)
auto_indexes = datastore_index_xml.IndexesXmlToIndexDefinitions(xml_str_2)
indexes.indexes += auto_indexes.indexes
new_files[auto_src] = None
new_files[dst] = indexes.ToYAML()
return MigrationResult(new_files)
class MigrationScript(object):
"""Object representing a migration script and its metadata.
Attributes:
migrate_fn: a function which accepts a variable number of self-defined
kwargs and returns a MigrationResult.
description: str, description for help texts and prompts.
"""
def __init__(self, migrate_fn, description):
self.migrate_fn = migrate_fn
self.description = description
def Run(entry, **kwargs):
"""Run a migration entry, with prompts and warnings.
Args:
entry: MigrationScript, the entry to run.
**kwargs: keyword args for the migration function.
"""
result = entry.migrate_fn(**kwargs) # Get errors early
log.warning('Please *back up* existing files.\n')
console_io.PromptContinue(
entry.description, default=True,
prompt_string='Do you want to run the migration script now?',
cancel_on_no=True)
result.Apply()
# Registry of all migration entries. Key corresponds to command name
REGISTRY = {
'cron-xml-to-yaml': MigrationScript(_MigrateCronXML, _CRON_DESC),
'queue-xml-to-yaml': MigrationScript(_MigrateQueueXML, _QUEUE_DESC),
'dispatch-xml-to-yaml': MigrationScript(_MigrateDispatchXML,
_DISPATCH_DESC),
'datastore-indexes-xml-to-yaml': MigrationScript(_MigrateDatastoreIndexXML,
_INDEX_DESC),
}

View File

@@ -0,0 +1,368 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for app migrate gen1-to-gen2."""
import json
import os
from os import path
import pathlib
import shutil
import time
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import yaml
from googlecloudsdk.core.util import files
class Gen1toGen2Migration:
"""Utility class for migrating Gen 1 App Engine applications to Gen 2."""
DEFAULT_APPYAML = 'app.yaml'
MIGRATION_PROGRESS_FILE = 'migration_progress.json'
DEFAULT_SERVICE_NAME = 'default'
SUPPORTED_GEN1_RUNTIMES = ('python27',)
SERVICE_FIELD = 'service'
PYTHON_GEN1_RUNTIME = 'python27'
APP_YAML_FIELD = 'app_yaml'
PROCESSED_FILES_FIELD = 'processed_files'
def __init__(self, api_client, args):
"""Initializes the Gen1toGen2Migration utility class.
Args:
api_client: The AppEngine API client.
args: The argparse arguments.
"""
log.debug(args)
self.api_client = api_client
self.input_dir = os.getcwd()
# if app.yaml is not provided, use app.yaml in current directory
if args.appyaml:
self.appyaml_path = os.path.relpath(args.appyaml)
else:
log.info('appyaml not provided. Using app.yaml in current directory.')
self.appyaml_path = os.path.join(self.input_dir, self.DEFAULT_APPYAML)
self.output_dir = os.path.abspath(args.output_dir)
self.project = properties.VALUES.core.project.Get()
def StartMigration(self):
"""Starts the migration process.
Raises:
MissingGen1ApplicationError: If the provided project does not contain an
AppEngine version with a Gen1 runtime.
"""
app_yaml_content = self.ValidateAppyamlAndGetContents()
# If service is not present in app.yaml, use default service
if app_yaml_content.get(self.SERVICE_FIELD):
service_name = app_yaml_content.get(self.SERVICE_FIELD)
else:
service_name = self.DEFAULT_SERVICE_NAME
log.status.Print(
'Service name not found in app.yaml. Using default service.'
)
log.info('service_name: {0}'.format(service_name))
# Check if the project has a Gen 1 version deployed.
if not self.api_client.CheckGen1AppId(service_name, self.project):
raise exceptions.MissingGen1ApplicationError(self.project)
# Check status of the migration i.e. new migration or resumed migration.
is_new_migration = self.CheckOutputDirectoryAndGetMigrationStatus()
if is_new_migration:
self.StartNewMigration(service_name)
else:
self.ResumeMigration(service_name)
def ValidateAppyamlAndGetContents(self):
"""Validates the app.yaml file and returns its contents.
Returns:
The contents of the app.yaml file.
Raises:
FileNotFoundError: If the app.yaml file is not found.
UnsupportedRuntimeError: If the runtime in app.yaml is not a valid Gen 1
runtime.
"""
if not path.exists(self.appyaml_path):
raise exceptions.FileNotFoundError(self.appyaml_path)
# If the runtime is app.yaml is not a supported Gen 1 runtime or is not
# present, raise an error.
appyaml_content = yaml.load_path(self.appyaml_path)
if appyaml_content.get('runtime') not in self.SUPPORTED_GEN1_RUNTIMES:
raise exceptions.UnsupportedRuntimeError(
self.appyaml_path, self.SUPPORTED_GEN1_RUNTIMES
)
return appyaml_content
def CheckOutputDirectoryAndGetMigrationStatus(self):
"""Check if output directory exists and decide the migration status.
If an output directory does not exist, we create one and decide that it is a
new migration.
Returns:
Boolean: True for new migration, False for resuming migration.
Raises:
InvalidOutputDirectoryError: If the output directory is not empty and does
not contain a migration_progress.json file.
"""
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
log.info('Creating directory: {0}'.format(self.output_dir))
return True
# Check if the directory is empty
if not os.listdir(self.output_dir):
log.info('Output directory {0} is empty.'.format(self.output_dir))
return True
# Check if migration_progress.json exists
if self.MIGRATION_PROGRESS_FILE in os.listdir(self.output_dir):
log.warning(
'Output directory {0} is not empty. Resuming migration...'.format(
self.output_dir
)
)
return False
# Raise error if output directory is not empty and does not contain a
# migration_progress.json file.
raise exceptions.InvalidOutputDirectoryError(self.output_dir)
def StartNewMigration(self, service_name):
"""Flow for starting a new migration.
Args:
service_name: The service name.
"""
log.info('input_dir: {0}'.format(self.input_dir))
appyaml_filename = os.path.basename(self.appyaml_path)
# Copy all files from input directory to output directory except app.yaml,
# files with .py extension and the output directory itself.
shutil.copytree(
self.input_dir,
self.output_dir,
ignore=shutil.ignore_patterns(
'*.py', appyaml_filename, pathlib.PurePath(self.output_dir).name
),
dirs_exist_ok=True,
)
log.status.Print('Copying files to output directory')
# Create a migration progress file.
progress_file = os.path.join(self.output_dir, self.MIGRATION_PROGRESS_FILE)
migration_progress = {}
# Write the migrated app.yaml to the output directory.
self.WriteMigratedYaml(
service_name,
os.path.join(self.output_dir, appyaml_filename),
migration_progress,
progress_file,
)
requirements_file = os.path.join(self.output_dir, 'requirements.txt')
# Write the migrated code to the output directory.
self.WriteMigratedCode(
service_name, migration_progress, progress_file, requirements_file
)
log.status.Print('Migration completed.')
def ResumeMigration(self, service_name):
"""Flow for a resumed migration.
Args:
service_name: The service name.
Raises:
InvalidOutputDirectoryError: If the output directory is not empty and does
not contain a migration_progress.json file.
"""
log.info('input_dir: {0}'.format(self.input_dir))
# Load the migration progress file.
progress_file = os.path.join(self.output_dir, self.MIGRATION_PROGRESS_FILE)
with files.FileReader(progress_file) as pf:
migration_progress = json.load(pf)
# If app.yaml is not present in migration_progress, migrate it.
if self.appyaml_path not in migration_progress.get('app_yaml', ''):
log.info(
'{0} not present in migration_progress. Will be migrated.'.format(
self.appyaml_path
)
)
self.WriteMigratedYaml(
service_name,
os.path.join(self.output_dir, os.path.basename(self.appyaml_path)),
migration_progress,
progress_file,
)
requirements_file = os.path.join(self.output_dir, 'requirements.txt')
# Write the migrated code to the output directory.
self.WriteMigratedCode(
service_name, migration_progress, progress_file, requirements_file
)
log.status.Print('Migration completed.')
def WriteMigratedYaml(
self, service_name, output_path, migration_progress, progress_file
):
"""Writes the migrated app.yaml to the output directory.
Args:
service_name: The service name.
output_path: The path to the output directory.
migration_progress: The migration progress dictionary.
progress_file: The path to the migration progress file.
"""
appyaml_content = files.ReadFileContents(self.appyaml_path)
appyaml_filename = os.path.basename(self.appyaml_path)
response = self.api_client.MigrateConfigYaml(
self.project, appyaml_content, self.PYTHON_GEN1_RUNTIME, service_name
)
migrated_yaml_contents = yaml.load(response.configAsString)
with files.FileWriter(output_path) as f:
yaml.dump(migrated_yaml_contents, f)
# Update the migration progress file.
migration_progress[self.APP_YAML_FIELD] = self.appyaml_path
with files.FileWriter(progress_file, 'w') as pf:
json.dump(migration_progress, pf, indent=4)
log.status.Print(
'Config modifications applied to {0}.'.format(appyaml_filename)
)
def WriteMigratedCode(
self, service_name, migration_progress, progress_file, requirements_file
):
"""Writes the migrated code to the output directory.
Args:
service_name: The service name.
migration_progress: The migration progress dictionary.
progress_file: The path to the migration progress file.
requirements_file: The path to the requirements file.
"""
# Recursively walk through the input directory.
for dirpath, dirname, filenames in os.walk(self.input_dir):
dirname[:] = [
d
for d in dirname
if d != pathlib.PurePath(self.output_dir).name
]
for filename in filenames:
file_path = os.path.join(dirpath, filename)
if pathlib.Path(file_path).suffix == '.py':
# If the file is already present in the migration_progress, skip it.
if (
self.PROCESSED_FILES_FIELD in migration_progress
and file_path in migration_progress[self.PROCESSED_FILES_FIELD]
):
log.info(
'File {0} already exists. Will be skipped.'.format(file_path)
)
continue
log.status.Print('Currently on file: {0}'.format(file_path))
file_content = files.ReadFileContents(file_path)
transformed_code, requirements_list = self.GetMigratedCode(
file_content, service_name
)
output_path = os.path.join(
self.output_dir, os.path.relpath(file_path, self.input_dir)
)
# Get the existing requirements from the requirements file.
existing_requirements = []
if os.path.exists(requirements_file):
requirements_file_contents = files.ReadFileContents(
requirements_file
)
if requirements_file_contents:
existing_requirements = requirements_file_contents.split('\n')
# Add the new requirements to the existing requirements.
for requirement in requirements_list:
if requirement not in existing_requirements:
existing_requirements.append(requirement)
files.WriteFileContents(
requirements_file, '\n'.join(existing_requirements)
)
# If the file already exists in the output_dir and not in
# migration_progress, do not overwrite it.
if os.path.exists(output_path):
new_output_path = (
os.path.splitext(output_path)[0]
+ '_'
+ str(time.time()).split('.')[0]
+ '.py'
)
log.warning(
'File {0} already exists. Will be renamed to {1}.'.format(
file_path, new_output_path
)
)
output_path = new_output_path
files.WriteFileContents(
output_path, transformed_code, overwrite=False
)
# Update the migration progress file.
if self.PROCESSED_FILES_FIELD not in migration_progress:
migration_progress[self.PROCESSED_FILES_FIELD] = []
migration_progress[self.PROCESSED_FILES_FIELD].append(file_path)
with files.FileWriter(progress_file, 'w') as pf:
json.dump(migration_progress, pf, indent=4)
def GetMigratedCode(
self, file_content, service_name
):
"""Calls MigrateCodeFile and gets the migrated code for a python file.
Args:
file_content: The contents of the python file.
service_name: The service name.
Returns:
transformed_code: The migrated code for the python file.
requirements_list: The list of requirements for the python file.
"""
operation = self.api_client.MigrateCodeFile(
self.project, file_content, self.PYTHON_GEN1_RUNTIME, service_name
)
transformed_code = ''
requirements_list = []
operation_response = operation.response.additionalProperties
for prop in operation_response:
if prop.key == 'codeAsString':
transformed_code = prop.value.string_value
if prop.key == 'python3Requirements':
requirements = prop.value.array_value.entries
for entry in requirements:
requirements_list.append(entry.string_value.strip())
return transformed_code, requirements_list

View File

@@ -0,0 +1,172 @@
# -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds exceptions raised by commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.app import deploy_command_util
from googlecloudsdk.api_lib.app import exceptions
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.api_lib.services import enable_api
from googlecloudsdk.api_lib.services import exceptions as s_exceptions
from googlecloudsdk.appengine.admin.tools.conversion import convert_yaml
from googlecloudsdk.core import log
import six
DEPLOY_SERVICE_MESSAGE_TEMPLATE = """\
descriptor: [{descriptor}]
source: [{source}]
target project: [{project}]
target service: [{service}]
target version: [{version}]
target url: [{url}]
target service account: [{service_account}]
"""
DEPLOY_CONFIG_MESSAGE_TEMPLATE = """\
descriptor: [{descriptor}]
type: [{type}]
target project: [{project}]
"""
CONFIG_TYPES = {
'index': 'datastore indexes',
'cron': 'cron jobs',
'queue': 'task queues',
'dispatch': 'routing rules',
}
PROMOTE_MESSAGE_TEMPLATE = """\
(add --promote if you also want to make this service available from
[{default_url}])
"""
RUNTIME_MISMATCH_MSG = ("You've generated a Dockerfile that may be customized "
'for your application. To use this Dockerfile, '
'the runtime field in [{0}] must be set to custom.')
QUEUE_TASKS_WARNING = """\
Caution: You are updating queue configuration. This will override any changes
performed using 'gcloud tasks'. More details at
https://cloud.google.com/tasks/docs/queue-yaml
"""
def DisplayProposedDeployment(app,
project,
services,
configs,
version,
promote,
service_account,
api_version='v1'):
"""Prints the details of the proposed deployment.
Args:
app: Application resource for the current application (required if any
services are deployed, otherwise ignored).
project: The name of the current project.
services: [deployables.Service], The services being deployed.
configs: [yaml_parsing.ConfigYamlInfo], The configurations being updated.
version: The version identifier of the application to be deployed.
promote: Whether the newly deployed version will receive all traffic
(this affects deployed URLs).
service_account: The service account that the deployed version will run as.
api_version: Version of the yaml file parser to use. Use 'v1' by default.
Returns:
dict (str->str), a mapping of service names to deployed service URLs
This includes information on to-be-deployed services (including service name,
version number, and deployed URLs) as well as configurations.
"""
deployed_urls = {}
if services:
if app is None:
raise TypeError('If services are deployed, must provide `app` parameter.')
log.status.Print('Services to deploy:\n')
for service in services:
use_ssl = deploy_command_util.UseSsl(service.service_info)
url = deploy_command_util.GetAppHostname(
app=app, service=service.service_id,
version=None if promote else version, use_ssl=use_ssl)
deployed_urls[service.service_id] = url
schema_parser = convert_yaml.GetSchemaParser(api_version)
service_account_from_yaml = ''
try:
service_account_from_yaml = schema_parser.ConvertValue(
service.service_info.parsed.ToDict()).get('serviceAccount', None)
except ValueError as e:
raise exceptions.ConfigError(
'[{f}] could not be converted to the App Engine configuration '
'format for the following reason: {msg}'.format(
f=service.service_info, msg=six.text_type(e)))
display_service_account = app.serviceAccount
if service_account:
display_service_account = service_account
elif service_account_from_yaml:
display_service_account = service_account_from_yaml
log.status.Print(
DEPLOY_SERVICE_MESSAGE_TEMPLATE.format(
project=project,
service=service.service_id,
version=version,
descriptor=service.descriptor,
source=service.source,
url=url,
service_account=display_service_account))
if not promote:
default_url = deploy_command_util.GetAppHostname(
app=app, service=service.service_id, use_ssl=use_ssl)
log.status.Print(PROMOTE_MESSAGE_TEMPLATE.format(
default_url=default_url))
if configs:
DisplayProposedConfigDeployments(project, configs)
return deployed_urls
def DisplayProposedConfigDeployments(project, configs):
"""Prints the details of the proposed config deployments.
Args:
project: The name of the current project.
configs: [yaml_parsing.ConfigYamlInfo], The configurations being
deployed.
"""
log.status.Print('Configurations to update:\n')
for c in configs:
log.status.Print(DEPLOY_CONFIG_MESSAGE_TEMPLATE.format(
project=project, type=CONFIG_TYPES[c.config], descriptor=c.file))
if c.name == yaml_parsing.ConfigYamlInfo.QUEUE:
# If useful, this logic can be broken out and moved to enable_api.py,
# under IsServiceMaybeEnabled(...) or similar.
try:
api_maybe_enabled = enable_api.IsServiceEnabled(
project, 'cloudtasks.googleapis.com')
except s_exceptions.ListServicesException:
api_maybe_enabled = True # We can't know, so presume it is enabled
if api_maybe_enabled:
# Display this warning with a false positive rate for when the Service
# Manangement API is not enabled or accessible.
log.warning(QUEUE_TASKS_WARNING)

View File

@@ -0,0 +1,185 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods for iterating over source files for deployment.
Based on the runtime and environment, this can entail generating a new
.gcloudignore, using an existing .gcloudignore, or using existing skip_files.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import runtime_registry
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.command_lib.app import exceptions
from googlecloudsdk.command_lib.util import gcloudignore
from googlecloudsdk.core import exceptions as core_exceptions
_NODE_GCLOUDIGNORE = '\n'.join([
gcloudignore.DEFAULT_IGNORE_FILE,
'# Node.js dependencies:',
'node_modules/'
])
_PHP_GCLOUDIGNORE = '\n'.join([
gcloudignore.DEFAULT_IGNORE_FILE,
'# PHP Composer dependencies:',
'/vendor/'
])
_PYTHON_GCLOUDIGNORE = '\n'.join([
gcloudignore.DEFAULT_IGNORE_FILE,
'# Python pycache:',
'__pycache__/',
'# Ignored by the build system',
'/setup.cfg'
])
_GO_GCLOUDIGNORE = '\n'.join([
gcloudignore.DEFAULT_IGNORE_FILE,
'# Binaries for programs and plugins',
'*.exe',
'*.exe~',
'*.dll',
'*.so',
'*.dylib',
'# Test binary, build with `go test -c`',
'*.test',
'# Output of the go coverage tool, specifically when used with LiteIDE',
'*.out'
])
_JAVA_GCLOUDIGNORE = '\n'.join([
gcloudignore.DEFAULT_IGNORE_FILE,
'# Target directory for maven builds',
'target/',
])
_GCLOUDIGNORE_REGISTRY = {
runtime_registry.RegistryEntry(
env.NODE_TI_RUNTIME_EXPR, {env.STANDARD}): _NODE_GCLOUDIGNORE,
runtime_registry.RegistryEntry(
env.PHP_TI_RUNTIME_EXPR, {env.STANDARD}): _PHP_GCLOUDIGNORE,
runtime_registry.RegistryEntry(
env.PYTHON_TI_RUNTIME_EXPR, {env.STANDARD}): _PYTHON_GCLOUDIGNORE,
runtime_registry.RegistryEntry(
env.GO_TI_RUNTIME_EXPR, {env.STANDARD}): _GO_GCLOUDIGNORE,
runtime_registry.RegistryEntry(
env.JAVA_TI_RUNTIME_EXPR, {env.STANDARD}): _JAVA_GCLOUDIGNORE,
}
class SkipFilesError(core_exceptions.Error):
pass
def _GetGcloudignoreRegistry():
return runtime_registry.Registry(_GCLOUDIGNORE_REGISTRY, default=False)
def GetSourceFiles(upload_dir, skip_files_regex, has_explicit_skip_files,
runtime, environment, source_dir, ignore_file=None):
"""Returns an iterator for accessing all source files to be uploaded.
This method uses several implementations based on the provided runtime and
env. The rules are as follows, in decreasing priority:
1) For some runtimes/envs (i.e. those defined in _GCLOUDIGNORE_REGISTRY), we
completely ignore skip_files and generate a runtime-specific .gcloudignore
if one is not present, or use the existing .gcloudignore.
2) For all other runtimes/envs, we:
2a) If ignore_file is not none, use custom ignore_file to skip files. If the
specified file does not exist, raise error. We also raise an error if
the user has both ignore file and explicit skip_files defined.
2b) If user does not specify ignore_file, check for an existing
.gcloudignore and use that if one exists. We also raise an error if
the user has both a .gcloudignore file and explicit skip_files defined.
2c) If there is no .gcloudignore, we use the provided skip_files.
Args:
upload_dir: str, path to upload directory, the files to be uploaded.
skip_files_regex: str, skip_files to use if necessary - see above rules for
when this could happen. This can be either the user's explicit skip_files
as defined in their app.yaml or the default skip_files we implicitly
provide if they didn't define any.
has_explicit_skip_files: bool, indicating whether skip_files_regex was
explicitly defined by the user
runtime: str, runtime as defined in app.yaml
environment: env.Environment enum
source_dir: str, path to original source directory, for writing generated
files. May be the same as upload_dir.
ignore_file: custom ignore_file name.
Override .gcloudignore file to customize files to be skipped.
Raises:
SkipFilesError: if you are using a runtime that no longer supports
skip_files (such as those defined in _GCLOUDIGNORE_REGISTRY), or if using
a runtime that still supports skip_files, but both skip_files and
a. gcloudignore file are present.
FileNotFoundError: if the custom ignore-file does not exist.
Returns:
A list of path names of source files to be uploaded.
"""
gcloudignore_registry = _GetGcloudignoreRegistry()
registry_entry = gcloudignore_registry.Get(runtime, environment)
if registry_entry:
if has_explicit_skip_files:
raise SkipFilesError(
'skip_files cannot be used with the [{}] runtime. '
'Ignore patterns are instead expressed in '
'a .gcloudignore file. For information on the format and '
'syntax of .gcloudignore files, see '
'https://cloud.google.com/sdk/gcloud/reference/topic/gcloudignore.'
.format(runtime))
file_chooser = gcloudignore.GetFileChooserForDir(
source_dir,
default_ignore_file=registry_entry,
write_on_disk=True,
gcloud_ignore_creation_predicate=lambda unused_dir: True,
ignore_file=ignore_file,
include_gitignore=False)
it = file_chooser.GetIncludedFiles(upload_dir, include_dirs=False)
elif ignore_file:
if os.path.exists(os.path.join(source_dir, ignore_file)):
if has_explicit_skip_files:
raise SkipFilesError(
'Cannot have both an ignore file {0} and skip_files defined in '
'the same application. We recommend you translate your skip_files '
'ignore patterns to your {0} file. '.format(ignore_file))
it = gcloudignore.GetFileChooserForDir(source_dir, ignore_file=ignore_file)\
.GetIncludedFiles(upload_dir, include_dirs=False)
else:
raise exceptions.FileNotFoundError('File {0} referenced by --ignore-file '
'does not exist.'.format(ignore_file))
elif os.path.exists(os.path.join(source_dir, gcloudignore.IGNORE_FILE_NAME)):
if has_explicit_skip_files:
raise SkipFilesError(
'Cannot have both a .gcloudignore file and skip_files defined in '
'the same application. We recommend you translate your skip_files '
'ignore patterns to your .gcloudignore file. See '
'https://cloud.google.com/sdk/gcloud/reference/topic/gcloudignore '
'for more information about gcloudignore.')
it = gcloudignore.GetFileChooserForDir(source_dir).GetIncludedFiles(
upload_dir, include_dirs=False)
else:
it = util.FileIterator(upload_dir, skip_files_regex)
return list(it)

View File

@@ -0,0 +1,246 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for `app instances *` commands using SSH."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import version_util
from googlecloudsdk.api_lib.compute import base_classes as compute_base_classes
from googlecloudsdk.api_lib.compute import lister
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.app import exceptions as command_exceptions
from googlecloudsdk.command_lib.projects import util as projects_util
from googlecloudsdk.command_lib.util.ssh import ssh
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.console import console_io
_ENABLE_DEBUG_WARNING = """\
This instance is serving live application traffic. Any changes made could
result in downtime or unintended consequences."""
# Used by OpenSSH for naming a logical host in the known_hosts file, rather than
# relying on IP or DNS. Flexible instance IDs are unique per project.
_HOST_KEY_ALIAS = 'gae.{project}.{instance_id}'
DETAILED_HELP = """
*{command}* resolves the instance's IP address and pre-populates the
VM with a public key managed by gcloud. If the gcloud managed key pair
does not exist, it is generated the first time an SSH command is run,
which may prompt you for a passphrase for the private key encryption.
All SSH commands require the OpenSSH client suite to be installed on
Linux and Mac OS X. On Windows, the Google Cloud CLI comes with a bundled
PuTTY suite instead, so it has no external dependencies."""
class ConnectionDetails(object):
"""Details about an SSH connection, for assembling an SSH command."""
def __init__(self, remote, options):
self.remote = remote
self.options = options
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return 'ConnectionDetails(**{})'.format(self.__dict__)
def GetComputeProject(release_track):
holder = compute_base_classes.ComputeApiHolder(release_track)
client = holder.client
project_ref = projects_util.ParseProject(
properties.VALUES.core.project.GetOrFail())
return client.MakeRequests([(client.apitools_client.projects, 'Get',
client.messages.ComputeProjectsGetRequest(
project=project_ref.projectId))])[0]
def _ContainsPort22(allowed_ports):
"""Checks if the given list of allowed ports contains port 22.
Args:
allowed_ports:
Returns:
Raises:
ValueError:Port value must be of type string.
"""
for port in allowed_ports:
try:
if not isinstance(port, str):
raise ValueError('Port value must be of type string')
except ValueError as e:
print(e)
if port == '22':
return True
if '-' in port:
start = int(port.split('-')[0])
end = int(port.split('-')[1])
if start <= 22 <= end:
return True
return False
def PopulatePublicKey(
api_client,
service_id,
version_id,
instance_id,
public_key,
oslogin_state_user,
oslogin_state_enabled
):
"""Enable debug mode on and send SSH keys to a flex instance.
Common method for SSH-like commands, does the following:
- Makes sure that the service/version/instance specified exists and is of the
right type (Flexible).
- If not already done, prompts and enables debug on the instance.
- Populates the public key onto the instance.
Args:
api_client: An appengine_api_client.AppEngineApiClient.
service_id: str, The service ID.
version_id: str, The version ID.
instance_id: str, The instance ID.
public_key: ssh.Keys.PublicKey, Public key to send.
oslogin_state_user: str, The user to connect as.
oslogin_state_enabled: bool, Whether OS Login is enabled.
Raises:
InvalidInstanceTypeError: The instance is not supported for SSH.
MissingVersionError: The version specified does not exist.
MissingInstanceError: The instance specified does not exist.
UnattendedPromptError: Not running in a tty.
OperationCancelledError: User cancelled the operation.
Returns:
ConnectionDetails, the details to use for SSH/SCP for the SSH
connection.
"""
try:
version = api_client.GetVersionResource(
service=service_id, version=version_id)
except apitools_exceptions.HttpNotFoundError:
raise command_exceptions.MissingVersionError(
'{}/{}'.format(service_id, version_id))
version = version_util.Version.FromVersionResource(version, None)
if version.environment is not env.FLEX:
if version.environment is env.MANAGED_VMS:
environment = 'Managed VMs'
msg = 'Use `gcloud compute ssh` for Managed VMs instances.'
else:
environment = 'Standard'
msg = None
raise command_exceptions.InvalidInstanceTypeError(environment, msg)
res = resources.REGISTRY.Parse(
instance_id,
params={
'appsId': properties.VALUES.core.project.GetOrFail,
'versionsId': version_id,
'instancesId': instance_id,
'servicesId': service_id,
},
collection='appengine.apps.services.versions.instances')
rel_name = res.RelativeName()
try:
instance = api_client.GetInstanceResource(res)
except apitools_exceptions.HttpNotFoundError:
raise command_exceptions.MissingInstanceError(rel_name)
if not instance.vmDebugEnabled:
log.warning(_ENABLE_DEBUG_WARNING)
console_io.PromptContinue(cancel_on_no=True, throw_if_unattended=True)
user = oslogin_state_user
instance_ip_mode_enum = (
api_client.messages.Network.InstanceIpModeValueValuesEnum)
host = (
instance.id if
version.version.network.instanceIpMode is instance_ip_mode_enum.INTERNAL
else instance.vmIp)
remote = ssh.Remote(host=host, user=user)
if not oslogin_state_enabled:
ssh_key = '{user}:{key} {user}'.format(user=user, key=public_key.ToEntry())
log.status.Print('Sending public key to instance [{}].'.format(rel_name))
api_client.DebugInstance(res, ssh_key)
options = {
'IdentitiesOnly': 'yes', # No ssh-agent as of yet
'UserKnownHostsFile': ssh.KnownHosts.DEFAULT_PATH,
'CheckHostIP': 'no',
'HostKeyAlias': _HOST_KEY_ALIAS.format(project=api_client.project,
instance_id=instance_id)}
return ConnectionDetails(remote, options)
def FetchFirewallRules():
"""Fetches the firewall rules for the current project.
Returns:
A list of firewall rules.
"""
holder = compute_base_classes.ComputeApiHolder(base.ReleaseTrack.GA)
client = holder.client
# pylint: disable=protected-access
request_data = lister._Frontend(
None,
None,
lister.GlobalScope([
holder.resources.Parse(
properties.VALUES.core.project.GetOrFail(),
collection='compute.projects',
)
]),
)
list_implementation = lister.GlobalLister(
client, client.apitools_client.firewalls
)
result = lister.Invoke(request_data, list_implementation)
return result
def FilterFirewallRules(firewall_rules):
"""Filters firewall rules that allow ingress to port 22."""
filtered_firewall_rules = []
for firewall_rule in firewall_rules:
if firewall_rule.get('direction') == 'INGRESS':
allowed_dict = firewall_rule.get('allowed')
if not allowed_dict:
continue
allowed_ports = allowed_dict[0].get('ports')
if not allowed_ports:
continue
if _ContainsPort22(allowed_ports):
filtered_firewall_rules.append(firewall_rule)
return filtered_firewall_rules

View File

@@ -0,0 +1,637 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code to provide a hook for staging.
Some App Engine runtimes require an additional staging step before deployment
(e.g. when deploying compiled artifacts, or vendoring code that normally lives
outside of the app directory). This module contains (1) a registry mapping
runtime/environment combinations to staging commands, and (2) code to run said
commands.
The interface is defined as follows:
- A staging command is an executable (binary or script) that takes two
positional parameters: the path of the `<service>.yaml` in the directory
containing the unstaged application code, and the path of an empty directory
in which to stage the application code.
- On success, the STDOUT and STDERR of the staging command are logged at the
INFO level. On failure, a StagingCommandFailedError is raised containing the
STDOUT and STDERR of the staging command (which are surfaced to the user as an
ERROR message).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import abc
import io
import os
import re
import shutil
import tempfile
from googlecloudsdk.api_lib.app import env
from googlecloudsdk.api_lib.app import runtime_registry
from googlecloudsdk.command_lib.app import jarfile
from googlecloudsdk.command_lib.util import java
from googlecloudsdk.core import config
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import execution_utils
from googlecloudsdk.core import log
from googlecloudsdk.core.updater import update_manager
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import platforms
import six
_JAVA_APPCFG_ENTRY_POINT = 'com.google.appengine.tools.admin.AppCfg'
_JAVA_APPCFG_STAGE_FLAGS = ['--enable_new_staging_defaults']
_STAGING_COMMAND_OUTPUT_TEMPLATE = """\
------------------------------------ STDOUT ------------------------------------
{out}\
------------------------------------ STDERR ------------------------------------
{err}\
--------------------------------------------------------------------------------
"""
class StagingCommandNotFoundError(exceptions.Error):
"""Base error indicating that a staging command could not be found."""
class NoSdkRootError(StagingCommandNotFoundError):
def __init__(self):
super(NoSdkRootError, self).__init__(
'No SDK root could be found. Please check your installation.')
class NoMainClassError(exceptions.Error):
def __init__(self):
super(NoMainClassError, self).__init__(
'Invalid jar file: it does not contain a Main-Class Manifest entry.')
class MavenPomNotSupported(exceptions.Error):
def __init__(self):
super(MavenPomNotSupported, self).__init__(
'Maven source deployment is not supported for legacy Java 8/11/17/21'
' GAE projects configured with appengine-web.xml. Please read '
'https://cloud.google.com/appengine/docs/standard/java-gen2/using-maven'
)
class GradleBuildNotSupported(exceptions.Error):
def __init__(self):
super(GradleBuildNotSupported, self).__init__(
'Gradle source deployment is not supported for legacy Java'
' 8/11/17/21 GAE projects configured with appengine-web.xml. Read '
'https://cloud.google.com/appengine/docs/standard/java-gen2/using-gradle'
)
class StagingCommandFailedError(exceptions.Error):
def __init__(self, args, return_code, output_message):
super(StagingCommandFailedError, self).__init__(
'Staging command [{0}] failed with return code [{1}].\n\n{2}'.format(
' '.join(args), return_code, output_message))
# TODO(b/65026284): eliminate "mappers" entirely by making a shim command
def _JavaStagingMapper(command_path, descriptor, app_dir, staging_dir):
"""Map a java staging request to the right args.
Args:
command_path: str, path to the jar tool file.
descriptor: str, path to the `appengine-web.xml`
app_dir: str, path to the unstaged app directory
staging_dir: str, path to the empty staging dir
Raises:
java.JavaError, if Java is not installed.
Returns:
[str], args for executable invocation.
"""
del descriptor # Unused, app_dir is sufficient
java_bin = java.RequireJavaInstalled('local staging for java')
args = ([java_bin, '-classpath', command_path, _JAVA_APPCFG_ENTRY_POINT] +
_JAVA_APPCFG_STAGE_FLAGS + ['stage', app_dir, staging_dir])
return args
class _Command(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for a staging command to be invoked on the user source.
This abstract class facilitates running an executable command that conforms to
the "staging command" interface outlined in the module docstring.
It implements the parts that are common to any such command while allowing
interface implementors to swap out how the command is created.
"""
@abc.abstractmethod
def EnsureInstalled(self):
"""Ensure that the command is installed and available.
May result in a command restart if installation is required.
"""
raise NotImplementedError()
@abc.abstractmethod
def GetPath(self):
"""Returns the path to the command.
Returns:
str, the path to the command
Raises:
StagingCommandNotFoundError: if the staging command could not be found.
"""
raise NotImplementedError()
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
"""Get the args for the command to execute.
Args:
descriptor: str, path to the unstaged <service>.yaml or appengine-web.xml
app_dir: str, path to the unstaged app directory
staging_dir: str, path to the directory to stage in.
explicit_appyaml: str or None, the app.yaml location
to used for deployment.
Returns:
list of str, the args for the command to run
"""
return [self.GetPath(), descriptor, app_dir, staging_dir]
def Run(self, staging_area, descriptor, app_dir, explicit_appyaml=None):
"""Invokes a staging command with a given <service>.yaml and temp dir.
Args:
staging_area: str, path to the staging area.
descriptor: str, path to the unstaged <service>.yaml or appengine-web.xml
app_dir: str, path to the unstaged app directory
explicit_appyaml: str or None, the app.yaml location
to used for deployment.
Returns:
str, the path to the staged directory or None if staging was not required.
Raises:
StagingCommandFailedError: if the staging command process exited non-zero.
"""
staging_dir = tempfile.mkdtemp(dir=staging_area)
args = self.GetArgs(descriptor, app_dir, staging_dir)
log.info('Executing staging command: [{0}]\n\n'.format(' '.join(args)))
out = io.StringIO()
err = io.StringIO()
return_code = execution_utils.Exec(
args, no_exit=True, out_func=out.write, err_func=err.write)
message = _STAGING_COMMAND_OUTPUT_TEMPLATE.format(
out=out.getvalue(), err=err.getvalue())
message = message.replace('\r\n', '\n')
log.info(message)
if return_code:
raise StagingCommandFailedError(args, return_code, message)
# Optionally use the custom app yaml if available:
if explicit_appyaml:
shutil.copyfile(explicit_appyaml, os.path.join(staging_dir, 'app.yaml'))
return staging_dir
class NoopCommand(_Command):
"""A command that does nothing.
Many runtimes do not require a staging step; this isn't a problem.
"""
def EnsureInstalled(self):
pass
def GetPath(self):
return None
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
return None
def Run(self, staging_area, descriptor, app_dir, explicit_appyaml=None):
"""Does nothing."""
pass
def __eq__(self, other):
return isinstance(other, NoopCommand)
class CreateJava21ProjectCommand(_Command):
"""A command that creates a java21 runtime app.yaml."""
def EnsureInstalled(self):
pass
def GetPath(self):
return
def GetArgs(self, descriptor, staging_dir, appyaml=None):
return
def Run(self, staging_area, descriptor, app_dir, explicit_appyaml=None):
# Logic is: copy/symlink the project in the staged area, and create a
# simple file app.yaml for runtime: java21 if it does not exist.
# If it exists in the standard and documented default location
# (in project_dir/src/main/appengine/app.yaml), copy it in the staged
# area.
appenginewebxml = os.path.join(app_dir, 'src', 'main', 'webapp', 'WEB-INF',
'appengine-web.xml')
if os.path.exists(appenginewebxml):
raise self.error()
if explicit_appyaml:
shutil.copyfile(explicit_appyaml, os.path.join(staging_area, 'app.yaml'))
else:
appyaml = os.path.join(app_dir, 'src', 'main', 'appengine', 'app.yaml')
if os.path.exists(appyaml):
# Put the user app.yaml at the root of the staging directory to deploy
# as required by the Cloud SDK.
shutil.copy2(appyaml, staging_area)
else:
# Create a very simple 2 liner app.yaml for Java21 runtime.
files.WriteFileContents(
os.path.join(staging_area, 'app.yaml'),
'runtime: java21\ninstance_class: F2\n')
for name in os.listdir(app_dir):
# Do not deploy locally built artifacts, buildpack will clean this anyway.
if name == self.ignore:
continue
srcname = os.path.join(app_dir, name)
dstname = os.path.join(staging_area, name)
try:
os.symlink(srcname, dstname)
except (AttributeError, OSError):
# AttributeError can occur if this is a Windows machine with an older
# version of Python, in which case os.symlink is not defined. If this is
# a newer version of Windows, but the user is not allowed to create
# symlinks, we'll get an OSError saying "symbolic link privilege not
# held." In both cases, we just fall back to copying the files.
log.debug('Could not symlink files in staging directory, falling back '
'to copying')
if os.path.isdir(srcname):
files.CopyTree(srcname, dstname)
else:
shutil.copy2(srcname, dstname)
return staging_area
def __eq__(self, other):
return isinstance(other, CreateJava21ProjectCommand)
class CreateJava21MavenProjectCommand(CreateJava21ProjectCommand):
"""A command that creates a java21 runtime app.yaml from a pom.xml file."""
def __init__(self):
self.error = MavenPomNotSupported
self.ignore = 'target'
super(CreateJava21MavenProjectCommand, self).__init__()
def __eq__(self, other):
return isinstance(other, CreateJava21GradleProjectCommand)
class CreateJava21GradleProjectCommand(CreateJava21ProjectCommand):
"""A command that creates a java21 runtime app.yaml from a build.gradle file."""
def __init__(self):
self.error = GradleBuildNotSupported
self.ignore = 'build'
super(CreateJava21GradleProjectCommand, self).__init__()
def __eq__(self, other):
return isinstance(other, CreateJava21GradleProjectCommand)
class CreateJava21YamlCommand(_Command):
"""A command that creates a java21 runtime app.yaml from a jar file."""
def EnsureInstalled(self):
pass
def GetPath(self):
return None
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
return None
def Run(self, staging_area, descriptor, app_dir, explicit_appyaml=None):
# Logic is simple: copy the jar in the staged area, and create a simple
# file app.yaml for runtime: java21.
shutil.copy2(descriptor, staging_area)
if explicit_appyaml:
shutil.copyfile(explicit_appyaml, os.path.join(staging_area, 'app.yaml'))
else:
files.WriteFileContents(
os.path.join(staging_area, 'app.yaml'),
'runtime: java21\ninstance_class: F2\n',
private=True)
manifest = jarfile.ReadManifest(descriptor)
if manifest:
main_entry = manifest.main_section.get('Main-Class')
if main_entry is None:
raise NoMainClassError()
classpath_entry = manifest.main_section.get('Class-Path')
if classpath_entry:
libs = classpath_entry.split()
for lib in libs:
dependent_file = os.path.join(app_dir, lib)
# We copy the dep jar in the correct staging sub directories
# and only if it exists,
if os.path.isfile(dependent_file):
destination = os.path.join(staging_area, lib)
files.MakeDir(os.path.abspath(os.path.join(destination, os.pardir)))
try:
os.symlink(dependent_file, destination)
except (AttributeError, OSError):
log.debug(
'Could not symlink files in staging directory, falling back '
'to copying')
shutil.copy(dependent_file, destination)
return staging_area
def __eq__(self, other):
return isinstance(other, CreateJava21YamlCommand)
class StageAppWithoutAppYamlCommand(_Command):
"""A command that creates a staged directory with an optional app.yaml."""
def EnsureInstalled(self):
pass
def GetPath(self):
return None
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
return None
def Run(self, staging_area, descriptor, app_dir, explicit_appyaml=None):
# Copy the application in tmp area, and copy the optional app.yaml in it.
scratch_area = os.path.join(staging_area, 'scratch')
if os.path.isdir(app_dir):
files.CopyTree(app_dir, scratch_area)
else:
os.mkdir(scratch_area)
shutil.copy2(app_dir, scratch_area)
if explicit_appyaml:
shutil.copyfile(explicit_appyaml, os.path.join(scratch_area, 'app.yaml'))
return scratch_area
def __eq__(self, other):
return isinstance(other, StageAppWithoutAppYamlCommand)
class _BundledCommand(_Command):
"""Represents a cross-platform command.
Paths are relative to the Cloud SDK Root directory.
Attributes:
_nix_path: str, the path to the executable on Linux and OS X
_windows_path: str, the path to the executable on Windows
_component: str or None, the name of the Cloud SDK component which contains
the executable
_mapper: fn or None, function that maps a staging invocation to a command.
"""
def __init__(self, nix_path, windows_path, component=None, mapper=None):
super(_BundledCommand, self).__init__()
self._nix_path = nix_path
self._windows_path = windows_path
self._component = component
self._mapper = mapper or None
@property
def name(self):
if platforms.OperatingSystem.Current() is platforms.OperatingSystem.WINDOWS:
return self._windows_path
else:
return self._nix_path
def GetPath(self):
"""Returns the path to the command.
Returns:
str, the path to the command
Raises:
NoSdkRootError: if no Cloud SDK root could be found (and therefore the
command is not installed).
"""
sdk_root = config.Paths().sdk_root
if not sdk_root:
raise NoSdkRootError()
return os.path.join(sdk_root, self.name)
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
if self._mapper:
return self._mapper(self.GetPath(), descriptor, app_dir, staging_dir)
else:
return super(_BundledCommand, self).GetArgs(descriptor, app_dir,
staging_dir)
def EnsureInstalled(self):
if self._component is None:
return
msg = ('The component [{component}] is required for staging this '
'application.').format(component=self._component)
update_manager.UpdateManager.EnsureInstalledAndRestart([self._component],
msg=msg)
class ExecutableCommand(_Command):
"""Represents a command that the user supplies.
Attributes:
_path: str, full path to the executable.
"""
def __init__(self, path):
super(ExecutableCommand, self).__init__()
self._path = path
@property
def name(self):
os.path.basename(self._path)
def GetPath(self):
return self._path
def EnsureInstalled(self):
pass
def GetArgs(self, descriptor, app_dir, staging_dir, explicit_appyaml=None):
if explicit_appyaml:
return [
self.GetPath(), descriptor, app_dir, staging_dir, explicit_appyaml
]
else:
return [self.GetPath(), descriptor, app_dir, staging_dir]
@classmethod
def FromInput(cls, executable):
"""Returns the command corresponding to the user input.
Could be either of:
- command on the $PATH or %PATH%
- full path to executable (absolute or relative)
Args:
executable: str, the user-specified staging exectuable to use
Returns:
_Command corresponding to the executable
Raises:
StagingCommandNotFoundError: if the executable couldn't be found
"""
try:
path = files.FindExecutableOnPath(executable)
except ValueError:
# If this is a path (e.g. with os.path.sep in the string),
# FindExecutableOnPath throws an exception
path = None
if path:
return cls(path)
if os.path.exists(executable):
return cls(executable)
raise StagingCommandNotFoundError('The provided staging command [{}] could '
'not be found.'.format(executable))
# Path to the go-app-stager binary
_GO_APP_STAGER_DIR = os.path.join('platform', 'google_appengine')
# Path to the jar which contains the staging command
_APPENGINE_TOOLS_JAR = os.path.join('platform', 'google_appengine', 'google',
'appengine', 'tools', 'java', 'lib',
'appengine-tools-api.jar')
_STAGING_REGISTRY = {
runtime_registry.RegistryEntry(
re.compile(r'(go|go1\..+)$'), {env.FLEX, env.MANAGED_VMS}):
_BundledCommand(
os.path.join(_GO_APP_STAGER_DIR, 'go-app-stager'),
os.path.join(_GO_APP_STAGER_DIR, 'go-app-stager.exe'),
component='app-engine-go'),
runtime_registry.RegistryEntry(
re.compile(r'(go|go1\..+|%s)$' % env.GO_TI_RUNTIME_EXPR.pattern), {
env.STANDARD,
}):
_BundledCommand(
os.path.join(_GO_APP_STAGER_DIR, 'go-app-stager'),
os.path.join(_GO_APP_STAGER_DIR, 'go-app-stager.exe'),
component='app-engine-go'),
runtime_registry.RegistryEntry('java-xml', {env.STANDARD}):
_BundledCommand(
_APPENGINE_TOOLS_JAR,
_APPENGINE_TOOLS_JAR,
component='app-engine-java',
mapper=_JavaStagingMapper),
runtime_registry.RegistryEntry('java-jar', {env.STANDARD}):
CreateJava21YamlCommand(),
runtime_registry.RegistryEntry('java-maven-project', {env.STANDARD}):
CreateJava21MavenProjectCommand(),
runtime_registry.RegistryEntry('java-gradle-project', {env.STANDARD}):
CreateJava21GradleProjectCommand(),
runtime_registry.RegistryEntry('generic-copy', {env.FLEX, env.STANDARD}):
StageAppWithoutAppYamlCommand(),
}
# _STAGING_REGISTRY_BETA extends _STAGING_REGISTRY, overriding entries if the
# same key is used.
_STAGING_REGISTRY_BETA = {}
class Stager(object):
def __init__(self, registry, staging_area):
self.registry = registry
self.staging_area = staging_area
def Stage(self, descriptor, app_dir, runtime, environment, appyaml=None):
"""Stage the given deployable or do nothing if N/A.
Args:
descriptor: str, path to the unstaged <service>.yaml or appengine-web.xml
app_dir: str, path to the unstaged app directory
runtime: str, the name of the runtime for the application to stage
environment: api_lib.app.env.Environment, the environment for the
application to stage
appyaml: str or None, the app.yaml location to used for deployment.
Returns:
str, the path to the staged directory or None if no corresponding staging
command was found.
Raises:
NoSdkRootError: if no Cloud SDK installation root could be found.
StagingCommandFailedError: if the staging command process exited non-zero.
"""
command = self.registry.Get(runtime, environment)
if not command:
return None
command.EnsureInstalled()
return command.Run(self.staging_area, descriptor, app_dir, appyaml)
def GetRegistry():
return runtime_registry.Registry(_STAGING_REGISTRY, default=NoopCommand())
def GetBetaRegistry():
mappings = _STAGING_REGISTRY.copy()
mappings.update(_STAGING_REGISTRY_BETA)
return runtime_registry.Registry(mappings, default=NoopCommand())
def GetStager(staging_area):
"""Get the default stager."""
return Stager(GetRegistry(), staging_area)
def GetBetaStager(staging_area):
"""Get the beta stager, used for `gcloud beta *` commands."""
return Stager(GetBetaRegistry(), staging_area)
def GetNoopStager(staging_area):
"""Get a stager with an empty registry."""
return Stager(
runtime_registry.Registry({}, default=NoopCommand()), staging_area)
def GetOverrideStager(command, staging_area):
"""Get a stager with a registry that always calls the given command."""
return Stager(
runtime_registry.Registry(None, override=command, default=NoopCommand()),
staging_area)

View File

@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for `gcloud app update` command."""
from googlecloudsdk.api_lib.app.api import appengine_app_update_api_client
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.core import log
from googlecloudsdk.core.console import progress_tracker
def AddAppUpdateFlags(parser):
"""Add the common flags to a app update command."""
parser.add_argument(
'--split-health-checks',
action=arg_parsers.StoreTrueFalseAction,
help='Enables/disables split health checks by default '
'on new deployments.')
# TODO(b/181786069):unify definition of the service account params.
parser.add_argument(
'--service-account',
help='The app-level default service account to update the app with.')
parser.add_argument(
'--ssl-policy',
choices=['TLS_VERSION_1_0', 'TLS_VERSION_1_2'],
help='The app-level SSL policy to update the app with.',
)
def PatchApplication(
release_track,
split_health_checks=None,
service_account=None,
ssl_policy=None,
):
"""Updates an App Engine application via API client.
Args:
release_track: The release track of the app update command to run.
split_health_checks: Boolean, whether to enable split health checks by
default.
service_account: str, the app-level default service account to update for
this App Engine app.
ssl_policy: str, the app-level SSL policy to update for this App Engine app.
Can be TLS_VERSION_1_0 or TLS_VERSION_1_2.
"""
api_client = appengine_app_update_api_client.GetApiClientForTrack(
release_track
)
ssl_policy_enum = {
'TLS_VERSION_1_0': (
api_client.messages.Application.SslPolicyValueValuesEnum.DEFAULT
),
'TLS_VERSION_1_2': (
api_client.messages.Application.SslPolicyValueValuesEnum.MODERN
),
}.get(ssl_policy)
if (
split_health_checks is not None
or service_account is not None
or ssl_policy_enum is not None
):
with progress_tracker.ProgressTracker(
'Updating the app [{0}]'.format(api_client.project)
):
api_client.PatchApplication(
split_health_checks=split_health_checks,
service_account=service_account,
ssl_policy=ssl_policy_enum,
)
else:
log.status.Print('Nothing to update.')