feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,399 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of installers for different component types."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import re
import stat
import tarfile
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import local_file_adapter
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import transport
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import exceptions as creds_exceptions
from googlecloudsdk.core.util import files as file_utils
from googlecloudsdk.core.util import http_encoding
from googlecloudsdk.core.util import retry
import requests
import six
UPDATE_MANAGER_COMMAND_PATH = 'UPDATE_MANAGER'
TIMEOUT_IN_SEC = 60
UPDATE_MANAGER_TIMEOUT_IN_SEC = 3
WRITE_BUFFER_SIZE = 16*1024
class Error(exceptions.Error):
"""Base exception for the installers module."""
pass
class ComponentDownloadFailedError(Error):
"""Exception for when we cannot download a component for some reason."""
def __init__(self, component_id, e):
super(ComponentDownloadFailedError, self).__init__(
'The component [{component_id}] failed to download.\n\n'.format(
component_id=component_id) + six.text_type(e))
class URLFetchError(Error):
"""Exception for problems fetching via HTTP."""
pass
class AuthenticationError(Error):
"""Exception for when the resource is protected by authentication."""
def __init__(self, msg, e):
super(AuthenticationError, self).__init__(msg + '\n\n' + six.text_type(e))
class UnsupportedSourceError(Error):
"""An exception when trying to install a component with an unknown source."""
pass
def MakeRequest(url, command_path):
"""Gets the request object for the given URL using the requests library.
If the URL is for cloud storage and we get a 403, this will try to load the
active credentials and use them to authenticate the download.
Args:
url: str, the URL to download.
command_path: str, the command path to include in the User-Agent header if
the URL is HTTP.
Raises:
AuthenticationError: If this download requires authentication and there
are no credentials or the credentials do not have access.
Returns:
requests.Response object
"""
# pylint: disable=g-import-not-at-top
from googlecloudsdk.core.credentials import store
# pylint: enable=g-import-not-at-top
if url.startswith(ComponentInstaller.GCS_BROWSER_DL_URL):
url = url.replace(ComponentInstaller.GCS_BROWSER_DL_URL,
ComponentInstaller.GCS_API_DL_URL, 1)
headers = {
b'Cache-Control':
b'no-cache',
b'User-Agent':
http_encoding.Encode(transport.MakeUserAgentString(command_path))
}
timeout = TIMEOUT_IN_SEC
if command_path == UPDATE_MANAGER_COMMAND_PATH:
timeout = UPDATE_MANAGER_TIMEOUT_IN_SEC
try:
return _RawRequest(url, headers=headers, timeout=timeout)
except requests.exceptions.HTTPError as e:
if e.response.status_code != 403 or not e.response.url.startswith(
ComponentInstaller.GCS_API_DL_URL):
raise e
try:
creds = store.LoadFreshCredential(use_google_auth=True)
creds.apply(headers)
except creds_exceptions.Error as e:
# If we fail here, it is because there are no active credentials or the
# credentials are bad.
raise AuthenticationError(
'This component requires valid credentials to install.', e)
try:
# Retry the download using the credentials.
return _RawRequest(url, headers=headers, timeout=timeout)
except requests.exceptions.HTTPError as e:
if e.response.status_code != 403:
raise e
# If we fail again with a 403, that means we used the credentials, but
# they didn't have access to the resource.
raise AuthenticationError(
"""\
Account [{account}] does not have permission to install this component. Please
ensure that this account should have access or run:
$ gcloud config set account `ACCOUNT`
to choose another account.""".format(
account=properties.VALUES.core.account.Get()), e)
def _RawRequest(*args, **kwargs):
"""Executes an HTTP request."""
def RetryIf(exc_type, exc_value, unused_traceback, unused_state):
return (exc_type == requests.exceptions.HTTPError and
exc_value.response.status_code == 404)
def StatusUpdate(unused_result, unused_state):
log.debug('Retrying request...')
retryer = retry.Retryer(
max_retrials=3,
exponential_sleep_multiplier=2,
jitter_ms=100,
status_update_func=StatusUpdate)
try:
return retryer.RetryOnException(
_ExecuteRequestAndRaiseExceptions,
args,
kwargs,
should_retry_if=RetryIf,
sleep_ms=500)
except retry.RetryException as e:
# last_result is (return value, sys.exc_info)
if e.last_result[1]:
exceptions.reraise(e.last_result[1][1], tb=e.last_result[1][2])
raise
def _ExecuteRequestAndRaiseExceptions(url, headers, timeout):
"""Executes an HTTP request using requests.
Args:
url: str, the url to download.
headers: obj, the headers to include in the request.
timeout: int, the timeout length for the request.
Returns:
A response object from the request.
Raises:
requests.exceptions.HTTPError in the case of a client or server error.
"""
# pylint: disable=g-import-not-at-top
from googlecloudsdk.core import requests as core_requests
# pylint: enable=g-import-not-at-top
requests_session = core_requests.GetSession()
if url.startswith('file://'):
requests_session.mount('file://', local_file_adapter.LocalFileAdapter())
response = requests_session.get(
url, headers=headers, timeout=timeout, stream=True)
response.raise_for_status()
return response
def DownloadTar(url, download_dir, progress_callback=None,
command_path='unknown'):
"""Download the given tar file.
Args:
url: str, The URL to download.
download_dir: str, The path to put the temporary download file into.
progress_callback: f(float), A function to call with the fraction of
completeness.
command_path: the command path to include in the User-Agent header if the
URL is HTTP
Returns:
str, The path of the downloaded tar file.
Raises:
URLFetchError: If there is a problem fetching the given URL.
"""
progress_callback = progress_callback or console_io.DefaultProgressBarCallback
if not os.path.exists(download_dir):
file_utils.MakeDir(download_dir)
download_file_path = os.path.join(download_dir, os.path.basename(url))
if os.path.exists(download_file_path):
os.remove(download_file_path)
try:
response = MakeRequest(url, command_path)
with file_utils.BinaryFileWriter(download_file_path) as fp:
total_written = 0
total_size = len(response.content)
for chunk in response.iter_content(chunk_size=WRITE_BUFFER_SIZE):
fp.write(chunk)
total_written += len(chunk)
progress_callback(total_written / total_size)
progress_callback(1)
except (requests.exceptions.HTTPError, OSError) as e:
raise URLFetchError(e)
return download_file_path
def ExtractTar(downloaded_archive, extract_dir, progress_callback=None):
"""Extracts the given archive.
Args:
downloaded_archive: str, The path to the archive downloaded previously.
extract_dir: str, The path to extract the tar into.
progress_callback: f(float), A function to call with the fraction of
completeness.
Returns:
[str], The files that were extracted from the tar file.
"""
progress_callback = progress_callback or console_io.DefaultProgressBarCallback
if not os.path.exists(extract_dir):
file_utils.MakeDir(extract_dir)
with tarfile.open(name=downloaded_archive) as tar:
members = tar.getmembers()
total_files = len(members)
files = []
for num, member in enumerate(members, start=1):
files.append(member.name + '/' if member.isdir() else member.name)
tar.extract(member, extract_dir)
full_path = os.path.join(extract_dir, member.name)
# Ensure read-and-write permission for all files
if os.path.isfile(full_path) and not os.access(full_path, os.W_OK):
os.chmod(full_path, stat.S_IWUSR|stat.S_IREAD)
progress_callback(num / total_files)
progress_callback(1)
os.remove(downloaded_archive)
return files
class ComponentInstaller(object):
"""A class to install Cloud SDK components of different source types."""
DOWNLOAD_DIR_NAME = '.download'
# This is the URL prefix for files that require authentication which triggers
# browser based cookie authentication. We will use URLs with this pattern,
# but we never want to actually try to download from here because we are not
# using a browser and it will return the html of the sign in page.
GCS_BROWSER_DL_URL = 'https://storage.cloud.google.com/'
# All files accessible though the above prefix, are accessible through this
# prefix when you insert authentication data into the http headers. If no
# auth is required, you can also use this URL directly with no headers.
GCS_API_DL_URL = 'https://storage.googleapis.com/'
def __init__(self, sdk_root, state_directory):
"""Initializes an installer for components of different source types.
Args:
sdk_root: str, The path to the root directory of all Cloud SDK files.
state_directory: str, The path to the directory where the local state is
stored.
"""
self.__sdk_root = sdk_root
self.__state_directory = state_directory
self.__download_directory = os.path.join(
self.__state_directory, ComponentInstaller.DOWNLOAD_DIR_NAME)
def Download(self, component, progress_callback=None, command_path='unknown'):
"""Downloads the given component for whatever source type it has.
Args:
component: schemas.Component, The component from the snapshot to install.
progress_callback: f(float), A function to call with the fraction of
completeness.
command_path: the command path to include in the User-Agent header if the
URL is HTTP
Returns:
Optional[str], The path of the downloaded archive, or None if the
component has no actual sources.
Raises:
UnsupportedSourceError: If the component data source is of an unknown
type.
URLFetchError: If the URL associated with the component data source
cannot be fetched.
"""
data = component.data
if not data:
# No source data, just a configuration component
return None
if data.type == 'tar':
return self._DownloadTar(
component, progress_callback=progress_callback,
command_path=command_path)
raise UnsupportedSourceError(
'tar is the only supported source format [{datatype}]'.format(
datatype=data.type))
def Extract(self, downloaded_archive, progress_callback=None):
"""Extracts the archive previously downloaded from self.Download().
Args:
downloaded_archive: Optional[str], The path to the archive downloaded
previously.
progress_callback: f(float), A function to call with the fraction of
completeness.
Returns:
list of str, The files that were installed or [] if nothing was installed.
"""
if downloaded_archive is None:
# From a component with no actual data/sources; nothing to extract.
return []
return ExtractTar(
downloaded_archive, self.__sdk_root,
progress_callback=progress_callback)
def _DownloadTar(self, component, progress_callback=None,
command_path='unknown'):
"""Download implementation for a component with source in a .tar.gz.
Downloads the .tar for the component and returns its path.
Args:
component: schemas.Component, The component to install.
progress_callback: f(float), A function to call with the fraction of
completeness.
command_path: the command path to include in the User-Agent header if the
URL is HTTP
Returns:
Optional[str], The path of the downloaded archive, or None if the
component has no actual sources.
Raises:
ValueError: If the source URL for the tar file is relative, but there is
no location information associated with the snapshot we are installing
from.
URLFetchError: If there is a problem fetching the component's URL.
"""
url = component.data.source
if not url:
# not all components must have real source
return None
if not re.search(r'^\w+://', url):
raise ValueError('Cannot install component [{0}] from a relative path '
'because the base URL of the snapshot is not defined.'
.format(component.id))
try:
return DownloadTar(
url, self.__download_directory, progress_callback=progress_callback,
command_path=command_path)
except (URLFetchError, AuthenticationError) as e:
raise ComponentDownloadFailedError(component.id, e)

View File

@@ -0,0 +1,749 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manages the state of what is installed in the cloud SDK.
This tracks the installed modules along with the files they created. It also
provides functionality like extracting tar files into the installation and
tracking when we check for updates.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import compileall
import errno
import logging
import os
import posixpath
import re
import shutil
import sys
from googlecloudsdk.core import config
from googlecloudsdk.core import exceptions
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.updater import installers
from googlecloudsdk.core.updater import snapshots
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import files as file_utils
import six
class Error(exceptions.Error):
"""Base exception for the local_state module."""
pass
class InvalidSDKRootError(Error):
"""Error for when the root of the Cloud SDK is invalid or cannot be found."""
def __init__(self):
super(InvalidSDKRootError, self).__init__(
'The components management action could not be performed because the '
'installation root of the Cloud SDK could not be located. '
'If you previously used the Cloud SDK installer, '
'you could re-install the SDK and retry again.')
class InvalidDownloadError(Error):
"""Exception for when the SDK that was download was invalid."""
def __init__(self):
super(InvalidDownloadError, self).__init__(
'The Cloud SDK download was invalid.')
class PermissionsError(Error):
"""Error for when a file operation cannot complete due to permissions."""
def __init__(self, message, path):
"""Initialize a PermissionsError.
Args:
message: str, The message from the underlying error.
path: str, The absolute path to a file or directory that needs to be
operated on, but can't because of insufficient permissions.
"""
super(PermissionsError, self).__init__(
'{message}: [{path}]\n\nEnsure you have the permissions to access the '
'file and that the file is not in use.'
.format(message=message, path=path))
def _RaisesPermissionsError(func):
"""Use this decorator for functions that deal with files.
If an exception indicating file permissions is raised, this decorator will
raise a PermissionsError instead, so that the caller only has to watch for
one type of exception.
Args:
func: The function to decorate.
Returns:
A decorator.
"""
def _TryFunc(*args, **kwargs):
try:
return func(*args, **kwargs)
except shutil.Error as e:
args = e.args[0][0]
# unfortunately shutil.Error *only* has formatted strings to inspect.
# Looking for this substring is looking for errno.EACCES, which has
# a numeric value of 13.
if args[2].startswith('[Errno 13]'):
exceptions.reraise(
PermissionsError(message=args[2],
path=os.path.abspath(args[0])))
raise
except (OSError, IOError) as e:
if e.errno == errno.EACCES:
exceptions.reraise(
PermissionsError(
message=encoding.Decode(e.strerror),
path=encoding.Decode(os.path.abspath(e.filename))))
raise
return _TryFunc
class InstallationState(object):
"""The main class for checking / updating local installation state."""
STATE_DIR_NAME = config.Paths.CLOUDSDK_STATE_DIR
BACKUP_DIR_NAME = '.backup'
TRASH_DIR_NAME = '.trash'
STAGING_ROOT_SUFFIX = '.staging'
COMPONENT_SNAPSHOT_FILE_SUFFIX = '.snapshot.json'
DEPRECATED_DIRS = ('lib/third_party/grpc',)
@staticmethod
def ForCurrent():
"""Gets the installation state for the SDK that this code is running in.
Returns:
InstallationState, The state for this area.
Raises:
InvalidSDKRootError: If this code is not running under a valid SDK.
"""
sdk_root = config.Paths().sdk_root
if not sdk_root:
raise InvalidSDKRootError()
return InstallationState(os.path.realpath(sdk_root))
def BackupInstallationState(self):
"""Gets the installation state for the backup of this state, if it exists.
Returns:
InstallationState, The state for this area or None if the backup does not
exist.
"""
if not self.HasBackup():
return None
return InstallationState(os.path.realpath(self.__backup_directory))
@staticmethod
def VersionForInstalledComponent(component_id):
"""Gets the version string for the given installed component.
This function is to be used to get component versions for metrics reporting.
If it fails in any way or if the component_id is unknown, it will return
None. This prevents errors from surfacing when the version is needed
strictly for reporting purposes.
Args:
component_id: str, The component id of the component you want the version
for.
Returns:
str, The installed version of the component, or None if it is not
installed or if an error occurs.
"""
try:
state = InstallationState.ForCurrent()
# pylint: disable=protected-access, This is the same class.
return InstallationManifest(
state._state_directory, component_id).VersionString()
# pylint: disable=bare-except, We never want to fail because of metrics.
except:
logging.debug('Failed to get installed version for component [%s]: [%s]',
component_id, sys.exc_info())
return None
@_RaisesPermissionsError
def __init__(self, sdk_root):
"""Initializes the installation state for the given sdk install.
Args:
sdk_root: str, The file path of the root of the SDK installation.
Raises:
ValueError: If the given SDK root does not exist.
"""
if not os.path.isdir(sdk_root):
raise ValueError('The given Cloud SDK root does not exist: [{0}]'
.format(sdk_root))
self.__sdk_root = encoding.Decode(sdk_root)
self._state_directory = os.path.join(self.__sdk_root,
InstallationState.STATE_DIR_NAME)
self.__backup_directory = os.path.join(self._state_directory,
InstallationState.BACKUP_DIR_NAME)
self.__trash_directory = os.path.join(self._state_directory,
InstallationState.TRASH_DIR_NAME)
self.__sdk_staging_root = (os.path.normpath(self.__sdk_root) +
InstallationState.STAGING_ROOT_SUFFIX)
self._component_installer = installers.ComponentInstaller(
self.__sdk_root, self._state_directory)
@_RaisesPermissionsError
def _CreateStateDir(self):
"""Creates the state directory if it does not exist."""
if not os.path.isdir(self._state_directory):
file_utils.MakeDir(self._state_directory)
@property
def sdk_root(self):
"""Gets the root of the SDK that this state corresponds to.
Returns:
str, the path to the root directory.
"""
return self.__sdk_root
def _FilesForSuffix(self, suffix):
"""Returns the files in the state directory that have the given suffix.
Args:
suffix: str, The file suffix to match on.
Returns:
list of str, The file names that match.
"""
if not os.path.isdir(self._state_directory):
return []
files = os.listdir(self._state_directory)
matching = [f for f in files
if os.path.isfile(os.path.join(self._state_directory, f))
and f.endswith(suffix)]
return matching
@_RaisesPermissionsError
def InstalledComponents(self):
"""Gets all the components that are currently installed.
Returns:
A dictionary of component id string to InstallationManifest.
"""
snapshot_files = self._FilesForSuffix(
InstallationState.COMPONENT_SNAPSHOT_FILE_SUFFIX)
manifests = {}
for f in snapshot_files:
component_id = f[:-len(InstallationState.COMPONENT_SNAPSHOT_FILE_SUFFIX)]
manifests[component_id] = InstallationManifest(self._state_directory,
component_id)
return manifests
@_RaisesPermissionsError
def Snapshot(self):
"""Generates a ComponentSnapshot from the currently installed components."""
return snapshots.ComponentSnapshot.FromInstallState(self)
def DiffCurrentState(self, latest_snapshot, platform_filter=None,):
"""Generates a ComponentSnapshotDiff from current state and the given state.
Args:
latest_snapshot: snapshots.ComponentSnapshot, The current state of the
world to diff against.
platform_filter: platforms.Platform, A platform that components must
match in order to be considered for any operations.
Returns:
A ComponentSnapshotDiff.
"""
return self.Snapshot().CreateDiff(latest_snapshot,
platform_filter=platform_filter)
@_RaisesPermissionsError
def CreateStagingFromDownload(self, url, progress_callback=None):
"""Creates a new staging area from a fresh download of the Cloud SDK.
Args:
url: str, The url to download the new SDK from.
progress_callback: f(float), A function to call with the fraction of
completeness.
Returns:
An InstallationState object for the new install.
Raises:
installers.URLFetchError: If the new SDK could not be downloaded.
InvalidDownloadError: If the new SDK was malformed.
"""
self._ClearStaging()
with file_utils.TemporaryDirectory() as t:
download_dir = os.path.join(t, '.download')
extract_dir = os.path.join(t, '.extract')
(download_callback, extract_callback) = (
console_io.SplitProgressBar(progress_callback, [1, 1]))
downloaded_tar = installers.DownloadTar(
url, download_dir, progress_callback=download_callback,
command_path='components.reinstall')
installers.ExtractTar(
downloaded_tar, extract_dir, progress_callback=extract_callback)
files = os.listdir(extract_dir)
if len(files) != 1:
raise InvalidDownloadError()
sdk_root = os.path.join(extract_dir, files[0])
file_utils.MoveDir(sdk_root, self.__sdk_staging_root)
staging_sdk = InstallationState(self.__sdk_staging_root)
# pylint: disable=protected-access, This is an instance of InstallationState
staging_sdk._CreateStateDir()
self.CopyMachinePropertiesTo(staging_sdk)
return staging_sdk
@_RaisesPermissionsError
def ReplaceWith(self, other_install_state, progress_callback=None):
"""Replaces this installation with the given other installation.
This moves the current installation to the backup directory of the other
installation. Then, it moves the entire second installation to replace
this one on the file system. The result is that the other installation
completely replaces the current one, but the current one is snapshotted and
stored as a backup under the new one (and can be restored later).
Args:
other_install_state: InstallationState, The other state with which to
replace this one.
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
self._CreateStateDir()
self.ClearBackup()
self.ClearTrash()
# pylint: disable=protected-access, This is an instance of InstallationState
other_install_state._CreateStateDir()
other_install_state.ClearBackup()
# pylint: disable=protected-access, This is an instance of InstallationState
file_utils.MoveDir(self.__sdk_root, other_install_state.__backup_directory)
if progress_callback:
progress_callback(0.5)
file_utils.MoveDir(other_install_state.__sdk_root, self.__sdk_root)
if progress_callback:
progress_callback(1.0)
@_RaisesPermissionsError
def RestoreBackup(self):
"""Restore the backup from this install state if it exists.
If this installation has a backup stored in it (created by and update that
used ReplaceWith(), above), it replaces this installation with the backup,
using a temporary staging area. This installation is moved to the trash
directory under the installation that exists after this is done. The trash
directory can be removed at any point in the future. We just don't want to
delete code that is running since some platforms have a problem with that.
Returns:
bool, True if there was a backup to restore, False otherwise.
"""
if not self.HasBackup():
return False
self._ClearStaging()
file_utils.MoveDir(self.__backup_directory, self.__sdk_staging_root)
staging_state = InstallationState(self.__sdk_staging_root)
# pylint: disable=protected-access, This is an instance of InstallationState
staging_state._CreateStateDir()
staging_state.ClearTrash()
# pylint: disable=protected-access, This is an instance of InstallationState
file_utils.MoveDir(self.__sdk_root, staging_state.__trash_directory)
file_utils.MoveDir(staging_state.__sdk_root, self.__sdk_root)
return True
def HasBackup(self):
"""Determines if this install has a valid backup that can be restored.
Returns:
bool, True if there is a backup, False otherwise.
"""
return os.path.isdir(self.__backup_directory)
def BackupDirectory(self):
"""Gets the backup directory of this installation if it exists.
Returns:
str, The path to the backup directory or None if it does not exist.
"""
if self.HasBackup():
return self.__backup_directory
return None
@_RaisesPermissionsError
def _ClearStaging(self, progress_callback=None):
"""Deletes the current staging directory if it exists.
Args:
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
if os.path.exists(self.__sdk_staging_root):
file_utils.RmTree(self.__sdk_staging_root)
if progress_callback:
progress_callback(1)
@_RaisesPermissionsError
def ClearBackup(self, progress_callback=None):
"""Deletes the current backup if it exists.
Args:
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
if os.path.isdir(self.__backup_directory):
file_utils.RmTree(self.__backup_directory)
if progress_callback:
progress_callback(1)
@_RaisesPermissionsError
def ClearTrash(self, progress_callback=None):
"""Deletes the current trash directory if it exists.
Args:
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
if os.path.isdir(self.__trash_directory):
file_utils.RmTree(self.__trash_directory)
if progress_callback:
progress_callback(1)
@_RaisesPermissionsError
def Download(self, snapshot, component_id, progress_callback=None,
command_path='unknown'):
"""Downloads the given component based on the given snapshot.
Args:
snapshot: snapshots.ComponentSnapshot, The snapshot that describes the
component to install.
component_id: str, The component to install from the given snapshot.
progress_callback: f(float), A function to call with the fraction of
completeness.
command_path: the command path to include in the User-Agent header if the
URL is HTTP
Returns:
Optional[str], The path of the downloaded archive, or None if the
component has no actual sources.
Raises:
installers.URLFetchError: If the component associated with the provided
component ID has a URL that is not fetched correctly.
"""
self._CreateStateDir()
component = snapshot.ComponentFromId(component_id)
downloaded_archive = self._component_installer.Download(
component, progress_callback=progress_callback,
command_path=command_path)
return downloaded_archive
@_RaisesPermissionsError
def Install(self, snapshot, component_id, downloaded_archive,
progress_callback=None):
"""Installs the archive previously downloaded from self.Download().
Args:
snapshot: snapshots.ComponentSnapshot, The snapshot that describes the
component to install.
component_id: str, The component to install from the given snapshot.
downloaded_archive: Optional[str], The path to the archive downloaded
previously.
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
files = self._component_installer.Extract(
downloaded_archive, progress_callback=progress_callback)
manifest = InstallationManifest(self._state_directory, component_id)
manifest.MarkInstalled(snapshot, files)
@_RaisesPermissionsError
def Uninstall(self, component_id, progress_callback=None):
"""Uninstalls the given component.
Deletes all the files for this component and marks it as no longer being
installed.
Args:
component_id: str, The id of the component to uninstall.
progress_callback: f(float), A function to call with the fraction of
completeness.
"""
manifest = InstallationManifest(self._state_directory, component_id)
paths = manifest.InstalledPaths()
total_paths = len(paths)
root = self.__sdk_root
dirs_to_remove = set()
pycache_dirs = set()
for num, p in enumerate(paths, start=1):
path = os.path.join(root, p)
if os.path.isfile(path) or os.path.islink(path):
os.remove(path)
dir_path = os.path.dirname(os.path.normpath(p))
if p.endswith('.py'):
# Python 2 processes leave behind .pyc files adjacent to the .py file;
# clean these up for any .py files being removed.
pyc_path = path + 'c'
if os.path.isfile(pyc_path):
os.remove(pyc_path)
# Python 3 processes leave behind __pycache__ folders in the .py
# file's directory; clean these up as well. Since the .pyc files
# within have different suffixes depending on the Python version, and
# the version of Python that compiled the file may differ from the
# current one running, it's faster to just delete the whole folder
# later instead of trying to match the file(s) here.
pycache_dirs.add(os.path.join(root, dir_path, '__pycache__'))
while dir_path:
dirs_to_remove.add(os.path.join(root, dir_path))
dir_path = os.path.dirname(dir_path)
elif os.path.isdir(path):
dirs_to_remove.add(os.path.normpath(path))
if progress_callback:
progress_callback(num / total_paths)
for d in pycache_dirs:
if os.path.isdir(d) and not os.path.islink(d):
file_utils.RmTree(d)
# Remove dirs from the bottom up. Subdirs will always have a longer path
# than it's parent.
for d in sorted(dirs_to_remove, key=len, reverse=True):
if os.path.isdir(d) and not os.path.islink(d) and not os.listdir(d):
os.rmdir(d)
manifest.MarkUninstalled()
@_RaisesPermissionsError
def ClearDeprecatedDirs(self):
"""Clear deprecated directories that were not removed correctly."""
for d in self.DEPRECATED_DIRS:
path = os.path.join(self.sdk_root, d)
if os.path.isdir(path):
file_utils.RmTree(path)
def CopyMachinePropertiesTo(self, other_state):
"""Copy this state's properties file to another state.
This is primarily intended to be used to maintain the machine properties
file during a schema-change-induced reinstall.
Args:
other_state: InstallationState, The installation state of the fresh
Cloud SDK that needs the properties file mirrored in.
"""
my_properties = os.path.join(
self.sdk_root, config.Paths.CLOUDSDK_PROPERTIES_NAME)
other_properties = os.path.join(
other_state.sdk_root, config.Paths.CLOUDSDK_PROPERTIES_NAME)
if not os.path.exists(my_properties):
return
shutil.copyfile(my_properties, other_properties)
def CompilePythonFiles(self, force=False, workers=None):
"""Attempts to compile all the python files into .pyc files.
Args:
force: boolean, passed to force option of compileall.compiledir,
workers: int, can be used to explicitly set number of worker processes;
otherwise we determine it automatically. Only set for testing.
This does not raise exceptions if compiling a given file fails.
"""
# Some python code shipped in the SDK is not 2 + 3 compatible.
# Create execlusion patterns to avoid compilation errors.
# This is pretty hacky, ideally we would have this information in the
# component metadata and derive the exclusion patterns from that.
# However, this is an ok short-term solution until we have bundled python.
if six.PY2:
regex_exclusion = re.compile('(httplib2/python3|typing/python3'
'|platform/bq/third_party/yaml/lib3'
'|third_party/google/api_core'
'|third_party/google/auth'
'|third_party/google/oauth2'
'|third_party/overrides'
'|third_party/proto'
'|dulwich'
'|gapic'
'|pubsublite'
'|pubsub/lite_subscriptions.py'
'|logging_v2'
'|platform/bundledpythonunix'
'|pubsub_v1/services)')
else:
# Do not compile anything on python 3.4.x
if sys.version_info[1] == 4:
regex_exclusion = re.compile('.*')
elif sys.version_info[1] >= 7:
regex_exclusion = re.compile(
'(kubernetes/utils/create_from_yaml.py'
'|platform/google_appengine'
'|gslib/vendored/boto/boto/iam/connection.py'
'|gslib/vendored/boto/tests/'
'|third_party/.*/python2/'
'|third_party/yaml/[a-z]*.py'
'|third_party/yaml/lib2/'
'|third_party/antlr3/'
'|appengine/'
'|google/cloud/appengine_'
'|google/cloud/bigquery_logging_v1'
'|third_party/fancy_urllib/'
'|platform/bq/third_party/gflags'
'|platform/ext-runtime/nodejs/test/'
'|platform/gsutil/third_party/apitools/ez_setup'
'|platform/gsutil/third_party/pyparsing'
'|platform/gsutil/third_party/crcmod_osx/crcmod/test)')
else:
regex_exclusion = None
# The self.sdk_root pathname could contain unicode chars and py_compile
# chokes on unicode paths. Using relative paths from self.sdk_root works
# around the problem.
with file_utils.ChDir(self.sdk_root):
to_compile = [
os.path.join('bin', 'bootstrapping'),
os.path.join('data', 'cli'),
'lib',
'platform',
]
# There are diminishing returns to using more worker processes past a
# certain point, so we cap it to a reasonable amount here.
num_workers = min(os.cpu_count(), 8) if workers is None else workers
for d in to_compile:
# Using 2 for quiet, in python 2.7 this value is used as a bool in the
# implementation and bool(2) is True. Starting in python 3.5 this
# parameter was changed to a multilevel value, where 1 hides files
# being processed and 2 suppresses output.
compileall.compile_dir(
d, rx=regex_exclusion, quiet=2, force=force, workers=num_workers)
class InstallationManifest(object):
"""Class to encapsulate the data stored in installation manifest files."""
MANIFEST_SUFFIX = '.manifest'
def __init__(self, state_dir, component_id):
"""Creates a new InstallationManifest.
Args:
state_dir: str, The directory path where install state is stored.
component_id: str, The component id that you want to get the manifest for.
"""
self.state_dir = state_dir
self.id = component_id
self.snapshot_file = os.path.join(
self.state_dir,
component_id + InstallationState.COMPONENT_SNAPSHOT_FILE_SUFFIX)
self.manifest_file = os.path.join(
self.state_dir,
component_id + InstallationManifest.MANIFEST_SUFFIX)
def MarkInstalled(self, snapshot, files):
"""Marks this component as installed with the given snapshot and files.
This saves the ComponentSnapshot and writes the installed files to a
manifest so they can be removed later.
Args:
snapshot: snapshots.ComponentSnapshot, The snapshot that was the source
of the install.
files: list of str, The files that were created by the installation.
"""
with file_utils.FileWriter(self.manifest_file) as fp:
for f in _NormalizeFileList(files):
fp.write(f + '\n')
snapshot.WriteToFile(self.snapshot_file, component_id=self.id)
def MarkUninstalled(self):
"""Marks this component as no longer being installed.
This does not actually uninstall the component, but rather just removes the
snapshot and manifest.
"""
for f in [self.manifest_file, self.snapshot_file]:
if os.path.isfile(f):
os.remove(f)
def ComponentSnapshot(self):
"""Loads the local ComponentSnapshot for this component.
Returns:
The snapshots.ComponentSnapshot for this component.
"""
return snapshots.ComponentSnapshot.FromFile(self.snapshot_file)
def ComponentDefinition(self):
"""Loads the ComponentSnapshot and get the schemas.Component this component.
Returns:
The schemas.Component for this component.
"""
return self.ComponentSnapshot().ComponentFromId(self.id)
def VersionString(self):
"""Gets the version string of this component as it was installed.
Returns:
str, The installed version of this component.
"""
return self.ComponentDefinition().version.version_string
def InstalledPaths(self):
"""Gets the list of files and dirs created by installing this component.
Returns:
list of str, The files and directories installed by this component.
"""
with file_utils.FileReader(self.manifest_file) as f:
files = [line.rstrip() for line in f]
return files
def _NormalizeFileList(file_list):
"""Removes non-empty directory entries and sorts resulting list."""
parent_directories = set([])
directories = set([])
files = set([])
for f in file_list:
# Drops any trailing /.
norm_file_path = posixpath.normpath(f)
if f.endswith('/'):
directories.add(norm_file_path + '/')
else:
files.add(norm_file_path)
norm_file_path = os.path.dirname(norm_file_path)
while norm_file_path:
parent_directories.add(norm_file_path + '/')
norm_file_path = os.path.dirname(norm_file_path)
return sorted((directories - parent_directories) | files)

View File

@@ -0,0 +1,209 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python installers for gcloud."""
import os
import sys
from googlecloudsdk.core import config
from googlecloudsdk.core import execution_utils
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import platforms
PYTHON_VERSION = '3.13'
PYTHON_VERSION_INFO = (3, 13)
MACOS_PYTHON = 'python-3.13.7-macos11.tar.gz'
HOMEBREW_BIN = '/opt/homebrew/bin'
MACOS_PYTHON_INSTALL_PATH = (
f'/Library/Frameworks/Python.framework/Versions/{PYTHON_VERSION}/')
MACOS_PYTHON_URL = (
'https://dl.google.com/dl/cloudsdk/channels/rapid/' + MACOS_PYTHON
)
def _VirtualEnvPath():
env_dir = config.Paths().virtualenv_dir
if os.path.isdir(env_dir):
return env_dir
else:
return None
def _CreateVirtualEnv(cli, python_to_use):
cli.Execute(['config', 'virtualenv', 'create', '--python-to-use',
python_to_use])
def _RecreateVirtualEnv(cli, python_to_use, existing_env_dir):
print(f'Virtual env already exists at {existing_env_dir}. '
'Deleting so we can create new one.')
cli.Execute(['config', 'virtualenv', 'delete'])
_CreateVirtualEnv(cli, python_to_use)
def _UpdateVirtualEnv(cli):
cli.Execute(['config', 'virtualenv', 'update'])
def _EnableVirtualEnv(cli):
cli.Execute(['config', 'virtualenv', 'enable'])
def UpdatePythonDependencies(python_to_use):
"""Enables virtual environment with new python version and dependencies."""
try:
from googlecloudsdk import gcloud_main # pylint: disable=g-import-not-at-top
cli = gcloud_main.CreateCLI([])
# Assume we are executing in a virtual environment if env_dir exists
env_dir = _VirtualEnvPath()
if env_dir and sys.version_info[:2] != PYTHON_VERSION_INFO:
_RecreateVirtualEnv(cli, python_to_use, env_dir)
elif env_dir:
_UpdateVirtualEnv(cli)
else:
_CreateVirtualEnv(cli, python_to_use)
_EnableVirtualEnv(cli)
except ImportError:
print('Failed to enable virtual environment')
def _IsHomebrewInstalled():
return os.path.isdir(HOMEBREW_BIN) and 'homebrew' in config.GcloudPath()
def _PromptPythonInstall():
if _IsHomebrewInstalled():
return f'Homebrew install Python {PYTHON_VERSION}?'
else:
return f'Download and run Python {PYTHON_VERSION} installer?'
def _BrewInstallPython():
"""Make sure python version is correct for user using gcloud with homebrew."""
brew_install = f'{HOMEBREW_BIN}/brew install python@{PYTHON_VERSION}'
print(f'Running "{brew_install}".')
exit_code = execution_utils.Exec(brew_install.split(' '), no_exit=True)
if exit_code != 0:
return (
f'"{brew_install}" failed. Please brew install '
f'python@{PYTHON_VERSION} manually.')
return None
def _MacInstallPython():
"""Optionally install Python on Mac machines."""
print(f'Running Python {PYTHON_VERSION} installer, you may be prompted for '
'sudo password...')
# Xcode Command Line Tools is required to install Python.
PromptAndInstallXcodeCommandLineTools()
with files.TemporaryDirectory() as tempdir:
with files.ChDir(tempdir):
curl_args = ['curl', '--silent', '-O', MACOS_PYTHON_URL]
exit_code = execution_utils.Exec(curl_args, no_exit=True)
if exit_code != 0:
return 'Failed to download Python installer'
exit_code = execution_utils.Exec(
['tar', '-xf', MACOS_PYTHON], no_exit=True)
if exit_code != 0:
return 'Failed to extract Python installer'
exit_code = execution_utils.Exec([
'sudo', 'installer', '-target', '/', '-pkg',
'./python-3.13.7-macos11.pkg'
], no_exit=True)
if exit_code != 0:
return 'Installer failed.'
return None
def PromptAndInstallPythonOnMac():
"""Optionally install Python on Mac machines."""
if platforms.OperatingSystem.Current() != platforms.OperatingSystem.MACOSX:
return
print(
f'\nGoogle Cloud CLI works best with Python {PYTHON_VERSION} '
'and certain modules.\n')
# Determine python install path
homebrew_installed = _IsHomebrewInstalled()
if homebrew_installed:
python_to_use = f'{HOMEBREW_BIN}/python{PYTHON_VERSION}'
else:
python_to_use = f'{MACOS_PYTHON_INSTALL_PATH}bin/python3'
already_installed = os.path.isfile(python_to_use)
# Prompt for user permission to install python if not already installed
install_errors = None
if not already_installed:
prompt = _PromptPythonInstall()
if not console_io.PromptContinue(prompt_string=prompt, default=True):
return
install_errors = (_BrewInstallPython()
if homebrew_installed else _MacInstallPython())
# Update python dependencies
if not install_errors:
os.environ['CLOUDSDK_PYTHON'] = python_to_use
print('Setting up virtual environment')
UpdatePythonDependencies(python_to_use)
else:
print(f'Failed to install Python. Error: {install_errors}')
def CheckXcodeCommandLineToolsInstalled() -> bool:
"""Checks if Xcode Command Line Tools is installed."""
exit_code = execution_utils.Exec(['xcode-select', '-p'], no_exit=True)
return exit_code == 0
def PromptAndInstallXcodeCommandLineTools():
"""Optionally install Xcode Command Line Tools on Mac machines."""
if platforms.OperatingSystem.Current() != platforms.OperatingSystem.MACOSX:
return
if CheckXcodeCommandLineToolsInstalled():
print('Xcode Command Line Tools is already installed.')
return
prompt = (
'Xcode Command Line Tools is required to install Python. Continue to'
' install'
)
setup_xcode = console_io.PromptContinue(prompt_string=prompt, default=True)
if setup_xcode:
print('Installing Xcode Command Line Tools...')
xcode_command = ['xcode-select', '--install']
exit_code = execution_utils.Exec(xcode_command, no_exit=True)
if exit_code != 0:
print('Failed to install Xcode Command Line Tools. '
'Please run `xcode-select --install` manually to install '
'Xcode Command Line Tools.')
else:
print('Xcode Command Line Tools is installed.')

View File

@@ -0,0 +1,232 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains utilities for comparing RELEASE_NOTES between Cloud SDK versions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.core import config
from googlecloudsdk.core import log
from googlecloudsdk.core.document_renderers import render_document
from googlecloudsdk.core.updater import installers
import requests
from six.moves import StringIO
class ReleaseNotes(object):
"""Represents a parsed RELEASE_NOTES file.
The file should have the general structure of:
# Google Cloud SDK - Release Notes
Copyright 2014-2015 Google LLC. All rights reserved.
## 0.9.78 (2015/09/16)
* Note
* Note 2
## 0.9.77 (2015/09/09)
* Note 3
"""
# This regex matches each version section in the release notes file.
# It uses lookaheads and lookbehinds to be able to ensure double newlines
# without consuming them (because they are needed as part of the match of the
# next version section. This translates to a line starting with '##' preceded
# by a blank line that has a version string and description. It then consumes
# all lines until it hits a newline that is not followed by a blank line and
# another line starting with '##"
_VERSION_SPLIT_REGEX = (
r'(?<=\n)\n## +(?P<version>\S+).*\n(?:\n.*(?!\n\n## ))+.')
MAX_DIFF = 15
@classmethod
def FromURL(cls, url, command_path=None):
"""Parses release notes from the given URL using the requests library.
Any error in downloading or parsing release notes is logged and swallowed
and None is returned.
Args:
url: str, The URL to download and parse.
command_path: str, The command that is calling this for instrumenting the
user agent for the download.
Returns:
ReleaseNotes, the parsed release notes or None if an error occurred.
"""
try:
response = installers.MakeRequest(url, command_path)
if response is None:
return None
code = response.status_code
if code != requests.codes.ok:
return None
return cls(response.text)
# pylint: disable=broad-except, We don't want any failure to download or
# parse the release notes to block an update. Returning None here will
# print a generic message of where the user can go to view the release
# notes online.
except Exception:
log.debug('Failed to download [{url}]'.format(url=url), exc_info=True)
return None
def __init__(self, text):
"""Parse the release notes from the given text.
Args:
text: str, The text of the release notes to parse.
Returns:
ReleaseNotes, the parsed release notes.
"""
self._text = text.replace('\r\n', '\n')
versions = []
for m in re.finditer(ReleaseNotes._VERSION_SPLIT_REGEX, self._text):
versions.append((m.group('version'), m.group().strip()))
# [(version string, full version text including header), ...]
self._versions = versions
def GetVersionText(self, version):
"""Gets the release notes text for the given version.
Args:
version: str, The version to get the release notes for.
Returns:
str, The release notes or None if the version does not exist.
"""
index = self._GetVersionIndex(version)
if index is None:
return None
return self._versions[index][1]
def _GetVersionIndex(self, version):
"""Gets the index of the given version in the list of parsed versions.
Args:
version: str, The version to get the index for.
Returns:
int, The index of the given version or None if not found.
"""
for i, (v, _) in enumerate(self._versions):
if v == version:
return i
return None
def Diff(self, start_version, end_version):
"""Creates a diff of the release notes between the two versions.
The release notes are returned in reversed order (most recent first).
Args:
start_version: str, The version at which to start the diff. This should
be the later of the two versions. The diff will start with this version
and go backwards in time until end_version is hit. If None, the diff
will start at the most recent entry.
end_version: str, The version at which to stop the diff. This should be
the version you are currently on. The diff is accumulated until this
version it hit. This version is not included in the diff. If None,
the diff will include through the end of all release notes.
Returns:
[(version, text)], The list of release notes in the diff from most recent
to least recent. Each item is a tuple of the version string and the
release notes text for that version. Returns None if either of the
versions are not present in the release notes.
"""
if start_version:
start_index = self._GetVersionIndex(start_version)
if start_index is None:
return None
else:
start_index = 0
if end_version:
end_index = self._GetVersionIndex(end_version)
if end_index is None:
return None
else:
end_index = len(self._versions)
return self._versions[start_index:end_index]
def PrintReleaseNotesDiff(release_notes_url, current_version, latest_version):
"""Prints the release notes diff based on your current version.
If any of the arguments are None, a generic message will be printed telling
the user to go to the web to view the release notes. If the release_notes_url
is also None, it will print the developers site page for the SDK.
Args:
release_notes_url: str, The URL to download the latest release notes from.
current_version: str, The current version of the SDK you have installed.
latest_version: str, The version you are about to update to.
"""
if release_notes_url and current_version and latest_version:
notes = ReleaseNotes.FromURL(release_notes_url)
if notes:
release_notes_diff = notes.Diff(latest_version, current_version)
else:
release_notes_diff = None
else:
release_notes_diff = None
if not release_notes_diff:
# We failed to print the release notes. Send people to a nice web page with
# the release notes.
log.status.write(
'For the latest full release notes, please visit:\n {0}\n\n'.format(
config.INSTALLATION_CONFIG.release_notes_url))
return
if len(release_notes_diff) > ReleaseNotes.MAX_DIFF:
log.status.Print("""\
A lot has changed since your last upgrade. For the latest full release notes,
please visit:
{0}
""".format(config.INSTALLATION_CONFIG.release_notes_url))
return
log.status.Print("""\
The following release notes are new in this upgrade.
Please read carefully for information about new features, breaking changes,
and bugs fixed. The latest full release notes can be viewed at:
{0}
""".format(config.INSTALLATION_CONFIG.release_notes_url))
full_text = StringIO()
for _, text in release_notes_diff:
full_text.write(text)
full_text.write('\n')
full_text.seek(0)
render_document.RenderDocument('text', full_text, log.status)
log.status.Print()

View File

@@ -0,0 +1,979 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains object representations of the JSON data for components."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
import time
from googlecloudsdk.core import config
from googlecloudsdk.core import log
from googlecloudsdk.core.util import platforms
from googlecloudsdk.core.util import semver
import six
class Error(Exception):
"""Base exception for the schemas module."""
pass
class ParseError(Error):
"""An error for when a component snapshot cannot be parsed."""
pass
class DictionaryParser(object):
"""A helper class to parse elements out of a JSON dictionary."""
def __init__(self, cls, dictionary):
"""Initializes the parser.
Args:
cls: class, The class that is doing the parsing (used for error messages).
dictionary: dict, The JSON dictionary to parse.
"""
self.__cls = cls
self.__dictionary = dictionary
self.__args = {}
def Args(self):
"""Gets the dictionary of all parsed arguments.
Returns:
dict, The dictionary of field name to value for all parsed arguments.
"""
return self.__args
def _Get(self, field, default, required):
if required and field not in self.__dictionary:
raise ParseError('Required field [{0}] not found while parsing [{1}]'
.format(field, self.__cls))
return self.__dictionary.get(field, default)
def Parse(self, field, required=False, default=None, func=None):
"""Parses a single element out of the dictionary.
Args:
field: str, The name of the field to parse.
required: bool, If the field must be present or not (False by default).
default: str or dict, The value to use if a non-required field is not
present.
func: An optional function to call with the value before returning (if
value is not None). It takes a single parameter and returns a single
new value to be used instead.
Raises:
ParseError: If a required field is not found or if the field parsed is a
list.
"""
value = self._Get(field, default, required)
if value is not None:
if isinstance(value, list):
raise ParseError('Did not expect a list for field [{field}] in '
'component [{component}]'.format(
field=field, component=self.__cls))
if func:
value = func(value)
self.__args[field] = value
def ParseList(self, field, required=False, default=None,
func=None, sort=False):
"""Parses a element out of the dictionary that is a list of items.
Args:
field: str, The name of the field to parse.
required: bool, If the field must be present or not (False by default).
default: str or dict, The value to use if a non-required field is not
present.
func: An optional function to call with each value in the parsed list
before returning (if the list is not None). It takes a single parameter
and returns a single new value to be used instead.
sort: bool, sort parsed list when it represents an unordered set.
Raises:
ParseError: If a required field is not found or if the field parsed is
not a list.
"""
value = self._Get(field, default, required)
if value:
if not isinstance(value, list):
raise ParseError('Expected a list for field [{0}] in component [{1}]'
.format(field, self.__cls))
if func:
value = [func(v) for v in value]
self.__args[field] = sorted(value) if sort else value
def ParseDict(self, field, required=False, default=None, func=None):
"""Parses a element out of the dictionary that is a dictionary of items.
Most elements are dictionaries but the difference between this and the
normal Parse method is that Parse interprets the value as an object. Here,
the value of the element is a dictionary of key:object where the keys are
unknown.
Args:
field: str, The name of the field to parse.
required: bool, If the field must be present or not (False by default).
default: str or dict, The value to use if a non-required field is not
present.
func: An optional function to call with each value in the parsed dict
before returning (if the dict is not empty). It takes a single
parameter and returns a single new value to be used instead.
Raises:
ParseError: If a required field is not found or if the field parsed is
not a dict.
"""
value = self._Get(field, default, required)
if value:
if not isinstance(value, dict):
raise ParseError('Expected a dict for field [{0}] in component [{1}]'
.format(field, self.__cls))
if func:
value = dict((k, func(v)) for k, v in six.iteritems(value))
self.__args[field] = value
class DictionaryWriter(object):
"""Class to help writing these objects back out to a dictionary."""
def __init__(self, obj):
self.__obj = obj
self.__dictionary = {}
@staticmethod
def AttributeGetter(attrib):
def Inner(obj):
if obj is None:
return None
return getattr(obj, attrib)
return Inner
def Write(self, field, func=None):
"""Writes the given field to the dictionary.
This gets the value of the attribute named field from self, and writes that
to the dictionary. The field is not written if the value is not set.
Args:
field: str, The field name.
func: An optional function to call on the value of the field before
writing it to the dictionary.
"""
value = getattr(self.__obj, field)
if value is None:
return
if func:
value = func(value)
self.__dictionary[field] = value
def WriteList(self, field, func=None):
"""Writes the given list field to the dictionary.
This gets the value of the attribute named field from self, and writes that
to the dictionary. The field is not written if the value is not set.
Args:
field: str, The field name.
func: An optional function to call on each value in the list before
writing it to the dictionary.
"""
list_func = None
if func:
def ListMapper(values):
return [func(v) for v in values]
list_func = ListMapper
self.Write(field, func=list_func)
def WriteDict(self, field, func=None):
"""Writes the given dict field to the dictionary.
This gets the value of the attribute named field from self, and writes that
to the dictionary. The field is not written if the value is not set.
Args:
field: str, The field name.
func: An optional function to call on each value in the dict before
writing it to the dictionary.
"""
def DictMapper(values):
return dict((k, func(v)) for k, v in six.iteritems(values))
dict_func = DictMapper if func else None
self.Write(field, func=dict_func)
def Dictionary(self):
return self.__dictionary
class ComponentDetails(object):
"""Encapsulates some general information about the component.
Attributes:
display_name: str, The user facing name of the component.
description: str, A little more details about what the component does.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('display_name', required=True)
p.Parse('description', required=True)
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('display_name')
w.Write('description')
return w.Dictionary()
def __init__(self, display_name, description):
self.display_name = display_name
self.description = description
class ComponentVersion(object):
"""Version information for the component.
Attributes:
build_number: int, The unique, monotonically increasing version of the
component.
version_string: str, The user facing version for the component.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('build_number', required=True)
p.Parse('version_string', required=True)
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('build_number')
w.Write('version_string')
return w.Dictionary()
def __init__(self, build_number, version_string):
self.build_number = build_number
self.version_string = version_string
class ComponentData(object):
"""Information on the data source for the component.
Attributes:
type: str, The type of the source of this data (i.e. tar).
source: str, The hosted location of the component.
size: int, The size of the component in bytes.
checksum: str, The hex digest of the archive file.
contents_checksum: str, The hex digest of the contents of all files in the
archive.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('type', required=True)
p.Parse('source', required=True)
p.Parse('size')
p.Parse('checksum')
p.Parse('contents_checksum')
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('type')
w.Write('source')
w.Write('size')
w.Write('checksum')
w.Write('contents_checksum')
return w.Dictionary()
# pylint: disable=redefined-builtin, params must match JSON names
def __init__(self, type, source, size, checksum, contents_checksum):
self.type = type
self.source = source
self.size = size
self.checksum = checksum
self.contents_checksum = contents_checksum
class ComponentPlatform(object):
"""Information on the applicable platforms for the component.
Attributes:
operating_systems: [platforms.OperatingSystem], The operating systems this
component is valid on. If [] or None, it is valid on all operating
systems.
architectures: [platforms.Architecture], The architectures this component is
valid on. If [] or None, it is valid on all architectures.
"""
@classmethod
def FromDictionary(cls, dictionary):
"""Parses operating_systems and architectures from a dictionary."""
p = DictionaryParser(cls, dictionary)
# error_on_unknown=False here will prevent exception when trying to parse
# a manifest that has OS or Arch that we don't understand. If we can't
# parse it, None will be put in the list. This will allow the Matches()
# logic below to know that a filter was actually specified, but will make
# it impossible for our current OS or Arch to match that filter. If the
# filter has multiple values, we could still match even though we can't
# parse one of the filter values.
# pylint: disable=g-long-lambda
p.ParseList('operating_systems',
func=lambda value: platforms.OperatingSystem.FromId(
value, error_on_unknown=False))
p.ParseList('architectures',
func=lambda value: platforms.Architecture.FromId(
value, error_on_unknown=False))
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.WriteList('operating_systems',
func=DictionaryWriter.AttributeGetter('id'))
w.WriteList('architectures', func=DictionaryWriter.AttributeGetter('id'))
return w.Dictionary()
def __init__(self, operating_systems, architectures):
"""Creates a new ComponentPlatform.
Args:
operating_systems: list(platforms.OperatingSystem), The OSes this
component should be installed on. None indicates all OSes.
architectures: list(platforms.Architecture), The processor architectures
this component works on. None indicates all architectures.
"""
# Sort to make this independent of specified ordering.
self.operating_systems = operating_systems and sorted(
operating_systems, key=lambda x: (0, x) if x is None else (1, x))
self.architectures = architectures and sorted(
architectures, key=lambda x: (0, x) if x is None else (1, x))
def Matches(self, platform):
"""Determines if the platform for this component matches the environment.
For both operating system and architecture, it is a match if:
- No filter is given (regardless of platform value)
- A filter is given but the value in platform matches one of the values in
the filter.
It is a match iff both operating system and architecture match.
Args:
platform: platform.Platform, The platform that must be matched. None will
match only platform-independent components.
Returns:
True if it matches or False if not.
"""
if not platform:
my_os, my_arch = None, None
else:
my_os, my_arch = platform.operating_system, platform.architecture
if self.operating_systems:
# Some OS filter was specified, we must be on an OS that is in the filter.
if not my_os or my_os not in self.operating_systems:
return False
if self.architectures:
# Some arch filter was specified, we must be on an arch that is in the
# filter.
if not my_arch or my_arch not in self.architectures:
return False
return True
def IntersectsWith(self, other):
"""Determines if this platform intersects with the other platform.
Platforms intersect if they can both potentially be installed on the same
system.
Args:
other: ComponentPlatform, The other component platform to compare against.
Returns:
bool, True if there is any intersection, False otherwise.
"""
return (self.__CollectionsIntersect(self.operating_systems,
other.operating_systems) and
self.__CollectionsIntersect(self.architectures,
other.architectures))
def __CollectionsIntersect(self, collection1, collection2):
"""Determines if the two collections intersect.
The collections intersect if either or both are None or empty, or if they
contain an intersection of elements.
Args:
collection1: [] or None, The first collection.
collection2: [] or None, The second collection.
Returns:
bool, True if there is an intersection, False otherwise.
"""
# If either is None (valid for all) then they definitely intersect.
if not collection1 or not collection2:
return True
# Both specify values, return if there is at least one intersecting.
return set(collection1) & set(collection2)
class Component(object):
"""Data type for an entire component.
Attributes:
id: str, The unique id for this component.
details: ComponentDetails, More descriptions of the components.
version: ComponentVersion, Information about the version of this component.
is_hidden: bool, True if this should be hidden from the user.
is_required: bool, True if this component must always be installed.
gdu_only: bool, True if this component is only available in GDU.
is_configuration: bool, True if this should be displayed in the packages
section of the component manager.
data: ComponentData, Information about where to get the component from.
platform: ComponentPlatform, Information about what operating systems and
architectures the compoonent is valid on.
dependencies: [str], The other components required by this one.
platform_required: bool, True if a platform-specific executable is
required.
"""
@classmethod
def FromDictionary(cls, dictionary):
"""Converts a dictionary object to an instantiated Component class.
Args:
dictionary: The Dictionary to to convert from.
Returns:
A Component object initialized from the dictionary object.
"""
p = DictionaryParser(cls, dictionary)
p.Parse('id', required=True)
p.Parse('details', required=True, func=ComponentDetails.FromDictionary)
p.Parse('version', required=True, func=ComponentVersion.FromDictionary)
p.Parse('is_hidden', default=False)
p.Parse('is_required', default=False)
p.Parse('gdu_only', default=True)
p.Parse('is_configuration', default=False)
p.Parse('data', func=ComponentData.FromDictionary)
p.Parse('platform', default={}, func=ComponentPlatform.FromDictionary)
p.ParseList('dependencies', default=[], sort=True)
p.Parse('platform_required', default=False)
return cls(**p.Args())
def ToDictionary(self):
"""Converts a Component object to a Dictionary object.
Returns:
A Dictionary object initialized from self.
"""
w = DictionaryWriter(self)
w.Write('id')
w.Write('details', func=ComponentDetails.ToDictionary)
w.Write('version', func=ComponentVersion.ToDictionary)
w.Write('is_hidden')
w.Write('is_required')
w.Write('gdu_only')
w.Write('is_configuration')
w.Write('data', func=ComponentData.ToDictionary)
w.Write('platform', func=ComponentPlatform.ToDictionary)
w.WriteList('dependencies')
w.Write('platform_required')
return w.Dictionary()
# pylint: disable=redefined-builtin, params must match JSON names
def __init__(self, id, details, version, dependencies, data, is_hidden,
is_required, gdu_only, is_configuration, platform,
platform_required):
self.id = id
self.details = details
self.version = version
self.is_hidden = is_hidden
self.is_required = is_required
self.gdu_only = gdu_only
self.is_configuration = is_configuration
self.platform = platform
self.data = data
self.dependencies = dependencies
self.platform_required = platform_required
class Notification(object):
"""Data type for a update notification's notification object.
Attributes:
annotation: str, A message to print before the normal update message.
update_to_version: str, A version string to tell the user to update to.
custom_message: str, An alternate message to print instead of the usual one.
"""
@classmethod
def FromDictionary(cls, dictionary):
"""Converts a dictionary object to an instantiated Notification class.
Args:
dictionary: The Dictionary to to convert from.
Returns:
A Notification object initialized from the dictionary object.
"""
p = DictionaryParser(cls, dictionary)
p.Parse('annotation')
p.Parse('update_to_version')
p.Parse('custom_message')
return cls(**p.Args())
def ToDictionary(self):
"""Converts a Notification object to a Dictionary object.
Returns:
A Dictionary object initialized from self.
"""
w = DictionaryWriter(self)
w.Write('annotation')
w.Write('update_to_version')
w.Write('custom_message')
return w.Dictionary()
def __init__(self, annotation, update_to_version, custom_message):
self.annotation = annotation
self.update_to_version = update_to_version
self.custom_message = custom_message
def NotificationMessage(self):
"""Gets the notification message to print to the user.
Returns:
str, The notification message the user should see.
"""
if self.custom_message:
msg = self.custom_message
else:
msg = self.annotation + '\n\n' if self.annotation else ''
if self.update_to_version:
version_string = ' --version ' + self.update_to_version
else:
version_string = ''
msg += """\
Updates are available for some Google Cloud CLI components. To install them,
please run:
$ gcloud components update{version}""".format(version=version_string)
return '\n\n' + msg + '\n\n'
class Trigger(object):
"""Data type for a update notification's trigger object.
Attributes:
frequency: int, The number of seconds between notifications.
command_regex: str, A regular expression to match a command name. The
notification will only trigger when running a command that matches this
regex.
"""
DEFAULT_NAG_FREQUENCY = 86400 # One day
@classmethod
def FromDictionary(cls, dictionary):
"""Converts a dictionary object to an instantiated Trigger class.
Args:
dictionary: The Dictionary to to convert from.
Returns:
A Condition object initialized from the dictionary object.
"""
p = DictionaryParser(cls, dictionary)
p.Parse('frequency', default=Trigger.DEFAULT_NAG_FREQUENCY)
p.Parse('command_regex')
return cls(**p.Args())
def ToDictionary(self):
"""Converts a Trigger object to a Dictionary object.
Returns:
A Dictionary object initialized from self.
"""
w = DictionaryWriter(self)
w.Write('frequency')
w.Write('command_regex')
return w.Dictionary()
def __init__(self, frequency, command_regex):
self.frequency = frequency
self.command_regex = command_regex
def Matches(self, last_nag_time, command_path=None):
"""Determine if this trigger matches and the notification should be printed.
Args:
last_nag_time: int, The time we last printed this notification in seconds
since the epoch.
command_path: str, The name of the command currently being run
(i.e. gcloud.components.list).
Returns:
True if the trigger matches, False otherwise.
"""
if time.time() - last_nag_time < self.frequency:
return False
if self.command_regex:
if not command_path:
# An unknown command name is a non-match if a regex is specified.
return False
if not re.match(self.command_regex, command_path):
return False
return True
class Condition(object):
"""Data type for a update notification's condition object.
Attributes:
start_version: str, The current version of the SDK must be great than or
equal to this version in order to activate the notification.
end_version: str, The current version of the SDK must be less than or equal
to this version in order to activate the notification.
version_regex: str, A regex to match the current version of the SDK to
activate this notification.
age: int, The number of seconds old this SDK version must be to activate
this notification.
check_component: bool, True to require that component updates are actually
present to activate this notification, False to skip this check.
"""
@classmethod
def FromDictionary(cls, dictionary):
"""Converts a dictionary object to an instantiated Condition class.
Args:
dictionary: The Dictionary to to convert from.
Returns:
A Condition object initialized from the dictionary object.
"""
p = DictionaryParser(cls, dictionary)
p.Parse('start_version')
p.Parse('end_version')
p.Parse('version_regex')
p.Parse('age')
p.Parse('check_components', default=True)
return cls(**p.Args())
def ToDictionary(self):
"""Converts a Component object to a Dictionary object.
Returns:
A Dictionary object initialized from self.
"""
w = DictionaryWriter(self)
w.Write('start_version')
w.Write('end_version')
w.Write('version_regex')
w.Write('age')
w.Write('check_components')
return w.Dictionary()
def __init__(
self, start_version, end_version, version_regex, age,
check_components):
self.start_version = start_version
self.end_version = end_version
self.version_regex = version_regex
self.age = age
self.check_components = check_components
def Matches(self, current_version, current_revision,
component_updates_available):
"""Determines if this notification should be activated for this SDK.
Args:
current_version: str, The installed version of the SDK (i.e. 1.2.3)
current_revision: long, The revision (from the component snapshot) that is
currently installed. This is a long int but formatted as an actual
date in seconds (i.e 20151009132504). It is *NOT* seconds since the
epoch.
component_updates_available: bool, True if there are updates available for
some components that are currently installed.
Returns:
True if the notification should be activated, False to ignore it.
"""
if (current_version is None and
(self.start_version or self.end_version or self.version_regex)):
# If we don't know what version we have, don't match a condition that
# relies on specific version information.
return False
try:
if (self.start_version and
semver.SemVer(current_version) < semver.SemVer(self.start_version)):
return False
if (self.end_version and
semver.SemVer(current_version) > semver.SemVer(self.end_version)):
return False
except semver.ParseError:
# Failed to parse something, condition does not match.
log.debug('Failed to parse semver, condition not matching.',
exc_info=True)
return False
if self.version_regex and not re.match(self.version_regex, current_version):
return False
if self.age is not None:
if current_revision is None:
# We don't know the current revision, not a match.
return False
try:
now = time.time()
last_updated = config.InstallationConfig.ParseRevisionAsSeconds(
current_revision)
if now - last_updated < self.age:
return False
except ValueError:
# If we could not parse our current revision, don't match the age
# condition.
log.debug('Failed to parse revision, condition not matching.',
exc_info=True)
return False
if self.check_components and not component_updates_available:
return False
return True
class NotificationSpec(object):
"""Data type for a update notification object.
Attributes:
condition: Condition, The settings for whether or not this notification
should be activated by a particular installation.
trigger: Trigger, The settings for whether to trigger an activated
notification on a particular command execution.
notification: Notification, The settings about how to actually express the
notification to the user once it is triggered.
"""
@classmethod
def FromDictionary(cls, dictionary):
"""Converts a dictionary object to an instantiated NotificationSpec class.
Args:
dictionary: The Dictionary to to convert from.
Returns:
A NotificationSpec object initialized from the dictionary object.
"""
p = DictionaryParser(cls, dictionary)
p.Parse('id', required=True)
p.Parse('condition', default={}, func=Condition.FromDictionary)
p.Parse('trigger', default={}, func=Trigger.FromDictionary)
p.Parse('notification', default={}, func=Notification.FromDictionary)
return cls(**p.Args())
def ToDictionary(self):
"""Converts a Component object to a Dictionary object.
Returns:
A Dictionary object initialized from self.
"""
w = DictionaryWriter(self)
w.Write('id')
w.Write('condition', func=Condition.ToDictionary)
w.Write('trigger', func=Trigger.ToDictionary)
w.Write('notification', func=Notification.ToDictionary)
return w.Dictionary()
# pylint: disable=redefined-builtin, params must match JSON names
def __init__(self, id, condition, trigger, notification):
self.id = id
self.condition = condition
self.trigger = trigger
self.notification = notification
class SchemaVersion(object):
"""Information about the schema version of this snapshot file.
Attributes:
version: int, The schema version number. A different number is considered
incompatible.
no_update: bool, True if this installation should not attempted to be
updated.
message: str, A message to display to the user if they are updating to this
new schema version.
url: str, The URL to grab a fresh Cloud SDK bundle.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('version', required=True)
p.Parse('no_update', default=False)
p.Parse('message')
p.Parse('url', required=True)
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('version')
w.Write('no_update')
w.Write('message')
w.Write('url')
return w.Dictionary()
def __init__(self, version, no_update, message, url):
self.version = version
self.no_update = no_update
self.message = message
self.url = url
class SDKDefinition(object):
"""Top level object for then entire component snapshot.
Attributes:
revision: int, The unique, monotonically increasing version of the snapshot.
release_notes_url: string, The URL where the latest release notes can be
downloaded.
version: str, The version name of this release (i.e. 1.2.3). This should be
used only for informative purposes during an update (to say what version
you are updating to).
gcloud_rel_path: str, The path to the gcloud entrypoint relative to the SDK
root.
post_processing_command: str, The gcloud subcommand to run to do
post-processing after an update. This will be split on spaces before
being executed.
components: [Component], The component definitions.
notifications: [NotificationSpec], The active update notifications.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = cls._ParseBase(dictionary)
p.Parse('revision', required=True)
p.Parse('release_notes_url')
p.Parse('version')
p.Parse('gcloud_rel_path')
p.Parse('post_processing_command')
p.ParseList('components', required=True, func=Component.FromDictionary)
p.ParseList('notifications', default=[],
func=NotificationSpec.FromDictionary)
return cls(**p.Args())
@classmethod
def SchemaVersion(cls, dictionary):
return cls._ParseBase(dictionary).Args()['schema_version']
@classmethod
def _ParseBase(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('schema_version', default={'version': 1, 'url': ''},
func=SchemaVersion.FromDictionary)
return p
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('revision')
w.Write('release_notes_url')
w.Write('version')
w.Write('gcloud_rel_path')
w.Write('post_processing_command')
w.Write('schema_version', func=SchemaVersion.ToDictionary)
w.WriteList('components', func=Component.ToDictionary)
w.WriteList('notifications', func=NotificationSpec.ToDictionary)
return w.Dictionary()
def __init__(self, revision, schema_version, release_notes_url, version,
gcloud_rel_path, post_processing_command,
components, notifications):
self.revision = revision
self.schema_version = schema_version
self.release_notes_url = release_notes_url
self.version = version
self.gcloud_rel_path = gcloud_rel_path
self.post_processing_command = post_processing_command
self.components = components
self.notifications = notifications
def LastUpdatedString(self):
try:
last_updated = config.InstallationConfig.ParseRevision(self.revision)
return time.strftime('%Y/%m/%d', last_updated)
except ValueError:
return 'Unknown'
def Merge(self, sdk_def):
current_components = dict((c.id, c) for c in self.components)
for c in sdk_def.components:
if c.id in current_components:
self.components.remove(current_components[c.id])
current_components[c.id] = c
self.components.append(c)
class LastUpdateCheck(object):
"""Top level object for the cache of the last time an update check was done.
Attributes:
version: int, The schema version number. A different number is considered
incompatible.
no_update: bool, True if this installation should not attempted to be
updated.
message: str, A message to display to the user if they are updating to this
new schema version.
url: str, The URL to grab a fresh Cloud SDK bundle.
"""
@classmethod
def FromDictionary(cls, dictionary):
p = DictionaryParser(cls, dictionary)
p.Parse('last_update_check_time', default=0)
p.Parse('last_update_check_revision', default=0)
p.ParseList('notifications', default=[],
func=NotificationSpec.FromDictionary)
p.ParseDict('last_nag_times', default={})
return cls(**p.Args())
def ToDictionary(self):
w = DictionaryWriter(self)
w.Write('last_update_check_time')
w.Write('last_update_check_revision')
w.WriteList('notifications', func=NotificationSpec.ToDictionary)
w.WriteDict('last_nag_times')
return w.Dictionary()
def __init__(self, last_update_check_time, last_update_check_revision,
notifications, last_nag_times):
self.last_update_check_time = last_update_check_time
self.last_update_check_revision = last_update_check_revision
self.notifications = notifications
self.last_nag_times = last_nag_times

View File

@@ -0,0 +1,244 @@
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module implements update checking and notification to the user.
It provides a context manager around the cache file that stores information
about the last update check. The general process is as follows:
1) This stores the last time an update check occurred, so the check will only
be done if the update check frequency has expired.
2) When an update check is done, all notifications in the latest snapshot are
queried to see if their condition matches the current state of the SDK. Any
notifications that match are "activated" and cached.
3) Every time a command is run, Notify() is called to notify the user of
available updates. It loops over the activated notifications and determines
if any of the triggers match the current command invocation. If there is a
match, the notification is printed and the last nag time is recorded for that
particular notification. At most one notification is printed per command.
The priority is determined by the order the notifications are registered
in the component snapshot.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import os
import time
from googlecloudsdk.core import config
from googlecloudsdk.core import log
from googlecloudsdk.core.updater import schemas
from googlecloudsdk.core.util import files
import six
class UpdateCheckData(object):
"""A class to hold update checking data and to perform notifications."""
UPDATE_CHECK_FREQUENCY_IN_SECONDS = 86400 # Once a day.
def __init__(self):
self._last_update_check_file = config.Paths().update_check_cache_path
self._dirty = False
self._data = self._LoadData()
def _LoadData(self):
"""Deserializes data from the json file."""
if not os.path.isfile(self._last_update_check_file):
return schemas.LastUpdateCheck.FromDictionary({})
raw_data = files.ReadFileContents(self._last_update_check_file)
try:
data = json.loads(raw_data)
return schemas.LastUpdateCheck.FromDictionary(data)
except ValueError:
log.debug('Failed to parse update check cache file. Using empty '
'cache instead.')
return schemas.LastUpdateCheck.FromDictionary({})
def _SaveData(self):
"""Serializes data to the json file."""
if not self._dirty:
return
files.WriteFileContents(self._last_update_check_file,
json.dumps(self._data.ToDictionary()))
self._dirty = False
def __enter__(self):
return self
def __exit__(self, *args):
self._SaveData()
def LastUpdateCheckRevision(self):
"""Gets the revision of the snapshot from the last update check.
Returns:
long, The revision of the last checked snapshot. This is a long int but
formatted as an actual date in seconds (i.e 20151009132504). It is *NOT*
seconds since the epoch.
"""
return self._data.last_update_check_revision
def LastUpdateCheckTime(self):
"""Gets the time of the last update check as seconds since the epoch.
Returns:
int, The time of the last update check in seconds since the epoch.
"""
return self._data.last_update_check_time
def SecondsSinceLastUpdateCheck(self):
"""Gets the number of seconds since we last did an update check.
Returns:
int, The amount of time in seconds.
"""
return time.time() - self._data.last_update_check_time
def ShouldDoUpdateCheck(self):
"""Checks if it is time to do an update check.
Returns:
True, if enough time has elapsed and we should perform another update
check. False otherwise.
"""
return (self.SecondsSinceLastUpdateCheck() >=
UpdateCheckData.UPDATE_CHECK_FREQUENCY_IN_SECONDS)
def UpdatesAvailable(self):
"""Returns whether we already know about updates that are available.
Returns:
bool, True if we know about updates, False otherwise.
"""
return bool([
notification for notification in self._data.notifications
if notification.condition.check_components
])
def SetFromSnapshot(self, snapshot, component_updates_available, force=False):
"""Sets that we just did an update check and found the given snapshot.
If the given snapshot is different than the last one we saw, refresh the set
of activated notifications for available updates for any notifications with
matching conditions.
You must call Save() to persist these changes or use this as a context
manager.
Args:
snapshot: snapshots.ComponentSnapshot, The latest snapshot available.
component_updates_available: bool, True if there are updates to components
we have installed. False otherwise.
force: bool, True to force a recalculation of whether there are available
updates, even if the snapshot revision has not changed.
"""
if force or self.LastUpdateCheckRevision() != snapshot.revision:
log.debug('Updating notification cache...')
current_version = config.INSTALLATION_CONFIG.version
current_revision = config.INSTALLATION_CONFIG.revision
activated = []
possible_notifications = snapshot.sdk_definition.notifications
for notification in possible_notifications:
if notification.condition.Matches(
current_version, current_revision, component_updates_available):
log.debug('Activating notification: [%s]', notification.id)
activated.append(notification)
self._data.notifications = activated
self._CleanUpLastNagTimes()
self._data.last_update_check_time = time.time()
self._data.last_update_check_revision = snapshot.revision
self._dirty = True
def SetFromIncompatibleSchema(self):
"""Sets that we just did an update check and found a new schema version.
An incompatible schema version means there are definitely updates available
but we can't read the notifications to correctly notify the user. This will
install a default notification for the incompatible schema.
You must call Save() to persist these changes or use this as a context
manager.
"""
log.debug('Incompatible schema found. Activating default notification.')
# Nag once a week to update if the schema changed and we don't know what's
# going on anymore.
notification_spec = schemas.NotificationSpec(
id='incompatible',
condition=schemas.Condition(None, None, None, None, False),
trigger=schemas.Trigger(frequency=604800, command_regex=None),
notification=schemas.Notification(None, None, None)
)
self._data.notifications = [notification_spec]
self._CleanUpLastNagTimes()
self._data.last_update_check_time = time.time()
self._data.last_update_check_revision = 0 # Doesn't matter
self._dirty = True
def _CleanUpLastNagTimes(self):
"""Clean the map holding the last nag times for each notification.
If a notification is no longer activate, it is removed from the map. Any
notifications that are still activated have their last nag times preserved.
"""
activated_ids = [n.id for n in self._data.notifications]
self._data.last_nag_times = (
dict(
(name, value)
for name, value in six.iteritems(self._data.last_nag_times)
if name in activated_ids))
def Notify(self, command_path):
"""Notify the user of any available updates.
This should be called for every command that is run. It does not actually
do an update check, and does not necessarily notify the user each time. The
user will only be notified if there are activated notifications and if the
trigger for one of the activated notifications matches. At most one
notification will be printed per command. Order or priority is determined
by the order in which the notifications are registered in the component
snapshot file.
Args:
command_path: str, The '.' separated path of the command that is currently
being run (i.e. gcloud.foo.bar).
"""
# Only nag if we are running in an interactive terminal.
if not log.out.isatty() or not log.status.isatty():
return
for notification in self._data.notifications:
name = notification.id
last_nag_time = self._data.last_nag_times.get(name, 0)
# Only notify if the trigger matches. Exit as soon as one notification
# is printed.
if notification.trigger.Matches(last_nag_time, command_path):
log.status.write(notification.notification.NotificationMessage())
self._data.last_nag_times[name] = time.time()
self._dirty = True
break