feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,101 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants shared across Cloud SQL commands."""
# A list of flags that can be overridden from the source instance when creating
# a new Cloud SQL instance via a backup restore or PITR operation.
TARGET_INSTANCE_OVERRIDE_FLAGS = (
# go/keep-sorted start
'activation_policy',
'active_directory_dns_servers',
'active_directory_domain',
'active_directory_mode',
'active_directory_organizational_unit',
'active_directory_secret_manager_key',
'allowed_psc_projects',
'assign_ip',
'audit_bucket_path',
'authorized_networks',
'availability_type',
'backup',
'backup_location',
'backup_start_time',
'collation',
'connector_enforcement',
'cpu',
'database_version',
'deletion_protection',
'deny_maintenance_period_end_date',
'deny_maintenance_period_start_date',
'deny_maintenance_period_time',
'disk_encryption_key',
'disk_encryption_key_keyring',
'disk_encryption_key_location',
'disk_encryption_key_project',
'edition',
'enable_bin_log',
'enable_data_cache',
'enable_google_ml_integration',
'enable_google_private_path',
'enable_point_in_time_recovery',
'enable_private_service_connect',
'failover_replica_name',
'final_backup',
'final_backup_retention_days',
'insights_config_query_insights_enabled',
'insights_config_query_plans_per_minute',
'insights_config_query_string_length',
'insights_config_record_application_tags',
'insights_config_record_client_address',
'maintenance_release_channel',
'maintenance_window_day',
'maintenance_window_hour',
'memory',
'network',
'psc_auto_connections',
'region',
'require_ssl',
'retain_backups_on_delete',
'retained_backups_count',
'retained_transaction_log_days',
'server_ca_mode',
'ssl_mode',
'storage_auto_increase',
'storage_provisioned_iops',
'storage_provisioned_throughput',
'storage_size',
'storage_type',
'tags',
'tier',
'time_zone',
'timeout',
# go/keep-sorted end
)
# A list of flags that can be cleared when creating a new Cloud SQL instance via
# a backup restore or PITR operation. This is to allow users to remove certain
# overrides that might be inherited from the source instance, such as a private
# network.
TARGET_INSTANCE_CLEAR_FLAGS = (
# go/keep-sorted start
'clear_active_directory',
'clear_active_directory_dns_servers',
'clear_disk_encryption',
'clear_network',
# go/keep-sorted end
)
# 1h, based off of the max time it usually takes to create a SQL instance.
INSTANCE_CREATION_TIMEOUT_SECONDS = 3600

View File

@@ -0,0 +1,223 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions for sql export commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.sql import export_util
from googlecloudsdk.api_lib.sql import operations
from googlecloudsdk.api_lib.sql import validate
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.sql import flags
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
def AddBaseExportFlags(
parser,
gz_supported=True,
database_required=False,
database_help_text=flags.DEFAULT_DATABASE_LIST_EXPORT_HELP_TEXT):
"""Adds the base export flags to the parser.
Args:
parser: The current argparse parser to add these flags to.
gz_supported: Boolean, specifies whether gz compression is supported.
database_required: Boolean, specifies whether the database flag is required.
database_help_text: String, specifies the help text for the database flag.
"""
base.ASYNC_FLAG.AddToParser(parser)
flags.AddInstanceArgument(parser)
uri_help_text = ('The path to the file in Google Cloud Storage where the '
'export will be stored. The URI is in the form '
'gs://bucketName/fileName. If the file already exists, the '
'operation fails.')
if gz_supported:
uri_help_text = uri_help_text + (' If the filename ends with .gz, the '
'contents are compressed.')
flags.AddUriArgument(parser, uri_help_text)
flags.AddDatabaseList(parser, database_help_text, database_required)
def RunExportCommand(args, client, export_context):
"""Exports data from a Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
export_context: ExportContext; format-specific export metadata.
Returns:
A dict representing the export operation resource, if '--async' is used,
or else None.
Raises:
HttpException: An HTTP error response was received while executing API
request.
ToolException: An error other than HTTP error occurred while executing the
command.
"""
sql_client = client.sql_client
sql_messages = client.sql_messages
validate.ValidateInstanceName(args.instance)
instance_ref = client.resource_parser.Parse(
args.instance,
params={'project': properties.VALUES.core.project.GetOrFail},
collection='sql.instances')
export_request = sql_messages.SqlInstancesExportRequest(
instance=instance_ref.instance,
project=instance_ref.project,
instancesExportRequest=sql_messages.InstancesExportRequest(
exportContext=export_context))
result_operation = sql_client.instances.Export(export_request)
operation_ref = client.resource_parser.Create(
'sql.operations',
operation=result_operation.name,
project=instance_ref.project)
if args.async_:
return sql_client.operations.Get(
sql_messages.SqlOperationsGetRequest(
project=operation_ref.project, operation=operation_ref.operation))
is_tde_export = (
export_context.fileType
== sql_messages.ExportContext.FileTypeValueValuesEnum.TDE
)
export_dest = args.cert_path if is_tde_export else args.uri
operations.OperationsV1Beta4.WaitForOperation(sql_client, operation_ref,
'Exporting Cloud SQL instance')
log.status.write('Exported [{instance}] to [{bucket}].\n'.format(
instance=instance_ref, bucket=export_dest))
return None
def RunSqlExportCommand(args, client):
"""Exports data from a Cloud SQL instance to a MySQL dump file.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict object representing the operations resource describing the export
operation if the export was successful.
"""
sql_export_context = export_util.SqlExportContext(
client.sql_messages,
args.uri,
args.database,
args.table,
offload=args.offload,
parallel=args.parallel,
threads=args.threads,
clean=args.clean,
if_exists=args.if_exists,
)
if args.offload:
log.status.write(
'Serverless exports cost extra. See the pricing page for more information: https://cloud.google.com/sql/pricing.\n'
)
return RunExportCommand(args, client, sql_export_context)
def RunCsvExportCommand(args, client):
"""Exports data from a Cloud SQL instance to a CSV file.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict object representing the operations resource describing the export
operation if the export was successful.
"""
csv_export_context = export_util.CsvExportContext(
client.sql_messages,
args.uri,
args.database,
args.query,
offload=args.offload,
quote=args.quote,
escape=args.escape,
fields_terminated_by=args.fields_terminated_by,
lines_terminated_by=args.lines_terminated_by)
if args.offload:
log.status.write(
'Serverless exports cost extra. See the pricing page for more information: https://cloud.google.com/sql/pricing.\n'
)
return RunExportCommand(args, client, csv_export_context)
def RunBakExportCommand(args, client):
"""Export data from a Cloud SQL instance to a SQL Server BAK file.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict object representing the operations resource describing the export
operation if the export was successful.
"""
sql_export_context = export_util.BakExportContext(
client.sql_messages,
args.uri,
args.database,
args.stripe_count,
args.striped,
args.bak_type,
args.differential_base,
args.export_log_start_time,
args.export_log_end_time,
)
return RunExportCommand(args, client, sql_export_context)
def RunTdeExportCommand(args, client):
"""Export TDE certificate from a Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict object representing the operations resource describing the export
operation if the export was successful.
"""
tde_export_context = export_util.TdeExportContext(
client.sql_messages,
args.certificate,
args.cert_path,
args.pvk_path,
args.pvk_password,
)
return RunExportCommand(args, client, tde_export_context)

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud SQL resource filter expression rewrite backend."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core.resource import resource_expr_rewrite
from googlecloudsdk.core.util import times
import six
# If _STRING_FIELDS and _TIME_FIELDS are out of sync with the API then --filter
# expressions will still work, but parts may be done client side, degrading
# performance.
_STRING_FIELDS = frozenset([
'location',
'instance',
'type',
])
_TIME_FIELDS = frozenset([
'backupInterval.startTime',
'instanceDeletionTime',
])
class Backend(resource_expr_rewrite.Backend):
"""Cloud Build resource filter expression rewrite backend."""
def _RewriteStrings(self, key, op, operand):
"""Rewrites <key op operand>."""
terms = []
for arg in operand if isinstance(operand, list) else [operand]:
terms.append('{key}{op}{arg}'.format(key=key, op=op,
arg=self.Quote(arg, always=True)))
if len(terms) > 1:
return '{terms}'.format(terms=' OR '.join(terms))
return terms[0]
def _RewriteTimes(self, key, op, operand):
"""Rewrites <*Time op operand>."""
try:
dt = times.ParseDateTime(operand)
except ValueError as e:
raise ValueError(
'{operand}: date-time value expected for {key}: {error}'.format(
operand=operand, key=key, error=six.text_type(e)
)
)
dt_string = times.FormatDateTime(dt, '%Y-%m-%dT%H:%M:%S.%3f%Ez', times.UTC)
return '{key}{op}{dt_string}'.format(
key=key, op=op, dt_string=self.Quote(dt_string, always=True)
)
def RewriteTerm(self, key, op, operand, key_type):
"""Rewrites <key op operand>."""
del key_type # unused in RewriteTerm
if op not in ['<', '<=', '=', '!=', '>=', '>', ':']:
return None
name = key
if name in _STRING_FIELDS:
if op not in ['=', '!=']:
return None
return self._RewriteStrings(name, op, operand)
elif name in _TIME_FIELDS:
if op not in ['<', '<=', '=', '!=', '>=', '>']:
return None
return self._RewriteTimes(name, op, operand)
return None

View File

@@ -0,0 +1,128 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions for sql generate-login-token commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from google.auth import credentials
from google.auth import exceptions as google_auth_exceptions
from google.oauth2 import credentials as google_auth_creds
from googlecloudsdk.api_lib.auth import exceptions as auth_exceptions
from googlecloudsdk.calliope import exceptions as c_exc
from googlecloudsdk.core import log
from googlecloudsdk.core import requests
from googlecloudsdk.core.credentials import creds as c_creds
from googlecloudsdk.core.credentials import google_auth_credentials as c_google_auth
from googlecloudsdk.core.credentials import store as c_store
import six
def generate_login_token_from_gcloud_auth(scopes):
"""Genearete a down-coped access token with given scopes for IAM DB authentication from gcloud credentials.
Args:
scopes: scopes to be included in the down-scoped token.
Returns:
Down-scoped access token.
"""
cred = c_store.Load(
allow_account_impersonation=True,
use_google_auth=True,
cache_only_rapt=True,
)
cred_type = c_creds.CredentialTypeGoogleAuth.FromCredentials(cred)
if cred_type == c_creds.CredentialTypeGoogleAuth.USER_ACCOUNT:
# Make sure the credential has the required scopes before we downscope it.
missing_scope = frozenset(scopes) - frozenset(cred.scopes)
if missing_scope:
raise auth_exceptions.InvalidCredentialsError(
f'Missed the following scopes: {list(missing_scope)}. Please run'
' "gcloud auth login", consent the missing scopes and try again.'
)
cred = _downscope_credential(cred, scopes)
c_store.Refresh(cred)
if c_creds.IsGoogleAuthCredentials(cred):
token = cred.token
else:
token = cred.access_token
if not token:
raise auth_exceptions.InvalidCredentialsError(
'No access token could be obtained from the current credentials.')
return token
def generate_login_token_from_adc(scopes):
"""Genearete a down-coped access token with given scopes for IAM DB authentication from application default credentials.
Args:
scopes: scopes to be included in the down-scoped token.
Returns:
Down-scoped access token.
"""
try:
creds, _ = c_creds.GetGoogleAuthDefault().default(
scopes=scopes)
except google_auth_exceptions.DefaultCredentialsError as e:
log.debug(e, exc_info=True)
raise c_exc.ToolException(six.text_type(e))
creds = _downscope_credential(creds, scopes)
# Converts the user credentials so that it can handle reauth during refresh.
if isinstance(creds, google_auth_creds.Credentials):
creds = c_google_auth.Credentials.FromGoogleAuthUserCredentials(
creds)
with c_store.HandleGoogleAuthCredentialsRefreshError(for_adc=True):
creds.refresh(requests.GoogleAuthRequest())
return creds
def _downscope_credential(creds, scopes):
"""Genearte a down-scoped credential.
Args:
creds: end user credential
scopes: scopes to be included in the down-scoped credential
Returns:
Down-scoped credential.
"""
cred_type = c_creds.CredentialTypeGoogleAuth.FromCredentials(creds)
if cred_type not in [
c_creds.CredentialTypeGoogleAuth.USER_ACCOUNT,
c_creds.CredentialTypeGoogleAuth.SERVICE_ACCOUNT,
c_creds.CredentialTypeGoogleAuth.IMPERSONATED_ACCOUNT,
]:
# TODO(b/223649175): Add support for other credential types(e.g GCE).
log.warning(
'This command may not work as expected '
'for account type {}.'.format(cred_type.key)
)
# non user account credential types
# pylint:disable=protected-access
if isinstance(creds, credentials.Scoped):
creds = creds.with_scopes(scopes)
else:
creds._scopes = scopes
return creds

View File

@@ -0,0 +1,292 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions for sql import commands."""
from googlecloudsdk.api_lib.sql import import_util
from googlecloudsdk.api_lib.sql import operations
from googlecloudsdk.api_lib.sql import validate
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.sql import flags
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
def AddBaseImportFlags(
parser, filetype, gz_supported=True, user_supported=True
):
"""Adds the base flags for importing data.
Args:
parser: An argparse parser that you can use to add arguments that go on the
command line after this command. Positional arguments are allowed.
filetype: String, description of the file type being imported.
gz_supported: Boolean, if True then .gz compressed files are supported.
user_supported: Boolean, if True then a Postgres user can be specified.
"""
base.ASYNC_FLAG.AddToParser(parser)
flags.AddInstanceArgument(parser)
uri_help_text = (
'Path to the {filetype} file in Google Cloud Storage from '
'which the import is made. The URI is in the form '
'`gs://bucketName/fileName`.'
)
if gz_supported:
uri_help_text = uri_help_text + (
' Compressed gzip files (.gz) are also supported.'
)
flags.AddUriArgument(parser, uri_help_text.format(filetype=filetype))
if user_supported:
flags.AddUser(parser, 'PostgreSQL user for this import operation.')
def AddBakImportFlags(parser, filetype, gz_supported=True, user_supported=True):
"""Adds the base flags for importing data for bak import.
Args:
parser: An argparse parser that you can use to add arguments that go on the
command line after this command. Positional arguments are allowed.
filetype: String, description of the file type being imported.
gz_supported: Boolean, if True then .gz compressed files are supported.
user_supported: Boolean, if True then a Postgres user can be specified.
"""
base.ASYNC_FLAG.AddToParser(parser)
flags.AddInstanceArgument(parser)
uri_help_text = (
'Path to the {filetype} file in Google Cloud Storage from '
'which the import is made. The URI is in the form '
'`gs://bucketName/fileName`.'
)
if gz_supported:
uri_help_text = uri_help_text + (
' Compressed gzip files (.gz) are also supported.'
)
flags.AddBakImportUriArgument(parser, uri_help_text.format(filetype=filetype))
if user_supported:
flags.AddUser(parser, 'PostgreSQL user for this import operation.')
def RunImportCommand(args, client, import_context):
"""Imports data into a Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
import_context: ImportContext; format-specific import metadata.
Returns:
A dict representing the import operation resource, if '--async' is used,
or else None.
Raises:
HttpException: An HTTP error response was received while executing API
request.
ToolException: An error other than HTTP error occurred while executing the
command.
"""
sql_client = client.sql_client
sql_messages = client.sql_messages
is_bak_import = (
import_context.fileType
== sql_messages.ImportContext.FileTypeValueValuesEnum.BAK
)
is_tde_import = (
import_context.fileType
== sql_messages.ImportContext.FileTypeValueValuesEnum.TDE
)
import_source = args.cert_path if is_tde_import else args.uri
validate.ValidateInstanceName(args.instance)
if is_bak_import:
validate.ValidateURI(import_source, args.recovery_only)
instance_ref = client.resource_parser.Parse(
args.instance,
params={'project': properties.VALUES.core.project.GetOrFail},
collection='sql.instances',
)
if is_bak_import and args.recovery_only:
console_io.PromptContinue(
message=(
'Bring database [{database}] online with recovery-only.'.format(
database=args.database
)
),
default=True,
cancel_on_no=True,
)
else:
console_io.PromptContinue(
message='Data from [{uri}] will be imported to [{instance}].'.format(
uri=import_source, instance=args.instance
),
default=True,
cancel_on_no=True,
)
import_request = sql_messages.SqlInstancesImportRequest(
instance=instance_ref.instance,
project=instance_ref.project,
instancesImportRequest=sql_messages.InstancesImportRequest(
importContext=import_context
),
)
result_operation = sql_client.instances.Import(import_request)
operation_ref = client.resource_parser.Create(
'sql.operations',
operation=result_operation.name,
project=instance_ref.project,
)
if args.async_:
return sql_client.operations.Get(
sql_messages.SqlOperationsGetRequest(
project=operation_ref.project, operation=operation_ref.operation
)
)
message = 'Importing data into Cloud SQL instance'
if is_bak_import and args.recovery_only:
message = 'Bring database online'
operations.OperationsV1Beta4.WaitForOperation(
sql_client, operation_ref, message
)
if is_bak_import and args.recovery_only:
log.status.write(
'Bring database [{database}] online with recovery-only.\n'.format(
database=args.database
)
)
else:
log.status.write(
'Imported data from [{bucket}] into [{instance}].\n'.format(
instance=instance_ref, bucket=import_source
)
)
return None
def RunSqlImportCommand(args, client):
"""Imports data from a SQL dump file into Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict representing the import operation resource, if '--async' is used,
or else None.
"""
sql_import_context = import_util.SqlImportContext(
client.sql_messages,
args.uri,
args.database,
args.user,
parallel=args.parallel,
threads=args.threads,
clean=args.clean,
if_exists=args.if_exists,
)
return RunImportCommand(args, client, sql_import_context)
def RunCsvImportCommand(args, client):
"""Imports data from a CSV file into Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict representing the import operation resource, if '--async' is used,
or else None.
"""
csv_import_context = import_util.CsvImportContext(
client.sql_messages,
args.uri,
args.database,
args.table,
args.columns,
args.user,
args.quote,
args.escape,
args.fields_terminated_by,
args.lines_terminated_by,
)
return RunImportCommand(args, client, csv_import_context)
def RunBakImportCommand(args, client):
"""Imports data from a BAK file into Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict representing the import operation resource, if '--async' is used,
or else None.
"""
sql_import_context = import_util.BakImportContext(
client.sql_messages,
args.uri,
args.database,
args.cert_path,
args.pvk_path,
args.pvk_password,
args.keep_encrypted,
args.striped,
args.no_recovery,
args.recovery_only,
args.bak_type,
args.stop_at,
args.stop_at_mark,
)
return RunImportCommand(args, client, sql_import_context)
def RunTdeImportCommand(args, client):
"""Imports data from a TDE certificate file into Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
A dict representing the import operation resource, if '--async' is used,
or else None.
"""
tde_import_context = import_util.TdeImportContext(
client.sql_messages,
args.certificate,
args.cert_path,
args.pvk_path,
args.pvk_password,
)
return RunImportCommand(args, client, tde_import_context)

View File

@@ -0,0 +1,148 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions for sql reschedule-maintenance commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from dateutil import tz
from googlecloudsdk.api_lib.sql import exceptions as sql_exceptions
from googlecloudsdk.api_lib.sql import operations
from googlecloudsdk.api_lib.sql import validate
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.resource import resource_lex
from googlecloudsdk.core.resource import resource_property
from googlecloudsdk.core.util import times
import six
def ParseRescheduleType(sql_messages, reschedule_type):
if reschedule_type:
return sql_messages.Reschedule.RescheduleTypeValueValuesEnum.lookup_by_name(
reschedule_type.upper())
return None
def RunRescheduleMaintenanceCommand(args, client):
"""Reschedule maintenance for a Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
client: SqlClient instance, with sql_client and sql_messages props, for use
in generating messages and making API calls.
Returns:
None
Raises:
HttpException: An HTTP error response was received while executing API
request.
ArgumentError: The schedule_time argument was missing, in an invalid format,
or not within the reschedule maintenance bounds.
InvalidStateException: The Cloud SQL instance was not in an appropriate
state for the requested command.
ToolException: Any other error that occurred while executing the command.
"""
sql_client = client.sql_client
sql_messages = client.sql_messages
reschedule_type = ParseRescheduleType(sql_messages, args.reschedule_type)
schedule_time = args.schedule_time
# Start argument validation.
validate.ValidateInstanceName(args.instance)
instance_ref = client.resource_parser.Parse(
args.instance,
params={'project': properties.VALUES.core.project.GetOrFail},
collection='sql.instances')
if reschedule_type == sql_messages.Reschedule.RescheduleTypeValueValuesEnum.SPECIFIC_TIME:
if schedule_time is None:
raise sql_exceptions.ArgumentError(
'argument --schedule-time: Must be specified for SPECIFIC_TIME.')
# Get the instance the user is operating on.
try:
instance_resource = sql_client.instances.Get(
sql_messages.SqlInstancesGetRequest(
project=instance_ref.project, instance=instance_ref.instance))
except apitools_exceptions.HttpError as error:
# TODO(b/64292220): Remove once API gives helpful error message.
log.debug('operation : %s', six.text_type(instance_ref))
exc = exceptions.HttpException(error)
if resource_property.Get(exc.payload.content,
resource_lex.ParseKey('error.errors[0].reason'),
None) == 'notAuthorized':
raise exceptions.HttpException(
'You are either not authorized to access the instance or it does not '
'exist.'
)
raise
# Start validation against instance properties.
if instance_resource.scheduledMaintenance is None:
raise sql_exceptions.InvalidStateError(
'This instance does not have any scheduled maintenance at this time.')
if not instance_resource.scheduledMaintenance.canReschedule:
raise sql_exceptions.InvalidStateError(
'Cannot reschedule this instance\'s maintenance.')
if reschedule_type == sql_messages.Reschedule.RescheduleTypeValueValuesEnum.SPECIFIC_TIME:
# Ensure we have a valid scheduledMaintenance.startTime to prevent
# validation errors elsewhere in the pipeline.
try:
start_time = times.ParseDateTime(
instance_resource.scheduledMaintenance.startTime, tzinfo=tz.tzutc())
except ValueError:
raise sql_exceptions.InvalidStateError(
'Cannot reschedule this instance\'s maintenance.')
if schedule_time < start_time:
raise sql_exceptions.ArgumentError(
'argument --schedule-time: Must be after original scheduled time.')
# Convert the schedule_time to the format the backend expects, if it exists.
schedule_time = times.LocalizeDateTime(
schedule_time, times.UTC).isoformat().replace(
'+00:00', 'Z') if schedule_time is not None else None
# Perform the requested reschedule operation.
reschedule_maintenance_request = sql_messages.SqlProjectsInstancesRescheduleMaintenanceRequest(
instance=instance_ref.instance,
project=instance_ref.project,
sqlInstancesRescheduleMaintenanceRequestBody=sql_messages
.SqlInstancesRescheduleMaintenanceRequestBody(
reschedule=sql_messages.Reschedule(
rescheduleType=reschedule_type,
scheduleTime=schedule_time)))
result_operation = sql_client.projects_instances.RescheduleMaintenance(
reschedule_maintenance_request)
operation_ref = client.resource_parser.Create(
'sql.operations',
operation=result_operation.name,
project=instance_ref.project)
operations.OperationsV1Beta4.WaitForOperation(sql_client, operation_ref,
'Rescheduling maintenance.')
log.status.write('Maintenance rescheduled.\n')
return

View File

@@ -0,0 +1,40 @@
project:
name: project
collection: sqladmin.projects
attributes:
- &project
parameter_name: project
attribute_name: project
help: The project name.
instance:
name: instance
collection: sqladmin.instances
attributes:
- *project
- &instance
parameter_name: instance
attribute_name: instance
help: The instance name.
database:
name: database
collection: sqladmin.databases
attributes:
- *project
- *instance
- &database
parameter_name: database
attribute_name: database
help: The database name.
ssl_cert:
name: SSL certificates
collection: sqladmin.sslCerts
attributes:
- *project
- *instance
- &ssl_cert
parameter_name: sha1Fingerprint
attribute_name: common_name
help: 'User supplied name. Constrained to [a-zA-Z.-_ ]+.'

View File

@@ -0,0 +1,131 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions for sql users commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import exceptions
def ParseDualPasswordType(sql_messages, args):
"""Parses the correct retained password type for the arguments given.
Args:
sql_messages: the proto definition for the API being called
args: argparse.Namespace, The arguments that this command was invoked with.
Returns:
DualPasswordType enum or None
"""
if args.discard_dual_password:
return sql_messages.User.DualPasswordTypeValueValuesEnum.NO_DUAL_PASSWORD
if args.retain_password:
return sql_messages.User.DualPasswordTypeValueValuesEnum.DUAL_PASSWORD
return None
def ParseUserType(sql_messages, args):
if args.type:
return sql_messages.User.TypeValueValuesEnum.lookup_by_name(
args.type.upper())
return None
def ValidateSetPasswordRequest(args):
"""Validates that the arguments for setting a password are correct.
Args:
args: argparse.Namespace, The arguments that this command was invoked with.
Returns:
throws exception or None
"""
# Cannot retain an empty password
if hasattr(args,
'retain_password') and args.retain_password and not args.password:
raise exceptions.InvalidArgumentException(
'--retain-password', 'Must set --password to non-empty'
' value.')
if hasattr(
args,
'discard_dual_password') and args.discard_dual_password and args.password:
raise exceptions.InvalidArgumentException(
'--discard-dual-password', 'Cannot set --password to non-empty value ' +
'while discarding the old password.')
def CreatePasswordPolicyFromArgs(sql_messages,
password_policy,
args):
"""Generates password policy for the user.
Args:
sql_messages: module, The messages module that should be used.
password_policy: sql_messages.UserPasswordValidationPolicy,
The policy to build the new policy off.
args: argparse.Namespace, The arguments that this command was invoked with.
Returns:
sql_messages.UserPasswordValidationPolicy or None
"""
# this logic is shared between create-user and set-password-policy. There is
# no argument in create-user to set a shared password, so we must check that
# the argument exists.
clear_password_policy = None
if hasattr(args, 'clear_password_policy'):
clear_password_policy = args.clear_password_policy
allowed_failed_attempts = args.password_policy_allowed_failed_attempts
password_expiration_duration = args.password_policy_password_expiration_duration
enable_failed_attempts_check = args.password_policy_enable_failed_attempts_check
enable_password_verification = args.password_policy_enable_password_verification
should_generate_policy = any([
allowed_failed_attempts is not None,
password_expiration_duration is not None,
enable_failed_attempts_check is not None,
enable_password_verification is not None,
clear_password_policy is not None,
])
# Config does not exist, do not generate a policy
if not should_generate_policy:
return None
if password_policy is None:
password_policy = sql_messages.UserPasswordValidationPolicy()
# Directly return empty policy to clear the existing password policy.
if clear_password_policy:
return sql_messages.UserPasswordValidationPolicy()
if allowed_failed_attempts is not None:
password_policy.allowedFailedAttempts = allowed_failed_attempts
password_policy.enableFailedAttemptsCheck = True
if password_expiration_duration is not None:
password_policy.passwordExpirationDuration = str(
password_expiration_duration) + 's'
if enable_failed_attempts_check is not None:
password_policy.enableFailedAttemptsCheck = enable_failed_attempts_check
if enable_password_verification is not None:
password_policy.enablePasswordVerification = enable_password_verification
return password_policy

View File

@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common validation methods for some SQL commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.api_lib.sql import validate as api_validate
from googlecloudsdk.calliope import arg_parsers
def InstanceNameRegexpValidator():
"""Returns a function that validates an instance name using predefined rules.
Returns:
function: str -> str, usable as an argparse type
"""
# : and . are not valid characters, but we allow them through this regex so
# that we can give a better error message using ValidateInstanceName, below.
pattern = r'^[a-z][a-z0-9-:.]*'
description = ('must be composed of lowercase letters, numbers, and hyphens; '
'must start with a letter.')
def Parse(value):
if not re.match(pattern + '$', value):
raise arg_parsers.ArgumentTypeError(
'Bad value [{0}]: {1}'.format(value, description))
api_validate.ValidateInstanceName(value)
return value
return Parse
def IsProjectLevelBackupRequest(backup_id):
"""Checks if the backup request is project level.
Project level requests will have backup_id in string format whearas they will
be integer values for instance level backup requests.
Args:
backup_id: The id of the requested backup.
Returns:
True if is a project level backup request.
"""
try:
int(backup_id)
except ValueError:
return True
else:
return False
def IsBackupDrBackupRequest(backup_id: str) -> bool:
"""Checks if the backup request is a backupdr backup by checking if the backup id contains /backupVaults.
A backupdr backup will have the backup in the format of
projects/{project}/locations/{location}/backupVaults/{backup_vault}/dataSources/{data_source}/backups/{backup}.
Args:
backup_id: The id of the requested backup.
Returns:
True if the request is a backupdr backup request.
"""
return backup_id and '/backupVaults' in backup_id