feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The backups command group for bigtable."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Backups(base.Group):
"""Manage Cloud Bigtable backups."""

View File

@@ -0,0 +1,33 @@
help_text:
brief: Add an IAM policy binding to a Cloud Bigtable Backup.
description: |
Add an IAM policy binding to a Cloud Bigtable Backup. One binding consists of a member,
a role, and an optional condition.
examples: |
To add an IAM policy binding for the role of `roles/editor` for the user `test-user@gmail.com`
with backup `my-backup` in instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster` --member=`user:test-user@gmail.com` --role=`roles/editor`
To add an IAM policy binding which expires at the end of the year 2020 for the role of
`roles/bigtable.admin` and the user `test-user@gmail.com` with backup `my-backup`
in instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster` --member=`user:test-user@gmail.com` --role=`roles/bigtable.admin` --condition=`expression=request.time < timestamp("2021-01-01T00:00:00Z"),title=expires_end_of_2020,description=Expires at midnight on 2020-12-31`
See https://cloud.google.com/iam/docs/managing-policies for details of
policy role and member types.
iam:
enable_condition: true
policy_version: 3
get_iam_policy_version_path: getIamPolicyRequest.options.requestedPolicyVersion
request:
collection: bigtableadmin.projects.instances.clusters.backups
arguments:
resource:
help_text: Cloud Bigtable Backup to add the IAM policy binding to.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup

View File

@@ -0,0 +1,103 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""bigtable backups copy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.bigtable import backups
from googlecloudsdk.api_lib.bigtable import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.bigtable import arguments
from googlecloudsdk.core import log
class Copy(base.Command):
"""Copy a Cloud Bigtable backup to a new backup."""
detailed_help = {
'DESCRIPTION': textwrap.dedent("""
This command creates a copy of a Cloud Bigtable backup.
"""),
'EXAMPLES': textwrap.dedent("""\
To copy a backup within the same project, run:
$ {command} --source-instance=SOURCE_INSTANCE --source-cluster=SOURCE_CLUSTER --source-backup=SOURCE_BACKUP --destination-instance=DESTINATION_INSTANCE --destination-cluster=DESTINATION_CLUSTER --destination-backup=DESTINATION_BACKUP --expiration-date=2023-09-01T10:49:41Z
To copy a backup to a different project, run:
$ {command} --source-backup=projects/SOURCE_PROJECT/instances/SOURCE_INSTANCE/clusters/SOURCE_CLUSTER/backups/SOURCE_BACKUP --destination-backup=projects/DESTINATION_PROJECT/instances/DESTINATION_INSTANCE/clusters/DESTINATION_CLUSTER/backups/DESTINATION_BACKUP --expiration-date=2022-08-01T10:49:41Z
To set retention period and run asyncronously, run:
$ {command} --source-backup=projects/SOURCE_PROJECT/instances/SOURCE_INSTANCE/clusters/SOURCE_CLUSTER/backups/SOURCE_BACKUP --destination-backup=projects/DESTINATION_PROJECT/instances/DESTINATION_INSTANCE/clusters/DESTINATION_CLUSTER/backups/DESTINATION_BACKUP --retention-period=2w --async
"""),
}
@staticmethod
def Args(parser):
"""Register flags for this command."""
arguments.AddCopyBackupResourceArgs(parser)
group_parser = parser.add_argument_group(mutex=True, required=True)
group_parser.add_argument(
'--expiration-date',
help=(
'Expiration time of the backup, must be at least 6 hours and at '
'most 30 days from the time the source backup is created. See '
'`$ gcloud topic datetimes` for information on date/time formats.'
),
)
group_parser.add_argument(
'--retention-period',
help=(
'Retention period of the backup relative from now, must be at least'
' 6 hours and at most 30 days from the time the source backup is'
' created. See `$ gcloud topic datetimes` for information on'
' duration formats.'
),
)
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
source_backup_ref = args.CONCEPTS.source.Parse()
destination_backup_ref = args.CONCEPTS.destination.Parse()
op = backups.CopyBackup(source_backup_ref, destination_backup_ref, args)
operation_ref = util.GetOperationRef(op)
if args.async_:
log.status.Print('Copy request issued from [{}] to [{}]\n'
'Check operation [{}] for status.'.format(
source_backup_ref.RelativeName(),
destination_backup_ref.RelativeName(), op.name))
return op
op_result = util.AwaitBackup(
operation_ref, 'Waiting for operation [{}] to complete'.format(op.name))
if op.error is None:
log.CreatedResource(op_result)
return op_result

View File

@@ -0,0 +1,86 @@
- release_tracks: [ALPHA, BETA, GA]
help_text:
brief: Creates a backup of a Cloud Bigtable table.
description: Creates a backup of a Cloud Bigtable table.
examples: |
To create a backup `BACKUP_NAME` asyncronously from table `TABLE_NAME` which expires at
`2019-03-30T10:49:41Z`, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--table=TABLE_NAME --expiration-date=2019-03-30T10:49:41Z --async
To create a backup `BACKUP_NAME` syncronously from table `TABLE_NAME` which expires in 2 weeks
from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--table=TABLE_NAME --retention-period=2w
To create a hot backup `BACKUP_NAME` from table `TABLE_NAME` which expires in 2 weeks
from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--table=TABLE_NAME --retention-period=2w --backup-type=HOT
To create a hot backup `BACKUP_NAME` from table `TABLE_NAME` which will be converted to a
standard backup at `2019-03-31T10:49:41Z` and expires in 2 weeks from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--table=TABLE_NAME --retention-period=2w --backup-type=HOT
--hot-to-standard-time=2019-03-31T10:49:41Z
To create a hot backup `BACKUP_NAME` from table `TABLE_NAME` which will be converted to a
standard backup in 1 week from now and expires in 2 weeks from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--table=TABLE_NAME --retention-period=2w --backup-type=HOT
--hot-to-standard-time=+P1w
request:
collection: bigtableadmin.projects.instances.clusters.backups
modify_request_hooks:
- googlecloudsdk.api_lib.bigtable.backups:ModifyCreateRequest
arguments:
params:
- api_field: backup.sourceTable
arg_name: table
required: true
help_text: ID of the table from which the backup will be created.
- group:
mutex: true
required: true
params:
- arg_name: expiration-date
api_field: backup.expireTime
help_text: |
Absolute expiration time of the backup. From the time the request is received, must be:
- At least 6 hours in the future
- At most 90 days in the future
See `$ gcloud topic datetimes` for information on date/time formats.
- arg_name: retention-period
api_field: backup.expireTime
help_text: |
Retention period of the backup relative from now; must be:
- At least 6 hours
- At most 90 days
See `$ gcloud topic datetimes` for information on duration formats.
- arg_name: backup-type
api_field: backup.backupType
help_text: |
Type of the backup; whether the backup is a standard backup or a hot backup.
- arg_name: hot-to-standard-time
api_field: backup.hotToStandardTime
help_text: |
Time at which a hot backup will be converted to a standard backup relative from now; must
be:
- At least 24 hours
Only applies for hot backups. See `$ gcloud topic datetimes` for information on date/time
formats.
resource:
help_text: The Cloud Bigtable backup to create.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup
async:
collection: bigtableadmin.operations

View File

@@ -0,0 +1,15 @@
help_text:
brief: Delete an existing backup.
description: Delete an existing backup.
examples: |
To delete a backup, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
request:
collection: bigtableadmin.projects.instances.clusters.backups
arguments:
resource:
help_text: Cloud Bigtable backup to delete.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup

View File

@@ -0,0 +1,15 @@
help_text:
brief: Retrieves information about a backup.
description: Retrieves information about a backup.
examples: |
To describe a backup, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
request:
collection: bigtableadmin.projects.instances.clusters.backups
arguments:
resource:
help_text: Cloud Bigtable backup to describe.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup

View File

@@ -0,0 +1,23 @@
help_text:
brief: Get an IAM policy on a Cloud Bigtable Backup.
description: |
Get an IAM policy on a Cloud Bigtable Backup.
examples: |
To get the IAM policy on the backup `my-backup` in instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster`
See https://cloud.google.com/iam/docs/managing-policies for more information.
request:
collection: bigtableadmin.projects.instances.clusters.backups
arguments:
resource:
help_text: Cloud Bigtable Backup to get the IAM policy for.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup
iam:
policy_version: 3
get_iam_policy_version_path: getIamPolicyRequest.options.requestedPolicyVersion

View File

@@ -0,0 +1,116 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""bigtable backups list command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.bigtable import util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.bigtable import arguments
from googlecloudsdk.core import resources
def _GetUriFunction(resource):
return resources.REGISTRY.ParseRelativeName(
resource.name,
collection='bigtableadmin.projects.instances.clusters.backups').SelfLink()
def _TransformCluster(resource):
"""Get Cluster ID from backup name."""
# backup name is in the format of:
# projects/{}/instances/{}/clusters/{}/backups/{}
backup_name = resource.get('name')
results = backup_name.split('/')
cluster_name = results[-3]
return cluster_name
class ListBackups(base.ListCommand):
"""List existing Bigtable backups."""
detailed_help = {
'DESCRIPTION':
textwrap.dedent("""
List existing Bigtable backups.
"""),
'EXAMPLES':
textwrap.dedent("""
To list all backups in an instance, run:
$ {command} --instance=INSTANCE_NAME
To list all backups in a cluster, run:
$ {command} --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
"""),
}
@staticmethod
def Args(parser):
"""Register flags for this command."""
arguments.AddBackupResourceArg(parser, 'to list backups for')
parser.display_info.AddFormat("""
table(
name.basename():sort=1:label=NAME,
cluster():label=CLUSTER,
sourceTable.basename():label=TABLE,
expireTime:label=EXPIRE_TIME,
state
)
""")
parser.display_info.AddUriFunc(_GetUriFunction)
parser.display_info.AddTransforms({'cluster': _TransformCluster})
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Yields:
Some value that we want to have printed later.
"""
cli = util.GetAdminClient()
instance_ref = args.CONCEPTS.instance.Parse()
cluster_ref = args.CONCEPTS.cluster.Parse()
if cluster_ref:
cluster_str = cluster_ref.RelativeName()
elif instance_ref:
if args.IsSpecified('cluster'):
cluster_str = instance_ref.RelativeName() + '/clusters/' + args.cluster
else:
cluster_str = instance_ref.RelativeName() + '/clusters/-'
else:
raise exceptions.InvalidArgumentException('--instance',
'--instance must be specified')
msg = (
util.GetAdminMessages()
.BigtableadminProjectsInstancesClustersBackupsListRequest(
parent=cluster_str))
for backup in list_pager.YieldFromList(
cli.projects_instances_clusters_backups,
msg,
field='backups',
batch_size_attribute=None):
yield backup

View File

@@ -0,0 +1,33 @@
help_text:
brief: Remove an IAM policy binding from a Cloud Bigtable Backup.
description: |
Remove an IAM policy binding from a Cloud Bigtable Backup. One binding consists of a member,
a role, and an optional condition.
examples: |
To remove an IAM policy binding for the role of `roles/editor` for the user `test-user@gmail.com`
with backup `my-backup` in instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster` --member=`user:test-user@gmail.com` --role=`roles/editor`
To remove an IAM policy binding which expires at the end of the year 2020 for the role of
`roles/bigtable.admin` and the user `test-user@gmail.com` with backup `my-backup`
in instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster` --member=`user:test-user@gmail.com` --role=`roles/bigtable.admin` --condition=`expression=request.time < timestamp("2021-01-01T00:00:00Z"),title=expires_end_of_2020,description=Expires at midnight on 2020-12-31`
See https://cloud.google.com/iam/docs/managing-policies for details of
policy role and member types.
iam:
enable_condition: true
policy_version: 3
get_iam_policy_version_path: getIamPolicyRequest.options.requestedPolicyVersion
request:
collection: bigtableadmin.projects.instances.clusters.backups
arguments:
resource:
help_text: Cloud Bigtable Backup to remove the IAM policy binding from.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup

View File

@@ -0,0 +1,26 @@
help_text:
brief: Set an IAM policy on a Cloud Bigtable Backup.
description: |
Set an IAM policy on a Cloud Bigtable Backup.
examples: |
To set the IAM policy from file `my-policy` on the backup `my-backup` in
instance `my-instance` and cluster `my-cluster`, run:
$ {command} my-backup --instance=`my-instance` --cluster=`my-cluster` my-policy
See https://cloud.google.com/iam/docs/managing-policies for more information.
request:
collection: bigtableadmin.projects.instances.clusters.backups
modify_request_hooks:
- googlecloudsdk.command_lib.iam.hooks:UseMaxRequestedPolicyVersion:api_field=setIamPolicyRequest.policy.version
- googlecloudsdk.command_lib.iam.hooks:AddVersionToUpdateMaskIfNotPresent:update_mask_path=setIamPolicyRequest.updateMask
arguments:
resource:
help_text: Cloud Bigtable Backup to set the IAM policy on.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup
iam:
policy_version: 3

View File

@@ -0,0 +1,58 @@
help_text:
brief: |
Update a backup, only supported for the following fields: --expiration-date and --retention-period.
description: |
Update a backup, only supported for the following fields: --expiration-date and --retention-period.
examples: |
To update the expire time of backup `BACKUP_NAME` to 7 days from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--retention-period=7d
To update the hot-to-standard time of backup `BACKUP_NAME` to `2019-03-31T10:49:41Z`, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--hot-to-standard-time=2019-03-31T10:49:41Z
To update the hot-to-standard time of backup `BACKUP_NAME` to 7 days from now, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--hot-to-standard-time=+P7d
To clear the hot-to-standard time of backup `BACKUP_NAME`, run:
$ {command} BACKUP_NAME --instance=INSTANCE_NAME --cluster=CLUSTER_NAME
--hot-to-standard-time=''
request:
collection: bigtableadmin.projects.instances.clusters.backups
modify_request_hooks:
- googlecloudsdk.api_lib.bigtable.backups:ResetDefaultMaskField
- googlecloudsdk.api_lib.bigtable.backups:AddBackupFieldsToUpdateMask
arguments:
resource:
help_text: Cloud Bigtable backup to update.
spec: !REF googlecloudsdk.command_lib.bigtable.resources:backup
params:
- group:
mutex: true
params:
- arg_name: expiration-date
api_field: backup.expireTime
help_text: |
Absolute expiration time of the backup; must be at least 6 hours and at most 90 days from
backup creation time. See `$ gcloud topic datetimes` for information on date/time formats.
See `$ gcloud bigtable backups describe` for creation time.
- arg_name: retention-period
api_field: backup.expireTime
help_text: |
Retention period of the backup relative from now; must be at least 6 hours and at most 90
days from backup creation time. See `$ gcloud topic datetimes` for information on duration
formats. See `$ gcloud bigtable backups describe` for creation time.
- arg_name: hot-to-standard-time
api_field: backup.hotToStandardTime
help_text: |
Time at which a hot backup will be converted to a standard backup; must be at least 24 hours
from backup creation time. Only applies for hot backups. See `$ gcloud topic datetimes` for
information on date/time formats. See `$ gcloud bigtable backups describe` for creation time.