feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage bucket commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Buckets(base.Group):
"""Manage Cloud Storage buckets."""

View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets add-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class AddIamPolicyBinding(base.Command):
"""Add an IAM policy binding to a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Add an IAM policy binding to a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To grant a single role to a single principal for BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/storage.objectCreator
To make objects in BUCKET publicly readable:
$ {command} gs://BUCKET --member=allUsers --role=roles/storage.objectViewer
To specify a custom role for a principal on BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/customRoleName
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of bucket to add IAM policy binding to.')
iam_util.AddArgsForAddIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
policy = api_factory.get_api(url_object.scheme).get_bucket_iam_policy(
url_object.bucket_name)
return iam_command_util.add_iam_binding_to_resource(
args,
url_object,
apis.GetMessagesModule('storage', 'v1'),
policy,
set_iam_policy_task.SetBucketIamPolicyTask,
)

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage buckets anywhere-caches commands."""
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class AnywhereCaches(base.Group):
"""Manage Cloud Storage Anywhere Caches."""

View File

@@ -0,0 +1,118 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for creating Anywhere Cache Instances."""
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import create_anywhere_cache_task
@base.DefaultUniverseOnly
class Create(base.CreateCommand):
"""Create Anywhere Cache instances for a bucket."""
detailed_help = {
'DESCRIPTION': """
Create Anywhere Cache instances.
Only one cache instance per zone can be created for each bucket.
""",
'EXAMPLES': """
The following command creates an anywhere cache instance for bucket
in ``asia-south2-b'' zone:
$ {command} gs://my-bucket asia-south2-b
The following command creates anywhere cache instances for bucket
in ``asia-south2-b'', and ``asia-east1-a'' zone:
$ {command} gs://my-bucket asia-south2-b asia-east1-a
The following command creates an anywhere cache instance for bucket
in ``asia-south2-b'' zone, with ttl for cache entry as 6 hours, and
admission policy as ``ADMIT_ON_SECOND_MISS'':
$ {command} gs://my-bucket asia-south2-b --ttl=6h --admission-policy='ADMIT_ON_SECOND_MISS'
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url',
type=str,
help=(
'Specifies the URL of the bucket where the Anywhere Cache should be'
' created.'
),
)
parser.add_argument(
'zone',
type=str,
nargs='+',
help=(
'Specifies the name of the zonal locations where the Anywhere Cache'
' should be created.'
),
)
parser.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help='Cache entry time-to-live. Default to 24h if not provided.',
)
flags.add_admission_policy_flag(parser)
def get_task_iterator(self, args, task_status_queue):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.zone)
)
if args.ttl is not None:
args.ttl = str(args.ttl)+'s'
for zone in args.zone:
yield create_anywhere_cache_task.CreateAnywhereCacheTask(
url, zone, admission_policy=args.admission_policy, ttl=args.ttl
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
plurality_checkable_task_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(task_iterator)
)
self.exit_code = task_executor.execute_tasks(
plurality_checkable_task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command to get the Anywhere Cache Instance."""
import collections
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import resource_util
# Determines the order in which the fields should be displayed for
# an AnywhereCacheResource.
AnywhereCacheDisplayTitlesAndDefaults = collections.namedtuple(
'AnywhereCacheDisplayTitlesAndDefaults',
(
'admission_policy',
'anywhere_cache_id',
'bucket',
'create_time',
'id',
'kind',
'pending_update',
'state',
'ttl',
'update_time',
'zone',
),
)
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Returns details of Anywhere Cache instance of a bucket."""
detailed_help = {
'DESCRIPTION': """
Desribes a single Anywhere Cache instance if it exists.
""",
'EXAMPLES': """
The following command describes the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
help=(
'Identifier for a Anywhere Cache instance. It is a combination of'
' bucket_name/anywhere_cache_id, For example :'
' test-bucket/my-cache-id.'
),
)
flags.add_raw_display_flag(parser)
def Run(self, args):
bucket_name, _, anywhere_cache_id = args.id.rpartition('/')
result = api_factory.get_api(
storage_url.ProviderPrefix.GCS
).get_anywhere_cache(bucket_name, anywhere_cache_id)
return resource_util.get_display_dict_for_resource(
result,
AnywhereCacheDisplayTitlesAndDefaults,
args.raw,
)

View File

@@ -0,0 +1,90 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of disable command for disabling Anywhere Cache Instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import disable_anywhere_cache_task
@base.DefaultUniverseOnly
class Disable(base.Command):
"""Disable Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Disables one or more Anywhere Cache instances.
The cache instance will be set to DISABLED state. The existing entries
can be read from the cache but new entries will not be written to the
cache. The L4 SSD cache would not be deleted by the cache manager until
the min TTL (1h) has been reached (cache instance is kept for at least
1h). Google Cloud Storage defines the min TTL which is applied to all
cache instances. Cach disablement could be canceled by using
anywhere-caches resume command before the instance is deleted.
""",
'EXAMPLES': """
The following command disables the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def _get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition(
storage_url.CLOUD_URL_DELIMITER
)
yield disable_anywhere_cache_task.DisableAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self._get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,69 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of list command to list Anywhere Cache instances of bucket."""
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import resource_util
from surface.storage.buckets.anywhere_caches import describe
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List all Anywhere Cache instances of a bucket."""
detailed_help = {
'DESCRIPTION': """
List all cache instances of this bucket.
""",
'EXAMPLES': """
The following command lists all anywhere cache instances of bucket
``gs://my-bucket'':
$ {command} gs://my-bucket
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url',
type=str,
help=(
'Specifies the URL of the bucket for which anywhere cache instances'
' should be listed.'
),
)
flags.add_raw_display_flag(parser)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
cache_resources = api_factory.get_api(url.scheme).list_anywhere_caches(
url.bucket_name
)
for cache_resource in cache_resources:
yield resource_util.get_display_dict_for_resource(
cache_resource,
describe.AnywhereCacheDisplayTitlesAndDefaults,
args.raw,
)

View File

@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of pause command to pause Anywhere Cache instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import pause_anywhere_cache_task
@base.DefaultUniverseOnly
class Pause(base.Command):
"""Pause Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
The pause operation can be used to stop the data ingestion of a cache
instance in RUNNING state (read-only cache) until the Resume is invoked.
""",
'EXAMPLES': """
The following command pause the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition('/')
yield pause_anywhere_cache_task.PauseAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
plurality_checkable_task_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(task_iterator)
)
self.exit_code = task_executor.execute_tasks(
plurality_checkable_task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
continue_on_error=getattr(args, 'continue_on_error', False),
)

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of resume command for resuming Anywhere Cache instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import resume_anywhere_cache_task
@base.DefaultUniverseOnly
class Resume(base.Command):
"""Resume Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Resume operation could be used to revert the Paused and Disabled state.
Once a paused/disabled cache is resumed, the cache will be set to
RUNNING/CREATING state:
1. RUNNING if the cache is active.
2. CREATING if the cache is pending creation.
""",
'EXAMPLES': """
The following command resume the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def _get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition(
storage_url.CLOUD_URL_DELIMITER
)
yield resume_anywhere_cache_task.ResumeAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self._get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,101 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating Anywhere Cache instances."""
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import patch_anywhere_cache_task
@base.DefaultUniverseOnly
class Update(base.UpdateCommand):
"""Update Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Update one or more Anywhere Cache instances. A cache instance can be
updated if its state is created or pending creation.
""",
'EXAMPLES': """
The following command updates cache entry's ttl, and admisson policy of
anywhere cache instance in bucket ``my-bucket'' having anywhere_cache_id
``my-cache-id'':
$ {command} my-bucket/my-cache-id --ttl=6h --admission-policy='ADMIT_ON_SECOND_MISS'
The following command updates cache entry's ttl of anywhere cache
instances in bucket ``bucket-1'' and ``bucket-2'' having anywhere_cache_id
``my-cache-id-1'', and ``my-cache-id-2'' respectively:
$ {command} bucket-1/my-cache-id-1 bucket-2/my-cache-id-2 --ttl=6h
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache Instance.They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
parser.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help='Cache entry time-to-live. Default to 24h if not provided.',
)
flags.add_admission_policy_flag(parser)
def get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
ttl = str(args.ttl) + 's' if args.ttl else None
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition('/')
yield patch_anywhere_cache_task.PatchAnywhereCacheTask(
bucket_name,
anywhere_cache_id,
admission_policy=args.admission_policy,
ttl=ttl,
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for managing Storage bucket configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Config(base.Group):
"""Manage Storage bucket configurations."""

View File

@@ -0,0 +1,38 @@
release_tracks: [ALPHA]
command_type: CONFIG_EXPORT
help_text:
brief: Export the configuration for a Storage bucket.
description: |
*{command}* exports the configuration for a Storage bucket.
Bucket configurations can be exported in
Kubernetes Resource Model (krm) or Terraform HCL formats. The
default format is `krm`.
Specifying `--all` allows you to export the configurations for all
buckets within the project.
Specifying `--path` allows you to export the configuration(s) to
a local directory.
examples: |
To export the configuration for a bucket, run:
$ {command} my-bucket
To export the configuration for a bucket to a file, run:
$ {command} my-bucket --path=/path/to/dir/
To export the configuration for a bucket in Terraform
HCL format, run:
$ {command} my-bucket --resource-format=terraform
To export the configurations for all buckets within a
project, run:
$ {command} --all
arguments:
resource:
help_text: Bucket to export the configuration for.
spec: !REF googlecloudsdk.command_lib.storage.resources.resources:bucket

View File

@@ -0,0 +1,193 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for making buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import user_request_args_factory
from googlecloudsdk.command_lib.storage.resources import resource_reference
from googlecloudsdk.command_lib.storage.tasks.buckets import create_bucket_task
_LIFECYCLE_HELP_TEXT = """
Sets the lifecycle management configuration on a bucket. For example,
The following lifecycle management configuration JSON document
specifies that all objects in this bucket that are more than 365 days
old are deleted automatically:
{
"rule":
[
{
"action": {"type": "Delete"},
"condition": {"age": 365}
}
]
}
"""
@base.UniverseCompatible
class Create(base.Command):
"""Create buckets for storing objects."""
detailed_help = {
'DESCRIPTION': """
Create new buckets.
""",
'EXAMPLES': """
The following command creates 2 Cloud Storage buckets, one named
``my-bucket'' and a second bucket named ``my-other-bucket'':
$ {command} gs://my-bucket gs://my-other-bucket
The following command creates a bucket with the ``nearline'' default
[storage class](https://cloud.google.com/storage/docs/storage-classes) in
the ``asia'' [location](https://cloud.google.com/storage/docs/locations):
$ {command} gs://my-bucket --default-storage-class=nearline --location=asia
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the buckets to create.'
)
parser.add_argument(
'--location',
'-l',
type=str,
required=arg_parsers.ArgRequiredInUniverse(
default_universe=False, non_default_universe=True
),
help=(
'[Location](https://cloud.google.com/storage/docs/locations)'
' for the bucket. If not specified, the location used by Cloud'
" Storage is ``us''. A bucket's location cannot be changed"
' after creation.'
),
)
parser.add_argument(
'--public-access-prevention',
'--pap',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Sets public access prevention to "enforced".'
' For details on how exactly public access is blocked, see:'
' http://cloud.google.com/storage/docs/public-access-prevention'
),
)
parser.add_argument(
'--uniform-bucket-level-access',
'-b',
action=arg_parsers.StoreTrueFalseAction,
help='Turns on uniform bucket-level access setting. Default is False.',
)
parser.add_argument(
'--default-storage-class',
'-c',
'-s',
type=str,
help=(
'Default [storage class]'
'(https://cloud.google.com/storage/docs/storage-classes) for'
' the bucket. If not specified, the default storage class'
' used by Cloud Storage is "Standard".'
),
)
parser.add_argument(
'--default-encryption-key',
'-k',
type=str,
help=(
'Set the default KMS key using the full path to the key, which '
'has the following form: '
"``projects/[project-id]/locations/[location]/keyRings/[key-ring]/cryptoKeys/[my-key]''."
),
)
parser.add_argument(
'--retention-period',
help=(
'Minimum [retention period](https://cloud.google.com'
'/storage/docs/bucket-lock#retention-periods)'
' for objects stored in the bucket, for example'
" ``--retention-period=P1Y1M1DT5S''. Objects added to the bucket"
" cannot be deleted until they've been stored for the specified"
' length of time. Default is no retention period. Only available'
' for Cloud Storage using the JSON API.'
),
)
flags.add_placement_flag(parser)
parser.add_argument(
'--soft-delete-duration',
type=arg_parsers.Duration(),
help=(
'Duration to retain soft-deleted objects. For example, "2w1d" is'
' two weeks and one day. See `gcloud topic datetimes` for more'
' information on the duration format. Setting `0` will disable'
' soft delete policy on the bucket. Default is 7 days.'
),
)
flags.add_additional_headers_flag(parser)
flags.add_autoclass_flags(parser)
flags.add_enable_per_object_retention_flag(parser)
flags.add_recovery_point_objective_flag(parser)
parser.add_argument(
'--enable-hierarchical-namespace',
action='store_true',
default=None,
help=(
'Enable hierarchical namespace for the bucket. To use this'
' flag, you must also use --uniform-bucket-level-access'
),
)
parser.add_argument('--lifecycle-file', help=_LIFECYCLE_HELP_TEXT)
flags.add_ip_filter_file_flag(parser)
if cls.ReleaseTrack() is base.ReleaseTrack.ALPHA:
flags.add_encryption_enforcement_file_flag(parser)
def Run(self, args):
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
resource = resource_reference.UnknownResource(url)
user_request_args = (
user_request_args_factory.get_user_request_args_from_command_args(
args, metadata_type=user_request_args_factory.MetadataType.BUCKET
)
)
if (
user_request_args.resource_args.autoclass_terminal_storage_class
is not None
and not user_request_args.resource_args.enable_autoclass
):
raise errors.Error(
'--autoclass_terminal_storage_class is only allowed if'
' --enable-autoclass is set.'
)
create_bucket_task.CreateBucketTask(
resource, user_request_args=user_request_args
).execute()

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of rb command for deleting buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.rm import delete_task_iterator_factory
@base.UniverseCompatible
class Delete(base.Command):
"""Deletes Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION':
"""
Deletes one or more Cloud Storage buckets.
""",
'EXAMPLES':
"""
Delete a Google Cloud Storage bucket named "my-bucket":
$ {command} gs://my-bucket
Delete two buckets:
$ {command} gs://my-bucket gs://my-other-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls', nargs='+', help='Specifies the URLs of the buckets to delete.')
flags.add_additional_headers_flag(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
bucket_iterator = delete_task_iterator_factory.DeleteTaskIteratorFactory(
name_expansion.NameExpansionIterator(
args.urls, include_buckets=name_expansion.BucketSetting.YES
),
task_status_queue=task_status_queue,
).bucket_iterator()
plurality_checkable_bucket_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(
bucket_iterator))
self.exit_code = task_executor.execute_tasks(
plurality_checkable_bucket_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
continue_on_error=args.continue_on_error)

View File

@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets describe command for getting info on buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import gsutil_json_printer
from googlecloudsdk.command_lib.storage.resources import resource_util
def _add_common_args(parser):
"""Adds common arguments to the parser."""
parser.add_argument('url', help='Specifies URL of bucket to describe.')
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
gsutil_json_printer.GsutilJsonPrinter.Register()
def _validate_url_does_not_contain_wildcards(url):
if wildcard_iterator.contains_wildcard(url):
raise errors.InvalidUrlError(
'Describe does not accept wildcards because it returns a single'
' resource. Please use the `ls` or `buckets list` command for'
' retrieving multiple resources.'
)
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class Describe(base.DescribeCommand):
"""Describes Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION': """
Describe a Cloud Storage bucket.
""",
'EXAMPLES': """
Describe a Google Cloud Storage bucket named "my-bucket":
$ {command} gs://my-bucket
Describe bucket with JSON formatting, only returning the "name" key:
$ {command} gs://my-bucket --format="json(name)"
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def Run(self, args):
_validate_url_does_not_contain_wildcards(args.url)
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_bucket(args.command_path, url)
bucket_resource = api_factory.get_api(url.scheme).get_bucket(
url.bucket_name,
fields_scope=cloud_api.FieldsScope.FULL,
)
return resource_util.get_display_dict_for_resource(
bucket_resource,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class DescribeAlpha(Describe):
"""Describes Cloud Storage buckets."""
@staticmethod
def Args(parser):
_add_common_args(parser)
flags.add_soft_deleted_flag(parser)
def Run(self, args):
_validate_url_does_not_contain_wildcards(args.url)
url = storage_url.storage_url_from_string(
args.url, is_bucket_gen_parsing_allowed=True
)
errors_util.raise_error_if_not_bucket(args.command_path, url)
bucket_resource = api_factory.get_api(url.scheme).get_bucket(
url.bucket_name,
generation=int(url.generation) if url.generation else None,
fields_scope=cloud_api.FieldsScope.FULL,
soft_deleted=getattr(args, 'soft_deleted', False),
)
return resource_util.get_display_dict_for_resource(
bucket_resource,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets get-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class GetIamPolicy(base.Command):
"""Get the IAM policy for a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Get the IAM policy for a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To get the IAM policy for BUCKET:
$ {command} gs://BUCKET
To output the IAM policy for BUCKET to a file:
$ {command} gs://BUCKET > policy.txt
""",
}
@staticmethod
def Args(parser):
parser.add_argument('url', help='Request IAM policy for this bucket.')
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
matching_url = iam_command_util.get_single_matching_url(args.url)
return api_factory.get_api(matching_url.scheme).get_bucket_iam_policy(
matching_url.bucket_name)

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets list command for getting info on buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
def _add_common_args(parser):
"""Adds common arguments to the parser."""
parser.add_argument(
'urls', nargs='*', help='Specifies URL of buckets to List.'
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
flags.add_uri_support_to_list_commands(parser)
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class List(base.ListCommand):
"""Lists Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION': """
List Cloud Storage buckets.
""",
'EXAMPLES': """
List all Google Cloud Storage buckets in default project:
$ {command}
List buckets beginning with ``b'':
$ {command} gs://b*
List buckets with JSON formatting, only returning the ``name'' key:
$ {command} --format="json(name)"
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def Run(self, args):
if args.urls:
urls = []
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
if not (url.is_provider() or url.is_bucket()):
raise errors.InvalidUrlError(
'URL does not match buckets: {}'.format(url_string)
)
urls.append(url)
else:
urls = [storage_url.CloudUrl(storage_url.ProviderPrefix.GCS)]
for url in urls:
for bucket in wildcard_iterator.get_wildcard_iterator(
url.url_string,
fields_scope=cloud_api.FieldsScope.FULL,
get_bucket_metadata=True,
soft_deleted_buckets=getattr(args, 'soft_deleted', False),
):
yield resource_util.get_display_dict_for_resource(
bucket,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class ListAlpha(List):
"""Lists Cloud Storage buckets."""
@staticmethod
def Args(parser):
_add_common_args(parser)
flags.add_soft_deleted_flag(parser)

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage buckets notifications commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Notifications(base.Group):
"""Manage Cloud Storage bucket notifications."""

View File

@@ -0,0 +1,290 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for notifications."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import functools
import time
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.api_lib.storage.gcs_json import error_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
@error_util.catch_http_error_raise_gcs_api_error()
def _maybe_create_or_modify_topic(topic_name, service_account_email):
"""Ensures that topic with SA permissions exists, creating it if needed.
Args:
topic_name (str): Name of the Cloud Pub/Sub topic to use or create.
service_account_email (str): The project service account for Google Cloud
Storage. This SA needs publish permission on the PubSub topic.
Returns:
True if topic was created or had its IAM permissions modified.
Otherwise, False.
"""
pubsub_client = apis.GetClientInstance('pubsub', 'v1')
pubsub_messages = apis.GetMessagesModule('pubsub', 'v1')
try:
pubsub_client.projects_topics.Get(
pubsub_messages.PubsubProjectsTopicsGetRequest(topic=topic_name))
log.warning('Topic already exists: ' + topic_name)
created_new_topic = False
except apitools_exceptions.HttpError as e:
if e.status_code != 404:
# Expect an Apitools NotFound error. Raise error otherwise.
raise
new_topic = pubsub_client.projects_topics.Create(
pubsub_messages.Topic(name=topic_name))
log.info('Created topic:\n{}'.format(new_topic))
created_new_topic = True
# Verify that the service account is in the IAM policy.
topic_iam_policy = pubsub_client.projects_topics.GetIamPolicy(
pubsub_messages.PubsubProjectsTopicsGetIamPolicyRequest(
resource=topic_name))
expected_binding = pubsub_messages.Binding(
role='roles/pubsub.publisher',
members=['serviceAccount:' + service_account_email])
# Can be improved by checking for roles stronger than "pubsub.publisher".
# We could also recurse up the hierarchy, checking project-level permissions.
# However, the caller may not have permission to perform this recursion.
# The trade-off of complexity for the benefit of not granting a redundant,
# permission is not worth it, so we grant "publisher" if a simple check fails.
if expected_binding not in topic_iam_policy.bindings:
topic_iam_policy.bindings.append(expected_binding)
updated_topic_iam_policy = pubsub_client.projects_topics.SetIamPolicy(
pubsub_messages.PubsubProjectsTopicsSetIamPolicyRequest(
resource=topic_name,
setIamPolicyRequest=pubsub_messages.SetIamPolicyRequest(
policy=topic_iam_policy)))
log.info('Updated topic IAM policy:\n{}'.format(updated_topic_iam_policy))
return True
else:
log.warning(
'Project service account {} already has publish permission for topic {}'
.format(service_account_email, topic_name))
return created_new_topic
@base.UniverseCompatible
class Create(base.Command):
"""Create a notification configuration on a bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* creates a notification configuration on a bucket,
establishing a flow of event notifications from Cloud Storage to a
Cloud Pub/Sub topic. As part of creating this flow, it also verifies
that the destination Cloud Pub/Sub topic exists, creating it if necessary,
and verifies that the Cloud Storage bucket has permission to publish
events to that topic, granting the permission if necessary.
If a destination Cloud Pub/Sub topic is not specified with the `-t` flag,
Cloud Storage chooses a topic name in the default project whose ID is
the same as the bucket name. For example, if the default project ID
specified is `default-project` and the bucket being configured is
`gs://example-bucket`, the create command uses the Cloud Pub/Sub topic
`projects/default-project/topics/example-bucket`.
In order to enable notifications, your project's
[Cloud Storage service agent](https://cloud.google.com/storage/docs/projects#service-accounts)
must have the IAM permission "pubsub.topics.publish".
This command checks to see if the destination Cloud Pub/Sub topic grants
the service agent this permission. If not, the create command attempts to
grant it.
A bucket can have up to 100 total notification configurations and up to
10 notification configurations set to trigger for a specific event.
""",
'EXAMPLES':
"""
Send notifications of all changes to the bucket
`example-bucket` to the Cloud Pub/Sub topic
`projects/default-project/topics/example-bucket`:
$ {command} gs://example-bucket
The same as the above but sends no notification payload:
$ {command} --payload-format=none gs://example-bucket
Include custom metadata in notification payloads:
$ {command} --custom-attributes=key1:value1,key2:value2 gs://example-bucket
Create a notification configuration that only sends an event when a new
object has been created or an object is deleted:
$ {command} --event-types=OBJECT_FINALIZE,OBJECT_DELETE gs://example-bucket
Create a topic and notification configuration that sends events only when
they affect objects with the prefix `photos/`:
$ {command} --object-prefix=photos/ gs://example-bucket
Specifies the destination topic ID `files-to-process` in the default
project:
$ {command} --topic=files-to-process gs://example-bucket
The same as above but specifies a Cloud Pub/Sub topic belonging
to the specific cloud project `example-project`:
$ {command} --topic=projects/example-project/topics/files-to-process gs://example-bucket
Skip creating a topic when creating the notification configuraiton:
$ {command} --skip-topic-setup gs://example-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url',
help='URL of the bucket to create the notification configuration'
' on.')
parser.add_argument(
'-m',
'--custom-attributes',
metavar='KEY=VALUE',
type=arg_parsers.ArgDict(),
help='Specifies key:value attributes that are appended to the set of'
' attributes sent to Cloud Pub/Sub for all events associated with'
' this notification configuration.')
parser.add_argument(
'-e',
'--event-types',
metavar='NOTIFICATION_EVENT_TYPE',
type=arg_parsers.ArgList(
choices=sorted(
[status.value for status in cloud_api.NotificationEventType])),
help=(
'Specify event type filters for this notification configuration.'
' Cloud Storage will send notifications of only these types. By'
' default, Cloud Storage sends notifications for all event types.'
' * OBJECT_FINALIZE: An object has been created.'
' * OBJECT_METADATA_UPDATE: The metadata of an object has changed.'
' * OBJECT_DELETE: An object has been permanently deleted.'
' * OBJECT_ARCHIVE: A live version of an object has become a'
' noncurrent version.'))
parser.add_argument(
'-p',
'--object-prefix',
help='Specifies a prefix path for this notification configuration.'
' Cloud Storage will send notifications for only objects in the'
' bucket whose names begin with the prefix.')
parser.add_argument(
'-f',
'--payload-format',
choices=sorted(
[status.value for status in cloud_api.NotificationPayloadFormat]),
default=cloud_api.NotificationPayloadFormat.JSON.value,
help='Specifies the payload format of notification messages.'
' Notification details are available in the message attributes.'
" 'none' sends no payload.")
parser.add_argument(
'-s',
'--skip-topic-setup',
action='store_true',
help='Skips creation and permission assignment of the Cloud Pub/Sub'
' topic. This is useful if the caller does not have permission to'
' access the topic in question, or if the topic already exists and has'
' the appropriate publish permission assigned.')
parser.add_argument(
'-t',
'--topic',
help='Specifies the Cloud Pub/Sub topic to send notifications to.'
' If not specified, this command chooses a topic whose project is'
' your default project and whose ID is the same as the'
' Cloud Storage bucket name.')
def Run(self, args):
project_id = properties.VALUES.core.project.GetOrFail()
url = storage_url.storage_url_from_string(args.url)
notification_configuration_iterator.raise_error_if_not_gcs_bucket_matching_url(
url)
if not args.topic:
topic_name = 'projects/{}/topics/{}'.format(project_id, url.bucket_name)
elif not args.topic.startswith('projects/'):
# A topic ID may be present but not a whole path. Use the default project.
topic_name = 'projects/{}/topics/{}'.format(
project_id,
args.topic.rpartition('/')[-1])
else:
topic_name = args.topic
# Notifications supported for only GCS.
gcs_client = api_factory.get_api(storage_url.ProviderPrefix.GCS)
if not args.skip_topic_setup:
# Using generated topic name instead of custom one.
# Project number is different than project ID.
bucket_project_number = gcs_client.get_bucket(
url.bucket_name).metadata.projectNumber
# Fetch the email of the service account that will need access to
# the new pubsub topic.
service_account_email = gcs_client.get_service_agent(
project_number=bucket_project_number)
log.info(
'Checking for topic {} with access for project {} service account {}.'
.format(topic_name, project_id, service_account_email))
created_new_topic_or_set_new_permissions = _maybe_create_or_modify_topic(
topic_name, service_account_email)
else:
created_new_topic_or_set_new_permissions = False
if args.event_types:
event_types = [
cloud_api.NotificationEventType(event_type)
for event_type in args.event_types
]
else:
event_types = None
create_notification_configuration = functools.partial(
gcs_client.create_notification_configuration,
url,
topic_name,
custom_attributes=args.custom_attributes,
event_types=event_types,
object_name_prefix=args.object_prefix,
payload_format=cloud_api.NotificationPayloadFormat(args.payload_format))
try:
return create_notification_configuration()
except api_errors.CloudApiError:
if not created_new_topic_or_set_new_permissions:
raise
log.warning(
'Retrying create notification request because topic changes may'
' take up to 10 seconds to process.')
time.sleep(10)
return create_notification_configuration()

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete notification configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.notifications import delete_notification_configuration_task
def _delete_notification_configuration_task_iterator(urls):
"""Creates delete tasks from notification_configuration_iterator."""
for notification_configuration_iterator_result in (
notification_configuration_iterator
.get_notification_configuration_iterator(urls)):
yield (delete_notification_configuration_task
.DeleteNotificationConfigurationTask(
notification_configuration_iterator_result.bucket_url,
notification_configuration_iterator_result
.notification_configuration.id))
@base.UniverseCompatible
class Delete(base.DeleteCommand):
"""Delete notification configurations from a bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* deletes notification configurations from a bucket. If a
notification configuration name is passed as a parameter, that
configuration alone is deleted. If a bucket name is passed, all
notification configurations associated with the bucket are deleted.
Cloud Pub/Sub topics associated with this notification configuration
are not deleted by this command. Those must be deleted separately,
for example with the command "gcloud pubsub topics delete".
""",
'EXAMPLES':
"""
Delete a single notification configuration (with ID 3) in the
bucket `example-bucket`:
$ {command} projects/_/buckets/example-bucket/notificationConfigs/3
Delete all notification configurations in the bucket `example-bucket`:
$ {command} gs://example-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='+',
help='Specifies notification configuration names or buckets.')
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_executor.execute_tasks(
_delete_notification_configuration_task_iterator(args.urls),
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
)

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to show metadata of a notification configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core.resource import resource_projector
@base.UniverseCompatible
class Describe(base.DescribeCommand):
"""Show metadata for a notification configuration."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* prints populated metadata for a notification configuration.
""",
'EXAMPLES':
"""
Describe a single notification configuration (with ID 3) in the
bucket `example-bucket`:
$ {command} projects/_/buckets/example-bucket/notificationConfigs/3
""",
}
@staticmethod
def Args(parser):
parser.add_argument('url', help='The url of the notification configuration')
def Run(self, args):
bucket_url, notification_id = (
notification_configuration_iterator
.get_bucket_url_and_notification_id_from_url(args.url))
if not (bucket_url and notification_id):
raise errors.InvalidUrlError(
'Received invalid notification configuration URL: ' + args.url)
return resource_projector.MakeSerializable(
api_factory.get_api(
storage_url.ProviderPrefix.GCS).get_notification_configuration(
bucket_url, notification_id))

View File

@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list notification configurations belonging to a bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.core.resource import resource_printer
from googlecloudsdk.core.resource import resource_projector
_PUBSUB_DOMAIN_PREFIX_LENGTH = len('//pubsub.googleapis.com/')
def _get_human_readable_notification(url, config):
"""Returns pretty notification string."""
if config.custom_attributes:
custom_attributes_string = '\n\tCustom attributes:'
for attribute in config.custom_attributes.additionalProperties:
custom_attributes_string += '\n\t\t{}: {}'.format(
attribute.key, attribute.value
)
else:
custom_attributes_string = ''
if config.event_types or config.object_name_prefix:
filters_string = '\n\tFilters:'
if config.event_types:
filters_string += '\n\t\tEvent Types: {}'.format(
', '.join(config.event_types)
)
if config.object_name_prefix:
filters_string += "\n\t\tObject name prefix: '{}'".format(
config.object_name_prefix
)
else:
filters_string = ''
return (
'projects/_/buckets/{bucket}/notificationConfigs/{notification}\n'
'\tCloud Pub/Sub topic: {topic}'
'{custom_attributes}{filters}\n\n'.format(
bucket=url.bucket_name,
notification=config.id,
topic=config.topic[_PUBSUB_DOMAIN_PREFIX_LENGTH:],
custom_attributes=custom_attributes_string,
filters=filters_string,
)
)
@base.UniverseCompatible
class List(base.ListCommand):
"""List the notification configurations belonging to a given bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* provides a list of notification configurations belonging to a
given bucket. The listed name of each configuration can be used
with the delete sub-command to delete that specific notification config.
""",
'EXAMPLES':
"""
Fetch the list of notification configs for the bucket `example-bucket`:
$ {command} gs://example-bucket
Fetch the notification configs in all buckets matching a wildcard:
$ {command} gs://example-*
Fetch all of the notification configs for buckets in the default project:
$ {command}
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='*',
help='Google Cloud Storage bucket paths. The path must begin '
'with gs:// and may contain wildcard characters.')
parser.add_argument(
'--human-readable',
action='store_true',
# Used by shim. Could be public but don't want maintainence burden.
hidden=True,
help=(
'Prints notification information in a more descriptive,'
' unstructured format.'
),
)
def Display(self, args, resources):
if args.human_readable:
resource_printer.Print(resources, 'object')
else:
resource_printer.Print(resources, args.format or 'yaml')
def Run(self, args):
if not args.urls:
# Provider URL will fetch all notification configurations in project.
urls = ['gs://']
else:
urls = args.urls
# Not bucket URLs raise error in iterator.
for notification_configuration_iterator_result in (
notification_configuration_iterator
.get_notification_configuration_iterator(
urls, accept_notification_configuration_urls=False)):
url, config = notification_configuration_iterator_result
if args.human_readable:
yield _get_human_readable_notification(url, config)
else:
yield {
'Bucket URL': url.url_string,
'Notification Configuration': resource_projector.MakeSerializable(
config
),
}

View File

@@ -0,0 +1,234 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets relocate command."""
import textwrap
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors as command_errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import operations_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
_BUCKET_RELCOCATION_WRITE_DOWNTIME_WARNING = textwrap.dedent("""
1. This move will involve write downtime.
2. In-flight resumable uploads not finished before the write downtime will be \
lost.
3. Bucket tags added to the bucket will result in the relocation being canceled.
4. Please ensure that you have sufficient quota in the destination before \
performing the relocation.
""")
_BUCKET_RELOCATION_WITHOUT_WRITE_DOWNTIME_WARNING = textwrap.dedent("""
1. This is a policy change move (no write downtime).
2. Please ensure that you have sufficient quota in the destination before \
performing the relocation.
""")
_ADVANCING_BUCKET_RELOCATION_WARNING = textwrap.dedent("""
1. Any ongoing, in-flight resumable uploads will be canceled and lost.
2. Write downtime will be incurred.
""")
def _get_bucket_resource(gcs_client, bucket_url):
"""Fetches the bucket resource for the given bucket storage URL."""
try:
return gcs_client.get_bucket(bucket_url.bucket_name)
except api_errors.CloudApiError as e:
raise command_errors.FatalError(e) from e
def _prompt_user_to_confirm_the_relocation(bucket_resource, args):
"""Prompt the user to confirm the relocation."""
if args.dry_run:
return
source_location = f'{bucket_resource.location}'
if bucket_resource.data_locations:
source_location += f' {bucket_resource.data_locations}'
if bucket_resource.location.casefold() == args.location.casefold():
warning_message = _BUCKET_RELOCATION_WITHOUT_WRITE_DOWNTIME_WARNING
else:
warning_message = _BUCKET_RELCOCATION_WRITE_DOWNTIME_WARNING
log.warning(f'The bucket {args.url} is in {source_location}.')
log.warning(warning_message)
console_io.PromptContinue(
prompt_string=(
"Please acknowledge that you've read the above warnings and want to"
f' relocate the bucket {args.url}?'
),
cancel_on_no=True,
)
log.status.Print(f'Starting bucket relocation for {args.url}...\n')
def _prompt_user_to_confirm_advancing_the_relocation(bucket_name):
"""Prompt the user to confirm advancing the relocation."""
log.warning(_ADVANCING_BUCKET_RELOCATION_WARNING)
console_io.PromptContinue(
prompt_string=(
'This will start the write downtime for your relocation of gs://'
f'{bucket_name}, are you sure you want to continue?'
),
cancel_on_no=True,
)
# TODO: b/361729720 - Make bucket-relocate command group universe compatible.
@base.DefaultUniverseOnly
class Relocate(base.Command):
"""Relocates bucket between different locations."""
detailed_help = {
'DESCRIPTION': """
Relocates a bucket between different locations.
""",
'EXAMPLES': """
To move a bucket (``gs://my-bucket'') to the ``us-central1'' location, use
the following command:
$ {command} gs://my-bucket --location=us-central1
To move a bucket to a custom dual-region, use the following command:
$ {command} gs://my-bucket --location=us
--placement=us-central1,us-east1
To validate the operation without actually moving the bucket, use the
following command:
$ {command} gs://my-bucket --location=us-central1 --dry-run
To schedule a write lock for the move, with ttl for reverting the write
lock after 7h, if the relocation has not succeeded, use the following
command:
$ {command}
--operation=projects/_/buckets/my-bucket/operations/C894F35J
--finalize --ttl=7h
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
relocate_arguments_group = parser.add_mutually_exclusive_group(
required=True
)
bucket_relocate_group = relocate_arguments_group.add_group(
'Arguments for initiating the bucket relocate operation.'
)
bucket_relocate_group.SetSortArgs(False)
bucket_relocate_group.add_argument(
'url',
type=str,
help='The URL of the bucket to relocate.',
)
bucket_relocate_group.add_argument(
'--location',
type=str,
required=True,
help=(
'The final [location]'
'(https://cloud.google.com/storage/docs/locations) where the'
' bucket will be relocated to. If no location is provided, Cloud'
' Storage will use the default location, which is us.'
),
)
flags.add_placement_flag(bucket_relocate_group)
bucket_relocate_group.add_argument(
'--dry-run',
action='store_true',
help=(
'Prints the operations that the relocate command would perform'
' without actually performing relocation. This is helpful to'
' identify any issues that need to be detected asynchronously.'
),
)
advance_relocate_operation_group = relocate_arguments_group.add_group(
'Arguments for advancing the relocation operation.'
)
advance_relocate_operation_group.SetSortArgs(False)
advance_relocate_operation_group.add_argument(
'--operation',
type=str,
required=True,
help=(
'Specify the relocation operation name to advance the relocation'
' operation.The relocation operation name must include the Cloud'
' Storage bucket and operation ID.'
),
)
advance_relocate_operation_group.add_argument(
'--finalize',
action='store_true',
required=True,
help=(
'Schedules the write lock to occur. Once activated, no further'
' writes will be allowed to the associated bucket. This helps'
' minimize disruption to bucket usage. For certain types of'
' moves(between Multi Region and Custom Dual Regions), finalize is'
' not required.'
),
)
advance_relocate_operation_group.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help=(
'Time to live for the relocation operation. Defaults to 12h if not'
' provided.'
),
)
def Run(self, args):
gcs_client = api_factory.get_api(storage_url.ProviderPrefix.GCS)
if args.url:
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
bucket_resource = _get_bucket_resource(gcs_client, url)
_prompt_user_to_confirm_the_relocation(bucket_resource, args)
return gcs_client.relocate_bucket(
url.bucket_name,
args.location,
args.placement,
args.dry_run,
)
bucket, operation_id = (
operations_util.get_operation_bucket_and_id_from_name(args.operation)
)
_prompt_user_to_confirm_advancing_the_relocation(bucket)
gcs_client.advance_relocate_bucket(bucket, operation_id, args.ttl)
log.status.Print(
f'Sent request to advance relocation for bucket gs://{bucket} with'
f' operation {operation_id}.'
)

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets remove-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class RemoveIamPolicyBinding(base.Command):
"""Remove an IAM policy binding from a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Removes a policy binding from the IAM policy of a bucket, given a bucket
URL and the binding. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To remove an IAM policy binding from the role of
roles/storage.objectCreator for the user john.doe@example.com on BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/storage.objectCreator
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of bucket to remove IAM policy binding from.')
iam_util.AddArgsForRemoveIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
client = api_factory.get_api(url_object.scheme)
policy = client.get_bucket_iam_policy(url_object.bucket_name)
return iam_command_util.remove_iam_binding_from_resource(
args, url_object, policy, set_iam_policy_task.SetBucketIamPolicyTask
)

View File

@@ -0,0 +1,94 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets set-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.api_lib.storage.gcs_json import metadata_field_converters
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
def _set_iam_policy_task_iterator(url_strings, policy):
"""Generates SetIamPolicyTask's for execution."""
for url_string in url_strings:
for resource in wildcard_iterator.get_wildcard_iterator(
url_string, fields_scope=cloud_api.FieldsScope.SHORT):
yield set_iam_policy_task.SetBucketIamPolicyTask(
resource.storage_url, policy
)
@base.UniverseCompatible
class SetIamPolicy(base.Command):
"""Set the IAM policy for a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Set the IAM policy for a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To set the IAM policy in POLICY-FILE on BUCKET:
$ {command} gs://BUCKET POLICY-FILE
To set the IAM policy in POLICY-FILE on all buckets beginning with "b":
$ {command} gs://b* POLICY-FILE
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='+',
help='URLs for buckets to apply the IAM policy to.'
' Can include wildcards.')
parser.add_argument(
'-e',
'--etag',
help='Custom etag to set on IAM policy. API will reject etags that do'
' not match this value, making it useful as a precondition during'
' concurrent operations.')
iam_util.AddArgForPolicyFile(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
policy = metadata_field_converters.process_iam_file(
args.policy_file, custom_etag=args.etag)
exit_code, output = iam_command_util.execute_set_iam_task_iterator(
_set_iam_policy_task_iterator(args.urls, policy),
args.continue_on_error)
self.exit_code = exit_code
return output

View File

@@ -0,0 +1,441 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating bucket settings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import stdin_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import user_request_args_factory
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets import update_bucket_task
_CORS_HELP_TEXT = """
Sets the Cross-Origin Resource Sharing (CORS) configuration on a bucket.
An example CORS JSON document looks like the following:
[
{
"origin": ["http://origin1.example.com"],
"responseHeader": ["Content-Type"],
"method": ["GET"],
"maxAgeSeconds": 3600
}
]
For more information about supported endpoints for CORS, see
[Cloud Storage CORS support](https://cloud.google.com/storage/docs/cross-origin#server-side-support).
"""
_LABELS_HELP_TEXT = """
Sets the label configuration for the bucket. An example label JSON document
looks like the following:
{
"your_label_key": "your_label_value",
"your_other_label_key": "your_other_label_value"
}
"""
_LIFECYCLE_HELP_TEXT = """
Sets the lifecycle management configuration on a bucket. For example,
The following lifecycle management configuration JSON document
specifies that all objects in this bucket that are more than 365 days
old are deleted automatically:
{
"rule":
[
{
"action": {"type": "Delete"},
"condition": {"age": 365}
}
]
}
"""
def _add_common_args(parser):
"""Register flags for this command.
Args:
parser (argparse.ArgumentParser): The parser to add the arguments to.
Returns:
buckets update flag group
"""
parser.add_argument(
'url',
nargs='*',
type=str,
help='Specifies the URLs of the buckets to update.',
)
acl_flags_group = parser.add_group()
flags.add_acl_modifier_flags(acl_flags_group)
default_acl_flags_group = parser.add_group()
default_acl_flags_group.add_argument(
'--default-object-acl-file',
help='Sets the default object ACL from file for the bucket.',
)
default_acl_flags_group.add_argument(
'--predefined-default-object-acl',
help='Apply a predefined set of default object access controls tobuckets',
)
default_acl_flags_group.add_argument(
'--add-default-object-acl-grant',
action='append',
metavar='DEFAULT_OBJECT_ACL_GRANT',
type=arg_parsers.ArgDict(),
help=(
'Adds default object ACL grant. See --add-acl-grant help text for'
' more details.'
),
)
default_acl_flags_group.add_argument(
'--remove-default-object-acl-grant',
action='append',
help=(
'Removes default object ACL grant. See --remove-acl-grant help text'
' for more details.'
),
)
cors = parser.add_mutually_exclusive_group()
cors.add_argument('--cors-file', help=_CORS_HELP_TEXT)
cors.add_argument(
'--clear-cors',
action='store_true',
help="Clears the bucket's CORS settings.")
parser.add_argument(
'--default-storage-class',
help='Sets the default storage class for the bucket.',
)
default_encryption_key = parser.add_mutually_exclusive_group()
default_encryption_key.add_argument(
'--default-encryption-key',
help='Set the default KMS key for the bucket.')
default_encryption_key.add_argument(
'--clear-default-encryption-key',
action='store_true',
help="Clears the bucket's default encryption key.")
parser.add_argument(
'--default-event-based-hold',
action=arg_parsers.StoreTrueFalseAction,
help='Sets the default value for an event-based hold on the bucket.'
' By setting the default event-based hold on a bucket, newly-created'
' objects inherit that value as their event-based hold (it is not'
' applied retroactively).')
labels = parser.add_mutually_exclusive_group()
labels.add_argument('--labels-file', help=_LABELS_HELP_TEXT)
update_labels = labels.add_group()
update_labels.add_argument(
'--update-labels',
metavar='LABEL_KEYS_AND_VALUES',
type=arg_parsers.ArgDict(),
help='Add or update labels. Example:'
' --update-labels=key1=value1,key2=value2')
update_labels.add_argument(
'--remove-labels',
metavar='LABEL_KEYS',
type=arg_parsers.ArgList(),
help='Remove labels by their key names.')
labels.add_argument(
'--clear-labels',
action='store_true',
help='Clear all labels associated with a bucket.')
lifecycle = parser.add_mutually_exclusive_group()
lifecycle.add_argument('--lifecycle-file', help=_LIFECYCLE_HELP_TEXT)
lifecycle.add_argument(
'--clear-lifecycle',
action='store_true',
help='Removes all lifecycle configuration for the bucket.')
log_bucket = parser.add_mutually_exclusive_group()
log_bucket.add_argument(
'--log-bucket',
help='Enables usage and storage logging for the bucket specified in the'
' overall update command, outputting log files to the bucket specified in'
' this flag. Cloud Storage does not validate the existence of the bucket'
' receiving logs. In addition to enabling logging on your bucket, you'
' also need to grant cloud-storage-analytics@google.com write access to'
' the log bucket.')
log_bucket.add_argument(
'--clear-log-bucket',
action='store_true',
help='Disables usage and storage logging for the bucket specified in the'
' overall update command.')
log_object_prefix = parser.add_mutually_exclusive_group()
log_object_prefix.add_argument(
'--log-object-prefix',
help='Specifies a prefix for the names of logs generated in the log'
' bucket. The default prefix is the bucket name. If logging is not'
' enabled, this flag has no effect.')
log_object_prefix.add_argument(
'--clear-log-object-prefix',
action='store_true',
help='Clears the prefix used to determine the naming of log objects in'
' the logging bucket.')
public_access_prevention = parser.add_mutually_exclusive_group()
public_access_prevention.add_argument(
'--public-access-prevention',
'--pap',
action=arg_parsers.StoreTrueFalseAction,
help='If True, sets [public access prevention](https://cloud.google.com'
'/storage/docs/public-access-prevention) to "enforced".'
' If False, sets public access prevention to "inherited".')
public_access_prevention.add_argument(
'--clear-public-access-prevention',
'--clear-pap',
action='store_true',
help='Unsets the public access prevention setting on a bucket.',
)
retention_period = parser.add_mutually_exclusive_group()
retention_period.add_argument(
'--retention-period',
help='Minimum [retention period](https://cloud.google.com'
'/storage/docs/bucket-lock#retention-periods)'
' for objects stored in the bucket, for example'
' ``--retention-period=P1Y1M1DT5S\'\'. Objects added to the bucket'
' cannot be deleted until they\'ve been stored for the specified'
' length of time. Default is no retention period. Only available'
' for Cloud Storage using the JSON API.')
retention_period.add_argument(
'--clear-retention-period',
action='store_true',
help='Clears the object retention period for a bucket.')
parser.add_argument(
'--lock-retention-period',
action='store_true',
help='Locks an unlocked retention policy on the buckets. Caution: A'
' locked retention policy cannot be removed from a bucket or reduced in'
' duration. Once locked, deleting the bucket is the only way to'
' "remove" a retention policy.')
parser.add_argument(
'--requester-pays',
action=arg_parsers.StoreTrueFalseAction,
help='Allows you to configure a Cloud Storage bucket so that the'
' requester pays all costs related to accessing the bucket and its'
' objects.')
parser.add_argument(
'--soft-delete-duration',
type=arg_parsers.Duration(),
help=(
'Duration to retain soft-deleted objects. For example, "2w1d" is'
' two weeks and one day.'
),
)
parser.add_argument(
'--clear-soft-delete',
action='store_true',
help=(
'Clears bucket soft delete settings. Does not affect objects already'
' in soft-deleted state.'
),
)
parser.add_argument(
'--uniform-bucket-level-access',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Enables or disables [uniform bucket-level access]'
'(https://cloud.google.com/storage/docs/bucket-policy-only)'
' for the buckets.'
),
)
parser.add_argument(
'--versioning',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Allows you to configure a Cloud Storage bucket to keep old'
' versions of objects.'
),
)
web_main_page_suffix = parser.add_mutually_exclusive_group()
web_main_page_suffix.add_argument(
'--web-main-page-suffix',
help=(
'Cloud Storage allows you to configure a bucket to behave like a'
' static website. A subsequent GET bucket request through a custom'
' domain serves the specified "main" page instead of performing the'
' usual bucket listing.'
),
)
web_main_page_suffix.add_argument(
'--clear-web-main-page-suffix',
action='store_true',
help='Clear website main page suffix if bucket is hosting website.',
)
web_error_page = parser.add_mutually_exclusive_group()
web_error_page.add_argument(
'--web-error-page',
help=(
'Cloud Storage allows you to configure a bucket to behave like a'
' static website. A subsequent GET bucket request through a custom'
' domain for a non-existent object serves the specified error page'
' instead of the standard Cloud Storage error.'
),
)
web_error_page.add_argument(
'--clear-web-error-page',
action='store_true',
help='Clear website error page if bucket is hosting website.',
)
flags.add_additional_headers_flag(parser)
flags.add_autoclass_flags(parser)
flags.add_continue_on_error_flag(parser)
flags.add_recovery_point_objective_flag(parser)
flags.add_read_paths_from_stdin_flag(parser)
ip_filter = parser.add_mutually_exclusive_group()
ip_filter.add_argument(
'--clear-ip-filter',
action='store_true',
help='Disables and clears IP filter configuration of the bucket.',
)
flags.add_ip_filter_file_flag(ip_filter)
def _add_alpha_args(parser):
"""Register flags for the alpha version of this command.
Args:
parser (argparse.ArgumentParser): The parser to add the arguments to.
"""
flags.add_encryption_enforcement_file_flag(parser)
def _is_initial_bucket_metadata_needed(user_request_args):
"""Determines if the bucket update has to patch existing metadata."""
resource_args = user_request_args.resource_args
if not resource_args:
return False
return user_request_args_factory.adds_or_removes_acls(
user_request_args) or any([
resource_args.labels_file_path,
resource_args.labels_to_append,
resource_args.labels_to_remove,
])
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class Update(base.Command):
"""Update bucket settings."""
detailed_help = {
'DESCRIPTION':
"""
Update the settings for a bucket.
""",
'EXAMPLES':
"""
The following command updates the default storage class of a Cloud Storage
bucket named "my-bucket" to NEARLINE and sets requester pays to true:
$ {command} gs://my-bucket --default-storage-class=NEARLINE --requester-pays
The following command updates the retention period of a Cloud Storage
bucket named "my-bucket" to one year and thirty-six minutes:
$ {command} gs://my-bucket --retention-period=1y36m
The following command clears the retention period of a bucket:
$ {command} gs://my-bucket --clear-retention-period
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def update_task_iterator(self, args):
user_request_args = (
user_request_args_factory.get_user_request_args_from_command_args(
args, metadata_type=user_request_args_factory.MetadataType.BUCKET
)
)
if user_request_args_factory.adds_or_removes_acls(user_request_args):
fields_scope = cloud_api.FieldsScope.FULL
else:
fields_scope = cloud_api.FieldsScope.NO_ACL
urls = stdin_iterator.get_urls_iterable(
args.url, args.read_paths_from_stdin
)
for url_string in urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
for resource in wildcard_iterator.get_wildcard_iterator(
url_string,
fields_scope=fields_scope,
get_bucket_metadata=_is_initial_bucket_metadata_needed(
user_request_args)):
yield update_bucket_task.UpdateBucketTask(
resource, user_request_args=user_request_args)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
locks_retention_period = getattr(args, 'lock_retention_period', False)
self.exit_code = task_executor.execute_tasks(
self.update_task_iterator(args),
parallelizable=not locks_retention_period,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
continue_on_error=args.continue_on_error,
)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(Update):
"""Update bucket settings."""
detailed_help = {
'DESCRIPTION':
"""
Update a bucket.
""",
'EXAMPLES':
"""
The following command updates the retention period of a Cloud Storage
bucket named "my-bucket" to one year and thirty-six minutes:
$ {command} gs://my-bucket --retention-period=1y36m
The following command clears the retention period of a bucket:
$ {command} gs://my-bucket --clear-retention-period
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
_add_alpha_args(parser)