feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import metrics_util
DETAILED_HELP = {
'DESCRIPTION': """\
The gcloud storage command group lets you create and manage
Cloud Storage resources such as buckets and objects.
More information on Cloud Storage can be found here:
https://cloud.google.com/storage, and detailed documentation can be
found here: https://cloud.google.com/storage/docs/
""",
}
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Storage(base.Group):
"""Create and manage Cloud Storage buckets and objects."""
category = base.STORAGE_CATEGORY
detailed_help = DETAILED_HELP
def __init__(self):
super(Storage, self).__init__()
metrics_util.fix_user_agent_for_gsutil_shim()
def Filter(self, context, args):
# gsutil does not keep the user project quota enabled by default. Hence
# we will be keeping it disabled in gcloud storage as well to ensure parity.
# See b/258687686#comment5 for more information.
base.DisableUserProjectQuota()
del context, args
# Enable self signed jwt for alpha track
self.EnableSelfSignedJwtForTracks([base.ReleaseTrack.ALPHA])

View File

@@ -0,0 +1,32 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage batch operations commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class BatchOperations(base.Group):
"""Manage Cloud Storage batch operations."""
def Filter(self, context, args):
# TODO(b/190541521): Determine if command group works with project number
base.RequireProjectID(args)
del context, args

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage batch operations jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class Jobs(base.Group):
"""Manage Cloud Storage batch operations jobs."""

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of cancel command for batch operations jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_batch_operations_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.batch_operations.jobs import resource_args
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Cancel(base.Command):
"""Cancel a batch-operations job."""
detailed_help = {
"DESCRIPTION": """
Cancel the batch operation job.
""",
"EXAMPLES": """
To cancel a batch job with the name `my-job` in location `us-central1`:
$ {command} my-job --location=us-central1
To cancel the same batch job with fully specified name:
$ {command} projects/my-project/locations/us-central1/jobs/my-job
""",
}
@staticmethod
def Args(parser):
resource_args.add_batch_job_resource_arg(parser, "to cancel")
def Run(self, args):
job_name = args.CONCEPTS.batch_job.Parse().RelativeName()
storage_batch_operations_api.StorageBatchOperationsApi().cancel_batch_job(
job_name
)
log.status.Print("Canceled batch job: {}".format(job_name))

View File

@@ -0,0 +1,136 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for batch actions."""
from googlecloudsdk.api_lib.storage import storage_batch_operations_api
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage.batch_operations.jobs import resource_args
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@calliope_base.ReleaseTracks(calliope_base.ReleaseTrack.GA)
@calliope_base.DefaultUniverseOnly
class Create(calliope_base.Command):
"""Create a new batch operation job."""
detailed_help = {
"DESCRIPTION": """
Create a batch operation job, allowing you to perform operations
such as deletion, updating metadata, and more on objects in a
serverless manner.
""",
"EXAMPLES": """
The following example command creates a batch job, named `my-job`,
that performs object deletion on bucket `my-bucket` for objects
specified in the manifest file `gs://my-bucket/manifest.csv`:
$ {command} my-job --bucket=my-bucket --manifest-location=gs://my-bucket/manifest.csv
--delete-object
The following example command creates a batch job, named `my-job`,
that updates object metadata `Content-Disposition` to `inline`,
`Content-Language` to `en`, and sets object retention mode to `locked`
on bucket `my-bucket` for objects with prefixes `prefix1` or `prefix2`:
$ {command} my-job --bucket=my-bucket --included-object-prefixes=prefix1,prefix2
--put-metadata=Content-Disposition=inline,Content-Language=en,Retain-Until=2025-01-01T00:00:00Z,Retention-Mode=locked
The following example command creates a batch job, named `my-job`,
that puts object event based hold on objects in bucket `my-bucket`
with logging config enabled for corresponding transform action and
succeeded and failed action states:
$ {command} my-job --bucket=my-bucket --put-object-event-based-hold
--put-metadata=Content-Disposition=inline,Content-Language=en
--log-actions=transform --log-action-states=succeeded,failed
""",
}
@staticmethod
def Args(parser):
resource_args.add_batch_job_resource_arg(parser, "to create")
flags.add_batch_jobs_flags(parser)
def Run(self, args):
# Prompts to confirm deletion if --delete-object is specified.
dry_run = getattr(args, "dry_run", False)
if args.delete_object and not dry_run:
delete_prompt = (
"This command will delete objects specified in the batch operation"
" job. Please ensure that you have soft delete enabled on the bucket"
" if you want to restore the objects within the retention duration."
)
console_io.PromptContinue(
message=delete_prompt,
cancel_on_no=True,
)
job_ref = args.CONCEPTS.batch_job.Parse()
storage_batch_operations_api.StorageBatchOperationsApi().create_batch_job(
args, job_ref.RelativeName()
)
log.status.Print("Created batch job: {}".format(job_ref.RelativeName()))
@calliope_base.ReleaseTracks(calliope_base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a new batch operation job."""
detailed_help = {
"DESCRIPTION": """
Create a batch operation job, allowing you to perform operations
such as deletion, updating metadata, and more on objects in a
serverless manner.
""",
"EXAMPLES": """
The following example command creates a batch job, named `my-job`,
that performs object deletion on bucket `my-bucket` for objects
specified in the manifest file `gs://my-bucket/manifest.csv`:
$ {command} my-job --bucket=my-bucket --manifest-location=gs://my-bucket/manifest.csv
--delete-object
The following example command creates a batch job, named `my-job`,
that performs object deletion on buckets `my-bucket-1` and
`my-bucket-2` for all objects in them:
$ {command} my-job \
--bucket-list=my-bucket-1,my-bucket-2 \
--included-object-prefixes='' --delete-object
The following example command creates a batch job, named `my-job`,
that updates object metadata `Content-Disposition` to `inline`,
`Content-Language` to `en`, and sets object retention mode to `locked`
on bucket `my-bucket` for objects with prefixes `prefix1` or `prefix2`:
$ {command} my-job --bucket=my-bucket --included-object-prefixes=prefix1,prefix2
--put-metadata=Content-Disposition=inline,Content-Language=en,Retain-Until=2025-01-01T00:00:00Z,Retention-Mode=locked
The following example command creates a batch job, named `my-job`,
that puts object event based hold on objects in bucket `my-bucket`
with logging config enabled for corresponding transform action and
succeeded and failed action states:
$ {command} my-job --bucket=my-bucket --put-object-event-based-hold
--put-metadata=Content-Disposition=inline,Content-Language=en
--log-actions=transform --log-action-states=succeeded,failed
""",
}
@staticmethod
def Args(parser):
resource_args.add_batch_job_resource_arg(parser, "to create")
flags.add_batch_jobs_flags(parser, track=calliope_base.ReleaseTrack.ALPHA)
flags.add_batch_jobs_dry_run_flag(parser)

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of delete command for batch operations jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_batch_operations_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.batch_operations.jobs import resource_args
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Delete(base.Command):
"""Delete a batch-operations job."""
detailed_help = {
"DESCRIPTION": """
Delete the batch operation job.
""",
"EXAMPLES": """
To delete a batch job with the name `my-job` in location `us-central1`:
$ {command} my-job --location=us-central1
To delete the same batch job with fully specified name:
$ {command} projects/my-project/locations/us-central1/jobs/my-job
""",
}
@staticmethod
def Args(parser):
resource_args.add_batch_job_resource_arg(parser, "to delete")
def Run(self, args):
job_name = args.CONCEPTS.batch_job.Parse().RelativeName()
storage_batch_operations_api.StorageBatchOperationsApi().delete_batch_job(
job_name
)
log.status.Print("Deleted batch job: {}".format(job_name))

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command for batch operations jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_batch_operations_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.batch_operations.jobs import resource_args
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describe a batch-operations job."""
detailed_help = {
"DESCRIPTION": """
Describe the batch operation job.
""",
"EXAMPLES": """
To describe a batch job with the name `my-job`:
$ {command} my-job
To describe the same batch job with fully specified name:
$ {command} projects/my-project/locations/global/jobs/my-job
""",
}
@staticmethod
def Args(parser):
resource_args.add_batch_job_resource_arg(parser, "to describe")
def Run(self, args):
job_ref = args.CONCEPTS.batch_job.Parse()
return (
storage_batch_operations_api.StorageBatchOperationsApi().get_batch_job(
job_ref.RelativeName()
)
)

View File

@@ -0,0 +1,157 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of list command for batch operations jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_batch_operations_api
from googlecloudsdk.calliope import base
_SBO_CLH_LOCATION_GLOBAL = "global"
def _TransformTransformation(job):
"""Transform for the TRANSFORMATION field in the table output.
TRANSFORMATION is generated from the oneof field transformation.
Args:
job: job dictionary for transform
Returns:
A dictionary of the transformation type and its values.
"""
transformation = {}
transform_types = [
"putObjectHold",
"deleteObject",
"putMetadata",
"rewriteObject",
]
for transform in transform_types:
if transform in job:
transformation[transform] = job[transform]
return transformation
def _TransformDryRun(job):
"""Transform for the DRY_RUN field in the table output."""
return job.get("dry_run", False)
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List batch-operations jobs."""
detailed_help = {
"DESCRIPTION": """
List batch operation jobs.
""",
"EXAMPLES": """
To list all batch jobs:
$ {command}
To list all batch jobs with a page size of `10`:
$ {command} --page-size=10
To list a limit of `20` batch jobs:
$ {command} --limit=20
To list all batch jobs in `JSON` format:
$ {command} --format=json
""",
}
@staticmethod
def Args(parser):
base.URI_FLAG.RemoveFromParser(parser)
parser.display_info.AddFormat("""
table(
name.basename():wrap=20:label=BATCH_JOB_ID,
bucketList.buckets:wrap=20:label=SOURCE,
transformation():wrap=20:label=TRANSFORMATION,
createTime:wrap=20:label=CREATE_TIME,
counters:wrap=20:label=COUNTERS,
errorSummaries:wrap=20:label=ERROR_SUMMARIES,
state:wrap=20:label=STATE
)
""")
parser.display_info.AddTransforms({
"transformation": _TransformTransformation,
})
def Run(self, args):
return storage_batch_operations_api.StorageBatchOperationsApi().list_batch_jobs(
_SBO_CLH_LOCATION_GLOBAL, args.limit, args.page_size
)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class ListAlpha(List):
"""List batch-operations jobs."""
detailed_help = {
"DESCRIPTION": """
List batch operation jobs.
""",
"EXAMPLES": """
To list all batch jobs:
$ {command}
To list all batch jobs that are `not` dry run:
$ {command} --filter="dry_run=false"
To list all batch jobs with a page size of `10`:
$ {command} --page-size=10
To list a limit of `20` batch jobs:
$ {command} --limit=20
To list all batch jobs in `JSON` format:
$ {command} --format=json
""",
}
@staticmethod
def Args(parser):
base.URI_FLAG.RemoveFromParser(parser)
parser.display_info.AddFormat("""
table(
name.basename():wrap=20:label=BATCH_JOB_ID,
dryrun():wrap=20:label=DRY_RUN,
bucketList.buckets:wrap=20:label=SOURCE,
transformation():wrap=20:label=TRANSFORMATION,
createTime:wrap=20:label=CREATE_TIME,
counters:wrap=20:label=COUNTERS,
errorSummaries:wrap=20:label=ERROR_SUMMARIES,
state:wrap=20:label=STATE
)
""")
parser.display_info.AddTransforms({
"transformation": _TransformTransformation,
"dryrun": _TransformDryRun,
})

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage bucket commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Buckets(base.Group):
"""Manage Cloud Storage buckets."""

View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets add-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class AddIamPolicyBinding(base.Command):
"""Add an IAM policy binding to a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Add an IAM policy binding to a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To grant a single role to a single principal for BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/storage.objectCreator
To make objects in BUCKET publicly readable:
$ {command} gs://BUCKET --member=allUsers --role=roles/storage.objectViewer
To specify a custom role for a principal on BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/customRoleName
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of bucket to add IAM policy binding to.')
iam_util.AddArgsForAddIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
policy = api_factory.get_api(url_object.scheme).get_bucket_iam_policy(
url_object.bucket_name)
return iam_command_util.add_iam_binding_to_resource(
args,
url_object,
apis.GetMessagesModule('storage', 'v1'),
policy,
set_iam_policy_task.SetBucketIamPolicyTask,
)

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage buckets anywhere-caches commands."""
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class AnywhereCaches(base.Group):
"""Manage Cloud Storage Anywhere Caches."""

View File

@@ -0,0 +1,118 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for creating Anywhere Cache Instances."""
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import create_anywhere_cache_task
@base.DefaultUniverseOnly
class Create(base.CreateCommand):
"""Create Anywhere Cache instances for a bucket."""
detailed_help = {
'DESCRIPTION': """
Create Anywhere Cache instances.
Only one cache instance per zone can be created for each bucket.
""",
'EXAMPLES': """
The following command creates an anywhere cache instance for bucket
in ``asia-south2-b'' zone:
$ {command} gs://my-bucket asia-south2-b
The following command creates anywhere cache instances for bucket
in ``asia-south2-b'', and ``asia-east1-a'' zone:
$ {command} gs://my-bucket asia-south2-b asia-east1-a
The following command creates an anywhere cache instance for bucket
in ``asia-south2-b'' zone, with ttl for cache entry as 6 hours, and
admission policy as ``ADMIT_ON_SECOND_MISS'':
$ {command} gs://my-bucket asia-south2-b --ttl=6h --admission-policy='ADMIT_ON_SECOND_MISS'
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url',
type=str,
help=(
'Specifies the URL of the bucket where the Anywhere Cache should be'
' created.'
),
)
parser.add_argument(
'zone',
type=str,
nargs='+',
help=(
'Specifies the name of the zonal locations where the Anywhere Cache'
' should be created.'
),
)
parser.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help='Cache entry time-to-live. Default to 24h if not provided.',
)
flags.add_admission_policy_flag(parser)
def get_task_iterator(self, args, task_status_queue):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.zone)
)
if args.ttl is not None:
args.ttl = str(args.ttl)+'s'
for zone in args.zone:
yield create_anywhere_cache_task.CreateAnywhereCacheTask(
url, zone, admission_policy=args.admission_policy, ttl=args.ttl
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
plurality_checkable_task_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(task_iterator)
)
self.exit_code = task_executor.execute_tasks(
plurality_checkable_task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command to get the Anywhere Cache Instance."""
import collections
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import resource_util
# Determines the order in which the fields should be displayed for
# an AnywhereCacheResource.
AnywhereCacheDisplayTitlesAndDefaults = collections.namedtuple(
'AnywhereCacheDisplayTitlesAndDefaults',
(
'admission_policy',
'anywhere_cache_id',
'bucket',
'create_time',
'id',
'kind',
'pending_update',
'state',
'ttl',
'update_time',
'zone',
),
)
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Returns details of Anywhere Cache instance of a bucket."""
detailed_help = {
'DESCRIPTION': """
Desribes a single Anywhere Cache instance if it exists.
""",
'EXAMPLES': """
The following command describes the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
help=(
'Identifier for a Anywhere Cache instance. It is a combination of'
' bucket_name/anywhere_cache_id, For example :'
' test-bucket/my-cache-id.'
),
)
flags.add_raw_display_flag(parser)
def Run(self, args):
bucket_name, _, anywhere_cache_id = args.id.rpartition('/')
result = api_factory.get_api(
storage_url.ProviderPrefix.GCS
).get_anywhere_cache(bucket_name, anywhere_cache_id)
return resource_util.get_display_dict_for_resource(
result,
AnywhereCacheDisplayTitlesAndDefaults,
args.raw,
)

View File

@@ -0,0 +1,90 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of disable command for disabling Anywhere Cache Instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import disable_anywhere_cache_task
@base.DefaultUniverseOnly
class Disable(base.Command):
"""Disable Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Disables one or more Anywhere Cache instances.
The cache instance will be set to DISABLED state. The existing entries
can be read from the cache but new entries will not be written to the
cache. The L4 SSD cache would not be deleted by the cache manager until
the min TTL (1h) has been reached (cache instance is kept for at least
1h). Google Cloud Storage defines the min TTL which is applied to all
cache instances. Cach disablement could be canceled by using
anywhere-caches resume command before the instance is deleted.
""",
'EXAMPLES': """
The following command disables the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def _get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition(
storage_url.CLOUD_URL_DELIMITER
)
yield disable_anywhere_cache_task.DisableAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self._get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,69 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of list command to list Anywhere Cache instances of bucket."""
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import resource_util
from surface.storage.buckets.anywhere_caches import describe
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List all Anywhere Cache instances of a bucket."""
detailed_help = {
'DESCRIPTION': """
List all cache instances of this bucket.
""",
'EXAMPLES': """
The following command lists all anywhere cache instances of bucket
``gs://my-bucket'':
$ {command} gs://my-bucket
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url',
type=str,
help=(
'Specifies the URL of the bucket for which anywhere cache instances'
' should be listed.'
),
)
flags.add_raw_display_flag(parser)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
cache_resources = api_factory.get_api(url.scheme).list_anywhere_caches(
url.bucket_name
)
for cache_resource in cache_resources:
yield resource_util.get_display_dict_for_resource(
cache_resource,
describe.AnywhereCacheDisplayTitlesAndDefaults,
args.raw,
)

View File

@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of pause command to pause Anywhere Cache instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import pause_anywhere_cache_task
@base.DefaultUniverseOnly
class Pause(base.Command):
"""Pause Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
The pause operation can be used to stop the data ingestion of a cache
instance in RUNNING state (read-only cache) until the Resume is invoked.
""",
'EXAMPLES': """
The following command pause the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition('/')
yield pause_anywhere_cache_task.PauseAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
plurality_checkable_task_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(task_iterator)
)
self.exit_code = task_executor.execute_tasks(
plurality_checkable_task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
continue_on_error=getattr(args, 'continue_on_error', False),
)

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of resume command for resuming Anywhere Cache instances."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import resume_anywhere_cache_task
@base.DefaultUniverseOnly
class Resume(base.Command):
"""Resume Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Resume operation could be used to revert the Paused and Disabled state.
Once a paused/disabled cache is resumed, the cache will be set to
RUNNING/CREATING state:
1. RUNNING if the cache is active.
2. CREATING if the cache is pending creation.
""",
'EXAMPLES': """
The following command resume the anywhere cache instance of bucket
``my-bucket'' having anywhere_cache_id ``my-cache-id'':
$ {command} my-bucket/my-cache-id
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache instance. They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
def _get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition(
storage_url.CLOUD_URL_DELIMITER
)
yield resume_anywhere_cache_task.ResumeAnywhereCacheTask(
bucket_name, anywhere_cache_id
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self._get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,101 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating Anywhere Cache instances."""
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import progress_callbacks
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.anywhere_caches import patch_anywhere_cache_task
@base.DefaultUniverseOnly
class Update(base.UpdateCommand):
"""Update Anywhere Cache instances."""
detailed_help = {
'DESCRIPTION': """
Update one or more Anywhere Cache instances. A cache instance can be
updated if its state is created or pending creation.
""",
'EXAMPLES': """
The following command updates cache entry's ttl, and admisson policy of
anywhere cache instance in bucket ``my-bucket'' having anywhere_cache_id
``my-cache-id'':
$ {command} my-bucket/my-cache-id --ttl=6h --admission-policy='ADMIT_ON_SECOND_MISS'
The following command updates cache entry's ttl of anywhere cache
instances in bucket ``bucket-1'' and ``bucket-2'' having anywhere_cache_id
``my-cache-id-1'', and ``my-cache-id-2'' respectively:
$ {command} bucket-1/my-cache-id-1 bucket-2/my-cache-id-2 --ttl=6h
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'id',
type=str,
nargs='+',
help=(
'Identifiers for a Anywhere Cache Instance.They are combination of'
' bucket_name/anywhere_cache_id. For example :'
' test-bucket/my-cache-id.'
),
)
parser.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help='Cache entry time-to-live. Default to 24h if not provided.',
)
flags.add_admission_policy_flag(parser)
def get_task_iterator(self, args, task_status_queue):
progress_callbacks.workload_estimator_callback(
task_status_queue, len(args.id)
)
ttl = str(args.ttl) + 's' if args.ttl else None
for id_str in args.id:
bucket_name, _, anywhere_cache_id = id_str.rpartition('/')
yield patch_anywhere_cache_task.PatchAnywhereCacheTask(
bucket_name,
anywhere_cache_id,
admission_policy=args.admission_policy,
ttl=ttl,
)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_iterator = self.get_task_iterator(args, task_status_queue)
self.exit_code = task_executor.execute_tasks(
task_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER, manifest_path=None
),
)

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for managing Storage bucket configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Config(base.Group):
"""Manage Storage bucket configurations."""

View File

@@ -0,0 +1,38 @@
release_tracks: [ALPHA]
command_type: CONFIG_EXPORT
help_text:
brief: Export the configuration for a Storage bucket.
description: |
*{command}* exports the configuration for a Storage bucket.
Bucket configurations can be exported in
Kubernetes Resource Model (krm) or Terraform HCL formats. The
default format is `krm`.
Specifying `--all` allows you to export the configurations for all
buckets within the project.
Specifying `--path` allows you to export the configuration(s) to
a local directory.
examples: |
To export the configuration for a bucket, run:
$ {command} my-bucket
To export the configuration for a bucket to a file, run:
$ {command} my-bucket --path=/path/to/dir/
To export the configuration for a bucket in Terraform
HCL format, run:
$ {command} my-bucket --resource-format=terraform
To export the configurations for all buckets within a
project, run:
$ {command} --all
arguments:
resource:
help_text: Bucket to export the configuration for.
spec: !REF googlecloudsdk.command_lib.storage.resources.resources:bucket

View File

@@ -0,0 +1,193 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for making buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import user_request_args_factory
from googlecloudsdk.command_lib.storage.resources import resource_reference
from googlecloudsdk.command_lib.storage.tasks.buckets import create_bucket_task
_LIFECYCLE_HELP_TEXT = """
Sets the lifecycle management configuration on a bucket. For example,
The following lifecycle management configuration JSON document
specifies that all objects in this bucket that are more than 365 days
old are deleted automatically:
{
"rule":
[
{
"action": {"type": "Delete"},
"condition": {"age": 365}
}
]
}
"""
@base.UniverseCompatible
class Create(base.Command):
"""Create buckets for storing objects."""
detailed_help = {
'DESCRIPTION': """
Create new buckets.
""",
'EXAMPLES': """
The following command creates 2 Cloud Storage buckets, one named
``my-bucket'' and a second bucket named ``my-other-bucket'':
$ {command} gs://my-bucket gs://my-other-bucket
The following command creates a bucket with the ``nearline'' default
[storage class](https://cloud.google.com/storage/docs/storage-classes) in
the ``asia'' [location](https://cloud.google.com/storage/docs/locations):
$ {command} gs://my-bucket --default-storage-class=nearline --location=asia
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the buckets to create.'
)
parser.add_argument(
'--location',
'-l',
type=str,
required=arg_parsers.ArgRequiredInUniverse(
default_universe=False, non_default_universe=True
),
help=(
'[Location](https://cloud.google.com/storage/docs/locations)'
' for the bucket. If not specified, the location used by Cloud'
" Storage is ``us''. A bucket's location cannot be changed"
' after creation.'
),
)
parser.add_argument(
'--public-access-prevention',
'--pap',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Sets public access prevention to "enforced".'
' For details on how exactly public access is blocked, see:'
' http://cloud.google.com/storage/docs/public-access-prevention'
),
)
parser.add_argument(
'--uniform-bucket-level-access',
'-b',
action=arg_parsers.StoreTrueFalseAction,
help='Turns on uniform bucket-level access setting. Default is False.',
)
parser.add_argument(
'--default-storage-class',
'-c',
'-s',
type=str,
help=(
'Default [storage class]'
'(https://cloud.google.com/storage/docs/storage-classes) for'
' the bucket. If not specified, the default storage class'
' used by Cloud Storage is "Standard".'
),
)
parser.add_argument(
'--default-encryption-key',
'-k',
type=str,
help=(
'Set the default KMS key using the full path to the key, which '
'has the following form: '
"``projects/[project-id]/locations/[location]/keyRings/[key-ring]/cryptoKeys/[my-key]''."
),
)
parser.add_argument(
'--retention-period',
help=(
'Minimum [retention period](https://cloud.google.com'
'/storage/docs/bucket-lock#retention-periods)'
' for objects stored in the bucket, for example'
" ``--retention-period=P1Y1M1DT5S''. Objects added to the bucket"
" cannot be deleted until they've been stored for the specified"
' length of time. Default is no retention period. Only available'
' for Cloud Storage using the JSON API.'
),
)
flags.add_placement_flag(parser)
parser.add_argument(
'--soft-delete-duration',
type=arg_parsers.Duration(),
help=(
'Duration to retain soft-deleted objects. For example, "2w1d" is'
' two weeks and one day. See `gcloud topic datetimes` for more'
' information on the duration format. Setting `0` will disable'
' soft delete policy on the bucket. Default is 7 days.'
),
)
flags.add_additional_headers_flag(parser)
flags.add_autoclass_flags(parser)
flags.add_enable_per_object_retention_flag(parser)
flags.add_recovery_point_objective_flag(parser)
parser.add_argument(
'--enable-hierarchical-namespace',
action='store_true',
default=None,
help=(
'Enable hierarchical namespace for the bucket. To use this'
' flag, you must also use --uniform-bucket-level-access'
),
)
parser.add_argument('--lifecycle-file', help=_LIFECYCLE_HELP_TEXT)
flags.add_ip_filter_file_flag(parser)
if cls.ReleaseTrack() is base.ReleaseTrack.ALPHA:
flags.add_encryption_enforcement_file_flag(parser)
def Run(self, args):
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
resource = resource_reference.UnknownResource(url)
user_request_args = (
user_request_args_factory.get_user_request_args_from_command_args(
args, metadata_type=user_request_args_factory.MetadataType.BUCKET
)
)
if (
user_request_args.resource_args.autoclass_terminal_storage_class
is not None
and not user_request_args.resource_args.enable_autoclass
):
raise errors.Error(
'--autoclass_terminal_storage_class is only allowed if'
' --enable-autoclass is set.'
)
create_bucket_task.CreateBucketTask(
resource, user_request_args=user_request_args
).execute()

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of rb command for deleting buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import plurality_checkable_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.rm import delete_task_iterator_factory
@base.UniverseCompatible
class Delete(base.Command):
"""Deletes Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION':
"""
Deletes one or more Cloud Storage buckets.
""",
'EXAMPLES':
"""
Delete a Google Cloud Storage bucket named "my-bucket":
$ {command} gs://my-bucket
Delete two buckets:
$ {command} gs://my-bucket gs://my-other-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls', nargs='+', help='Specifies the URLs of the buckets to delete.')
flags.add_additional_headers_flag(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
bucket_iterator = delete_task_iterator_factory.DeleteTaskIteratorFactory(
name_expansion.NameExpansionIterator(
args.urls, include_buckets=name_expansion.BucketSetting.YES
),
task_status_queue=task_status_queue,
).bucket_iterator()
plurality_checkable_bucket_iterator = (
plurality_checkable_iterator.PluralityCheckableIterator(
bucket_iterator))
self.exit_code = task_executor.execute_tasks(
plurality_checkable_bucket_iterator,
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
continue_on_error=args.continue_on_error)

View File

@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets describe command for getting info on buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import gsutil_json_printer
from googlecloudsdk.command_lib.storage.resources import resource_util
def _add_common_args(parser):
"""Adds common arguments to the parser."""
parser.add_argument('url', help='Specifies URL of bucket to describe.')
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
gsutil_json_printer.GsutilJsonPrinter.Register()
def _validate_url_does_not_contain_wildcards(url):
if wildcard_iterator.contains_wildcard(url):
raise errors.InvalidUrlError(
'Describe does not accept wildcards because it returns a single'
' resource. Please use the `ls` or `buckets list` command for'
' retrieving multiple resources.'
)
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class Describe(base.DescribeCommand):
"""Describes Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION': """
Describe a Cloud Storage bucket.
""",
'EXAMPLES': """
Describe a Google Cloud Storage bucket named "my-bucket":
$ {command} gs://my-bucket
Describe bucket with JSON formatting, only returning the "name" key:
$ {command} gs://my-bucket --format="json(name)"
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def Run(self, args):
_validate_url_does_not_contain_wildcards(args.url)
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_bucket(args.command_path, url)
bucket_resource = api_factory.get_api(url.scheme).get_bucket(
url.bucket_name,
fields_scope=cloud_api.FieldsScope.FULL,
)
return resource_util.get_display_dict_for_resource(
bucket_resource,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class DescribeAlpha(Describe):
"""Describes Cloud Storage buckets."""
@staticmethod
def Args(parser):
_add_common_args(parser)
flags.add_soft_deleted_flag(parser)
def Run(self, args):
_validate_url_does_not_contain_wildcards(args.url)
url = storage_url.storage_url_from_string(
args.url, is_bucket_gen_parsing_allowed=True
)
errors_util.raise_error_if_not_bucket(args.command_path, url)
bucket_resource = api_factory.get_api(url.scheme).get_bucket(
url.bucket_name,
generation=int(url.generation) if url.generation else None,
fields_scope=cloud_api.FieldsScope.FULL,
soft_deleted=getattr(args, 'soft_deleted', False),
)
return resource_util.get_display_dict_for_resource(
bucket_resource,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets get-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class GetIamPolicy(base.Command):
"""Get the IAM policy for a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Get the IAM policy for a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To get the IAM policy for BUCKET:
$ {command} gs://BUCKET
To output the IAM policy for BUCKET to a file:
$ {command} gs://BUCKET > policy.txt
""",
}
@staticmethod
def Args(parser):
parser.add_argument('url', help='Request IAM policy for this bucket.')
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
matching_url = iam_command_util.get_single_matching_url(args.url)
return api_factory.get_api(matching_url.scheme).get_bucket_iam_policy(
matching_url.bucket_name)

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets list command for getting info on buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
def _add_common_args(parser):
"""Adds common arguments to the parser."""
parser.add_argument(
'urls', nargs='*', help='Specifies URL of buckets to List.'
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
flags.add_uri_support_to_list_commands(parser)
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class List(base.ListCommand):
"""Lists Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION': """
List Cloud Storage buckets.
""",
'EXAMPLES': """
List all Google Cloud Storage buckets in default project:
$ {command}
List buckets beginning with ``b'':
$ {command} gs://b*
List buckets with JSON formatting, only returning the ``name'' key:
$ {command} --format="json(name)"
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def Run(self, args):
if args.urls:
urls = []
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
if not (url.is_provider() or url.is_bucket()):
raise errors.InvalidUrlError(
'URL does not match buckets: {}'.format(url_string)
)
urls.append(url)
else:
urls = [storage_url.CloudUrl(storage_url.ProviderPrefix.GCS)]
for url in urls:
for bucket in wildcard_iterator.get_wildcard_iterator(
url.url_string,
fields_scope=cloud_api.FieldsScope.FULL,
get_bucket_metadata=True,
soft_deleted_buckets=getattr(args, 'soft_deleted', False),
):
yield resource_util.get_display_dict_for_resource(
bucket,
full_resource_formatter.BucketDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class ListAlpha(List):
"""Lists Cloud Storage buckets."""
@staticmethod
def Args(parser):
_add_common_args(parser)
flags.add_soft_deleted_flag(parser)

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage buckets notifications commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Notifications(base.Group):
"""Manage Cloud Storage bucket notifications."""

View File

@@ -0,0 +1,290 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for notifications."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import functools
import time
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.api_lib.storage.gcs_json import error_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
@error_util.catch_http_error_raise_gcs_api_error()
def _maybe_create_or_modify_topic(topic_name, service_account_email):
"""Ensures that topic with SA permissions exists, creating it if needed.
Args:
topic_name (str): Name of the Cloud Pub/Sub topic to use or create.
service_account_email (str): The project service account for Google Cloud
Storage. This SA needs publish permission on the PubSub topic.
Returns:
True if topic was created or had its IAM permissions modified.
Otherwise, False.
"""
pubsub_client = apis.GetClientInstance('pubsub', 'v1')
pubsub_messages = apis.GetMessagesModule('pubsub', 'v1')
try:
pubsub_client.projects_topics.Get(
pubsub_messages.PubsubProjectsTopicsGetRequest(topic=topic_name))
log.warning('Topic already exists: ' + topic_name)
created_new_topic = False
except apitools_exceptions.HttpError as e:
if e.status_code != 404:
# Expect an Apitools NotFound error. Raise error otherwise.
raise
new_topic = pubsub_client.projects_topics.Create(
pubsub_messages.Topic(name=topic_name))
log.info('Created topic:\n{}'.format(new_topic))
created_new_topic = True
# Verify that the service account is in the IAM policy.
topic_iam_policy = pubsub_client.projects_topics.GetIamPolicy(
pubsub_messages.PubsubProjectsTopicsGetIamPolicyRequest(
resource=topic_name))
expected_binding = pubsub_messages.Binding(
role='roles/pubsub.publisher',
members=['serviceAccount:' + service_account_email])
# Can be improved by checking for roles stronger than "pubsub.publisher".
# We could also recurse up the hierarchy, checking project-level permissions.
# However, the caller may not have permission to perform this recursion.
# The trade-off of complexity for the benefit of not granting a redundant,
# permission is not worth it, so we grant "publisher" if a simple check fails.
if expected_binding not in topic_iam_policy.bindings:
topic_iam_policy.bindings.append(expected_binding)
updated_topic_iam_policy = pubsub_client.projects_topics.SetIamPolicy(
pubsub_messages.PubsubProjectsTopicsSetIamPolicyRequest(
resource=topic_name,
setIamPolicyRequest=pubsub_messages.SetIamPolicyRequest(
policy=topic_iam_policy)))
log.info('Updated topic IAM policy:\n{}'.format(updated_topic_iam_policy))
return True
else:
log.warning(
'Project service account {} already has publish permission for topic {}'
.format(service_account_email, topic_name))
return created_new_topic
@base.UniverseCompatible
class Create(base.Command):
"""Create a notification configuration on a bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* creates a notification configuration on a bucket,
establishing a flow of event notifications from Cloud Storage to a
Cloud Pub/Sub topic. As part of creating this flow, it also verifies
that the destination Cloud Pub/Sub topic exists, creating it if necessary,
and verifies that the Cloud Storage bucket has permission to publish
events to that topic, granting the permission if necessary.
If a destination Cloud Pub/Sub topic is not specified with the `-t` flag,
Cloud Storage chooses a topic name in the default project whose ID is
the same as the bucket name. For example, if the default project ID
specified is `default-project` and the bucket being configured is
`gs://example-bucket`, the create command uses the Cloud Pub/Sub topic
`projects/default-project/topics/example-bucket`.
In order to enable notifications, your project's
[Cloud Storage service agent](https://cloud.google.com/storage/docs/projects#service-accounts)
must have the IAM permission "pubsub.topics.publish".
This command checks to see if the destination Cloud Pub/Sub topic grants
the service agent this permission. If not, the create command attempts to
grant it.
A bucket can have up to 100 total notification configurations and up to
10 notification configurations set to trigger for a specific event.
""",
'EXAMPLES':
"""
Send notifications of all changes to the bucket
`example-bucket` to the Cloud Pub/Sub topic
`projects/default-project/topics/example-bucket`:
$ {command} gs://example-bucket
The same as the above but sends no notification payload:
$ {command} --payload-format=none gs://example-bucket
Include custom metadata in notification payloads:
$ {command} --custom-attributes=key1:value1,key2:value2 gs://example-bucket
Create a notification configuration that only sends an event when a new
object has been created or an object is deleted:
$ {command} --event-types=OBJECT_FINALIZE,OBJECT_DELETE gs://example-bucket
Create a topic and notification configuration that sends events only when
they affect objects with the prefix `photos/`:
$ {command} --object-prefix=photos/ gs://example-bucket
Specifies the destination topic ID `files-to-process` in the default
project:
$ {command} --topic=files-to-process gs://example-bucket
The same as above but specifies a Cloud Pub/Sub topic belonging
to the specific cloud project `example-project`:
$ {command} --topic=projects/example-project/topics/files-to-process gs://example-bucket
Skip creating a topic when creating the notification configuraiton:
$ {command} --skip-topic-setup gs://example-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url',
help='URL of the bucket to create the notification configuration'
' on.')
parser.add_argument(
'-m',
'--custom-attributes',
metavar='KEY=VALUE',
type=arg_parsers.ArgDict(),
help='Specifies key:value attributes that are appended to the set of'
' attributes sent to Cloud Pub/Sub for all events associated with'
' this notification configuration.')
parser.add_argument(
'-e',
'--event-types',
metavar='NOTIFICATION_EVENT_TYPE',
type=arg_parsers.ArgList(
choices=sorted(
[status.value for status in cloud_api.NotificationEventType])),
help=(
'Specify event type filters for this notification configuration.'
' Cloud Storage will send notifications of only these types. By'
' default, Cloud Storage sends notifications for all event types.'
' * OBJECT_FINALIZE: An object has been created.'
' * OBJECT_METADATA_UPDATE: The metadata of an object has changed.'
' * OBJECT_DELETE: An object has been permanently deleted.'
' * OBJECT_ARCHIVE: A live version of an object has become a'
' noncurrent version.'))
parser.add_argument(
'-p',
'--object-prefix',
help='Specifies a prefix path for this notification configuration.'
' Cloud Storage will send notifications for only objects in the'
' bucket whose names begin with the prefix.')
parser.add_argument(
'-f',
'--payload-format',
choices=sorted(
[status.value for status in cloud_api.NotificationPayloadFormat]),
default=cloud_api.NotificationPayloadFormat.JSON.value,
help='Specifies the payload format of notification messages.'
' Notification details are available in the message attributes.'
" 'none' sends no payload.")
parser.add_argument(
'-s',
'--skip-topic-setup',
action='store_true',
help='Skips creation and permission assignment of the Cloud Pub/Sub'
' topic. This is useful if the caller does not have permission to'
' access the topic in question, or if the topic already exists and has'
' the appropriate publish permission assigned.')
parser.add_argument(
'-t',
'--topic',
help='Specifies the Cloud Pub/Sub topic to send notifications to.'
' If not specified, this command chooses a topic whose project is'
' your default project and whose ID is the same as the'
' Cloud Storage bucket name.')
def Run(self, args):
project_id = properties.VALUES.core.project.GetOrFail()
url = storage_url.storage_url_from_string(args.url)
notification_configuration_iterator.raise_error_if_not_gcs_bucket_matching_url(
url)
if not args.topic:
topic_name = 'projects/{}/topics/{}'.format(project_id, url.bucket_name)
elif not args.topic.startswith('projects/'):
# A topic ID may be present but not a whole path. Use the default project.
topic_name = 'projects/{}/topics/{}'.format(
project_id,
args.topic.rpartition('/')[-1])
else:
topic_name = args.topic
# Notifications supported for only GCS.
gcs_client = api_factory.get_api(storage_url.ProviderPrefix.GCS)
if not args.skip_topic_setup:
# Using generated topic name instead of custom one.
# Project number is different than project ID.
bucket_project_number = gcs_client.get_bucket(
url.bucket_name).metadata.projectNumber
# Fetch the email of the service account that will need access to
# the new pubsub topic.
service_account_email = gcs_client.get_service_agent(
project_number=bucket_project_number)
log.info(
'Checking for topic {} with access for project {} service account {}.'
.format(topic_name, project_id, service_account_email))
created_new_topic_or_set_new_permissions = _maybe_create_or_modify_topic(
topic_name, service_account_email)
else:
created_new_topic_or_set_new_permissions = False
if args.event_types:
event_types = [
cloud_api.NotificationEventType(event_type)
for event_type in args.event_types
]
else:
event_types = None
create_notification_configuration = functools.partial(
gcs_client.create_notification_configuration,
url,
topic_name,
custom_attributes=args.custom_attributes,
event_types=event_types,
object_name_prefix=args.object_prefix,
payload_format=cloud_api.NotificationPayloadFormat(args.payload_format))
try:
return create_notification_configuration()
except api_errors.CloudApiError:
if not created_new_topic_or_set_new_permissions:
raise
log.warning(
'Retrying create notification request because topic changes may'
' take up to 10 seconds to process.')
time.sleep(10)
return create_notification_configuration()

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to delete notification configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets.notifications import delete_notification_configuration_task
def _delete_notification_configuration_task_iterator(urls):
"""Creates delete tasks from notification_configuration_iterator."""
for notification_configuration_iterator_result in (
notification_configuration_iterator
.get_notification_configuration_iterator(urls)):
yield (delete_notification_configuration_task
.DeleteNotificationConfigurationTask(
notification_configuration_iterator_result.bucket_url,
notification_configuration_iterator_result
.notification_configuration.id))
@base.UniverseCompatible
class Delete(base.DeleteCommand):
"""Delete notification configurations from a bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* deletes notification configurations from a bucket. If a
notification configuration name is passed as a parameter, that
configuration alone is deleted. If a bucket name is passed, all
notification configurations associated with the bucket are deleted.
Cloud Pub/Sub topics associated with this notification configuration
are not deleted by this command. Those must be deleted separately,
for example with the command "gcloud pubsub topics delete".
""",
'EXAMPLES':
"""
Delete a single notification configuration (with ID 3) in the
bucket `example-bucket`:
$ {command} projects/_/buckets/example-bucket/notificationConfigs/3
Delete all notification configurations in the bucket `example-bucket`:
$ {command} gs://example-bucket
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='+',
help='Specifies notification configuration names or buckets.')
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
task_executor.execute_tasks(
_delete_notification_configuration_task_iterator(args.urls),
parallelizable=True,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
)

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to show metadata of a notification configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core.resource import resource_projector
@base.UniverseCompatible
class Describe(base.DescribeCommand):
"""Show metadata for a notification configuration."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* prints populated metadata for a notification configuration.
""",
'EXAMPLES':
"""
Describe a single notification configuration (with ID 3) in the
bucket `example-bucket`:
$ {command} projects/_/buckets/example-bucket/notificationConfigs/3
""",
}
@staticmethod
def Args(parser):
parser.add_argument('url', help='The url of the notification configuration')
def Run(self, args):
bucket_url, notification_id = (
notification_configuration_iterator
.get_bucket_url_and_notification_id_from_url(args.url))
if not (bucket_url and notification_id):
raise errors.InvalidUrlError(
'Received invalid notification configuration URL: ' + args.url)
return resource_projector.MakeSerializable(
api_factory.get_api(
storage_url.ProviderPrefix.GCS).get_notification_configuration(
bucket_url, notification_id))

View File

@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list notification configurations belonging to a bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import notification_configuration_iterator
from googlecloudsdk.core.resource import resource_printer
from googlecloudsdk.core.resource import resource_projector
_PUBSUB_DOMAIN_PREFIX_LENGTH = len('//pubsub.googleapis.com/')
def _get_human_readable_notification(url, config):
"""Returns pretty notification string."""
if config.custom_attributes:
custom_attributes_string = '\n\tCustom attributes:'
for attribute in config.custom_attributes.additionalProperties:
custom_attributes_string += '\n\t\t{}: {}'.format(
attribute.key, attribute.value
)
else:
custom_attributes_string = ''
if config.event_types or config.object_name_prefix:
filters_string = '\n\tFilters:'
if config.event_types:
filters_string += '\n\t\tEvent Types: {}'.format(
', '.join(config.event_types)
)
if config.object_name_prefix:
filters_string += "\n\t\tObject name prefix: '{}'".format(
config.object_name_prefix
)
else:
filters_string = ''
return (
'projects/_/buckets/{bucket}/notificationConfigs/{notification}\n'
'\tCloud Pub/Sub topic: {topic}'
'{custom_attributes}{filters}\n\n'.format(
bucket=url.bucket_name,
notification=config.id,
topic=config.topic[_PUBSUB_DOMAIN_PREFIX_LENGTH:],
custom_attributes=custom_attributes_string,
filters=filters_string,
)
)
@base.UniverseCompatible
class List(base.ListCommand):
"""List the notification configurations belonging to a given bucket."""
detailed_help = {
'DESCRIPTION':
"""
*{command}* provides a list of notification configurations belonging to a
given bucket. The listed name of each configuration can be used
with the delete sub-command to delete that specific notification config.
""",
'EXAMPLES':
"""
Fetch the list of notification configs for the bucket `example-bucket`:
$ {command} gs://example-bucket
Fetch the notification configs in all buckets matching a wildcard:
$ {command} gs://example-*
Fetch all of the notification configs for buckets in the default project:
$ {command}
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='*',
help='Google Cloud Storage bucket paths. The path must begin '
'with gs:// and may contain wildcard characters.')
parser.add_argument(
'--human-readable',
action='store_true',
# Used by shim. Could be public but don't want maintainence burden.
hidden=True,
help=(
'Prints notification information in a more descriptive,'
' unstructured format.'
),
)
def Display(self, args, resources):
if args.human_readable:
resource_printer.Print(resources, 'object')
else:
resource_printer.Print(resources, args.format or 'yaml')
def Run(self, args):
if not args.urls:
# Provider URL will fetch all notification configurations in project.
urls = ['gs://']
else:
urls = args.urls
# Not bucket URLs raise error in iterator.
for notification_configuration_iterator_result in (
notification_configuration_iterator
.get_notification_configuration_iterator(
urls, accept_notification_configuration_urls=False)):
url, config = notification_configuration_iterator_result
if args.human_readable:
yield _get_human_readable_notification(url, config)
else:
yield {
'Bucket URL': url.url_string,
'Notification Configuration': resource_projector.MakeSerializable(
config
),
}

View File

@@ -0,0 +1,234 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets relocate command."""
import textwrap
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors as command_errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import operations_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
_BUCKET_RELCOCATION_WRITE_DOWNTIME_WARNING = textwrap.dedent("""
1. This move will involve write downtime.
2. In-flight resumable uploads not finished before the write downtime will be \
lost.
3. Bucket tags added to the bucket will result in the relocation being canceled.
4. Please ensure that you have sufficient quota in the destination before \
performing the relocation.
""")
_BUCKET_RELOCATION_WITHOUT_WRITE_DOWNTIME_WARNING = textwrap.dedent("""
1. This is a policy change move (no write downtime).
2. Please ensure that you have sufficient quota in the destination before \
performing the relocation.
""")
_ADVANCING_BUCKET_RELOCATION_WARNING = textwrap.dedent("""
1. Any ongoing, in-flight resumable uploads will be canceled and lost.
2. Write downtime will be incurred.
""")
def _get_bucket_resource(gcs_client, bucket_url):
"""Fetches the bucket resource for the given bucket storage URL."""
try:
return gcs_client.get_bucket(bucket_url.bucket_name)
except api_errors.CloudApiError as e:
raise command_errors.FatalError(e) from e
def _prompt_user_to_confirm_the_relocation(bucket_resource, args):
"""Prompt the user to confirm the relocation."""
if args.dry_run:
return
source_location = f'{bucket_resource.location}'
if bucket_resource.data_locations:
source_location += f' {bucket_resource.data_locations}'
if bucket_resource.location.casefold() == args.location.casefold():
warning_message = _BUCKET_RELOCATION_WITHOUT_WRITE_DOWNTIME_WARNING
else:
warning_message = _BUCKET_RELCOCATION_WRITE_DOWNTIME_WARNING
log.warning(f'The bucket {args.url} is in {source_location}.')
log.warning(warning_message)
console_io.PromptContinue(
prompt_string=(
"Please acknowledge that you've read the above warnings and want to"
f' relocate the bucket {args.url}?'
),
cancel_on_no=True,
)
log.status.Print(f'Starting bucket relocation for {args.url}...\n')
def _prompt_user_to_confirm_advancing_the_relocation(bucket_name):
"""Prompt the user to confirm advancing the relocation."""
log.warning(_ADVANCING_BUCKET_RELOCATION_WARNING)
console_io.PromptContinue(
prompt_string=(
'This will start the write downtime for your relocation of gs://'
f'{bucket_name}, are you sure you want to continue?'
),
cancel_on_no=True,
)
# TODO: b/361729720 - Make bucket-relocate command group universe compatible.
@base.DefaultUniverseOnly
class Relocate(base.Command):
"""Relocates bucket between different locations."""
detailed_help = {
'DESCRIPTION': """
Relocates a bucket between different locations.
""",
'EXAMPLES': """
To move a bucket (``gs://my-bucket'') to the ``us-central1'' location, use
the following command:
$ {command} gs://my-bucket --location=us-central1
To move a bucket to a custom dual-region, use the following command:
$ {command} gs://my-bucket --location=us
--placement=us-central1,us-east1
To validate the operation without actually moving the bucket, use the
following command:
$ {command} gs://my-bucket --location=us-central1 --dry-run
To schedule a write lock for the move, with ttl for reverting the write
lock after 7h, if the relocation has not succeeded, use the following
command:
$ {command}
--operation=projects/_/buckets/my-bucket/operations/C894F35J
--finalize --ttl=7h
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
relocate_arguments_group = parser.add_mutually_exclusive_group(
required=True
)
bucket_relocate_group = relocate_arguments_group.add_group(
'Arguments for initiating the bucket relocate operation.'
)
bucket_relocate_group.SetSortArgs(False)
bucket_relocate_group.add_argument(
'url',
type=str,
help='The URL of the bucket to relocate.',
)
bucket_relocate_group.add_argument(
'--location',
type=str,
required=True,
help=(
'The final [location]'
'(https://cloud.google.com/storage/docs/locations) where the'
' bucket will be relocated to. If no location is provided, Cloud'
' Storage will use the default location, which is us.'
),
)
flags.add_placement_flag(bucket_relocate_group)
bucket_relocate_group.add_argument(
'--dry-run',
action='store_true',
help=(
'Prints the operations that the relocate command would perform'
' without actually performing relocation. This is helpful to'
' identify any issues that need to be detected asynchronously.'
),
)
advance_relocate_operation_group = relocate_arguments_group.add_group(
'Arguments for advancing the relocation operation.'
)
advance_relocate_operation_group.SetSortArgs(False)
advance_relocate_operation_group.add_argument(
'--operation',
type=str,
required=True,
help=(
'Specify the relocation operation name to advance the relocation'
' operation.The relocation operation name must include the Cloud'
' Storage bucket and operation ID.'
),
)
advance_relocate_operation_group.add_argument(
'--finalize',
action='store_true',
required=True,
help=(
'Schedules the write lock to occur. Once activated, no further'
' writes will be allowed to the associated bucket. This helps'
' minimize disruption to bucket usage. For certain types of'
' moves(between Multi Region and Custom Dual Regions), finalize is'
' not required.'
),
)
advance_relocate_operation_group.add_argument(
'--ttl',
type=arg_parsers.Duration(),
help=(
'Time to live for the relocation operation. Defaults to 12h if not'
' provided.'
),
)
def Run(self, args):
gcs_client = api_factory.get_api(storage_url.ProviderPrefix.GCS)
if args.url:
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
bucket_resource = _get_bucket_resource(gcs_client, url)
_prompt_user_to_confirm_the_relocation(bucket_resource, args)
return gcs_client.relocate_bucket(
url.bucket_name,
args.location,
args.placement,
args.dry_run,
)
bucket, operation_id = (
operations_util.get_operation_bucket_and_id_from_name(args.operation)
)
_prompt_user_to_confirm_advancing_the_relocation(bucket)
gcs_client.advance_relocate_bucket(bucket, operation_id, args.ttl)
log.status.Print(
f'Sent request to advance relocation for bucket gs://{bucket} with'
f' operation {operation_id}.'
)

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets remove-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class RemoveIamPolicyBinding(base.Command):
"""Remove an IAM policy binding from a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Removes a policy binding from the IAM policy of a bucket, given a bucket
URL and the binding. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To remove an IAM policy binding from the role of
roles/storage.objectCreator for the user john.doe@example.com on BUCKET:
$ {command} gs://BUCKET --member=user:john.doe@example.com --role=roles/storage.objectCreator
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of bucket to remove IAM policy binding from.')
iam_util.AddArgsForRemoveIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url_object)
client = api_factory.get_api(url_object.scheme)
policy = client.get_bucket_iam_policy(url_object.bucket_name)
return iam_command_util.remove_iam_binding_from_resource(
args, url_object, policy, set_iam_policy_task.SetBucketIamPolicyTask
)

View File

@@ -0,0 +1,94 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of buckets set-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.api_lib.storage.gcs_json import metadata_field_converters
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
def _set_iam_policy_task_iterator(url_strings, policy):
"""Generates SetIamPolicyTask's for execution."""
for url_string in url_strings:
for resource in wildcard_iterator.get_wildcard_iterator(
url_string, fields_scope=cloud_api.FieldsScope.SHORT):
yield set_iam_policy_task.SetBucketIamPolicyTask(
resource.storage_url, policy
)
@base.UniverseCompatible
class SetIamPolicy(base.Command):
"""Set the IAM policy for a bucket."""
detailed_help = {
'DESCRIPTION':
"""
Set the IAM policy for a bucket. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To set the IAM policy in POLICY-FILE on BUCKET:
$ {command} gs://BUCKET POLICY-FILE
To set the IAM policy in POLICY-FILE on all buckets beginning with "b":
$ {command} gs://b* POLICY-FILE
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='+',
help='URLs for buckets to apply the IAM policy to.'
' Can include wildcards.')
parser.add_argument(
'-e',
'--etag',
help='Custom etag to set on IAM policy. API will reject etags that do'
' not match this value, making it useful as a precondition during'
' concurrent operations.')
iam_util.AddArgForPolicyFile(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_bucket(args.command_path, url)
policy = metadata_field_converters.process_iam_file(
args.policy_file, custom_etag=args.etag)
exit_code, output = iam_command_util.execute_set_iam_task_iterator(
_set_iam_policy_task_iterator(args.urls, policy),
args.continue_on_error)
self.exit_code = exit_code
return output

View File

@@ -0,0 +1,441 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating bucket settings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import stdin_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import user_request_args_factory
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
from googlecloudsdk.command_lib.storage.tasks import task_status
from googlecloudsdk.command_lib.storage.tasks.buckets import update_bucket_task
_CORS_HELP_TEXT = """
Sets the Cross-Origin Resource Sharing (CORS) configuration on a bucket.
An example CORS JSON document looks like the following:
[
{
"origin": ["http://origin1.example.com"],
"responseHeader": ["Content-Type"],
"method": ["GET"],
"maxAgeSeconds": 3600
}
]
For more information about supported endpoints for CORS, see
[Cloud Storage CORS support](https://cloud.google.com/storage/docs/cross-origin#server-side-support).
"""
_LABELS_HELP_TEXT = """
Sets the label configuration for the bucket. An example label JSON document
looks like the following:
{
"your_label_key": "your_label_value",
"your_other_label_key": "your_other_label_value"
}
"""
_LIFECYCLE_HELP_TEXT = """
Sets the lifecycle management configuration on a bucket. For example,
The following lifecycle management configuration JSON document
specifies that all objects in this bucket that are more than 365 days
old are deleted automatically:
{
"rule":
[
{
"action": {"type": "Delete"},
"condition": {"age": 365}
}
]
}
"""
def _add_common_args(parser):
"""Register flags for this command.
Args:
parser (argparse.ArgumentParser): The parser to add the arguments to.
Returns:
buckets update flag group
"""
parser.add_argument(
'url',
nargs='*',
type=str,
help='Specifies the URLs of the buckets to update.',
)
acl_flags_group = parser.add_group()
flags.add_acl_modifier_flags(acl_flags_group)
default_acl_flags_group = parser.add_group()
default_acl_flags_group.add_argument(
'--default-object-acl-file',
help='Sets the default object ACL from file for the bucket.',
)
default_acl_flags_group.add_argument(
'--predefined-default-object-acl',
help='Apply a predefined set of default object access controls tobuckets',
)
default_acl_flags_group.add_argument(
'--add-default-object-acl-grant',
action='append',
metavar='DEFAULT_OBJECT_ACL_GRANT',
type=arg_parsers.ArgDict(),
help=(
'Adds default object ACL grant. See --add-acl-grant help text for'
' more details.'
),
)
default_acl_flags_group.add_argument(
'--remove-default-object-acl-grant',
action='append',
help=(
'Removes default object ACL grant. See --remove-acl-grant help text'
' for more details.'
),
)
cors = parser.add_mutually_exclusive_group()
cors.add_argument('--cors-file', help=_CORS_HELP_TEXT)
cors.add_argument(
'--clear-cors',
action='store_true',
help="Clears the bucket's CORS settings.")
parser.add_argument(
'--default-storage-class',
help='Sets the default storage class for the bucket.',
)
default_encryption_key = parser.add_mutually_exclusive_group()
default_encryption_key.add_argument(
'--default-encryption-key',
help='Set the default KMS key for the bucket.')
default_encryption_key.add_argument(
'--clear-default-encryption-key',
action='store_true',
help="Clears the bucket's default encryption key.")
parser.add_argument(
'--default-event-based-hold',
action=arg_parsers.StoreTrueFalseAction,
help='Sets the default value for an event-based hold on the bucket.'
' By setting the default event-based hold on a bucket, newly-created'
' objects inherit that value as their event-based hold (it is not'
' applied retroactively).')
labels = parser.add_mutually_exclusive_group()
labels.add_argument('--labels-file', help=_LABELS_HELP_TEXT)
update_labels = labels.add_group()
update_labels.add_argument(
'--update-labels',
metavar='LABEL_KEYS_AND_VALUES',
type=arg_parsers.ArgDict(),
help='Add or update labels. Example:'
' --update-labels=key1=value1,key2=value2')
update_labels.add_argument(
'--remove-labels',
metavar='LABEL_KEYS',
type=arg_parsers.ArgList(),
help='Remove labels by their key names.')
labels.add_argument(
'--clear-labels',
action='store_true',
help='Clear all labels associated with a bucket.')
lifecycle = parser.add_mutually_exclusive_group()
lifecycle.add_argument('--lifecycle-file', help=_LIFECYCLE_HELP_TEXT)
lifecycle.add_argument(
'--clear-lifecycle',
action='store_true',
help='Removes all lifecycle configuration for the bucket.')
log_bucket = parser.add_mutually_exclusive_group()
log_bucket.add_argument(
'--log-bucket',
help='Enables usage and storage logging for the bucket specified in the'
' overall update command, outputting log files to the bucket specified in'
' this flag. Cloud Storage does not validate the existence of the bucket'
' receiving logs. In addition to enabling logging on your bucket, you'
' also need to grant cloud-storage-analytics@google.com write access to'
' the log bucket.')
log_bucket.add_argument(
'--clear-log-bucket',
action='store_true',
help='Disables usage and storage logging for the bucket specified in the'
' overall update command.')
log_object_prefix = parser.add_mutually_exclusive_group()
log_object_prefix.add_argument(
'--log-object-prefix',
help='Specifies a prefix for the names of logs generated in the log'
' bucket. The default prefix is the bucket name. If logging is not'
' enabled, this flag has no effect.')
log_object_prefix.add_argument(
'--clear-log-object-prefix',
action='store_true',
help='Clears the prefix used to determine the naming of log objects in'
' the logging bucket.')
public_access_prevention = parser.add_mutually_exclusive_group()
public_access_prevention.add_argument(
'--public-access-prevention',
'--pap',
action=arg_parsers.StoreTrueFalseAction,
help='If True, sets [public access prevention](https://cloud.google.com'
'/storage/docs/public-access-prevention) to "enforced".'
' If False, sets public access prevention to "inherited".')
public_access_prevention.add_argument(
'--clear-public-access-prevention',
'--clear-pap',
action='store_true',
help='Unsets the public access prevention setting on a bucket.',
)
retention_period = parser.add_mutually_exclusive_group()
retention_period.add_argument(
'--retention-period',
help='Minimum [retention period](https://cloud.google.com'
'/storage/docs/bucket-lock#retention-periods)'
' for objects stored in the bucket, for example'
' ``--retention-period=P1Y1M1DT5S\'\'. Objects added to the bucket'
' cannot be deleted until they\'ve been stored for the specified'
' length of time. Default is no retention period. Only available'
' for Cloud Storage using the JSON API.')
retention_period.add_argument(
'--clear-retention-period',
action='store_true',
help='Clears the object retention period for a bucket.')
parser.add_argument(
'--lock-retention-period',
action='store_true',
help='Locks an unlocked retention policy on the buckets. Caution: A'
' locked retention policy cannot be removed from a bucket or reduced in'
' duration. Once locked, deleting the bucket is the only way to'
' "remove" a retention policy.')
parser.add_argument(
'--requester-pays',
action=arg_parsers.StoreTrueFalseAction,
help='Allows you to configure a Cloud Storage bucket so that the'
' requester pays all costs related to accessing the bucket and its'
' objects.')
parser.add_argument(
'--soft-delete-duration',
type=arg_parsers.Duration(),
help=(
'Duration to retain soft-deleted objects. For example, "2w1d" is'
' two weeks and one day.'
),
)
parser.add_argument(
'--clear-soft-delete',
action='store_true',
help=(
'Clears bucket soft delete settings. Does not affect objects already'
' in soft-deleted state.'
),
)
parser.add_argument(
'--uniform-bucket-level-access',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Enables or disables [uniform bucket-level access]'
'(https://cloud.google.com/storage/docs/bucket-policy-only)'
' for the buckets.'
),
)
parser.add_argument(
'--versioning',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Allows you to configure a Cloud Storage bucket to keep old'
' versions of objects.'
),
)
web_main_page_suffix = parser.add_mutually_exclusive_group()
web_main_page_suffix.add_argument(
'--web-main-page-suffix',
help=(
'Cloud Storage allows you to configure a bucket to behave like a'
' static website. A subsequent GET bucket request through a custom'
' domain serves the specified "main" page instead of performing the'
' usual bucket listing.'
),
)
web_main_page_suffix.add_argument(
'--clear-web-main-page-suffix',
action='store_true',
help='Clear website main page suffix if bucket is hosting website.',
)
web_error_page = parser.add_mutually_exclusive_group()
web_error_page.add_argument(
'--web-error-page',
help=(
'Cloud Storage allows you to configure a bucket to behave like a'
' static website. A subsequent GET bucket request through a custom'
' domain for a non-existent object serves the specified error page'
' instead of the standard Cloud Storage error.'
),
)
web_error_page.add_argument(
'--clear-web-error-page',
action='store_true',
help='Clear website error page if bucket is hosting website.',
)
flags.add_additional_headers_flag(parser)
flags.add_autoclass_flags(parser)
flags.add_continue_on_error_flag(parser)
flags.add_recovery_point_objective_flag(parser)
flags.add_read_paths_from_stdin_flag(parser)
ip_filter = parser.add_mutually_exclusive_group()
ip_filter.add_argument(
'--clear-ip-filter',
action='store_true',
help='Disables and clears IP filter configuration of the bucket.',
)
flags.add_ip_filter_file_flag(ip_filter)
def _add_alpha_args(parser):
"""Register flags for the alpha version of this command.
Args:
parser (argparse.ArgumentParser): The parser to add the arguments to.
"""
flags.add_encryption_enforcement_file_flag(parser)
def _is_initial_bucket_metadata_needed(user_request_args):
"""Determines if the bucket update has to patch existing metadata."""
resource_args = user_request_args.resource_args
if not resource_args:
return False
return user_request_args_factory.adds_or_removes_acls(
user_request_args) or any([
resource_args.labels_file_path,
resource_args.labels_to_append,
resource_args.labels_to_remove,
])
@base.ReleaseTracks(base.ReleaseTrack.GA)
@base.UniverseCompatible
class Update(base.Command):
"""Update bucket settings."""
detailed_help = {
'DESCRIPTION':
"""
Update the settings for a bucket.
""",
'EXAMPLES':
"""
The following command updates the default storage class of a Cloud Storage
bucket named "my-bucket" to NEARLINE and sets requester pays to true:
$ {command} gs://my-bucket --default-storage-class=NEARLINE --requester-pays
The following command updates the retention period of a Cloud Storage
bucket named "my-bucket" to one year and thirty-six minutes:
$ {command} gs://my-bucket --retention-period=1y36m
The following command clears the retention period of a bucket:
$ {command} gs://my-bucket --clear-retention-period
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
def update_task_iterator(self, args):
user_request_args = (
user_request_args_factory.get_user_request_args_from_command_args(
args, metadata_type=user_request_args_factory.MetadataType.BUCKET
)
)
if user_request_args_factory.adds_or_removes_acls(user_request_args):
fields_scope = cloud_api.FieldsScope.FULL
else:
fields_scope = cloud_api.FieldsScope.NO_ACL
urls = stdin_iterator.get_urls_iterable(
args.url, args.read_paths_from_stdin
)
for url_string in urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_bucket(args.command_path, url)
for resource in wildcard_iterator.get_wildcard_iterator(
url_string,
fields_scope=fields_scope,
get_bucket_metadata=_is_initial_bucket_metadata_needed(
user_request_args)):
yield update_bucket_task.UpdateBucketTask(
resource, user_request_args=user_request_args)
def Run(self, args):
task_status_queue = task_graph_executor.multiprocessing_context.Queue()
locks_retention_period = getattr(args, 'lock_retention_period', False)
self.exit_code = task_executor.execute_tasks(
self.update_task_iterator(args),
parallelizable=not locks_retention_period,
task_status_queue=task_status_queue,
progress_manager_args=task_status.ProgressManagerArgs(
increment_type=task_status.IncrementType.INTEGER,
manifest_path=None),
continue_on_error=args.continue_on_error,
)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(Update):
"""Update bucket settings."""
detailed_help = {
'DESCRIPTION':
"""
Update a bucket.
""",
'EXAMPLES':
"""
The following command updates the retention period of a Cloud Storage
bucket named "my-bucket" to one year and thirty-six minutes:
$ {command} gs://my-bucket --retention-period=1y36m
The following command clears the retention period of a bucket:
$ {command} gs://my-bucket --clear-retention-period
""",
}
@staticmethod
def Args(parser):
_add_common_args(parser)
_add_alpha_args(parser)

View File

@@ -0,0 +1,150 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like cat command for cloud storage providers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import encryption_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_executor
from googlecloudsdk.command_lib.storage.tasks.cat import cat_task_iterator
def _range_parser(string_value):
"""Creates Range object out of given string value.
Args:
string_value (str): The range the user entered.
Returns:
Range(int, int|None): The Range object from the given string value.
"""
if string_value == '-':
return arg_parsers.Range(start=0, end=None)
range_start, _, range_end = string_value.partition('-')
if not range_start:
# Checking to see if the user entered -y for the range.
return arg_parsers.Range(start=-1 * int(range_end), end=None)
if not range_end:
# Checking to see if the user entered x- for the range.
return arg_parsers.Range(start=int(range_start), end=None)
return arg_parsers.Range.Parse(string_value)
@base.UniverseCompatible
class Cat(base.Command):
"""Outputs the contents of one or more URLs to stdout."""
detailed_help = {
'DESCRIPTION':
"""
The cat command outputs the contents of one or more URLs to stdout. While
the cat command does not compute a checksum, it is otherwise equivalent to
doing:
$ {parent_command} cp url... -
(The final '-' causes gcloud to stream the output to stdout.)
""",
'EXAMPLES':
"""
The following command writes all text files in a bucket to stdout:
$ {command} gs://bucket/*.txt
The following command outputs a short header describing file.txt, along
with its contents:
$ {command} -d gs://my-bucket/file.txt
The following command outputs bytes 256-939 of file.txt:
$ {command} -r 256-939 gs://my-bucket/file.txt
The following command outputs the last 5 bytes of file.txt:
$ {command} -r -5 gs://my-bucket/file.txt
""",
}
@staticmethod
def Args(parser):
parser.add_argument('url', nargs='+', help='The url of objects to list.')
parser.add_argument(
'-d',
'--display-url',
action='store_true',
help='Prints the header before each object.')
parser.add_argument(
'-r',
'--range',
type=_range_parser,
help=textwrap.dedent("""\
Causes gcloud storage to output just the specified byte range of
the object. In a case where "start" = 'x', and "end" = 'y',
ranges take the form:
`x-y` (e.g., `-r 256-5939`), `x-` (e.g., `-r 256-`),
`-y` (e.g., `-r -5`)
When offsets start at 0, x-y means to return bytes x
through y (inclusive), x- means to return bytes x through
the end of the object, and -y changes the role of y.
If -y is present, then it returns the last y bytes of the object.
If the bytes are out of range of the object,
then nothing is printed"""))
flags.add_additional_headers_flag(parser)
flags.add_encryption_flags(parser, command_only_reads_data=True)
def Run(self, args):
encryption_util.initialize_key_store(args)
if args.url:
storage_urls = []
for url_string in args.url:
url_object = storage_url.storage_url_from_string(url_string)
if not isinstance(url_object, storage_url.CloudUrl):
raise errors.InvalidUrlError('cat only works for valid cloud URLs.'
' {} is an invalid cloud URL.'.format(
url_object.url_string))
storage_urls.append(url_object)
# Not using "FieldsScope.SHORT" because downloads always expect
# serialization data for recovering from errors, which requires
# the "GCS mediaLink" API field.
source_expansion_iterator = name_expansion.NameExpansionIterator(
args.url,
fields_scope=cloud_api.FieldsScope.NO_ACL,
recursion_requested=name_expansion.RecursionSetting.NO)
task_iterator = cat_task_iterator.get_cat_task_iterator(
source_expansion_iterator,
args.display_url,
start_byte=getattr(args.range, 'start', 0),
end_byte=getattr(args.range, 'end', None))
self.exit_code = task_executor.execute_tasks(task_iterator=task_iterator)

View File

@@ -0,0 +1,199 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import copying
from googlecloudsdk.command_lib.storage import paths
from googlecloudsdk.command_lib.storage import storage_parallel
from googlecloudsdk.core import log
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
@base.UniverseCompatible
@base.Deprecate(is_removed=False, warning='This command is deprecated. '
'Use `gcloud alpha storage cp` instead.')
class Copy(base.Command):
"""Upload, download, and copy Cloud Storage objects."""
detailed_help = {
'DESCRIPTION': """\
Copy files between your local file system and Cloud Storage or from one
Cloud Storage location to another.
""",
'EXAMPLES': """\
Uploading files:
To upload a single file to a remote location:
$ *{command}* path/to/file.txt gs://mybucket/file.txt
$ *{command}* path/to/file.txt gs://mybucket/
The above two commands both create gs://mybucket/file.txt.
To upload multiple files to a remote location:
$ *{command}* path/to/a.txt other/path/b.txt gs://mybucket/remote-dir/
The above command will create gs://mybucket/remote-dir/a.txt and
gs://mybucket/remote-dir/b.txt. If remote-dir does not exist, this command will create
remote-dir.
To upload a directory my-dir and all its sub-directories and files:
$ *{command}* --recursive my-dir gs://mybucket/remote-dir/
If my-dir has a subdirectory sub-dir and sub-dir has a file a.txt, the above
command will create gs://mybucket/remote-dir/my-dir/sub-dir/a.txt. The structure of directory
is kept.
The following command also uploads all files in my-dir and sub-directories recursively:
$ *{command}* my-dir/** gs://mybucket/remote-dir/
The above command flattens the directory strucutre and creates gs://mybucket/remote-dir/a.txt.
To upload all files in a directory, ignoring the subdirectories:
$ *{command}* my-dir/* gs://mybucket/remote-dir/
If my-dir has a file a.txt and a subdirectory sub-dir. The above command will ceate
gs://mybucket/remote-dir/a.txt.
We can combine the wildcards to upload all text files in a directory and all subdirectories
recursively:
$ *{command}* my-dir/**/*.txt gs://mybucket/remote-dir/
Downloading files:
To download a single file:
$ *{command}* gs://mybucket/file.txt local-dir/
$ *{command}* gs://mybucket/file.txt local-dir/file.txt
The above two commands both create local-dir/file.txt.
To download multiple files:
$ *{command}* gs://mybucket/a.txt gs://mybucket/b.txt local-dir/
The above command creates local-dir/a.txt and local-dir/b.txt.
To download a directory and all its sub-directories and files:
$ *{command}* --recursive gs://mybucket/remote-dir/ local-dir/
The above command creates local-dir/remote-dir/ which contains all files and subdirectories
of gs://mybucket/remote-dir/. The structure of directory is kept.
The following command also downloads all files in gs://mybucket/remote-dir/ to local-dir:
$ *{command}* gs://mybucket/remote-dir/** local-dir/
If remote-dir contains files a.txt and sub-dir/b.txt, the above command flattens the
directory structure and creates local-dir/a.txt and local-dir/b.txt.
To download all files, ignoring the subdirectories::
$ *{command}* gs://mybucket/remote-dir/* local-dir/
We can combine the wildcards to download all text files under remote-dir and its
subdirectories:
$ *{command}* gs://mybucket/remote-dir/**/*.txt local-dir/
Coping between Cloud Storage locations:
To copy a single file to another location:
$ *{command}* gs://mybucket/file.txt gs://otherbucket/file.txt
$ *{command}* gs://mybucket/file.txt gs://otherbucket/
The above two commands both create gs://otherbucket/file.txt.
To copy multiple files to a new location:
$ *{command}* gs://mybucket/a.txt gs://mybucket/b.txt gs://otherbucket/target-dir/
The above command creates gs://otherbucket/target-dir/a.txt and
gs://otherbucket/target-dir/b.txt. If target-dir does not exist, this command will create
target-dir.
To copy all files and subdirectories in one location to another:
$ *{command}* --recursive gs://mybucket/source-dir/ gs://otherbucket/target-dir/
If source-dir has a subdirectory sub-dir and sub-dir has a file a.txt, the above
command will create gs://mybucket/target-dir/source-dir/sub-dir/a.txt. The structure of
directory is kept.
The following command also copies all files in source-dir and its sub-directories:
$ *{command}* gs://mybucket/source-dir/** gs://mybucket/target-dir/
The above command flattens the directory strucutre and creates gs://mybucket/target-dir/a.txt.
To copy all files in a directory, ignoring the subdirectories:
$ *{command}* gs://mybucket/source-dir/* gs://mybucket/target-dir/
If source-dir has a file a.txt and a subdirectory sub-dir. The above command will ceate
gs://mybucket/target-dir/a.txt.
We can combine the wildcards to copy all text in one location and all its sub-directories:
$ *{command}* gs://mybucket/source-dir/**/*.txt gs://mybucket/target-dir/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'source',
nargs='+',
help='The source file to copy.')
parser.add_argument(
'destination',
help='The destination to copy the file to.')
parser.add_argument(
'--recursive',
action='store_true',
help='Recursively copy the contents of any directories that match the '
'path expression.')
parser.add_argument(
'--num-threads',
type=int,
hidden=True,
default=16,
help='The number of threads to use for the copy.')
def Run(self, args):
sources = [paths.Path(p) for p in args.source]
dest = paths.Path(args.destination)
copier = copying.CopyTaskGenerator()
tasks = copier.GetCopyTasks(sources, dest, recursive=args.recursive)
storage_parallel.ExecuteTasks(
tasks, num_threads=args.num_threads, progress_bar_label='Copying Files')
log.status.write('Copied [{}] file{}.\n'.format(
len(tasks), 's' if len(tasks) > 1 else ''))

View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like cp command for cloud storage providers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import cp_command_util
from googlecloudsdk.command_lib.storage import flags
_COMMAND_DESCRIPTION = """
Copy data between your local file system and the cloud, within the cloud,
and between cloud storage providers.
Please Note - By default, the `cp` command does not follow directory symlinks.
You can use the `--preserve-symlinks` flag to follow directory symlinks.
"""
_GA_EXAMPLES = """
The following command uploads all text files from the local directory to a
bucket:
$ {command} *.txt gs://my-bucket
The following command downloads all text files from a bucket to your
current directory:
$ {command} gs://my-bucket/*.txt .
The following command transfers all text files from a bucket to a
different cloud storage provider:
$ {command} gs://my-bucket/*.txt s3://my-bucket
Use the `--recursive` option to copy an entire directory tree. The
following command uploads the directory tree ``dir'':
$ {command} --recursive dir gs://my-bucket
Recursive listings are similar to adding `**` to a query, except
`**` matches only cloud objects and will not match prefixes. For
example, the following would not match ``gs://my-bucket/dir/log.txt''
$ {command} gs://my-bucket/**/dir dir
`**` retrieves a flat list of objects in a single API call. However, `**`
matches folders for non-cloud queries. For example, a folder ``dir''
would be copied in the following.
$ {command} ~/Downloads/**/dir gs://my-bucket
"""
_ALPHA_EXAMPLES = """
The contexts are preserved when copying to Google Cloud Storage. To overwrite
contexts on the destination object, use `--custom-contexts` or
`--custom-contexts-file`. For example:
$ {command} gs://my-bucket/object \
gs://destination-bucket/object \
--custom-contexts=key1=value1,key2=value2
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Cp(base.Command):
"""Upload, download, and copy Cloud Storage objects."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES,
}
@classmethod
def Args(cls, parser):
cp_command_util.add_cp_and_mv_flags(parser, cls.ReleaseTrack())
cp_command_util.add_recursion_flag(parser)
flags.add_per_object_retention_flags(parser)
def Run(self, args):
self.exit_code = cp_command_util.run_cp(args)
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CpAlpha(Cp):
"""Upload, download, and copy Cloud Storage objects."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

View File

@@ -0,0 +1,167 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.storage import expansion
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_parallel
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.Hidden
@base.Deprecate(is_removed=False, warning='This command is deprecated. '
'Use `gcloud alpha storage rm` instead.')
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(base.Command):
"""Delete Cloud Storage objects and buckets."""
detailed_help = {
'DESCRIPTION': """\
*{command}* lets you delete Cloud Storage objects and buckets. You can
specify one or more paths (including wildcards) and all matching objects
and buckets will be deleted.
""",
'EXAMPLES': """\
To delete an object, run:
$ *{command}* gs://mybucket/a.txt
To delete all objects in a directory, run:
$ *{command}* gs://mybucket/remote-dir/*
The above command will delete all objects under remote-dir/ but not its sub-directories.
To delete a directory and all its objects and subdirectories, run:
$ *{command}* --recursive gs://mybucket/remote-dir
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and subdirectories of a directory, without deleting the directory
itself, run:
$ *{command}* --recursive gs://mybucket/remote-dir/*
or
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and directories in a bucket without deleting the bucket itself, run:
$ *{command}* gs://mybucket/**
To delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/*.txt
$ *{command}* gs://mybucket/remote-dir/*.txt
To go beyond directory boundary and delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/**/*.txt
$ *{command}* gs://mybucket/remote-dir/**/*.txt
To delete a bucket, run:
$ *{command}* gs://mybucket
You can use wildcards in bucket names. To delete all buckets with prefix of `my`, run:
$ *{command}* --recursive gs://my*
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'path',
nargs='+',
help='The path of objects and directories to delete. The path must '
'begin with gs:// and may or may not contain wildcard characters.')
parser.add_argument(
'--recursive',
action='store_true',
help='Recursively delete the contents of any directories that match '
'the path expression.')
parser.add_argument(
'--num-threads',
type=int,
hidden=True,
default=16,
help='The number of threads to use for the delete.')
flags.add_additional_headers_flag(parser)
def Run(self, args):
paths = args.path or ['gs://']
expander = expansion.GCSPathExpander()
objects, dirs = expander.ExpandPaths(paths)
if dirs and not args.recursive:
raise exceptions.RequiredArgumentException(
'--recursive',
'Source path matches directories but --recursive was not specified.')
buckets = []
dir_paths = []
for d in dirs:
obj_ref = storage_util.ObjectReference.FromUrl(d, allow_empty_object=True)
if not obj_ref.name:
buckets.append(obj_ref.bucket_ref)
dir_paths.append(d + '**')
sub_objects, _ = expander.ExpandPaths(dir_paths)
objects.update(sub_objects)
tasks = []
for o in sorted(objects):
tasks.append(storage_parallel.ObjectDeleteTask(
storage_util.ObjectReference.FromUrl(o)))
if buckets:
# Extra warnings and confirmation if any buckets will be deleted.
log.warning('Deleting a bucket is irreversible and makes that bucket '
'name available for others to claim.')
message = 'This command will delete the following buckets:\n '
message += '\n '.join([b.bucket for b in buckets])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
# TODO(b/120033753): Handle long lists of items.
message = 'You are about to delete the following:'
message += ''.join(['\n ' + b.ToUrl() for b in buckets])
message += ''.join(['\n ' + t.obj_ref.ToUrl() for t in tasks])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
storage_parallel.ExecuteTasks(tasks, num_threads=args.num_threads,
progress_bar_label='Deleting Files')
log.status.write(
'Deleted [{}] file{}.\n'.format(
len(tasks), 's' if len(tasks) > 1 else ''))
storage_client = storage_api.StorageClient()
for b in buckets:
storage_client.DeleteBucket(b)
log.DeletedResource(b.ToUrl(), kind='bucket')

View File

@@ -0,0 +1,437 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Diagnose Google Cloud Storage common issues."""
import enum
import os
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.api_lib.storage.gcs_json import client as gcs_json_client
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors as command_errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.diagnose import direct_connectivity_diagnostic
from googlecloudsdk.command_lib.storage.diagnose import download_throughput_diagnostic as download_throughput_diagnostic_lib
from googlecloudsdk.command_lib.storage.diagnose import export_util
from googlecloudsdk.command_lib.storage.diagnose import latency_diagnostic as latency_diagnostic_lib
from googlecloudsdk.command_lib.storage.diagnose import system_info
from googlecloudsdk.command_lib.storage.diagnose import upload_throughput_diagnostic as upload_throughput_diagnostic_lib
from googlecloudsdk.command_lib.storage.resources import gcs_resource_reference
from googlecloudsdk.core import log
_OBJECT_SIZE_UPPER_BOUND = '1GB'
def get_bucket_resource(
bucket_url: storage_url.StorageUrl,
) -> gcs_resource_reference.GcsBucketResource:
"""Fetches the bucket resource for the given bucket storage URL.
Args:
bucket_url: The URL object to get the bucket resource for.
Returns:
The bucket resource for the given URL.
Raises:
FatalError: If the bucket resource could not be fetched.
"""
gcs_client = gcs_json_client.JsonClient()
try:
return gcs_client.get_bucket(bucket_url.bucket_name)
except api_errors.CloudApiError as e:
raise command_errors.FatalError(
f'Bucket metadata could not be fetched for {bucket_url.bucket_name}'
) from e
def _validate_args(args):
"""Validates and raises error if the command arguments are invalid."""
errors_util.raise_error_if_not_gcs_bucket(
args.command_path, storage_url.storage_url_from_string(args.url)
)
if (
args.export
and args.destination
and not (
os.path.exists(args.destination) and os.path.isdir(args.destination)
)
):
raise ValueError(
f'Invalid destination path: {args.destination}. Please provide'
' a valid path.'
)
class TestType(enum.Enum):
"""Enum class for specifying performance test type for diagnostic tests."""
DIRECT_CONNECTIVITY = 'DIRECT_CONNECTIVITY'
DOWNLOAD_THROUGHPUT = 'DOWNLOAD_THROUGHPUT'
UPLOAD_THROUGHPUT = 'UPLOAD_THROUGHPUT'
LATENCY = 'LATENCY'
@base.DefaultUniverseOnly
class Diagnose(base.Command):
"""Diagnose Google Cloud Storage."""
detailed_help = {
'DESCRIPTION': """
The diagnose command runs a series of diagnostic tests for common gcloud
storage issues.
The `URL` argument must name an exisiting bucket for which the user
already has write permissions. Standard billing also applies.
Several test files/objects will be uploaded and downloaded to this bucket
to gauge out the performance metrics. All the temporary files will be
deleted on successfull completion of the command.
By default, the command executes `DOWNLOAD_THROUGHPUT`,
`UPLOAD_THROUGHPUT` and `LATENCY` tests. Tests to execute can be overriden
by using the `--test-type` flag.
Each test uses the command defaults or gcloud CLI configurations for
performing the operations. This command also provides a way to override
these values via means of different flags like `--process-count`,
`--thread-count`, `--download-type`, etc.
The command outputs a diagnostic report with sytem information like free
memory, available CPU, average CPU load per test, disk counter deltas and
diagnostic information specific to individual tests on successful
completion.
""",
'EXAMPLES': """
The following command runs the default diagnostic tests on ``my-bucket''
bucket:
$ {command} gs://my-bucket
The following command runs only UPLOAD_THROUGHPUT and DOWNLOAD_THROUGHPUT
diagnostic tests:
$ {command} gs://my-bucket --test-type=UPLOAD_THROUGHPUT,DOWNLOAD_THROUGHPUT
The following command runs the diagnostic tests using ``10'' objects of
``1MiB'' size each with ``10'' threads and ``10'' processes at max:
$ {command} gs://my-bucket --no-of-objects=10 --object-size=1MiB
--process-count=10 --thread-count=10
The following command can be used to bundle and export the diagnostic
information to a user defined ``PATH'' destination:
$ {command} gs://my-bucket --export --destination=<PATH>
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
parser.add_argument(
'url',
type=str,
help='Bucket URL to use for the diagnostic tests.',
)
parser.add_argument(
'--test-type',
type=arg_parsers.ArgList(
choices=sorted([option.value for option in TestType])
),
metavar='TEST_TYPES',
help="""
Tests to run as part of this diagnosis. Following tests are supported:
DIRECT_CONNECTIVITY: Run a test upload over the Direct Connectivity
network path and run other diagnostics if the upload fails.
DOWNLOAD_THROUGHPUT: Upload objects to the specified bucket and record
the number of bytes transferred per second.
UPLOAD_THROUGHPUT: Download objects from the specified bucket and
record the number of bytes transferred per second.
LATENCY: Write the objects, retrieve their metadata, read the objects,
and record latency of each operation.
""",
default=[],
)
parser.add_argument(
'--download-type',
choices=sorted([
option.value
for option in download_throughput_diagnostic_lib.DownloadType
]),
default=download_throughput_diagnostic_lib.DownloadType.FILE,
help="""
Download strategy to use for the DOWNLOAD_THROUGHPUT diagnostic test.
STREAMING: Downloads the file in memory, does not use parallelism.
`--process-count` and `--thread-count` flag values will be ignored if
provided.
SLICED: Performs a [sliced download](https://cloud.google.com/storage/docs/sliced-object-downloads)
of objects to a directory.
Parallelism can be controlled via `--process-count` and `--thread-count`
flags.
FILE: Download objects as files. Parallelism can be controlled via
`--process-count` and `--thread-count` flags.
""",
)
parser.add_argument(
'--logs-path',
help=(
'If the diagnostic supports writing logs, write the logs to this'
' file location.'
),
)
parser.add_argument(
'--upload-type',
choices=sorted([
option.value
for option in upload_throughput_diagnostic_lib.UploadType
]),
default=upload_throughput_diagnostic_lib.UploadType.FILE,
help="""
Upload strategy to use for the _UPLOAD_THROUGHPUT_ diagnostic test.
FILE: Uploads files to a bucket. Parallelism can be controlled via
`--process-count` and `--thread-count` flags.
PARALLEL_COMPOSITE: Uploads files using a [parallel
composite strategy](https://cloud.google.com/storage/docs/parallel-composite-uploads).
Parallelism can be controlled via `--process-count` and `--thread-count`
flags.
STREAMING: Streams the data to the bucket, does not use parallelism.
`--process-count` and `--thread-count` flag values will be ignored if
provided.
""",
)
parser.add_argument(
'--process-count',
type=arg_parsers.BoundedInt(lower_bound=1),
help='Number of processes at max to use for each diagnostic test.',
)
parser.add_argument(
'--thread-count',
type=arg_parsers.BoundedInt(lower_bound=1),
help='Number of threads at max to use for each diagnostic test.',
)
object_properties_group = parser.add_group(
sort_args=False, help='Object properties:'
)
object_properties_group.add_argument(
'--object-count',
required=True,
type=arg_parsers.BoundedInt(lower_bound=1),
help='Number of objects to use for each diagnostic test.',
)
object_size_properties_group = object_properties_group.add_group(
mutex=True,
sort_args=False,
help='Object size properties:',
required=True,
)
object_size_properties_group.add_argument(
'--object-size',
type=arg_parsers.BinarySize(upper_bound=_OBJECT_SIZE_UPPER_BOUND),
help='Object size to use for the diagnostic tests.',
)
object_size_properties_group.add_argument(
'--object-sizes',
metavar='OBJECT_SIZES',
type=arg_parsers.ArgList(
element_type=arg_parsers.BinarySize(
upper_bound=_OBJECT_SIZE_UPPER_BOUND
)
),
help="""
List of object sizes to use for the tests. Sizes should be
provided for each object specified using `--object-count` flag.
""",
)
export_group = parser.add_group(
sort_args=False, help='Export diagnostic bundle.'
)
export_group.add_argument(
'--export',
action='store_true',
required=True,
help="""
Generate and export a diagnostic bundle. The following
information will be bundled and exported into a gzipped tarball
(.tar.gz):
- Latest gcloud CLI logs.
- Output of running the `gcloud storage diagnose` command.
- Output of running the `gcloud info --anonymize` command.
Note: This command generates a bundle containing system information like
disk counter detlas, CPU information and system configurations. Please
exercise caution while sharing.
""",
)
export_group.add_argument(
'--destination',
type=str,
help=(
'Destination file path where the diagnostic bundle will be'
' exported.'
),
)
parser.display_info.AddFormat("""
table(
name,
operation_results[]:format='table[box](name,payload_description:wrap,result:wrap)'
)
""")
def _run_tests_with_performance_tracking(
self, args, url_object, tests_to_run
):
"""Runs test with system performance tracking."""
object_sizes = None
if args.object_count:
if args.object_sizes:
if len(args.object_sizes) != args.object_count:
raise ValueError(
'Number of object sizes provided should match the number of'
' objects.'
)
else:
object_sizes = args.object_sizes
elif args.object_size:
object_sizes = [args.object_size] * args.object_count
system_info_provider = system_info.get_system_info_provider()
test_results = []
with system_info.get_disk_io_stats_delta_diagnostic_result(
system_info_provider, test_results
):
if TestType.LATENCY.value in tests_to_run:
latency_diagnostic = latency_diagnostic_lib.LatencyDiagnostic(
url_object,
object_sizes,
)
latency_diagnostic.execute()
test_results.append(latency_diagnostic.result)
if TestType.DOWNLOAD_THROUGHPUT.value in tests_to_run:
download_type = download_throughput_diagnostic_lib.DownloadType(
args.download_type
)
download_throughput_diagnostic = (
download_throughput_diagnostic_lib.DownloadThroughputDiagnostic(
url_object,
download_type,
object_sizes,
process_count=args.process_count,
thread_count=args.thread_count,
)
)
download_throughput_diagnostic.execute()
test_results.append(download_throughput_diagnostic.result)
if TestType.UPLOAD_THROUGHPUT.value in tests_to_run:
upload_type = upload_throughput_diagnostic_lib.UploadType(
args.upload_type
)
upload_throughput_diagnostic = (
upload_throughput_diagnostic_lib.UploadThroughputDiagnostic(
url_object,
upload_type,
object_sizes,
process_count=args.process_count,
thread_count=args.thread_count,
)
)
upload_throughput_diagnostic.execute()
test_results.append(upload_throughput_diagnostic.result)
# Capture the system information at last to CPU load avg could account for
# the diagnostic test runs.
test_results.append(
system_info.get_system_info_diagnostic_result(system_info_provider)
)
return test_results
def Run(self, args):
default_tests = [
TestType.DOWNLOAD_THROUGHPUT.value,
TestType.LATENCY.value,
TestType.UPLOAD_THROUGHPUT.value,
]
_validate_args(args)
url_object = storage_url.storage_url_from_string(args.url)
bucket_resource = get_bucket_resource(url_object)
log.status.Print(
f'Using {bucket_resource.name} bucket for the diagnostic tests.'
)
log.status.Print(f'Bucket location : {bucket_resource.location}')
log.status.Print(
f'Bucket storage class : {bucket_resource.default_storage_class}'
)
if args.test_type:
tests_to_run = args.test_type
else:
tests_to_run = default_tests
if tests_to_run == [TestType.DIRECT_CONNECTIVITY.value]:
test_results = []
else:
test_results = self._run_tests_with_performance_tracking(
args, url_object, tests_to_run
)
if TestType.DIRECT_CONNECTIVITY.value in tests_to_run:
direct_connectivity = (
direct_connectivity_diagnostic.DirectConnectivityDiagnostic(
bucket_resource,
logs_path=args.logs_path,
)
)
direct_connectivity.execute()
test_results.append(direct_connectivity.result)
if args.export:
log.status.Print('Exporting diagnostic bundle...')
export_path = export_util.export_diagnostic_bundle(
test_results, args.destination
)
log.status.Print(
'Successfully exported diagnostic bundle to {}'.format(export_path)
)
return None
log.status.Print('Generating diagnostic report...')
return test_results

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like du command for cloud storage providers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import fnmatch
import sys
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import du_command_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import regex_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core.util import files
_COMMAND_DESCRIPTION = """
Displays the amount of space in bytes used by the objects in a bucket,
subdirectory, or project. This command calculates the current space usage
by making a series of object listing requests, which can take a long time
for large buckets. If your bucket contains hundreds of thousands of
objects, or if you want to monitor your bucket size over time, use
Monitoring instead, as described in [Get bucket size](https://cloud.google.com/storage/docs/getting-bucket-size)
"""
_GA_EXAMPLES = """
To list the size of each object in a bucket:
$ {command} gs://bucketname
To list the size of each object in the prefix subdirectory:
$ {command} gs://bucketname/prefix/*
To print the total number of bytes in a bucket in human-readable form:
$ {command} -c gs://bucketname
To see a summary of the total number of bytes in two given buckets:
$ {command} -s gs://bucket1 gs://bucket2
To list the size of each object in a bucket with Object Versioning
enabled, including noncurrent objects:
$ {command} -a gs://bucketname
To list the size of each object in a bucket, except objects that end in
".bak", with each object printed ending in a null byte:
$ {command} -e "*.bak" -0 gs://bucketname
To list the size of each bucket in a project and the total size of the
project:
$ {command} --summarize --readable-sizes --total
"""
_ALPHA_EXAMPLES = """
The following command summarizes the size of objects that match the specified
filter.
$ {command} gs://my-bucket --metadata-filter='contexts."foo"="bar"' --summarize
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Du(base.Command):
"""Displays the amount of space in bytes used by storage resources."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES,
}
@classmethod
def Args(cls, parser):
parser.add_argument('url', nargs='*', help='The url of objects to list.')
parser.add_argument(
'-0',
'--zero-terminator',
action='store_true',
help=(
'Ends each output line with a 0 byte rather than a newline. You'
' can use this to make the output machine-readable.'
),
)
parser.add_argument(
'-a',
'--all-versions',
action='store_true',
help='Includes noncurrent object versions for a bucket with Object'
' Versioning enabled. Also prints the generation and metageneration'
' number for each listed object.')
parser.add_argument(
'-c',
'--total',
action='store_true',
help='Includes a total size of all input sources.',
)
parser.add_argument(
'-e',
'--exclude-name-pattern',
action='append',
default=[],
help=(
'Exclude a pattern from the report. Example: `-e "*.o"` excludes'
' any object that ends in ".o". Can be specified multiple times.'
),
)
parser.add_argument(
'-r',
'--readable-sizes',
action='store_true',
help=(
'Prints object sizes in human-readable format. For example, 1 KiB,'
' 234 MiB, or 2GiB.'
),
)
parser.add_argument(
'-s',
'--summarize',
action='store_true',
help='Displays only the summary for each argument.',
)
parser.add_argument(
'-X',
'--exclude-name-pattern-file',
help=(
'Similar to -e, but excludes patterns from the given file.'
' The patterns to exclude should be listed one per line.'
),
)
flags.add_additional_headers_flag(parser)
if cls.ReleaseTrack() == base.ReleaseTrack.ALPHA:
flags.add_metadata_filter_flag(parser)
def Run(self, args):
use_gsutil_style = flags.check_if_use_gsutil_style(args)
metadata_filter = getattr(args, 'metadata_filter', None)
if args.url:
storage_urls = []
for url_string in args.url:
url_object = storage_url.storage_url_from_string(url_string)
if not isinstance(url_object, storage_url.CloudUrl):
raise errors.InvalidUrlError(
'Du only works for valid cloud URLs.'
' {} is an invalid cloud URL.'.format(url_object.url_string)
)
if (
metadata_filter is not None
and url_object.scheme != cloud_api.DEFAULT_PROVIDER
):
raise errors.Error('Metadata filter is only supported for GCS URLs.')
storage_urls.append(url_object)
else:
storage_urls = [storage_url.CloudUrl(cloud_api.DEFAULT_PROVIDER)]
exclude_fnmatch_strings = args.exclude_name_pattern
if args.exclude_name_pattern_file:
if args.exclude_name_pattern_file == '-':
exclude_fnmatch_strings.extend([line.strip() for line in sys.stdin])
else:
with files.FileReader(args.exclude_name_pattern_file) as file:
exclude_fnmatch_strings.extend([line.strip() for line in file])
exclude_regex_strings = [
fnmatch.translate(pattern) for pattern in exclude_fnmatch_strings
]
du_command_util.DuExecutor(
cloud_urls=storage_urls,
exclude_patterns=regex_util.Patterns(exclude_regex_strings),
object_state=flags.get_object_state_from_flags(args),
readable_sizes=args.readable_sizes,
summarize=args.summarize,
total=args.total,
use_gsutil_style=use_gsutil_style,
zero_terminator=args.zero_terminator,
list_filter=metadata_filter,
).list_urls()
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class DuAlpha(Du):
"""Displays the amount of space in bytes used by storage resources."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

View File

@@ -0,0 +1,32 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage folders commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class Folders(base.Group):
"""Manage Cloud Storage folders."""
def Filter(self, context, args):
# TODO(b/190541521): Determine if command group works with project number
base.RequireProjectID(args)
del context, args

View File

@@ -0,0 +1,76 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for making folders in HNS buckets."""
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Create(base.Command):
"""Create folders for hierarchical namespace bucket."""
detailed_help = {
'DESCRIPTION': 'Create folders.',
'EXAMPLES': """
The following command creates a folder called `folder/` in a bucket
named `my-bucket`:
$ {command} gs://my-bucket/folder/
The following command creates all folders in the path `A/B/C/D` in a
bucket named `my-bucket`:
$ {command} --recursive gs://my-bucket/folder/A/B/C/D
""",
}
@staticmethod
def Args(parser):
"""Adds arguments specific to this command to parser."""
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the folders to create.'
)
parser.add_argument(
'--recursive',
action='store_true',
help=(
'Recursively create all folders in a given path if they do not'
' alraedy exist.'
),
)
flags.add_additional_headers_flag(parser)
def Run(self, args):
urls = []
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_folder_type(
args.command_path, url, 'folder'
)
urls.append(url)
for url in urls:
client = api_factory.get_api(url.scheme)
log.status.Print('Creating {}...'.format(url))
client.create_folder(url.bucket_name, url.resource_name, args.recursive)

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of command for deleting folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import folder_util
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import rm_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
@base.DefaultUniverseOnly
class Delete(base.Command):
"""Delete folders."""
detailed_help = {
'DESCRIPTION': """Delete folders.""",
'EXAMPLES': """
The following command deletes a folder named `folder`
in a hierarchical namesapce bucket
called `my-bucket`:
$ {command} gs://my-bucket/folder/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
type=str,
nargs='+',
help='The URLs of the folders to delete.',
)
flags.add_additional_headers_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_folder_type(
args.command_path, url, 'folder'
)
folder_expansion_iterator = name_expansion.NameExpansionIterator(
args.urls,
folder_setting=folder_util.FolderSetting.LIST_WITHOUT_OBJECTS,
raise_error_for_unmatched_urls=True,
)
self.exit_code = rm_command_util.remove_folders(
folder_expansion_iterator,
task_status_queue=task_graph_executor.multiprocessing_context.Queue(),
raise_error_for_unmatched_urls=True,
)

View File

@@ -0,0 +1,68 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for making folders in HNS buckets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describe hierarchical namesapace bucket folders."""
detailed_help = {
'DESCRIPTION': """Describe hierarchical namespace bucket folders.""",
'EXAMPLES': """
The following command shows information about a folder named
`folder` in an hierarchical namespace bucket called `my-bucket`:
$ {command} gs://my-bucket/folder/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url',
type=str,
help='The URL of the folder to describe.',
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_folder_type(
args.command_path, url, 'folder'
)
client = api_factory.get_api(url.scheme)
resource = client.get_folder(
url.bucket_name,
url.resource_name,
)
return resource_util.get_display_dict_for_resource(
resource,
full_resource_formatter.FolderDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of command for listing folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import folder_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List folders."""
detailed_help = {
'DESCRIPTION': """List folders.""",
'EXAMPLES': """
The following command lists all folders in a hierarchical namespace bucket:
$ {command} gs://my-bucket/
The following command lists all folders under a parent folder:
$ {command} gs://my-bucket/parent-folder/
You can use [wildcards](https://cloud.google.com/storage/docs/wildcards)
to match multiple paths (including multiple buckets). Bucket wildcards are
expanded to match only buckets contained in your current project. The
following command matches folders that are stored in buckets
in your project that begin with ``my-b'':
$ {command} gs://my-b*/
Following is another example where we are listing all folders that
begin with ``B'' under a given bucket:
$ {command} gs://my-bucket/B*
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the resources to list.'
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
def Run(self, args):
urls = []
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs(args.command_path, url)
urls.append(url)
for url in urls:
for resource in wildcard_iterator.CloudWildcardIterator(
url.join('**'),
folder_setting=folder_util.FolderSetting.LIST_WITHOUT_OBJECTS,
):
yield resource_util.get_display_dict_for_resource(
resource,
full_resource_formatter.FolderDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,190 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of hash command for getting formatted file hashes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
import binascii
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import encryption_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import fast_crc32c_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import hash_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import resource_reference
from googlecloudsdk.command_lib.util import crc32c
from googlecloudsdk.core import log
_DIGEST_FORMAT_KEY = 'digest_format'
_CRC32C_HASH_KEY = 'crc32c_hash'
_MD5_HASH_KEY = 'md5_hash'
_URL_KEY = 'url'
def _convert_base64_to_hex(base64_string):
"""Converts base64 hash digest to hex-formatted hash digest string."""
if base64_string is None:
return None
return binascii.hexlify(
base64.b64decode(
base64_string.strip('\n"\'').encode('utf-8'))).decode('utf-8')
def _is_object_or_file_resource(resource):
return isinstance(resource, (resource_reference.ObjectResource,
resource_reference.FileObjectResource))
def _get_resource_iterator(url_strings):
"""Wildcard matches and recurses into top-level of buckets."""
any_url_matched = False
for url_string in url_strings:
wildcard_expanded_iterator = wildcard_iterator.get_wildcard_iterator(
url_string,
error_on_missing_key=False,
fetch_encrypted_object_hashes=True)
this_url_matched = False
for wildcard_expanded_resource in wildcard_expanded_iterator:
if _is_object_or_file_resource(wildcard_expanded_resource):
any_url_matched = this_url_matched = True
yield wildcard_expanded_resource
elif (isinstance(wildcard_expanded_resource.storage_url,
storage_url.CloudUrl) and
wildcard_expanded_resource.storage_url.is_bucket()):
bucket_expanded_iterator = wildcard_iterator.get_wildcard_iterator(
wildcard_expanded_resource.storage_url.join('*').url_string,
error_on_missing_key=False)
for bucket_expanded_resource in bucket_expanded_iterator:
if isinstance(bucket_expanded_resource,
(resource_reference.ObjectResource)):
any_url_matched = this_url_matched = True
yield bucket_expanded_resource
if not this_url_matched:
log.warning('No matches found for {}'.format(url_string))
if not any_url_matched:
raise errors.InvalidUrlError('No URLS matched.')
@base.UniverseCompatible
class Hash(base.Command):
"""Calculates hashes on local or cloud files."""
detailed_help = {
'DESCRIPTION':
"""
Calculates hashes on local or cloud files that can be used to compare with
"gcloud storage ls -L" output. If a specific hash option is not provided,
this command calculates all gcloud storage-supported hashes for the file.
Note that gcloud storage automatically performs hash validation when
uploading or downloading files, so this command is only needed if you want
to write a script that separately checks the hash for some reason.
If you calculate a CRC32C hash for the file without a precompiled
google-crc32c installation, hashing will be very slow.
""",
'EXAMPLES':
"""
To get the MD5 and CRC32C hash digest of a cloud object in Base64 format:
$ {command} gs://bucket/object
To get just the MD5 hash digest of a local object in hex format:
$ {command} /dir/object.txt --skip-crc32c --hex
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls', nargs='+', help='Local or cloud URLs of objects to hash.')
parser.add_argument(
'--hex',
action='store_true',
help='Output hash digests in hex format. By default, digests are'
' displayed in base64.')
skip_flags_group = parser.add_group(mutex=True)
skip_flags_group.add_argument(
'--skip-crc32c',
action='store_true',
help='Skip CRC32C hash calculation. Useful if command is running slow.')
skip_flags_group.add_argument(
'--skip-md5',
action='store_true',
help='Skip MD5 hash calculation. Useful if command is running slow.')
flags.add_encryption_flags(parser, command_only_reads_data=True)
flags.add_additional_headers_flag(parser)
def Run(self, args):
encryption_util.initialize_key_store(args)
if not args.skip_crc32c:
if fast_crc32c_util.should_use_gcloud_crc32c():
crc32c_implementation = 'gcloud-crc32c (Go binary)'
elif crc32c.IS_FAST_GOOGLE_CRC32C_AVAILABLE:
crc32c_implementation = 'google-crc32c (Python binary)'
else:
crc32c_implementation = 'crcmod (slow pure Python implementation)'
log.info('CRC32C implementation: {}'.format(crc32c_implementation))
if args.hex:
hash_format = 'hex'
format_cloud_digest = _convert_base64_to_hex
format_file_hash_object = lambda x: x.hexdigest()
else:
hash_format = 'base64'
format_cloud_digest = lambda x: x
format_file_hash_object = hash_util.get_base64_hash_digest_string
for resource in _get_resource_iterator(args.urls):
output_dict = {
_DIGEST_FORMAT_KEY: hash_format,
}
if isinstance(resource, resource_reference.ObjectResource):
if resource.crc32c_hash is None and resource.md5_hash is None:
log.warning('No hashes found for {}'.format(resource))
continue
output_dict[_URL_KEY] = resource.storage_url.versionless_url_string
if not args.skip_crc32c:
output_dict[_CRC32C_HASH_KEY] = format_cloud_digest(
resource.crc32c_hash)
if not args.skip_md5:
output_dict[_MD5_HASH_KEY] = format_cloud_digest(resource.md5_hash)
else: # FileObjectResource
output_dict[_URL_KEY] = resource.storage_url.resource_name
if not args.skip_crc32c:
output_dict[_CRC32C_HASH_KEY] = format_file_hash_object(
hash_util.get_hash_from_file(
resource.storage_url.resource_name,
hash_util.HashAlgorithm.CRC32C,
)
)
if not args.skip_md5:
output_dict[_MD5_HASH_KEY] = format_file_hash_object(
hash_util.get_hash_from_file(
resource.storage_url.resource_name,
hash_util.HashAlgorithm.MD5,
)
)
yield output_dict

View File

@@ -0,0 +1,32 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage HMAC commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Hmac(base.Group):
"""Manage Cloud Storage service account HMAC keys."""
def Filter(self, context, args):
# TODO(b/190541521): Determine if command group works with project number
base.RequireProjectID(args)
del context, args

View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for HMAC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class Create(base.Command):
"""Add a service account HMAC."""
detailed_help = {
'DESCRIPTION': """
*{command}* command creates an HMAC key for the specified service
account. The secret key material is only available upon creation, so be
sure to store the returned secret along with the access_id.
""",
'EXAMPLES': """
To create an HMAC key for
``test.service.account@test_project.iam.gserviceaccount.com'':
$ {command} test.service.account@test_project.iam.gserviceaccount.com
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'service_account', type=str, help='The service account email.')
def Run(self, args):
service_account = args.service_account
api = api_factory.get_api(storage_url.ProviderPrefix.GCS)
response = api.create_hmac_key(service_account)
return response.metadata

View File

@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of delete command for HMAC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class Delete(base.Command):
"""Remove a service account HMAC."""
detailed_help = {
'DESCRIPTION': """
*{command}* permanently deletes the specified HMAC key. Note that keys
must be updated to be in the ``INACTIVE'' state before they can be
deleted.
""",
'EXAMPLES': """
To delete a specific HMAC key:
$ {command} GOOG56JBMFZX6PMPTQ62VD2
To be prompted for HMAC keys to delete:
$ {command}
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'access_id',
help=textwrap.dedent("""\
Access ID for HMAC key to delete."""))
def Run(self, args):
api = api_factory.get_api(storage_url.ProviderPrefix.GCS)
response = api.delete_hmac_key(args.access_id)
return response

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command for HMAC key."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class Describe(base.DescribeCommand):
"""Describes a service account HMAC key."""
detailed_help = {
'DESCRIPTION': """
*{command}* retrieves the specified HMAC key's metadata. Note that there
is no option to retrieve a key's secret material after it has
been created.
""",
'EXAMPLES': """
The following command retrieves the HMAC key's metadata:
$ {command} GOOG56JBMFZX6PMPTQ62VD2
Note `GOOG56JBMFZX6PMPTQ62VD2` is the `ACCESS_ID` of the HMAC key.
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'access_id',
type=str,
help=(
'The [Access ID](https://cloud.google.com/'
'storage/docs/authentication/hmackeys#overview) of the HMAC key'
),
)
def Run(self, args):
hmac_resource = api_factory.get_api(
storage_url.ProviderPrefix.GCS
).get_hmac_key(args.access_id)
return hmac_resource.metadata

View File

@@ -0,0 +1,80 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of list command for HMAC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class List(base.ListCommand):
"""List service account HMAC keys."""
detailed_help = {
'DESCRIPTION': """
*{command}* lists the HMAC key metadata for keys in the current project.
""",
'EXAMPLES': """
To show metadata for all keys, including recently deleted keys:
$ {command} --all --long
To list only HMAC keys belonging to the service account
``test.sa@test.iam.gserviceaccount.com'':
$ {command} --service-account=test.sa@test.iam.gserviceaccount.com
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'-a',
'--all',
action='store_true',
help='Shows all keys, including recently deleted keys.')
parser.add_argument(
'-l',
'--long',
action='store_true',
help=textwrap.dedent("""\
Use long listing format, showing the full metadata for each key
excluding the secret."""))
parser.add_argument(
'-u',
'--service-account',
help='Filter keys for the provided service account email.')
def Run(self, args):
if args.long:
fields_scope = cloud_api.FieldsScope.FULL
else:
fields_scope = cloud_api.FieldsScope.SHORT
api = api_factory.get_api(storage_url.ProviderPrefix.GCS)
for hmac_key in api.list_hmac_keys(
service_account_email=args.service_account,
show_deleted_keys=args.all,
fields_scope=fields_scope
):
yield hmac_key.metadata

View File

@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for HMAC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class Update(base.Command):
"""Change the status of a service account HMAC."""
detailed_help = {
'DESCRIPTION': """
*{command}* sets the state of the specified key. Valid state arguments
are ``ACTIVE'' and ``INACTIVE''. To set a key to state ``DELETED'', use
*{parent_command} delete* on an ``INACTIVE'' key. If an etag is set in
the command, it will only succeed if the provided etag matches the etag
of the stored key.
""",
'EXAMPLES': """
To activate an HMAC key:
$ {command} GOOG56JBMFZX6PMPTQ62VD2 --activate
To set the state of an HMAC key to ``INACTIVE'' provided its etag is
``M42da='':
$ {command} GOOG56JBMFZX6PMPTQ62VD2 --deactivate --etag=M42da=
""",
}
@staticmethod
def Args(parser):
parser.add_argument('access_id', help='Access ID for HMAC key to update.')
parser.add_argument(
'-e',
'--etag',
help=textwrap.dedent("""\
If provided, the update will only be performed if the specified etag
matches the etag of the stored key."""))
state_group = parser.add_mutually_exclusive_group(required=True)
state_group.add_argument(
'--activate',
action='store_true',
help='Sets the state of the specified key to ``ACTIVE\'\'.')
state_group.add_argument(
'--deactivate',
action='store_true',
help='Sets the state of the specified key to ``INACTIVE\'\'.')
def Run(self, args):
api = api_factory.get_api(storage_url.ProviderPrefix.GCS)
access_id = args.access_id
etag = args.etag
if args.activate:
state = cloud_api.HmacKeyState.ACTIVE
elif args.deactivate:
state = cloud_api.HmacKeyState.INACTIVE
response = api.patch_hmac_key(access_id, etag, state)
return response.metadata

View File

@@ -0,0 +1,33 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage Insights commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Insights(base.Group):
"""Manage Cloud Storage inventory reports."""
def Filter(self, context, args):
# TODO(b/190541521): Determine if command group works with project number
base.RequireProjectID(args)
del context, args

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage Insights dataset configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class DatasetConfigs(base.Group):
"""Manage Cloud Storage Insights dataset configurations."""

View File

@@ -0,0 +1,148 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for Insights dataset config."""
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage.insights.dataset_configs import create_update_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import log_util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Create(base.Command):
"""Create a new dataset config for Insights."""
detailed_help = {
'DESCRIPTION': """
Create a new dataset config for Insights.
""",
'EXAMPLES': """
To create a dataset config with config name as "my_config" in location
"us-central1" and project numbers "123456" and "456789" belonging to
organization number "54321":
$ {command} my_config --location=us-central1
--source-projects=123456,456789 --organization=54321 --retention-period-days=1
To create a dataset config that automatically adds new buckets into
config:
$ {command} my_config --location=us-central1
--source-projects=123456,456789 --organization=54321
--auto-add-new-buckets --retention-period-days=1
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'DATASET_CONFIG_NAME',
type=str,
help='Provide human readable config name.',
)
parser.add_argument(
'--organization',
type=int,
required=True,
metavar='SOURCE_ORG_NUMBER',
help='Provide the source organization number.',
)
parser.add_argument(
'--identity',
type=str,
metavar='IDENTITY_TYPE',
choices=['IDENTITY_TYPE_PER_CONFIG', 'IDENTITY_TYPE_PER_PROJECT'],
default='IDENTITY_TYPE_PER_CONFIG',
help='The type of service account used in the dataset config.',
)
parser.add_argument(
'--auto-add-new-buckets',
action='store_true',
help=(
'Automatically include any new buckets created if they satisfy'
' criteria defined in config settings.'
),
)
flags.add_dataset_config_location_flag(parser)
flags.add_dataset_config_create_update_flags(parser)
def Run(self, args):
source_projects_list = None
if args.source_projects is not None:
source_projects_list = args.source_projects
elif args.source_projects_file is not None:
source_projects_list = create_update_util.get_source_configs_list(
args.source_projects_file, create_update_util.ConfigType.PROJECTS
)
source_folders_list = None
if args.source_folders is not None:
source_folders_list = args.source_folders
elif args.source_folders_file is not None:
source_folders_list = create_update_util.get_source_configs_list(
args.source_folders_file, create_update_util.ConfigType.FOLDERS
)
api_client = insights_api.InsightsApi()
try:
dataset_config_operation = api_client.create_dataset_config(
dataset_config_name=args.DATASET_CONFIG_NAME,
location=args.location,
destination_project=properties.VALUES.core.project.Get(),
organization_scope=args.enable_organization_scope,
source_projects_list=source_projects_list,
source_folders_list=source_folders_list,
organization_number=args.organization,
include_buckets_name_list=args.include_bucket_names,
include_buckets_prefix_regex_list=args.include_bucket_prefix_regexes,
exclude_buckets_name_list=args.exclude_bucket_names,
exclude_buckets_prefix_regex_list=args.exclude_bucket_prefix_regexes,
include_source_locations=args.include_source_locations,
exclude_source_locations=args.exclude_source_locations,
auto_add_new_buckets=args.auto_add_new_buckets,
retention_period=args.retention_period_days,
activity_data_retention_period=getattr(
args, 'activity_data_retention_period_days', None
),
identity_type=args.identity,
description=args.description,
)
log_util.dataset_config_operation_started_and_status_log(
'Create', args.DATASET_CONFIG_NAME, dataset_config_operation.name
)
except apitools_exceptions.HttpBadRequestError:
log.status.Print(
'We caught an exception while trying to create the'
' dataset-configuration.\nPlease check that the flags are set with'
' valid values. For example, config name must start with an'
" alphanumeric value and only contain '_' as a special character"
)
raise
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a new dataset config for Insights."""

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create-link command for Insights dataset config."""
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.dataset_configs import log_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import resource_args
@base.DefaultUniverseOnly
class CreateLink(base.Command):
"""Create a link to a BigQuery instance."""
detailed_help = {
'DESCRIPTION': """
Create link to the customer BigQuery instance for Insights dataset config.
""",
'EXAMPLES': """
To create a link to the customer BigQuery instance for config name:
"my_config" in location "us-central1":
$ {command} my_config --location=us-central1
To create a link for the same dataset config with fully specified name:
$ {command} projects/foo/locations/us-central1/datasetConfigs/my_config
""",
}
@staticmethod
def Args(parser):
resource_args.add_dataset_config_resource_arg(parser, 'to create link')
def Run(self, args):
client = insights_api.InsightsApi()
dataset_config_relative_name = (
args.CONCEPTS.dataset_config.Parse().RelativeName()
)
create_dataset_config_link_operation = client.create_dataset_config_link(
dataset_config_relative_name,
)
log_util.dataset_config_operation_started_and_status_log(
'Create link',
dataset_config_relative_name,
create_dataset_config_link_operation.name,
)

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of delete command for Insights dataset config."""
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.dataset_configs import log_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import resource_args
from googlecloudsdk.core.console import console_io
@base.DefaultUniverseOnly
class Delete(base.Command):
"""Delete dataset config for Insights."""
detailed_help = {
'DESCRIPTION': """
Delete an Insights dataset config.
""",
'EXAMPLES': """
To delete a dataset config with config name "my_config" in location
"us-central1":
$ {command} my_config --location=us-central1
To delete the same dataset config with fully specified name:
${command} projects/foo/locations/us-central1/datasetConfigs/my_config
To delete the same dataset config and unlink it from the BigQuery
instance:
$ {command} my_config --location=us-central1 --auto-delete-link
To delete the same dataset config without taking user consent:
$ {command} my_config --location=us-central1 --auto-delete-link
--force
""",
}
@staticmethod
def Args(parser):
resource_args.add_dataset_config_resource_arg(parser, 'to delete')
parser.add_argument(
'--auto-delete-link',
action='store_true',
help=(
'Delete the BigQuery instance links before the config gets deleted'
' explicitly.'
),
)
parser.add_argument(
'--force',
action='store_true',
help='Force delete the config by skipping the consent.',
)
def Run(self, args):
client = insights_api.InsightsApi()
dataset_config_relative_name = (
args.CONCEPTS.dataset_config.Parse().RelativeName()
)
if not args.force:
message = 'You are about to delete dataset config: {}'.format(
dataset_config_relative_name
)
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True
)
if args.auto_delete_link:
delete_dataset_config_link_operation = client.delete_dataset_config_link(
dataset_config_relative_name,
)
log_util.dataset_config_operation_started_and_status_log(
'Delete link',
dataset_config_relative_name,
delete_dataset_config_link_operation.name,
)
delete_dataset_config_operation = client.delete_dataset_config(
dataset_config_relative_name
)
log_util.dataset_config_operation_started_and_status_log(
'Delete',
dataset_config_relative_name,
delete_dataset_config_operation.name,
)

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of delete-link command for Insights dataset config."""
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.dataset_configs import log_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import resource_args
@base.DefaultUniverseOnly
class DeleteLink(base.Command):
"""Delete a link to a BigQuery instance."""
detailed_help = {
'DESCRIPTION': """
Delete a link to a BigQuery instance.
""",
'EXAMPLES': """
To unlink a dataset config with config name "my_config" in location
"us-central1":
$ {command} my_config --location=us-central1
To delete a link for the same dataset config with fully specified name:
$ {command} projects/foo/locations/us-central1/datasetConfigs/my_config
""",
}
@staticmethod
def Args(parser):
resource_args.add_dataset_config_resource_arg(parser, 'to delete link')
def Run(self, args):
client = insights_api.InsightsApi()
dataset_config_relative_name = (
args.CONCEPTS.dataset_config.Parse().RelativeName()
)
delete_dataset_config_link_operation = client.delete_dataset_config_link(
dataset_config_relative_name,
)
log_util.dataset_config_operation_started_and_status_log(
'Delete link',
dataset_config_relative_name,
delete_dataset_config_link_operation.name,
)

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command for Insights dataset config."""
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.dataset_configs import resource_args
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describe dataset config for Insights."""
detailed_help = {
'DESCRIPTION': """
Describe the Insights dataset config.
""",
'EXAMPLES': """
To describe a dataset config with config name "my_config" in location
"us-central1":
$ {command} my_config --location=us-central1
To describe the same dataset config with fully specified name:
$ {command} projects/foo/locations/us-central1/datasetConfigs/my_config
""",
}
@staticmethod
def Args(parser):
resource_args.add_dataset_config_resource_arg(parser, 'to describe')
def Run(self, args):
dataset_config_ref = args.CONCEPTS.dataset_config.Parse()
return insights_api.InsightsApi().get_dataset_config(
dataset_config_ref.RelativeName()
)

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of list command for Insights dataset config."""
import re
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
LOCATION_REGEX_PATTERN = re.compile(r'locations/(.*)/.*/')
def _transform_location(dataset_config):
matched_result = re.search(LOCATION_REGEX_PATTERN, dataset_config['name'])
if matched_result and matched_result.group(1) is not None:
return matched_result.group(1)
else:
return 'N/A-Misformated Value'
_TRANSFORMS = {'location_transform': _transform_location}
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List returns all the Insights dataset configs for given location."""
detailed_help = {
'DESCRIPTION': """
List Cloud storage Insights dataset configs.
""",
'EXAMPLES': """
List all dataset configs in all locations:
$ {command}
List all dataset configs for location "us-central1":
$ {command} --location=us-central1
List all dataset configs with a page size of "20":
$ {command} --location=us-central1 --page-size=20
List all dataset configs with JSON formatting:
$ {command} --location=us-central1 --format=json
""",
}
@staticmethod
def Args(parser):
flags.add_dataset_config_location_flag(parser, is_required=False)
parser.display_info.AddFormat("""
table(
uid:label=DATASET_CONFIG_ID,
name.basename():label=DATASET_CONFIG_NAME,
location_transform():label=LOCATION,
sourceProjects.projectNumbers:label=SOURCE_PROJECTS,
organizationNumber:label=ORGANIZATION_NUMBER,
retentionPeriodDays:label=RETENTION_PERIOD_DAYS,
datasetConfigState:label=STATE
)
""")
parser.display_info.AddTransforms(_TRANSFORMS)
def Run(self, args):
return insights_api.InsightsApi().list_dataset_config(
location=args.location, page_size=args.page_size
)

View File

@@ -0,0 +1,170 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for Insights dataset config."""
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage.insights.dataset_configs import create_update_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import log_util
from googlecloudsdk.command_lib.storage.insights.dataset_configs import resource_args
from googlecloudsdk.core.console import console_io
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Update(base.Command):
"""Updates a dataset config for Insights."""
detailed_help = {
'DESCRIPTION': """
Update a dataset config for Insights.
""",
'EXAMPLES': """
To update the description for a dataset config "my_config" in
location "us-central1":
$ {command} my_config --location=us-central1 --description="a user provided description"
To update the same dataset config with fully specified name:
$ {command} projects/foo/locations/us-central1/datasetConfigs/my_config
To update the retention period days for the dataset config "my_config" in
location "us-central1":
$ {command} my_config --location=us-central1
--retention-period-days=20
""",
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'--auto-add-new-buckets',
choices=['true', 'false'],
help=(
'Automatically include any new buckets created if they satisfy'
' criteria defined in config settings.'
),
)
resource_args.add_dataset_config_resource_arg(parser, 'to update')
flags.add_dataset_config_create_update_flags(parser, is_update=True)
def _get_source_projects_list(self, args):
if args.source_projects is not None:
return args.source_projects
elif args.source_projects_file is not None:
return create_update_util.get_source_configs_list(
args.source_projects_file, create_update_util.ConfigType.PROJECTS
)
return None
def _get_source_folders_list(self, args):
if args.source_folders is not None:
return args.source_folders
elif args.source_folders_file is not None:
return create_update_util.get_source_configs_list(
args.source_folders_file, create_update_util.ConfigType.FOLDERS
)
return None
def _get_auto_add_new_buckets(self, args):
if args.auto_add_new_buckets is not None:
if args.auto_add_new_buckets == 'true':
return True
elif args.auto_add_new_buckets == 'false':
return False
return None
def Run(self, args):
client = insights_api.InsightsApi()
dataset_config_relative_name = (
args.CONCEPTS.dataset_config.Parse().RelativeName()
)
if args.retention_period_days is not None:
if args.retention_period_days > 0:
message = (
'You are about to change retention period for dataset config: {}'
.format(dataset_config_relative_name)
)
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True
)
else:
raise ValueError('retention-period-days value must be greater than 0')
source_projects_list = self._get_source_projects_list(args)
source_folders_list = self._get_source_folders_list(args)
new_scope = create_update_util.get_new_source_config(
args.enable_organization_scope,
source_projects_list,
source_folders_list,
)
if new_scope is not None:
existing_scope = create_update_util.get_existing_source_config(
dataset_config_relative_name, client
)
message = (
'You are about to change scope of dataset config:'
f' {dataset_config_relative_name} from {existing_scope} to'
f' {new_scope}. Refer'
' https://cloud.google.com/storage/docs/insights/datasets#dataset-config'
' for more details.'
)
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True
)
auto_add_new_buckets = self._get_auto_add_new_buckets(args)
update_dataset_config_operation = client.update_dataset_config(
dataset_config_relative_name,
organization_scope=args.enable_organization_scope,
source_projects_list=source_projects_list,
source_folders_list=source_folders_list,
include_buckets_name_list=args.include_bucket_names,
include_buckets_prefix_regex_list=args.include_bucket_prefix_regexes,
exclude_buckets_name_list=args.exclude_bucket_names,
exclude_buckets_prefix_regex_list=args.exclude_bucket_prefix_regexes,
include_source_locations=args.include_source_locations,
exclude_source_locations=args.exclude_source_locations,
auto_add_new_buckets=auto_add_new_buckets,
retention_period=args.retention_period_days,
activity_data_retention_period=getattr(
args, 'activity_data_retention_period_days', None
),
description=args.description,
)
log_util.dataset_config_operation_started_and_status_log(
'Update',
dataset_config_relative_name,
update_dataset_config_operation.name,
)
return update_dataset_config_operation
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(Update):
"""Updates a dataset config for Insights."""

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage inventory report configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class InventoryReports(base.Group):
"""Manage Cloud Storage inventory report configurations."""

View File

@@ -0,0 +1,103 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for inventory reports."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Create(base.Command):
"""Create a new inventory report config."""
detailed_help = {
'DESCRIPTION': """
Create an inventory report config that defines how often
inventory reports are generated, the metadata fields you want the reports
to include, and a bucket/prefix in which to store the reports, also known
as the destination.
""",
'EXAMPLES': """
To create an inventory report about "my-bucket" that will store report
details in "report-bucket" with the prefix "save-path/".
$ {command} gs://my-bucket --destination=gs://report-bucket/save-path/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'source_bucket_url',
type=str,
help='URL of the source bucket that will contain the '
'inventory report configuration.')
flags.add_inventory_reports_flags(parser, require_create_flags=True)
def Run(self, args):
source_bucket = storage_url.storage_url_from_string(
storage_url.add_gcs_scheme_if_missing(args.source_bucket_url))
if (not isinstance(source_bucket, storage_url.CloudUrl) or
not source_bucket.is_bucket()):
raise errors.InvalidUrlError(
'Invalid bucket URL: {}. Only bucket URLs are accepted'
' for SOURCE_BUCKET_URL. Example: "gs://bucket"'.format(
args.source_bucket_url))
if args.destination is not None:
destination = storage_url.storage_url_from_string(
storage_url.add_gcs_scheme_if_missing(args.destination))
else:
destination = storage_url.CloudUrl(
scheme=source_bucket.scheme,
bucket_name=source_bucket.bucket_name,
resource_name='inventory_reports/')
if args.schedule_starts is not None:
start_date = args.schedule_starts
else:
start_date = (datetime.datetime.now(datetime.timezone.utc) +
datetime.timedelta(days=1)).date()
if args.schedule_repeats_until is not None:
end_date = args.schedule_repeats_until
else:
end_date = start_date + datetime.timedelta(days=365)
report_config = insights_api.InsightsApi().create_inventory_report(
source_bucket=source_bucket.bucket_name,
destination_url=destination,
metadata_fields=list(args.metadata_fields),
start_date=start_date,
end_date=end_date,
frequency=args.schedule_repeats,
csv_delimiter=args.csv_delimiter,
csv_separator=args.csv_separator,
csv_header=args.csv_header,
parquet=args.parquet,
display_name=args.display_name,
)
log.status.Print(
'Created report configuration: {}'.format(report_config.name))

View File

@@ -0,0 +1,69 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation command for deleting inventory report configurations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.inventory_reports import resource_args
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Delete(base.Command):
"""Delete an inventory report config."""
detailed_help = {
'DESCRIPTION':
"""
Delete an inventory report config.
""",
'EXAMPLES':
"""
To delete an inventory report config with ID=1234,
location=us-central1 and project=foo:
$ {command} 1234 --location=us-central1 --project=foo
To delete the same inventory report config with fully specified name:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234
To delete the report config with all generated report details:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234 --force
""",
}
@staticmethod
def Args(parser):
resource_args.add_report_config_resource_arg(parser, 'to delete')
parser.add_argument(
'--force',
action='store_true',
help='If set, all report details for this report config'
' will be deleted.'
)
def Run(self, args):
report_config_name = args.CONCEPTS.report_config.Parse().RelativeName()
insights_api.InsightsApi().delete_inventory_report(
report_config_name, args.force
)
log.status.Print('Deleted report config: {}'.format(
report_config_name))

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of insights inventory-reports describe command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.inventory_reports import resource_args
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describe an inventory report config."""
detailed_help = {
'DESCRIPTION':
"""
Describe an inventory report config.
""",
'EXAMPLES':
"""
To describe an inventory report config with ID=1234,
location=us-central1, and project=foo:
$ {command} 1234 --location=us-central1 --project=foo
To describe the same inventory report config with fully specified name:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234
Describe the same inventory report config with JSON formatting, only
returning the "displayName" field:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234 --format="json(displayName)"
""",
}
@staticmethod
def Args(parser):
resource_args.add_report_config_resource_arg(parser, 'to describe')
def Run(self, args):
report_config_ref = args.CONCEPTS.report_config.Parse()
return insights_api.InsightsApi().get_inventory_report(
report_config_ref.RelativeName()
)

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Retrieve details of Cloud Storage inventory reports."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
class Details(base.Group):
"""Retrieve details of inventory reports."""

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of insights inventory-reports details describe command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.inventory_reports import resource_args
from googlecloudsdk.core import log
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describe inventory reports detail."""
detailed_help = {
'DESCRIPTION':
"""
Describe the inventory report detail.
""",
'EXAMPLES':
"""
To describe an inventory report detail with ID=4568,
location=us-central1, project=foo, and report config ID=1234:
$ {command} 1234 --location=us-central1 --project=foo --report-config=1234
To describe the same inventory report detail with fully specified name:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234/reportDetails/5678
To describe the same inventory report detail with JSON formatting, only returning
the "status" field:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234/reportDetails/5678 --format="json(status)"
""",
}
@staticmethod
def Args(parser):
resource_args.add_report_detail_resource_arg(parser, 'to describe')
def Run(self, args):
report_detail_ref = args.CONCEPTS.report_detail.Parse()
report_details = insights_api.InsightsApi().get_report_details(
report_detail_ref.RelativeName())
if report_details:
log.status.Print(
'To download the reports, use the `gcloud storage cp` command.')
return report_details

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of insights inventory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage.insights.inventory_reports import resource_args
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""List inventory report details."""
detailed_help = {
'DESCRIPTION':
"""
List all inventory report details generated by a given inventory report
config.
""",
'EXAMPLES':
"""
To list all inventory report details for report config ID=1234,
location=us-central1, and project=foo:
$ {command} 1234 --location=us-central1 --project=foo
To list all the same inventory report details with fully specified name
of the report config:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234
To list all inventory reports, only returning the "status" key:
$ {command} projects/a/locations/b/reportConfigs/some-id --format="json(status)"
""",
}
@staticmethod
def Args(parser):
resource_args.add_report_config_resource_arg(
parser, 'for which the report details should be listed')
parser.display_info.AddFormat(
"""
table(
format('{}',name.basename()):label=REPORT_DETAIL_ID,
snapshotTime,
status.message:label=STATUS
)
"""
)
def Run(self, args):
report_config_name = args.CONCEPTS.report_config.Parse().RelativeName()
return insights_api.InsightsApi().list_report_details(
report_config_name, args.page_size)

View File

@@ -0,0 +1,97 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of insights inventory-reports list command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import storage_url
@base.DefaultUniverseOnly
class List(base.ListCommand):
"""Lists all inventory report configs."""
detailed_help = {
'DESCRIPTION':
"""
List Cloud Storage inventory report configs.
""",
'EXAMPLES':
"""
List all inventory report configs in the source bucket
"my-bucket":
$ {command} --source=gs://my-bucket
List buckets with JSON formatting, only returning the "displayName" field:
$ {command} --source=gs://my-bucket --format="json(displayName)"
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'--source',
metavar='SOURCE_BUCKET_URL',
help='Specifies URL of the source bucket that contains the inventory '
'report configuration.')
parser.add_argument(
'--location',
help='The location of the report configs.')
parser.display_info.AddFormat(
"""
table(
format('{}',name.basename()):label=REPORT_CONFIG_ID,
format(
'{:04d}-{:02d}-{:02d}',
frequencyOptions.startDate.year,
frequencyOptions.startDate.month,
frequencyOptions.startDate.day):label=START_DATE,
format(
'{:04d}-{:02d}-{:02d}',
frequencyOptions.endDate.year,
frequencyOptions.endDate.month,
frequencyOptions.endDate.day):label=END_DATE,
format(
'gs://{}',
objectMetadataReportOptions.storageFilters.bucket
):label=SOURCE_BUCKET:wrap,
format(
'gs://{}/{}',
objectMetadataReportOptions.storageDestinationOptions.bucket,
objectMetadataReportOptions.storageDestinationOptions.
destinationPath.flatten()):label=DESTINATION:wrap
)
"""
)
def Run(self, args):
if args.source is None and args.location is None:
raise errors.Error(
'At least one of --source or --location is required.')
source_bucket = storage_url.storage_url_from_string(
args.source) if args.source is not None else None
return insights_api.InsightsApi().list_inventory_reports(
source_bucket, location=args.location, page_size=args.page_size
)

View File

@@ -0,0 +1,122 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for inventory reports."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.insights.inventory_reports import resource_args
@base.DefaultUniverseOnly
class Update(base.Command):
"""Update an inventory report config."""
detailed_help = {
'DESCRIPTION': """
Update an inventory report config.
""",
'EXAMPLES': """
To update the display-name of an inventory report config with ID=1234,
location=us-central1, and project=foo:
$ {command} 1234 --location=us-central1 --project=foo --display-name=bar
To update the same inventory report config with fully specified name:
$ {command} /projects/foo/locations/us-central1/reportConfigs/1234 --display-name=bar
""",
}
@staticmethod
def Args(parser):
resource_args.add_report_config_resource_arg(parser, 'to update')
flags.add_inventory_reports_flags(parser)
metadata_fields_group = parser.add_group(mutex=True)
metadata_fields_add_remove_group = metadata_fields_group.add_group(
help='Add and Remove flags for metadata fields')
flags.add_inventory_reports_metadata_fields_flag(metadata_fields_group)
metadata_fields_add_remove_group.add_argument(
'--add-metadata-fields',
metavar='METADATA_FIELDS',
type=arg_parsers.ArgList(
choices=flags.OPTIONAL_INVENTORY_REPORTS_METADATA_FIELDS),
help='Adds fields to the metadata_fields list.')
metadata_fields_add_remove_group.add_argument(
'--remove-metadata-fields',
metavar='METADATA_FIELDS',
type=arg_parsers.ArgList(
choices=flags.OPTIONAL_INVENTORY_REPORTS_METADATA_FIELDS),
help='Removes fields from the metadata_fields list.')
# We don't addd --clear-metadata-fields because certain metadata-fields
# like name and project must be always present.
def Run(self, args):
client = insights_api.InsightsApi()
report_config_name = args.CONCEPTS.report_config.Parse().RelativeName()
if args.add_metadata_fields or args.remove_metadata_fields:
# Get the existing report config so that we can modify
# the metadata_fields list.
report_config = client.get_inventory_report(report_config_name)
metadata_fields = set(
report_config.objectMetadataReportOptions.metadataFields)
if args.add_metadata_fields is not None:
for field in args.add_metadata_fields:
metadata_fields.add(field)
if args.remove_metadata_fields is not None:
for field in args.remove_metadata_fields:
if field not in metadata_fields:
raise errors.Error(
'Cannot remove non-existing metadata field: {}'.format(field))
metadata_fields.remove(field)
metadata_fields_list = list(metadata_fields)
elif args.metadata_fields:
metadata_fields_list = list(args.metadata_fields)
else:
# messages.ReportConfig does not accept None value.
# This should be safe, as an empty list has no effect unless we add
# the field to the updateMask.
metadata_fields_list = []
if args.destination is not None:
destination_url = storage_url.storage_url_from_string(
storage_url.add_gcs_scheme_if_missing(args.destination))
else:
destination_url = None
return client.update_inventory_report(
report_config_name,
destination_url=destination_url,
metadata_fields=metadata_fields_list,
start_date=args.schedule_starts,
end_date=args.schedule_repeats_until,
frequency=args.schedule_repeats,
csv_separator=args.csv_separator,
csv_delimiter=args.csv_delimiter,
csv_header=args.csv_header,
parquet=args.parquet,
display_name=args.display_name,
)

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Storage Insights operations commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Operations(base.Group):
"""Manage insights operations."""

View File

@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to cancel an insights operation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
@base.DefaultUniverseOnly
class Cancel(base.Command):
"""Cancel an insights operation."""
detailed_help = {
'DESCRIPTION': """\
Cancel an insights operation. Since operations are asynchronous, this
request is best effort and may fail in cases such as when the operation
is already complete.
""",
'EXAMPLES': """\
To cancel the operation "12345" in "us-central1" for the project
"my-project", run:
$ {command} projects/my-project/locations/us-central1/operations/12345
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'operation_name',
help=(
'The operation name in the format'
' "projects/PROJECT/locations/LOCATION/operations/OPERATION_ID".'
),
)
def Run(self, args):
insights_api.InsightsApi().cancel_operation(args.operation_name)
log.status.Print('Sent cancel request for {}'.format(args.operation_name))

View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to describe an insights operation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.Command):
"""Describe an insights operation."""
detailed_help = {
'DESCRIPTION': """\
Get details about an insights operation.
""",
'EXAMPLES': """\
To describe the operation "12345" in "us-central1" for the project
"my-project", run:
$ {command} projects/my-project/locations/us-central1/operations/12345
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'operation_name',
help=(
'The operation name in the format'
' "projects/PROJECT/locations/LOCATION/operations/OPERATION_ID".'
),
)
def Run(self, args):
return insights_api.InsightsApi().get_operation(args.operation_name)

View File

@@ -0,0 +1,56 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Storage Insights operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import insights_api
from googlecloudsdk.calliope import base
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class List(base.ListCommand):
"""List insights operations."""
detailed_help = {
'DESCRIPTION': """\
List storage operations.
""",
'EXAMPLES': """\
To list all operations in "us-central1" for the project "my-project", run:
$ {command} projects/my-project/locations/us-central1
To list operations in JSON format, run:
$ {command} projects/my-project/locations/us-central1 --format=json
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'parent_resource_name',
help=(
'The operation parent resource in the format'
' ""projects/PROJECT/locations/LOCATION".'
),
)
def Run(self, args):
return insights_api.InsightsApi().list_operations(args.parent_resource_name)

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage Intelligence Configurations commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of Storage Intelligence in different universes.
@base.DefaultUniverseOnly
class StorageIntelligence(base.Group):
"""Manage Cloud Storage Intelligence Configurations."""

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command for getting storage intelligence configuration."""
from googlecloudsdk.api_lib.storage import storage_intelligence_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of storage intelligence in different universes.
@base.DefaultUniverseOnly
class Describe(base.DescribeCommand):
"""Describes storage intelligence configuration."""
detailed_help = {
'DESCRIPTION': """
Describe storage intelligence config for the organization, sub-folder
or project.
""",
'EXAMPLES': """
The following command describes storage intelligence config for the sub-folder with
id `123456`. \n
$ {command} --sub-folder=123456
""",
}
@classmethod
def Args(cls, parser):
flags.add_storage_intelligence_configs_level_flags(parser)
def Run(self, args):
client = storage_intelligence_api.StorageIntelligenceApi()
if args.sub_folder:
return client.get_sub_folder_intelligence_config(args.sub_folder)
elif args.project:
return client.get_project_intelligence_config(args.project)
elif args.organization:
return client.get_organization_intelligence_config(args.organization)

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of disable command for disabling storage intelligence."""
from googlecloudsdk.api_lib.storage import storage_intelligence_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of storage intelligence in different universes.
@base.DefaultUniverseOnly
class Disable(base.Command):
"""Disables storage intelligence."""
detailed_help = {
'DESCRIPTION': """
Disable storage intelligence for the organization, sub-folder or project.
""",
'EXAMPLES': """
The following command disables storage intelligence for the project. \n
$ {command} --project=my-project
""",
}
@classmethod
def Args(cls, parser):
flags.add_storage_intelligence_configs_level_flags(parser)
def Run(self, args):
client = storage_intelligence_api.StorageIntelligenceApi()
if args.sub_folder:
intelligence_config = client.disable_sub_folder_intelligence_config(
args.sub_folder
)
elif args.project:
intelligence_config = client.disable_project_intelligence_config(
args.project
)
else:
intelligence_config = client.disable_organization_intelligence_config(
args.organization
)
log.status.Print(
'Successfully disabled storage intelligence plan for {}.\n'.format(
intelligence_config.name
)
)
return intelligence_config

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of enable command for enabling storage intelligence."""
from googlecloudsdk.api_lib.storage import storage_intelligence_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of storage intelligence in different universes.
@base.DefaultUniverseOnly
class Enable(base.Command):
"""Enables storage intelligence."""
detailed_help = {
'DESCRIPTION': """
Enable storage intelligence plan for the organization, sub-folder or project
along with filters. The command sets `STANDARD` edition by default if
no other edition flags like ``--trial-edition`` are specified.
""",
'EXAMPLES': """
To remove buckets from the storage intelligence plan, Use the following
command with ``--exclude-bucket-id-regexes'' flag.
to specify list of bucket id regexes.,\n
$ {command} --organization=my-org --exclude-bucket-id-regexes="my-bucket-.*"
To apply location based filters in the storage intelligence plan, Use
``--include-locations'' or ``--exclude-locations'' flags to specify allowed
list of locations or excluded list of locations. The following
command updates storage intelligence plan of sub-folder `123456` with the
specified list of included locations.,\n
$ {command} --sub-folder=123456 --include-locations="us-east1","us-west1"
The following command enables storage intelligence with Trial edition
for the given project,\n
$ {command} --project=my-project --trial-edition
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
flags.add_storage_intelligence_configs_level_flags(parser)
settings = parser.add_group(category='SETTINGS')
flags.add_storage_intelligence_configs_settings_flags(settings)
def Run(self, args):
client = storage_intelligence_api.StorageIntelligenceApi()
if args.project:
intelligence_config = (
client.update_project_intelligence_config(
args.project,
inherit_from_parent=None,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
elif args.sub_folder:
intelligence_config = client.update_sub_folder_intelligence_config(
args.sub_folder,
inherit_from_parent=None,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
else:
intelligence_config = client.update_organization_intelligence_config(
args.organization,
inherit_from_parent=None,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
log.status.Print(
'Successfully enabled storage intelligence plan for {}.\n'.format(
intelligence_config.name
)
)
return intelligence_config

View File

@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating storage intelligence configuration."""
from googlecloudsdk.api_lib.storage import storage_intelligence_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of storage intelligence in different universes.
@base.DefaultUniverseOnly
class Update(base.Command):
"""Updates storage intelligence configuration."""
detailed_help = {
'DESCRIPTION': """
Update storage intelligence configuration for the organization, sub-folder
or project. The command sets `STANDARD` edition by default if no other
edition flags like ``--trial-edition`` or ``--inherit-from-parent``
are specified.
""",
'EXAMPLES': """
To limit buckets in the storage intelligence configuration, Use the following
command with ``--include-bucket-id-regexes'' flag.
to specify list of bucket ids and bucket id regexes.,\n
$ {command} --organization=my-org --include-bucket-id-regexes=my-bucket-.*
To apply location based filters in the storage intelligence configuration, Use
``--include-locations'' or ``--exclude-locations'' flags to specify allowed
list of locations or excluded list of locations. The following
command updates storage intelligence configuration of sub-folder `123456` with the
specified list of excluded locations.,\n
$ {command} --sub-folder=123456 --exclude-locations=us-east1,us-west1
The following command updates storage intelligence for the given project by
inheriting existing storage intelligence configuration from the hierarchical parent
resource.,\n
$ {command} --project=my-project --inherit-from-parent
To clear included locations from the project storage intelligence, Use the
following command.,\n
$ {command} --project=my-project --include-locations=
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
flags.add_storage_intelligence_configs_level_flags(parser)
update_group = parser.add_group(
category='UPDATE', mutex=True, required=True
)
update_group.add_argument(
'--inherit-from-parent',
action='store_true',
help=(
'Specifies storage intelligence config to be inherited from parent.'
),
)
settings = update_group.add_group(category='SETTINGS')
flags.add_storage_intelligence_configs_settings_flags(settings)
def Run(self, args):
client = storage_intelligence_api.StorageIntelligenceApi()
if args.project:
intelligence_config = (
client.update_project_intelligence_config(
args.project,
inherit_from_parent=args.inherit_from_parent,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
elif args.sub_folder:
intelligence_config = client.update_sub_folder_intelligence_config(
args.sub_folder,
inherit_from_parent=args.inherit_from_parent,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
else:
intelligence_config = client.update_organization_intelligence_config(
args.organization,
inherit_from_parent=args.inherit_from_parent,
trial_edition=args.trial_edition,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
log.status.Print(
'Successfully updated storage intelligence plan for {}.\n'.format(
intelligence_config.name
)
)
return intelligence_config

View File

@@ -0,0 +1,164 @@
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import itertools
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import expansion
from six.moves import queue
@base.Hidden
@base.Deprecate(is_removed=False, warning='This command is deprecated. '
'Use `gcloud alpha storage ls` instead.')
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class List(base.ListCommand):
"""List the objects in Cloud Storage buckets."""
detailed_help = {
'DESCRIPTION': """\
*{command}* lets you list the objects in your Cloud Storage buckets.
Forward slashes in object names are logically treated as directories for
the purposes of listing contents. See below for example of how to use
wildcards to get the listing behavior you want.
""",
'EXAMPLES': """\
To list the contents of a bucket:
$ {command} gs://my-bucket
This will list the direct contents of the bucket. To recursively list the
contents of all directories in the bucket:
$ {command} gs://my-bucket --recursive
You can use wildcards to match multiple paths (including multiple
buckets). Bucket wildcards are expanded only to the buckets contained in
your current project:
$ {command} gs://my-b*/log*.txt
The following wildcards are valid and match only within the current
directory:
*: Matches zero or more characters
?: Matches zero or one characters
[]: Matches a character range (ex. [a-z] or [0-9])
You can use double-star wildcards to match zero or more directory levels
in a path:
$ {command} gs://my-bucket/**/log*.txt
You can also use double-star to match all files after a root in a path:
$ {command} gs://my-bucket/**
Double-star expansion can not be combined with other expressions in a
given path segment and will operate as a single star in that context. For
example:
gs://my-bucket/dir**/log.txt is treated as:
gs://my-bucket/dir*/log.txt and instead should be written as:
gs://my-bucket/dir*/**/log.txt to get the recursive behavior.
""",
}
OBJECT_FORMAT_STRING = """\
table(
path:label=PATH,
data.size.size(zero=""):label=SIZE,
data.timeCreated.date():label=CREATED,
data.updated.date():label=UPDATED
)"""
@staticmethod
def Args(parser):
parser.add_argument(
'path',
nargs='*',
help='The path of objects and directories to list. The path must begin '
'with gs:// and may or may not contain wildcard characters.')
parser.add_argument(
'--recursive',
action='store_true',
help='Recursively list the contents of any directories that match the '
'path expression.')
parser.add_argument(
'--flatten-results',
action='store_true',
help='Show all matching objects in one list as opposed to grouping by '
'directory.')
parser.display_info.AddFormat("""\
table[no-heading](
format('{0}:', dir),
objects:format='%s'
)""" % List.OBJECT_FORMAT_STRING)
def Run(self, args):
paths = args.path or ['gs://']
expander = expansion.GCSPathExpander()
objects, dirs = expander.ExpandPaths(paths)
if args.IsSpecified('flatten_results'):
# Respect the user's choice if given explicitly.
flatten = args.flatten_results
else:
# Get a default for this mode if not specifically provided.
# Simplest case where we are listing only files or a single directory,
# don't nest output in tables by directory.
flatten = bool(not args.recursive and
not (objects and dirs) and
len(dirs) < 2)
# First collect all the directly matching objects.
results = []
if objects:
results.append(
{'dir': '',
'objects': expander.GetSortedObjectDetails(objects)})
# For each matching directory, get the objects directly under it.
dirs_to_process = queue.Queue()
for d in sorted(dirs):
dirs_to_process.put(d)
while not dirs_to_process.empty():
d = dirs_to_process.get()
children = [d + o for o in sorted(expander.ListDir(d))]
details = expander.GetSortedObjectDetails(children)
results.append({'dir': d, 'objects': details})
if args.recursive:
# Recurse on any directories that are found under the current parent.
for c in children:
if expander.IsDir(c):
dirs_to_process.put(c + '/')
if not flatten:
return results
# Flatten results.
args.GetDisplayInfo().AddFormat(List.OBJECT_FORMAT_STRING)
return itertools.chain.from_iterable([x['objects'] for x in results])

View File

@@ -0,0 +1,302 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import encryption_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import list_util
from googlecloudsdk.command_lib.storage import ls_command_util
from googlecloudsdk.command_lib.storage import stdin_iterator
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
_COMMAND_DESCRIPTION = """
List your Cloud Storage buckets in a project and objects in a bucket.
This command treats forward slashes in object names as directories. See
below for examples of how to use wildcards to get the listing behavior
you want.
"""
_GA_EXAMPLES = """
The following command lists the buckets in the default project:
$ {command}
The following command lists the buckets in the specified project:
$ {command} --project=my-project
The following command lists the contents of a bucket:
$ {command} gs://my-bucket
You can use [wildcards](https://cloud.google.com/storage/docs/wildcards)
to match multiple paths (including multiple buckets). Bucket wildcards are
expanded to match only buckets contained in your current project. The
following command matches ``.txt'' objects that begin with ``log'' and
that are stored in buckets in your project that begin with ``my-b'':
$ {command} gs://my-b*/log*.txt
You can use double-star wildcards to match zero or more directory levels
in a path. The following command matches all ``.txt'' objects in a bucket.
$ {command} gs://my-bucket/**/*.txt
The wildcard `**` retrieves a flat list of objects in a single API call
and does not match prefixes. The following command would not match
`gs://my-bucket/dir/log.txt`:
$ {command} gs://my-bucket/**/dir
Double-star expansion also can not be combined with other expressions in a
given path segment and operates as a single star in that context. For
example, the command `gs://my-bucket/dir**/log.txt` is treated as
`gs://my-bucket/dir*/log.txt`. To get the recursive behavior, the command
should instead be written the following way:
gs://my-bucket/dir*/**/log.txt
The following command lists all items recursively with formatting by
using `--recursive`:
$ {command} --recursive gs://bucket
Recursive listings are similar to `**` except recursive listings include
line breaks and header formatting for each subdirectory.
"""
_ALPHA_EXAMPLES = """
The following command filters objects based on specified filter while listing.
Note that the flag is only supported for Google Cloud Storage URLs and only
applies to objects. This means that directories or buckets will still be listed
even if they do not contain objects that match the filter.
$ {command} gs://my-bucket --metadata-filter='contexts."foo"="bar"'
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Ls(base.Command):
"""List Cloud Storage buckets and objects."""
# pylint:disable=g-backslash-continuation
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES,
}
# pylint:enable=g-backslash-continuation
@classmethod
def Args(cls, parser):
"""Edit argparse.ArgumentParser for the command."""
parser.add_argument(
'path',
nargs='*',
help=(
'The path of objects and directories to list. The path must begin'
' with gs:// and is allowed to contain wildcard characters.'
),
)
parser.add_argument(
'-a',
'--all-versions',
action='store_true',
help=(
'Include noncurrent object versions in the listing. This flag is'
' typically only useful for buckets with [object'
' versioning](https://cloud.google.com/storage/docs/object-versioning)'
' enabled. If combined with the `--long` option, the metageneration'
' for each listed object is also included.'
),
)
parser.add_argument(
'-b',
'--buckets',
action='store_true',
help=(
'When given a bucket URL, only return buckets. Useful for avoiding'
' the rule that prints the top-level objects of buckets matching a'
' query. Typically used in combination with `--full` to get the'
' full metadata of buckets.'
),
)
parser.add_argument(
'-e',
'--etag',
action='store_true',
help='Include ETag metadata in listings that use the `--long` flag.',
)
parser.add_argument(
'--format',
help=(
'Use "gsutil" to get the style of the older gsutil CLI. (e.g.'
' "--format=gsutil"). Other format values (e.g. "json") do not'
' work. See different ls flags and commands for alternative'
' formatting.'
),
)
parser.add_argument(
'--readable-sizes',
action='store_true',
help=(
'When used with `--long`, print object sizes in human'
' readable format, such as 1 KiB, 234 MiB, or 2 GiB.'
),
)
parser.add_argument(
'-R',
'-r',
'--recursive',
action='store_true',
help=(
'Recursively list the contents of any directories that match the'
' path expression.'
),
)
output_styles = parser.add_group(mutex='True')
output_styles.add_argument(
'-l',
'--long',
action='store_true',
help='For objects only. List size in bytes, creation time, and URL.',
)
output_styles.add_argument(
'-L',
'--full',
action='store_true',
help='List all available metadata about items in rows.',
)
output_styles.add_argument(
'-j',
'--json',
action='store_true',
help='List all available metadata about items as a JSON dump.',
)
flags.add_additional_headers_flag(parser)
flags.add_encryption_flags(parser, command_only_reads_data=True)
flags.add_fetch_encrypted_object_hashes_flag(parser, is_list=True)
flags.add_read_paths_from_stdin_flag(parser)
flags.add_soft_delete_flags(parser)
if cls.ReleaseTrack() == base.ReleaseTrack.ALPHA:
flags.add_metadata_filter_flag(parser)
@classmethod
def _get_args(cls, args):
"""Get the args for the command."""
soft_deleted = getattr(args, 'soft_deleted', False)
all_versions = getattr(args, 'all_versions', False)
buckets = getattr(args, 'buckets', False)
return soft_deleted, all_versions, buckets
def Run(self, args):
"""Command execution logic."""
encryption_util.initialize_key_store(args)
# We cannot do a backward incompatible change, and thus we are throwing a
# warning here so that users in the long term can avoid using the flags
# incorrectly.
soft_deleted, all_versions, buckets = self._get_args(args)
# We do not support operations for soft deleted resources combined with
# --all-versions flag.
if soft_deleted and all_versions:
log.warning(
'The --all-versions flag has no effect when used with the'
' --soft-deleted flag. When --soft-deleted is used, all'
' soft-deleted versions of the resource are returned.'
)
# When listing buckets, only --soft-deleted flag is allowed.
if buckets and all_versions:
log.warning('The --all-versions flag has no effect when listing buckets.')
use_gsutil_style = flags.check_if_use_gsutil_style(args)
found_non_default_provider = False
if args.path or args.read_paths_from_stdin:
paths = stdin_iterator.get_urls_iterable(
args.path, args.read_paths_from_stdin, allow_empty=True
)
else:
paths = [cloud_api.DEFAULT_PROVIDER.value + '://']
metadata_filter = getattr(args, 'metadata_filter', None)
storage_urls = [storage_url.storage_url_from_string(path) for path in paths]
for url in storage_urls:
if not isinstance(url, storage_url.CloudUrl):
raise errors.InvalidUrlError(
'Ls only works for cloud URLs. Error for: {}'.format(url.url_string)
)
if (
metadata_filter is not None
and url.scheme != cloud_api.DEFAULT_PROVIDER
):
raise errors.Error('Metadata filter is only supported for GCS URLs.')
if url.scheme is not cloud_api.DEFAULT_PROVIDER:
found_non_default_provider = True
if args.full:
display_detail = list_util.DisplayDetail.FULL
elif args.json:
display_detail = list_util.DisplayDetail.JSON
elif args.long:
display_detail = list_util.DisplayDetail.LONG
else:
display_detail = list_util.DisplayDetail.SHORT
ls_command_util.LsExecutor(
storage_urls,
buckets_flag=buckets,
display_detail=display_detail,
fetch_encrypted_object_hashes=args.fetch_encrypted_object_hashes,
halt_on_empty_response=not getattr(args, 'exhaustive', False),
include_etag=args.etag,
include_managed_folders=True,
next_page_token=getattr(args, 'next_page_token', None),
object_state=flags.get_object_state_from_flags(args),
readable_sizes=args.readable_sizes,
recursion_flag=args.recursive,
use_gsutil_style=use_gsutil_style,
soft_deleted_buckets=buckets and soft_deleted,
list_filter=metadata_filter,
).list_urls()
if found_non_default_provider and args.full:
# We do guarantee full-style formatting for all metadata fields of
# non-GCS providers. In this case, data is hidden.
log.warning('For additional metadata information, please run ls --json.')
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class LsAlpha(Ls):
"""List Cloud Storage buckets and objects."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

View File

@@ -0,0 +1,32 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage managed folder commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class ManagedFolders(base.Group):
"""Manage Cloud Storage managed folders."""
def Filter(self, context, args):
# TODO(b/190541521): Determine if command group works with project number
base.RequireProjectID(args)
del context, args

View File

@@ -0,0 +1,85 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of managed-folders add-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import errors as api_errors
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class AddIamPolicyBinding(base.Command):
"""Add an IAM policy binding to a managed folder."""
detailed_help = {
'DESCRIPTION': """
Add an IAM policy binding to a managed folder. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES': """
To grant a single role to a single principal for a managed folder `managed-folder` in a bucket `bucket`:
$ {command} gs://bucket/managed-folder --member=user:john.doe@example.com --role=roles/storage.objectCreator
To make objects in `gs://bucket/managed-folder` publicly readable:
$ {command} gs://bucket/managed-folder --member=allUsers --role=roles/storage.objectViewer
To specify a custom role for a principal on `gs://bucket/managed-folder`:
$ {command} gs://bucket/managed-folder --member=user:john.doe@example.com --role=roles/customRoleName
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of the managed folder to add IAM policy binding to.'
)
iam_util.AddArgsForAddIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
api_client = api_factory.get_api(url.scheme)
messages = apis.GetMessagesModule('storage', 'v1')
try:
policy = api_client.get_managed_folder_iam_policy(
url.bucket_name, url.resource_name
)
except api_errors.NotFoundError:
api_client.create_managed_folder(url.bucket_name, url.resource_name)
policy = messages.Policy()
return iam_command_util.add_iam_binding_to_resource(
args,
url,
messages,
policy,
set_iam_policy_task.SetManagedFolderIamPolicyTask,
)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of create command for making managed folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.core import log
@base.UniverseCompatible
class Create(base.Command):
"""Create managed folders."""
detailed_help = {
'DESCRIPTION': 'Create managed folders.',
'EXAMPLES': """
The following command creates a managed folder called `folder/` in a bucket
named `my-bucket`:
$ {command} gs://my-bucket/folder/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the folders to create.'
)
flags.add_additional_headers_flag(parser)
def Run(self, args):
urls = []
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
urls.append(url)
for url in urls:
client = api_factory.get_api(url.scheme)
log.status.Print('Creating {}...'.format(url))
client.create_managed_folder(url.bucket_name, url.resource_name)

View File

@@ -0,0 +1,71 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of command for deleting managed folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import folder_util
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import rm_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import task_graph_executor
@base.UniverseCompatible
class Delete(base.Command):
"""Delete managed folders."""
detailed_help = {
'DESCRIPTION': """Delete managed folders.""",
'EXAMPLES': """
The following command deletes a managed folder named `folder` in a bucket
called `my-bucket`:
$ {command} gs://my-bucket/folder/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url',
type=str,
nargs='+',
help='The URLs of the managed folders to delete.',
)
flags.add_additional_headers_flag(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
managed_folder_expansion_iterator = name_expansion.NameExpansionIterator(
args.url,
managed_folder_setting=folder_util.ManagedFolderSetting.LIST_WITHOUT_OBJECTS,
raise_error_for_unmatched_urls=True,
)
self.exit_code = rm_command_util.remove_managed_folders(
args,
managed_folder_expansion_iterator,
task_status_queue=task_graph_executor.multiprocessing_context.Queue(),
raise_error_for_unmatched_urls=True,
)

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of command for describing managed folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
@base.UniverseCompatible
class Describe(base.Command):
"""Describe managed folders."""
detailed_help = {
'DESCRIPTION': """Describe managed folders.""",
'EXAMPLES': """
The following command shows information about a managed folder named
`folder` in a bucket called `my-bucket`:
$ {command} gs://my-bucket/folder/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url',
type=str,
help='The URL of the managed folder to describe.',
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
client = api_factory.get_api(url.scheme)
resource = client.get_managed_folder(
url.bucket_name,
url.resource_name,
)
return resource_util.get_display_dict_for_resource(
resource,
full_resource_formatter.ManagedFolderDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of managed-folders get-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import storage_url
@base.UniverseCompatible
class GetIamPolicy(base.Command):
"""Get the IAM policy for a managed folder."""
detailed_help = {
'DESCRIPTION': """
Get the IAM policy for a managed folder. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES': """
To get the IAM policy for a managed folder `managed-folder` in a bucket `bucket`:
$ {command} gs://bucket/managed-folder/
To output the IAM policy for `gs://bucket/managed-folder` to a file:
$ {command} gs://bucket/managed-folder/ > policy.txt
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of the managed folder to get the IAM policy of.'
)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
client = api_factory.get_api(url.scheme)
return client.get_managed_folder_iam_policy(
url.bucket_name, url.resource_name
)

View File

@@ -0,0 +1,68 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of command for listing managed folders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import folder_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import resource_util
@base.UniverseCompatible
class List(base.ListCommand):
"""List managed folders."""
detailed_help = {
'DESCRIPTION': """List managed folders.""",
'EXAMPLES': """
The following command lists all managed folders in a bucket:
$ {command} gs://my-bucket/
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', type=str, nargs='+', help='The URLs of the resources to list.'
)
flags.add_additional_headers_flag(parser)
flags.add_raw_display_flag(parser)
def Run(self, args):
urls = []
for url_string in args.url:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs(args.command_path, url)
urls.append(url)
for url in urls:
for resource in wildcard_iterator.CloudWildcardIterator(
url.join('**'),
managed_folder_setting=folder_util.ManagedFolderSetting.LIST_WITHOUT_OBJECTS,
):
yield resource_util.get_display_dict_for_resource(
resource,
full_resource_formatter.ManagedFolderDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)

View File

@@ -0,0 +1,67 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of managed-folders remove-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.UniverseCompatible
class RemoveIamPolicyBinding(base.Command):
"""Remove an IAM policy binding from a managed folder."""
detailed_help = {
'DESCRIPTION': """
Remove a policy binding from the IAM policy of a managed folder, given a managed folder
URL and the binding. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES': """
To remove an IAM policy binding from the role of
roles/storage.objectCreator for the user john.doe@example.com on a managed folder `managed-folder` in a bucket `bucket`:
$ {command} gs://bucket/managed-folder --member=user:john.doe@example.com --role=roles/storage.objectCreator
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of managed folder to remove IAM policy binding from.'
)
iam_util.AddArgsForRemoveIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
policy = api_factory.get_api(url.scheme).get_managed_folder_iam_policy(
url.bucket_name, url.resource_name
)
return iam_command_util.remove_iam_binding_from_resource(
args,
url,
policy,
set_iam_policy_task.SetManagedFolderIamPolicyTask,
)

View File

@@ -0,0 +1,117 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of managed-folders set-iam-policy command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage.gcs_json import metadata_field_converters
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import folder_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
from googlecloudsdk.core import log
def _set_iam_policy_task_iterator(url_strings, policy):
"""Generates SetIamPolicyTask's for execution."""
url_found_match_tracker = collections.OrderedDict()
for name_expansion_result in name_expansion.NameExpansionIterator(
url_strings,
managed_folder_setting=folder_util.ManagedFolderSetting.LIST_WITHOUT_OBJECTS,
raise_error_for_unmatched_urls=False,
url_found_match_tracker=url_found_match_tracker,
):
yield set_iam_policy_task.SetManagedFolderIamPolicyTask(
name_expansion_result.resource.storage_url,
policy,
)
for url_string, found_match in url_found_match_tracker.items():
if found_match:
continue
if wildcard_iterator.contains_wildcard(url_string):
log.warning(
'Not creating managed folder for URL containing wildcard that did not'
' match any managed folders: '
+ url_string
)
continue
url = storage_url.storage_url_from_string(url_string)
api_factory.get_api(url.scheme).create_managed_folder(
url.bucket_name, url.resource_name
)
yield set_iam_policy_task.SetManagedFolderIamPolicyTask(url, policy)
@base.UniverseCompatible
class SetIamPolicy(base.Command):
"""Set the IAM policy for a managed folder."""
detailed_help = {
'DESCRIPTION': """
Set the IAM policy for a managed folder. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES': """
To set the IAM policy in POLICY-FILE on a managed folder `managed-folder` in a bucket `bucket`:
$ {command} gs://bucket/managed-folder POLICY-FILE
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'urls',
nargs='+',
help='URLs for managed folders to apply the IAM policy to.',
)
parser.add_argument(
'-e',
'--etag',
help=(
'Custom etag to set on IAM policy. API will reject etags that do'
' not match this value, making it useful as a precondition during'
' concurrent operations.'
),
)
iam_util.AddArgForPolicyFile(parser)
flags.add_continue_on_error_flag(parser)
def Run(self, args):
for url_string in args.urls:
url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_gcs_folder_type(args.command_path, url)
policy = metadata_field_converters.process_iam_file(
args.policy_file, custom_etag=args.etag
)
exit_code, output = iam_command_util.execute_set_iam_task_iterator(
_set_iam_policy_task_iterator(args.urls, policy), args.continue_on_error
)
self.exit_code = exit_code
return output

View File

@@ -0,0 +1,33 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage management hubs commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.Deprecate(is_removed=False, warning='This command group is deprecated. '
'Use `gcloud alpha storage intelligence-configs` command group '
'instead.')
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of management hub in different universes.
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class ManagementHub(base.Group):
"""Manage Cloud Storage Management Hub."""

View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of describe command for describing management hub."""
from googlecloudsdk.api_lib.storage import management_hub_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of management hub in different universes.
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
"""Describes Management Hub."""
detailed_help = {
'DESCRIPTION': """
Describe management hub for the organization, sub-folder
or project.
""",
'EXAMPLES': """
The following command describes management hub for the sub-folder with
id `123456`. \n
${command} --sub-folder=123456
""",
}
@classmethod
def Args(cls, parser):
flags.add_management_hub_level_flags(parser)
def Run(self, args):
client = management_hub_api.ManagementHubApi()
if args.sub_folder:
return client.get_sub_folder_management_hub(args.sub_folder)
elif args.project:
return client.get_project_management_hub(args.project)
elif args.organization:
return client.get_organization_management_hub(args.organization)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of disable command for disabling management hub."""
from googlecloudsdk.api_lib.storage import management_hub_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of management hub in different universes.
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Disable(base.Command):
"""Disables Management Hub."""
detailed_help = {
'DESCRIPTION': """
Disable management hub for the organization, sub-folder or project.
""",
'EXAMPLES': """
The following command disables management hub for the project. \n
${command} --project=my-project
""",
}
@classmethod
def Args(cls, parser):
flags.add_management_hub_level_flags(parser)
def Run(self, args):
client = management_hub_api.ManagementHubApi()
if args.sub_folder:
management_hub = client.disable_sub_folder_management_hub(args.sub_folder)
elif args.project:
management_hub = client.disable_project_management_hub(args.project)
else:
management_hub = client.disable_organization_management_hub(
args.organization
)
log.status.Print(
'Successfully disabled management hub plan for {}.\n'.format(
management_hub.name
)
)
return management_hub

View File

@@ -0,0 +1,99 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of enable command for enabling management hub."""
from googlecloudsdk.api_lib.storage import management_hub_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of management hub in different universes.
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Enable(base.Command):
"""Enables Management Hub."""
detailed_help = {
'DESCRIPTION': """
Enable management hub plan for the organization, sub-folder or project
along with filters.
""",
'EXAMPLES': """
To remove buckets from the management hub plan, Use the following
command with ``--exclude-bucket-ids'' and ``--exclude-bucket-regexes'' flags
to specify list of bucket ids and bucket id regexes.,\n
${command} --organization=my-org --exclude-bucket-ids="my-bucket" --exclude-bucket-regexes="my-bucket-.*"
To apply location based filters in the management hub plan, Use
``--include-locations'' or ``--exclude-locations'' flags to specify allowed
list of locations or excluded list of locations. The following
command updates management hub plan of sub-folder `123456` with the
specified list of included locations.,\n
${command} --sub-folder=123456 --include-locations="us-east1","us-west1"
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
flags.add_management_hub_level_flags(parser)
flags.add_management_hub_filter_flags(parser)
def Run(self, args):
if args.project:
management_hub = (
management_hub_api.ManagementHubApi().update_project_management_hub(
args.project,
inherit_from_parent=None,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
elif args.sub_folder:
management_hub = management_hub_api.ManagementHubApi().update_sub_folder_management_hub(
args.sub_folder,
inherit_from_parent=None,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
else:
management_hub = management_hub_api.ManagementHubApi().update_organization_management_hub(
args.organization,
inherit_from_parent=None,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
log.status.Print(
'Successfully enabled management hub plan for {}.\n'.format(
management_hub.name
)
)
return management_hub

View File

@@ -0,0 +1,131 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of update command for updating management hub."""
from googlecloudsdk.api_lib.storage import management_hub_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.core import log
# TODO: b/369949089 - Remove default universe flag after checking the
# availability of management hub in different universes.
@base.DefaultUniverseOnly
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Update(base.Command):
"""Updates Management Hub."""
detailed_help = {
'DESCRIPTION': """
Update management hub plan for the organization, sub-folder
or project.
""",
'EXAMPLES': """
To limit buckets in the management hub plan, Use the following
command with ``--include-bucket-ids'' and ``--include-bucket-regexes'' flags
to specify list of bucket ids and bucket id regexes.,\n
${command} --organization=my-org --include-bucket-ids=my-bucket --include-bucket-regexes=my-bucket-.*
To apply location based filters in the management hub plan, Use
``--include-locations'' or ``--exclude-locations'' flags to specify allowed
list of locations or excluded list of locations. The following
command updates management hub plan of sub-folder `123456` with the
specified list of excluded locations.,\n
${command} --sub-folder=123456 --exclude-locations=us-east1,us-west1
The following command updates management hub for the given project by
inheriting existing management hub plan from the hierarchical parent
resource.,\n
${command} --project=my-project --inherit-from-parent
To clear included locations from the project management hub, Use the
following command.,\n
${command} --project=my-project --include-locations=
To clear excluded bucket ids from the project management hub and to
replace existing excluded bucket ids regexes, Use the following
command.,\n
${command} --project=my-project --exclude-bucket-id-regexes="test1*","test2*" --exclude-bucket-ids=""
Alternatively, use the following command to do same operation since
the absense of cloud storage bucket filter flags will be considered
as empty list,\n
${command} --project=my-project --exclude-bucket-id-regexes="test1*","test2*"
""",
}
@classmethod
def Args(cls, parser):
parser.SetSortArgs(False)
flags.add_management_hub_level_flags(parser)
update_group = parser.add_group(
category='UPDATE', mutex=True, required=True
)
update_group.add_argument(
'--inherit-from-parent',
action='store_true',
help='Specifies management hub config to be inherited from parent.',
)
filters = update_group.add_group(category='FILTER')
flags.add_management_hub_filter_flags(filters)
def Run(self, args):
if args.project:
management_hub = (
management_hub_api.ManagementHubApi().update_project_management_hub(
args.project,
inherit_from_parent=args.inherit_from_parent,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
elif args.sub_folder:
management_hub = (
management_hub_api.ManagementHubApi().update_sub_folder_management_hub(
args.sub_folder,
inherit_from_parent=args.inherit_from_parent,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
else:
management_hub = (
management_hub_api.ManagementHubApi().update_organization_management_hub(
args.organization,
inherit_from_parent=args.inherit_from_parent,
include_locations=args.include_locations,
exclude_locations=args.exclude_locations,
include_bucket_ids=args.include_bucket_ids,
exclude_bucket_ids=args.exclude_bucket_ids,
include_bucket_id_regexes=args.include_bucket_id_regexes,
exclude_bucket_id_regexes=args.exclude_bucket_id_regexes,
)
)
log.status.Print(
'Successfully updated management hub plan for {}.\n'.format(
management_hub.name
)
)
return management_hub

View File

@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like mv command for cloud storage providers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import cp_command_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
_COMMAND_DESCRIPTION = """
The mv command allows you to move data between your local file system and
the cloud, move data within the cloud, and move data between cloud storage
providers.
*Renaming Groups Of Objects*
You can use the mv command to rename all objects with a given prefix to
have a new prefix. For example, the following command renames all objects
under gs://my_bucket/oldprefix to be under gs://my_bucket/newprefix,
otherwise preserving the naming structure:
$ {command} gs://my_bucket/oldprefix gs://my_bucket/newprefix
Note that when using mv to rename groups of objects with a common prefix,
you cannot specify the source URL using wildcards; you must spell out the
complete name.
If you do a rename as specified above and you want to preserve ACLs.
*Non-Atomic Operation*
Unlike the case with many file systems, the mv command does not perform a
single atomic operation. Rather, it performs a copy from source to
destination followed by removing the source for each object.
A consequence of this is that, in addition to normal network and operation
charges, if you move a Nearline Storage, Coldline Storage, or Archive
Storage object, deletion and data retrieval charges apply.
See the documentation for pricing details.
"""
_GA_EXAMPLES = """
To move all objects from a bucket to a local directory you could use:
$ {command} gs://my_bucket/* dir
Similarly, to move all objects from a local directory to a bucket you
could use:
$ {command} ./dir gs://my_bucket
The following command renames all objects under gs://my_bucket/oldprefix
to be under gs://my_bucket/newprefix, otherwise preserving the naming
structure:
$ {command} gs://my_bucket/oldprefix gs://my_bucket/newprefix
"""
_ALPHA_EXAMPLES = """
The following command would clear all custom contexts from the destination
object while moving the object to the destination bucket.
$ {command} gs://my-bucket/object gs://destination-bucket/object \
--clear-custom-contexts
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Mv(base.Command):
"""Moves or renames objects."""
detailed_help = {
"DESCRIPTION": _COMMAND_DESCRIPTION,
"EXAMPLES": _GA_EXAMPLES,
}
@classmethod
def Args(cls, parser):
cp_command_util.add_cp_and_mv_flags(parser, cls.ReleaseTrack())
flags.add_per_object_retention_flags(parser)
def Run(self, args):
for url_string in args.source:
url = storage_url.storage_url_from_string(url_string)
if isinstance(url, storage_url.CloudUrl) and not url.is_object():
raise errors.InvalidUrlError("Cannot mv buckets.")
if url.is_stdio:
raise errors.InvalidUrlError("Cannot mv stdin.")
# Must copy children of prefixes and folders.
args.recursive = True
self.exit_code = cp_command_util.run_cp(args, delete_source=True)
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class MvAlpha(Mv):
"""Moves or renames objects."""
detailed_help = {
"DESCRIPTION": _COMMAND_DESCRIPTION,
"EXAMPLES": _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Storage objects commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.UniverseCompatible
class Objects(base.Group):
"""Manage Cloud Storage objects."""

View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of objects add-iam-policy-binding command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import iam_command_util
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage.tasks import set_iam_policy_task
@base.Hidden
@base.UniverseCompatible
class AddIamPolicyBinding(base.Command):
"""Grant a principal access to an object."""
detailed_help = {
'DESCRIPTION':
"""
Add an IAM policy binding to an object. For more information, see [Cloud
Identity and Access
Management](https://cloud.google.com/storage/docs/access-control/iam).
""",
'EXAMPLES':
"""
To grant full control of OBJECT in BUCKET to the user
john.doe@example.com:
$ {command} gs://BUCKET/OBJECT --member=user:john.doe@example.com --role=roles/storage.legacyObjectOwner
To make OBJECT publicly readable:
$ {command} gs://BUCKET/OBJECT --member=AllUsers --role=roles/storage.legacyObjectReader
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'url', help='URL of bucket to add IAM policy binding to.')
iam_util.AddArgsForAddIamPolicyBinding(parser, add_condition=True)
def Run(self, args):
url_object = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_cloud_object(args.command_path, url_object)
errors_util.raise_error_if_not_gcs(args.command_path, url_object)
policy = api_factory.get_api(url_object.scheme).get_object_iam_policy(
url_object.bucket_name, url_object.resource_name, url_object.generation)
return iam_command_util.add_iam_binding_to_resource(
args,
url_object,
apis.GetMessagesModule('storage', 'v1'),
policy,
set_iam_policy_task.SetObjectIamPolicyTask,
)

View File

@@ -0,0 +1,136 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of objects compose command for Cloud Storage."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import encryption_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import name_expansion
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import user_request_args_factory
from googlecloudsdk.command_lib.storage.resources import resource_reference
from googlecloudsdk.command_lib.storage.tasks import compose_objects_task
_COMMAND_DESCRIPTION = """
{command} creates a new object whose content is the concatenation
of a given sequence of source objects in the same bucket.
For more information, please see:
[composite objects documentation](https://cloud.google.com/storage/docs/composite-objects).
There is a limit (currently 32) to the number of components
that can be composed in a single operation.
"""
_GA_EXAMPLES = """
The following command creates a new object `target.txt` by concatenating
`a.txt` and `b.txt`:
$ {command} gs://bucket/a.txt gs://bucket/b.txt gs://bucket/target.txt
"""
_ALPHA_EXAMPLES = """
Contexts are merged from source objects on the composed object by default.
However, the following command overwrites custom contexts on composed object:
$ {command} gs://bucket/a.txt gs://bucket/b.txt gs://bucket/target.txt \
--custom-contexts=key1=value1,key2=value2
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Compose(base.Command):
"""Concatenate a sequence of objects into a new composite object."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES,
}
@classmethod
def Args(cls, parser):
parser.add_argument(
'source',
nargs='+',
help=textwrap.dedent("""\
The list of source objects that will be concatenated into a
single object."""))
parser.add_argument('destination', help='The destination object.')
flags.add_additional_headers_flag(parser)
flags.add_encryption_flags(parser, hidden=True)
flags.add_per_object_retention_flags(parser)
flags.add_precondition_flags(parser)
if cls.ReleaseTrack() == base.ReleaseTrack.ALPHA:
context_group = flags.get_object_context_group(parser)
flags.add_object_context_setter_flags(context_group)
def Run(self, args):
encryption_util.initialize_key_store(args)
if args.source:
destination_resource = resource_reference.UnknownResource(
storage_url.storage_url_from_string(args.destination))
for url_string in args.source:
source_url = storage_url.storage_url_from_string(url_string)
errors_util.raise_error_if_not_cloud_object(args.command_path,
source_url)
if source_url.scheme is not destination_resource.storage_url.scheme:
raise errors.Error('Composing across providers is not supported.')
if (args.destination !=
destination_resource.storage_url.versionless_url_string):
raise errors.Error(
'Verison-specific URLs are not valid destinations because'
' composing always results in creating an object with the'
' latest generation.')
source_expansion_iterator = name_expansion.NameExpansionIterator(
args.source,
fields_scope=cloud_api.FieldsScope.NO_ACL,
recursion_requested=name_expansion.RecursionSetting.NO)
objects_to_compose = [
source.resource for source in source_expansion_iterator
]
user_request_args = (
user_request_args_factory.get_user_request_args_from_command_args(
args, metadata_type=user_request_args_factory.MetadataType.OBJECT))
compose_objects_task.ComposeObjectsTask(
objects_to_compose,
destination_resource,
print_status_message=True,
user_request_args=user_request_args,
).execute()
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class ComposeAlpha(Compose):
"""Concatenate a sequence of objects into a new composite object."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

View File

@@ -0,0 +1,136 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of objects describe command for getting info on objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import api_factory
from googlecloudsdk.api_lib.storage import cloud_api
from googlecloudsdk.api_lib.storage import request_config_factory
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.storage import encryption_util
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import errors_util
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.storage import wildcard_iterator
from googlecloudsdk.command_lib.storage.resources import contexts_only_formatter
from googlecloudsdk.command_lib.storage.resources import full_resource_formatter
from googlecloudsdk.command_lib.storage.resources import gsutil_json_printer
from googlecloudsdk.command_lib.storage.resources import resource_util
_COMMAND_DESCRIPTION = """
Describe a Cloud Storage object.
"""
_GA_EXAMPLES = """
Describe a Google Cloud Storage object with the url
"gs://bucket/my-object":
$ {command} gs://bucket/my-object
Describe object with JSON formatting, only returning the "name" key:
$ {command} gs://bucket/my-object --format="json(name)"
"""
_ALPHA_EXAMPLES = """
Describe only contexts attached to objects as key value pairs.
$ {command} gs://my-bucket/object --format=contextsonly
"""
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Describe(base.DescribeCommand):
"""Describe a Cloud Storage object."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES,
}
@classmethod
def Args(cls, parser):
parser.add_argument('url', help='Specifies URL of object to describe.')
flags.add_additional_headers_flag(parser)
flags.add_encryption_flags(parser, command_only_reads_data=True)
flags.add_fetch_encrypted_object_hashes_flag(parser, is_list=False)
flags.add_raw_display_flag(parser)
flags.add_soft_deleted_flag(parser)
gsutil_json_printer.GsutilJsonPrinter.Register()
if cls.ReleaseTrack() == base.ReleaseTrack.ALPHA:
contexts_only_formatter.ContextsOnlyPrinter.Register()
def Run(self, args):
encryption_util.initialize_key_store(args)
if wildcard_iterator.contains_wildcard(args.url):
raise errors.InvalidUrlError(
'Describe does not accept wildcards because it returns a single'
' resource. Please use the `ls` or `objects list` command for'
' retrieving multiple resources.')
url = storage_url.storage_url_from_string(args.url)
errors_util.raise_error_if_not_cloud_object(args.command_path, url)
client = api_factory.get_api(url.scheme)
resource = client.get_object_metadata(
url.bucket_name,
url.resource_name,
generation=url.generation,
fields_scope=cloud_api.FieldsScope.FULL,
soft_deleted=args.soft_deleted,
)
if (args.fetch_encrypted_object_hashes and
cloud_api.Capability.ENCRYPTION in client.capabilities and
not (resource.md5_hash and resource.crc32c_hash) and
resource.decryption_key_hash_sha256):
request_config = request_config_factory.get_request_config(
resource.storage_url,
decryption_key_hash_sha256=resource.decryption_key_hash_sha256,
error_on_missing_key=True)
final_resource = client.get_object_metadata(
resource.bucket,
resource.name,
fields_scope=cloud_api.FieldsScope.FULL,
generation=resource.generation,
request_config=request_config,
soft_deleted=args.soft_deleted,
)
else:
final_resource = resource
if args.format == contexts_only_formatter.CONTEXT_ONLY_PRINTER_FORMAT:
return final_resource
return resource_util.get_display_dict_for_resource(
final_resource,
full_resource_formatter.ObjectDisplayTitlesAndDefaults,
display_raw_keys=args.raw,
)
@base.UniverseCompatible
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class DescribeAlpha(Describe):
"""Describe a Cloud Storage object."""
detailed_help = {
'DESCRIPTION': _COMMAND_DESCRIPTION,
'EXAMPLES': _GA_EXAMPLES + _ALPHA_EXAMPLES,
}

Some files were not shown because too many files have changed in this diff Show More