feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manage crawler configurations in Data Catalog."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Crawlers(base.Group):
"""Manage crawler configurations in Data Catalog."""
pass

View File

@@ -0,0 +1,38 @@
- release_tracks: [ALPHA]
help_text:
brief: Create a new Data Catalog crawler.
description: Create a new Data Catalog crawler.
examples: |
Create a project-scoped crawler:
$ {command} crawler1 \
--run-option=MANUAL --display-name=my-crawler --crawl-scope=PROJECT
Create a bucket-scoped crawler that runs weekly:
$ {command} crawler1 \
--display-name=my-crawler --crawl-scope=BUCKET \
--buckets="gs://bucket1,gs://bucket2,gs://bucket3" \
--run-option=SCHEDULED --run-schedule=WEEKLY
request:
collection: datacatalog.projects.crawlers
api_version: v1alpha3
modify_request_hooks:
- googlecloudsdk.command_lib.data_catalog.crawlers.util:ValidateScopeFlagsForCreate
- googlecloudsdk.command_lib.data_catalog.crawlers.util:ValidateSchedulingFlagsForCreate
- googlecloudsdk.command_lib.data_catalog.crawlers.util:ParseScopeFlagsForCreate
- googlecloudsdk.command_lib.data_catalog.crawlers.util:ParseSchedulingFlagsForCreate
arguments:
resource:
help_text: The crawler to create.
spec: !REF googlecloudsdk.command_lib.data_catalog.resources:crawler
params:
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.description
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.display_name
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.bundle_specs
additional_arguments_hook: |-
googlecloudsdk.command_lib.data_catalog.crawlers.flags:AddCrawlerScopeAndSchedulingFlagsForCreate

View File

@@ -0,0 +1,13 @@
- release_tracks: [ALPHA]
help_text:
brief: Delete a Data Catalog crawler.
description: Delete a Data Catalog crawler.
request:
collection: datacatalog.projects.crawlers
api_version: v1alpha3
arguments:
resource:
help_text: The crawler to delete.
spec: !REF googlecloudsdk.command_lib.data_catalog.resources:crawler

View File

@@ -0,0 +1,13 @@
- release_tracks: [ALPHA]
help_text:
brief: Describe a Data Catalog crawler.
description: Describe a Data Catalog crawler.
request:
collection: datacatalog.projects.crawlers
api_version: v1alpha3
arguments:
resource:
help_text: The crawler to describe.
spec: !REF googlecloudsdk.command_lib.data_catalog.resources:crawler

View File

@@ -0,0 +1,16 @@
- release_tracks: [ALPHA]
help_text:
brief: List Data Catalog crawlers.
description: List Data Catalog crawlers.
request:
collection: datacatalog.projects.crawlers
api_version: v1alpha3
response:
id_field: name
arguments:
resource:
help_text: The project in which to list crawlers.
spec: !REF googlecloudsdk.command_lib.data_catalog.resources:project

View File

@@ -0,0 +1,57 @@
- release_tracks: [ALPHA]
help_text:
brief: Update a Data Catalog crawler.
description: Update a Data Catalog crawler.
examples: |
Update the display name of a crawler:
$ {command} crawler1 --display-name="my-crawler"
Add buckets to a BUCKET-scoped crawler:
$ {command} crawler1 --add-buckets=bucket1,bucket2
Remove buckets from a BUCKET-scoped crawler:
$ {command} crawler1 --remove-buckets=bucket1,bucket2
Overwrite buckets in a BUCKET-scoped crawler with bucket1, bucket2:
$ {command} crawler1 --clear-buckets --add-buckets=bucker1,bucket2
Change the schedule of a manual crawler to run daily:
$ {command} crawler1 --run-option=SCHEDULED --run-schedule=DAILY
request:
collection: datacatalog.projects.crawlers
api_version: v1alpha3
modify_request_hooks:
- googlecloudsdk.command_lib.data_catalog.crawlers.util:ValidateAndParseFlagsForUpdate
- googlecloudsdk.command_lib.data_catalog.crawlers.util:SetUpdateMask
arguments:
resource:
help_text: The crawler to update.
spec: !REF googlecloudsdk.command_lib.data_catalog.resources:crawler
params:
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.description
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.display_name
- group:
help_text: |-
Update bundling specifications. Bundling specifications direct the crawler to bundle files
into filesets based on the patterns provided:
params:
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.add_bundle_specs
- group:
mutex: true
params:
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.remove_bundle_specs
- !REF googlecloudsdk.command_lib.data_catalog.crawlers.flags:crawler.clear_bundle_specs
additional_arguments_hook: |-
googlecloudsdk.command_lib.data_catalog.crawlers.flags:AddCrawlerScopeAndSchedulingFlagsForUpdate
update:
disable_auto_field_mask: true