feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,176 @@
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
info_type:
api_field: googlePrivacyDlpV2InspectContentRequest.inspectConfig.infoTypes
metavar: 'infotype'
arg_name: info-types
type: googlecloudsdk.command_lib.dlp.hooks:InfoType
help_text: |
Which infoTypes to scan input for. The values must correspond to
infoType values found in documentation. For more information about
valid infoTypes, see [infoTypes Reference](https://cloud.google.com/dlp/docs/infotypes-reference)
min_likelihood:
arg_name: min-likelihood
api_field: googlePrivacyDlpV2InspectContentRequest.inspectConfig.minLikelihood
default: possible
help_text: |
Only return findings equal to or above this threshold.
choices:
- arg_value: very-unlikely
enum_value: VERY_UNLIKELY
- arg_value: unlikely
enum_value: UNLIKELY
- arg_value: possible
enum_value: POSSIBLE
- arg_value: likely
enum_value: LIKELY
- arg_value: very-likely
enum_value: VERY_LIKELY
include_quote:
api_field: googlePrivacyDlpV2InspectContentRequest.inspectConfig.includeQuote
arg_name: include-quote
type: bool
default: false
help_text: |
If True, a contextual quote from the data that triggered a
finding is included in the response. Even if the content is not
text, it may be converted to a textual representation in the
response. For example, given the input value 'My phone number is
(415) 555-0890' and a search for the infoType PHONE_NUMBER, the
contextual quote would be '(415) 555-0890.'
exclude_info_types:
api_field: googlePrivacyDlpV2InspectContentRequest.inspectConfig.excludeInfoTypes
arg_name: exclude-info-types
type: bool
default: false
help_text: |
Whether or not to exclude type information of the findings.
Type information is included by default.
max_findings:
api_field: googlePrivacyDlpV2InspectContentRequest.inspectConfig.limits.maxFindingsPerRequest
arg_name: max-findings
type: int
help_text: |
Maximum number of findings that will be returned per execution.
If not specified, no limits are applied.
datastore_kind:
arg_name: datastore-kind
api_field: googlePrivacyDlpV2CreateJobTriggerRequest.jobTrigger.inspectJob.storageConfig.datastoreOptions
type: googlecloudsdk.command_lib.dlp.hooks:DatastoreInputOptions
is_positional: false
help_text: |
The Cloud Datastore kind or table to scan. The kind can optionally be
prefixed with the Cloud Datastore namespace ID - for example
`namespace:example-kind` or simply `example-kind`
gcs_path:
arg_name: path
api_field: googlePrivacyDlpV2CreateJobTriggerRequest.jobTrigger.inspectJob.storageConfig.cloudStorageOptions
type: googlecloudsdk.command_lib.dlp.hooks:GcsInputOptions
is_positional: false
help_text: |
Google Cloud Storage URL to scan for files with optional wildcard
character (```*```) -- for example, gs://my-bucket/```*```.
big_query_table:
arg_name: input-table
api_field: googlePrivacyDlpV2CreateJobTriggerRequest.jobTrigger.inspectJob.storageConfig.bigQueryOptions
type: googlecloudsdk.command_lib.dlp.hooks:BigQueryInputOptions
is_positional: false
help_text: |
BigQuery table to scan. BigQuery tables are uniquely identified by
their project_id, dataset_id, and table_id in the format
`<project_id>.<dataset_id>.<table_id>`.
min_time:
arg_name: min-time
api_field: googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.storageConfig.timespanConfig.startTime
type: googlecloudsdk.calliope.arg_parsers:Datetime.Parse
processor: googlecloudsdk.command_lib.dlp.hooks:DlpTimeStamp
is_positional: false
help_text: |
Scan will include items in repository whose age is >= min-time
and <= max-time.
If max-time is omitted then there is no maximum time limit.
See $ gcloud topic datetimes for information on time formats.
max_time:
arg_name: max-time
api_field: googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.storageConfig.timespanConfig.endTime
type: googlecloudsdk.calliope.arg_parsers:Datetime.Parse
processor: googlecloudsdk.command_lib.dlp.hooks:DlpTimeStamp
is_positional: false
help_text: |
Scan will include items in repository whose age is >= min-time
and <= max-time.
If max-time is omitted then there is no maximum time limit.
See $ gcloud topic datetimes for information on time formats.
job_id:
arg_name: job-id
api_field: googlePrivacyDlpV2CreateDlpJobRequest.jobId
is_positional: false
required: false
help_text: |
Optional job ID to use for the created job. If not provided, a job
ID will automatically be generated. Must be unique within the project.
The job ID can contain uppercase and lowercase letters, numbers, and
hyphens; that is, it must match the regular expression:
`[a-zA-Z\\d-]+`. The maximum length is 100 characters. Can be empty to
allow the system to generate one.
max_item_findings:
arg_name: max-findings-per-item
api_field: googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.inspectConfig.limits.maxFindingsPerItem
type: int
is_positional: false
required: false
help_text: |
Maximum number of findings that will be returned for each item scanned.
If not specified, no limits are applied.
jobs_output_group:
group:
mutex: true
required: false
params:
- arg_name: output-topics
api_field: googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.actions
type: googlecloudsdk.command_lib.dlp.hooks:PubSubTopicAction
help_text: |
Publishes the results of a Cloud DLP job to one or more Cloud Pub/Sub
topics.
Note: The topic must have given publishing access rights to the DLP
API service account executing the Cloud DLP job.
- arg_name: output-table
repeated: false
api_field: googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.actions
type: googlecloudsdk.command_lib.dlp.hooks:BigQueryTableAction
help_text: |
Publishes results of a Cloud DLP job a BigQuery table.
BigQuery tables are uniquely identified by their project_id,
dataset_id, and table_id in the format
`<project_id>.<dataset_id>.<table_id>` or `<project_id>.<dataset_id>.<table_id>`. If
no table_id is specified, DLP will create a table for you.

View File

@@ -0,0 +1,440 @@
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Argument processors for DLP surface arguments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.util.apis import arg_utils
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.util import files
from googlecloudsdk.core.util import times
import six
_DLP_API = 'dlp'
_DLP_API_VERSION = 'v2'
_COLOR_SPEC_ERROR_SUFFIX = """\
Colors should be specified as a string of `r,g,b` float values in the interval
[0,1] representing the amount of red, green, and blue in the color,
respectively. For example, `black = 0,0,0`, `red = 1.0,0,0`,
`white = 1.0,1.0,1.0`, and so on.
"""
VALID_IMAGE_EXTENSIONS = {
'n_a': 'IMAGE',
'.png': 'IMAGE_PNG',
'.jpeg': 'IMAGE_JPEG',
'.jpg': 'IMAGE_JPEG',
'.svg': 'IMAGE_SVG',
'.bmp': 'IMAGE_BMP'
}
class ImageFileError(exceptions.Error):
"""Error if an image file is improperly formatted or missing."""
class RedactColorError(exceptions.Error):
"""Error if a redact color is improperly formatted or missing."""
class BigQueryTableNameError(exceptions.Error):
"""Error if a BigQuery table name is improperly formatted."""
# Misc/Helper Functions
def _GetMessageClass(msg_type_name):
"""Get API message object for given message type name."""
msg = apis.GetMessagesModule(_DLP_API, _DLP_API_VERSION)
return getattr(msg, msg_type_name)
def _ValidateExtension(extension):
"""Validate image file name extension."""
if not extension: # No extension is ok.
return True
# But if provided it should match expected values
return extension and (extension in VALID_IMAGE_EXTENSIONS)
def _ConvertColorValue(color):
"""Convert color value(color) to a float or raise value error."""
j = float(color)
if j > 1.0 or j < 0.0:
raise ValueError('Invalid Color.')
return j
def _ValidateAndParseColors(value):
"""Validates that values has proper format and returns parsed components."""
values = value.split(',')
if len(values) == 3:
try:
return [_ConvertColorValue(x) for x in values]
except ValueError:
raise RedactColorError('Invalid Color Value(s) [{}]. '
'{}'.format(value, _COLOR_SPEC_ERROR_SUFFIX))
else:
raise RedactColorError('You must specify exactly 3 color values [{}]. '
'{}'.format(value, _COLOR_SPEC_ERROR_SUFFIX))
def _ValidateAndParseInputTableName(table_name):
"""Validate BigQuery table name format and returned parsed components."""
name_parts = table_name.split('.')
if len(name_parts) != 3:
raise BigQueryTableNameError(
'Invalid BigQuery table name [{}]. BigQuery tables are uniquely '
'identified by their project_id, dataset_id, and table_id in the '
'format `<project_id>.<dataset_id>.<table_id>`.'.format(table_name))
return name_parts
def _ValidateAndParseOutputTableName(table_name):
"""Validate BigQuery table name format and returned parsed components."""
# Table id is optional for output tables.
name_parts = table_name.split('.')
if len(name_parts) != 3 and len(name_parts) != 2:
raise BigQueryTableNameError(
'Invalid BigQuery output table name [{}]. BigQuery tables are uniquely '
'identified by their project_id, dataset_id, and or table_id in the '
'format `<project_id>.<dataset_id>.<table_id>` or '
'`<project_id>.<dataset_id>.'.format(table_name))
return name_parts
# Types
def InfoType(value): # Defines elment type for infoTypes collection on request
"""Return GooglePrivacyDlpV2InfoType message for a parsed value."""
infotype = _GetMessageClass('GooglePrivacyDlpV2InfoType')
return infotype(name=value)
def PrivacyField(value):
"""Create a GooglePrivacyDlpV2FieldId for value."""
field_id = _GetMessageClass('GooglePrivacyDlpV2FieldId')
return field_id(name=value)
def BigQueryInputOptions(table_name):
"""Convert BigQuery table name into GooglePrivacyDlpV2BigQueryOptions.
Creates BigQuery input options for a job trigger.
Args:
table_name: str, BigQuery table name to create input options from in the
form `<project_id>.<dataset_id>.<table_id>`.
Returns:
GooglePrivacyDlpV2BigQueryOptions, input options for job trigger.
Raises:
BigQueryTableNameError if table_name is improperly formatted.
"""
project_id, data_set_id, table_id = _ValidateAndParseInputTableName(
table_name)
big_query_options = _GetMessageClass('GooglePrivacyDlpV2BigQueryOptions')
big_query_table = _GetMessageClass('GooglePrivacyDlpV2BigQueryTable')
table = big_query_table(
datasetId=data_set_id, projectId=project_id, tableId=table_id)
options = big_query_options(tableReference=table)
return options
def GcsInputOptions(url):
"""Return CloudStorageOptions for given GCS url."""
cloud_storage_options = _GetMessageClass(
'GooglePrivacyDlpV2CloudStorageOptions')
file_set = _GetMessageClass('GooglePrivacyDlpV2FileSet')
return cloud_storage_options(fileSet=file_set(url=url))
def DatastoreInputOptions(table_name):
"""Convert Datastore arg value into GooglePrivacyDlpV2DatastoreOptions.
Creates Datastore input options for a job trigger from datastore table name.
Args:
table_name: str, Datastore table name to create options from in the form
`namespace:example-kind` or simply `example-kind`.
Returns:
GooglePrivacyDlpV2Action, output action for job trigger.
"""
data_store_options = _GetMessageClass('GooglePrivacyDlpV2DatastoreOptions')
kind = _GetMessageClass('GooglePrivacyDlpV2KindExpression')
partition_id = _GetMessageClass('GooglePrivacyDlpV2PartitionId')
project = properties.VALUES.core.project.Get(required=True)
split_name = table_name.split(':')
if len(split_name) == 2:
namespace, table = split_name
kind_exp = kind(name=table)
partition = partition_id(namespaceId=namespace, projectId=project)
else:
kind_exp = kind(name=table_name)
partition = partition_id(projectId=project)
return data_store_options(kind=kind_exp, partitionId=partition)
def PubSubTopicAction(topic):
"""Return PubSub DlpV2Action for given PubSub topic."""
action_msg = _GetMessageClass('GooglePrivacyDlpV2Action')
pubsub_action = _GetMessageClass('GooglePrivacyDlpV2PublishToPubSub')
return action_msg(pubSub=pubsub_action(topic=topic))
def BigQueryTableAction(table_name):
"""Convert BigQuery formatted table name into GooglePrivacyDlpV2Action.
Creates a BigQuery output action for a job trigger.
Args:
table_name: str, BigQuery table name to create action from in the form
`<project_id>.<dataset_id>.<table_id>` or `<project_id>.<dataset_id>`.
Returns:
GooglePrivacyDlpV2Action, output action for job trigger.
Raises:
BigQueryTableNameError if table_name is improperly formatted.
"""
name_parts = _ValidateAndParseOutputTableName(table_name)
project_id = name_parts[0]
data_set_id = name_parts[1]
table_id = ''
if len(name_parts) == 3:
table_id = name_parts[2]
action_msg = _GetMessageClass('GooglePrivacyDlpV2Action')
save_findings_config = _GetMessageClass('GooglePrivacyDlpV2SaveFindings')
output_config = _GetMessageClass('GooglePrivacyDlpV2OutputStorageConfig')
big_query_table = _GetMessageClass('GooglePrivacyDlpV2BigQueryTable')
table = big_query_table(
datasetId=data_set_id, projectId=project_id, tableId=table_id)
return action_msg(
saveFindings=save_findings_config(
outputConfig=output_config(table=table)))
def DlpTimeStamp(value):
return times.FormatDateTime(value, tzinfo=times.UTC)
# Request Hooks
def SetRequestParent(ref, args, request):
"""Set parent value for a DlpXXXRequest."""
del ref
parent = args.project or properties.VALUES.core.project.Get(required=True)
project_ref = resources.REGISTRY.Parse(parent, collection='dlp.projects')
request.parent = project_ref.RelativeName()
return request
def SetCancelRequestHook(ref, args, request):
"""Set cancel message on DlpProjectsDlpJobsCancelRequest."""
del ref
del args
cancel_request = _GetMessageClass('GooglePrivacyDlpV2CancelDlpJobRequest')
request.googlePrivacyDlpV2CancelDlpJobRequest = cancel_request()
return request
def UpdateDataStoreOptions(ref, args, request):
"""Update partitionId.projectId on DatastoreOptions."""
del ref
data_store_options = (
request.googlePrivacyDlpV2CreateJobTriggerRequest.jobTrigger.inspectJob
.storageConfig.datastoreOptions)
if args.project and data_store_options:
data_store_options.partitionId.projectId = args.project
return request
# Required since bigQueryOptions are create by a separate flag so
# identifyingFields can't be set until before requests is sent.
def UpdateIdentifyingFields(ref, args, request):
"""Update bigQueryOptions.identifyingFields with parsed fields."""
del ref
big_query_options = (
request.googlePrivacyDlpV2CreateDlpJobRequest.inspectJob.storageConfig
.bigQueryOptions)
if big_query_options and args.identifying_fields:
field_id = _GetMessageClass('GooglePrivacyDlpV2FieldId')
big_query_options.identifyingFields = [
field_id(name=field) for field in args.identifying_fields
]
return request
def SetOrderByFromSortBy(ref, args, request):
"""Set orderBy attribute on message from common --sort-by flag."""
del ref
if args.sort_by:
order_by_fields = []
for field in args.sort_by:
# ~field ==> field desc
if field.startswith('~'):
field = field.lstrip('~') + ' desc'
else:
field += ' asc'
order_by_fields.append(field)
request.orderBy = ','.join(order_by_fields)
return request
# Argument Processors
def ExtractBqTableFromInputConfig(value):
"""Extracts and returns BigQueryTable from parsed BigQueryOptions message."""
return value.tableReference
def GetReplaceTextTransform(value):
replace_config = _GetMessageClass('GooglePrivacyDlpV2ReplaceValueConfig')
value_holder = _GetMessageClass('GooglePrivacyDlpV2Value')
return replace_config(newValue=value_holder(stringValue=value))
def GetInfoTypeTransform(value):
del value
infotype_config = _GetMessageClass(
'GooglePrivacyDlpV2ReplaceWithInfoTypeConfig')
return infotype_config()
def GetRedactTransform(value):
del value
redact_config = _GetMessageClass('GooglePrivacyDlpV2RedactConfig')
return redact_config()
def GetImageFromFile(path):
"""Builds a GooglePrivacyDlpV2ByteContentItem message from a path.
Will attempt to set message.type from file extension (if present).
Args:
path: the path arg given to the command.
Raises:
ImageFileError: if the image path does not exist and does not have a valid
extension.
Returns:
GooglePrivacyDlpV2ByteContentItem: an message containing image data for
the API on the image to analyze.
"""
extension = os.path.splitext(path)[-1].lower()
extension = extension or 'n_a'
image_item = _GetMessageClass('GooglePrivacyDlpV2ByteContentItem')
if os.path.isfile(path) and _ValidateExtension(extension):
enum_val = arg_utils.ChoiceToEnum(VALID_IMAGE_EXTENSIONS[extension],
image_item.TypeValueValuesEnum)
image = image_item(data=files.ReadBinaryFileContents(path), type=enum_val)
else:
raise ImageFileError(
'The image path [{}] does not exist or has an invalid extension. '
'Must be one of [jpg, jpeg, png, bmp or svg]. '
'Please double-check your input and try again.'.format(path))
return image
def GetRedactColorFromString(color_string):
"""Convert color_string into GooglePrivacyDlpV2Color.
Creates a GooglePrivacyDlpV2Color message from input string to use for image
redaction.
Args:
color_string: str, string representing red, green and blue color saturation
percentages as float values between 0.0 and 1.0. For example, `black =
0,0,0`, `red = 1.0,0,0`, `white = 1.0,1.0,1.0` etc.
Returns:
GooglePrivacyDlpV2Color, color message.
Raises:
RedactColorError if color_string is improperly formatted.
"""
color_msg = _GetMessageClass('GooglePrivacyDlpV2Color')
red, green, blue = _ValidateAndParseColors(color_string)
return color_msg(red=red, blue=blue, green=green)
def GetJobScheduleDurationString(value):
"""Return API required format for duration specified by value."""
return '{}s'.format(six.text_type(value))
# Additional Arguments Hook
def GetIdentifyingFieldsArg():
"""Capture identifying fields for BigQuery table."""
help_text = ('Comma separated list of references to field names uniquely '
'identifying rows within the BigQuery table. Nested fields in '
'the format `person.birthdate.year` are allowed.')
return [
base.Argument(
'--identifying-fields',
metavar='IDENTIFYING_FIELDS',
type=arg_parsers.ArgList(),
help=help_text)
]
def _PossiblyWriteRedactedResponseToOutputFile(value, parsed_args):
"""Helper function for writing redacted contents to an output file."""
if not parsed_args.output_file:
return
with files.BinaryFileWriter(parsed_args.output_file) as outfile:
outfile.write(value)
log.status.Print('The redacted contents can be viewed in [{}]'.format(
parsed_args.output_file))
def PossiblyWriteRedactedTextResponseToOutputFile(response, parsed_args):
"""Write the contents of the redacted text file to parsed_args.output_file."""
_PossiblyWriteRedactedResponseToOutputFile(response.item.value, parsed_args)
return response
def PossiblyWriteRedactedImageResponseToOutputFile(response, parsed_args):
"""Write the redacted image to parsed_args.output_file."""
_PossiblyWriteRedactedResponseToOutputFile(response.redactedImage,
parsed_args)
return response
def AddOutputFileFlag():
"""Add --output-file to a redact command."""
return [
base.Argument(
'--output-file',
help='Path to the file to write redacted contents to.')
]

View File

@@ -0,0 +1,51 @@
project:
name: project
collection: dlp.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: The project ID.
content:
name: content
collection: dlp.projects.content
attributes:
- *project
image:
name: image
collection: dlp.projects.image
attributes:
- *project
location:
name: location
collection: dlp.projects.locations
attributes:
- *project
- parameter_name: locationsId
attribute_name: location
help: The location ID.
fallthroughs:
- value: "global"
hint: |
uses "global" by default
job:
name: job
collection: dlp.projects.dlpJobs
disable_auto_completers: false
attributes:
- parameter_name: dlpJobsId
attribute_name: job
help: The id of the DLP Job
job_trigger:
name: job_trigger
collection: dlp.projects.jobTriggers
disable_auto_completers: false
attributes:
- parameter_name: jobTriggersId
attribute_name: job_trigger
help: The id of the DLP JobTrigger