feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,332 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource args for the Dataplex surface."""
from __future__ import absolute_import
from __future__ import annotations
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import parser_arguments
from googlecloudsdk.command_lib.dataplex import parsers as dataplex_parsers
from googlecloudsdk.command_lib.util.args import labels_util
def AddDiscoveryArgs(parser):
"""Adds Discovery Args to parser."""
discovery_spec = parser.add_group(
help='Settings to manage the metadata discovery and publishing.'
)
discovery_spec.add_argument(
'--discovery-enabled',
action=arg_parsers.StoreTrueFalseAction,
help='Whether discovery is enabled.',
)
discovery_spec.add_argument(
'--discovery-include-patterns',
default=[],
type=arg_parsers.ArgList(),
metavar='INCLUDE_PATTERNS',
help="""The list of patterns to apply for selecting data to include
during discovery if only a subset of the data should considered. For
Cloud Storage bucket assets, these are interpreted as glob patterns
used to match object names. For BigQuery dataset assets, these are
interpreted as patterns to match table names.""",
)
discovery_spec.add_argument(
'--discovery-exclude-patterns',
default=[],
type=arg_parsers.ArgList(),
metavar='EXCLUDE_PATTERNS',
help="""The list of patterns to apply for selecting data to exclude
during discovery. For Cloud Storage bucket assets, these are interpreted
as glob patterns used to match object names. For BigQuery dataset
assets, these are interpreted as patterns to match table names.""",
)
trigger = discovery_spec.add_group(
help='Determines when discovery jobs are triggered.'
)
trigger.add_argument(
'--discovery-schedule',
help="""[Cron schedule](https://en.wikipedia.org/wiki/Cron) for running
discovery jobs periodically. Discovery jobs must be scheduled at
least 30 minutes apart.""",
)
discovery_prefix = discovery_spec.add_group(help='Describe data formats.')
csv_option = discovery_prefix.add_group(
help='Describe CSV and similar semi-structured data formats.'
)
csv_option.add_argument(
'--csv-header-rows',
type=int,
help=(
'The number of rows to interpret as header rows that should be'
' skipped when reading data rows.'
),
)
csv_option.add_argument(
'--csv-delimiter',
help="The delimiter being used to separate values. This defaults to ','.",
)
csv_option.add_argument(
'--csv-encoding',
help='The character encoding of the data. The default is UTF-8.',
)
csv_option.add_argument(
'--csv-disable-type-inference',
action=arg_parsers.StoreTrueFalseAction,
help=(
'Whether to disable the inference of data type for CSV data. If true,'
' all columns will be registered as strings.'
),
)
json_option = discovery_prefix.add_group(help='Describe JSON data format.')
json_option.add_argument(
'--json-encoding',
help='The character encoding of the data. The default is UTF-8.',
)
json_option.add_argument(
'--json-disable-type-inference',
action=arg_parsers.StoreTrueFalseAction,
help=(
' Whether to disable the inference of data type for Json data. If'
' true, all columns will be registered as their primitive types'
' (strings, number or boolean).'
),
)
return discovery_spec
# Dataplex Entries
def AddEntrySourceArgs(
parser: parser_arguments.ArgumentInterceptor, for_update: bool
):
"""Add entry source update args.
Args:
parser: The arg parser to add flags to.
for_update: If True, then indicates that arguments are intended for Update
command. In such case for each clearable argument there will be also
`--clear-...` flag added in a mutually exclusive group to support clearing
the field.
"""
entry_source = parser.add_group(
help=(
'Source system related information for an entry. If any of the entry'
' source fields are specified, then ``--entry-source-update-time`'
' must be specified as well.'
)
)
def AddArgument(name: str, **kwargs):
parser_to_add = entry_source
# Update command includes `--clear-...` flag, that should be in mutually
# exclusive group, so either value is updated or cleared.
if for_update:
parser_to_add = entry_source.add_mutually_exclusive_group()
parser_to_add.add_argument(
'--clear-entry-source-' + name,
action='store_true',
help=(
f"Clear the value for the {name.replace('-', '_')} field in the"
' Entry Source.'
),
)
parser_to_add.add_argument('--entry-source-' + name, **kwargs)
AddArgument(
'resource',
help='The name of the resource in the source system.',
metavar='RESOURCE',
)
AddArgument(
'system',
help='The name of the source system.',
metavar='SYSTEM_NAME',
)
AddArgument(
'platform',
help='The platform containing the source system.',
metavar='PLATFORM_NAME',
)
AddArgument(
'display-name',
help='User friendly display name.',
metavar='DISPLAY_NAME',
)
AddArgument(
'description',
help='Description of the Entry.',
metavar='DESCRIPTION',
)
AddArgument(
'create-time',
help='The creation date and time of the resource in the source system.',
type=dataplex_parsers.IsoDateTime,
metavar='DATE_TIME',
)
# Handle labels using built-in utils.
entry_source_labels_container = entry_source
if for_update:
entry_source_labels_container = entry_source.add_mutually_exclusive_group()
clear_flag = labels_util.GetClearLabelsFlag(
labels_name='entry-source-labels'
).AddToParser(entry_source_labels_container)
clear_flag.help = 'Clear the labels for the Entry Source.'
labels_util.GetCreateLabelsFlag(
labels_name='entry-source-labels'
).AddToParser(entry_source_labels_container)
if not for_update:
entry_source.add_argument(
'--entry-source-ancestors',
help='Information about individual items in the hierarchy of an Entry.',
type=arg_parsers.ArgList(includes_json=True),
metavar='ANCESTORS',
)
# Update time is marked as required and is on a level above from other flags.
# If any other flag (e.g. `--entry-source-system`) will be specified, then
# the user will have to provide update time as well.
entry_source.add_argument(
'--entry-source-update-time',
help='The update date and time of the resource in the source system.',
type=dataplex_parsers.IsoDateTime,
required=for_update,
metavar='DATE_TIME',
)
def AddAspectFlags(
parser: parser_arguments.ArgumentInterceptor,
*,
update_aspects_name: str | None = 'update-aspects',
remove_aspects_name: str | None = 'remove-aspects',
required: bool = False,
):
"""Adds flags for updating and removing Aspects.
Args:
parser: The arg parser to add flags to.
update_aspects_name: Name of the flag to add for updating Aspects or None if
no flag should be added.
remove_aspects_name: Name of the flag to add for removing Aspects or None if
no flag should be added.
required: If True, then flags will be marked as required.
"""
combination_help_text = ''
if update_aspects_name is not None and remove_aspects_name is not None:
combination_help_text = f"""
If both `--{update_aspects_name}` and `--{remove_aspects_name}` flags
are specified, and the same aspect key is used in both flags, then
`--{update_aspects_name}` takes precedence, and such an aspect will be
updated and not removed.
"""
if update_aspects_name is not None:
parser.add_argument(
f'--{update_aspects_name}',
help="""
Path to a YAML or JSON file containing Aspects to add or update.
When this flag is specified, only Aspects referenced in the file are
going to be added or updated. Specifying this flag does not remove any
Aspects from the entry. In other words, specifying this flag will not
lead to a full replacement of Aspects with a contents of the provided
file.
Content of the file contains a map, where keys are in the format
``ASPECT_TYPE@PATH'', or just ``ASPECT_TYPE'', if the Aspect is attached
to an entry itself rather than to a specific column defined in the
schema.
Values in the map represent Aspect's content, which must conform to a
template defined for a given ``ASPECT_TYPE''. Each Aspect will be replaced
fully by the provided content. That means data in the Aspect will be
replaced and not merged with existing contents of that Aspect in the Entry.
``ASPECT_TYPE'' is expected to be in a format
``PROJECT_ID.LOCATION.ASPECT_TYPE_ID''.
``PATH'' can be either empty (which means a 'root' path, such that Aspect
is attached to the entry itself) or point to a specific column defined
in the schema. For example: `Schema.some_column`.
Example YAML format:
```
project-id1.us-central1.my-aspect-type1:
data:
aspectField1: someValue
aspectField2: someOtherValue
project-id2.us-central1.my-aspect-type2@Schema.column1:
data:
aspectField3: someValue3
```
Example JSON format:
```
{
"project-id1.us-central1.my-aspect-type1": {
"data": {
"aspectField1": "someValue",
"aspectField2": "someOtherValue"
}
},
"project-id2.us-central1.my-aspect-type2@Schema.column1": {
"data": {
"aspectField3": "someValue3"
}
}
}
```
""" + combination_help_text,
type=dataplex_parsers.ParseAspects,
metavar='YAML_OR_JSON_FILE',
required=required,
)
if remove_aspects_name is not None:
parser.add_argument(
f'--{remove_aspects_name}',
help="""
List of Aspect keys, identifying Aspects to remove from the entry.
Keys are in the format ``ASPECT_TYPE@PATH'', or just ``ASPECT_TYPE'', if
the Aspect is attached to an entry itself rather than to a specific
column defined in the schema.
``ASPECT_TYPE'' is expected to be in a format
``PROJECT_ID.LOCATION.ASPECT_TYPE_ID'' or a wildcard `*`, which targets
all aspect types.
``PATH'' can be either empty (which means a 'root' path, such that
Aspect is attached to the entry itself), point to a specific column
defined in the schema (for example: `Schema.some_column`) or a wildcard
`*` (target all paths).
``ASPECT_TYPE'' and ``PATH'' cannot be both specified as wildcards `*`."""
+ combination_help_text,
type=arg_parsers.ArgList(),
metavar='ASPECT_TYPE@PATH',
required=required,
)

View File

@@ -0,0 +1,70 @@
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
get_entry_view:
arg_name: view
api_field: view
help_text: Controls which parts of an entry are to be returned.
# Listing choices here explicitly to prevent "UNSPECIFIED" value appearing as a valid option.
choices:
- arg_value: basic
enum_value: BASIC
help_text: Returns entry only, without aspects.
- arg_value: full
enum_value: FULL
help_text: |
Default value. Returns all required aspects, as well as the keys of all non-required aspects.
- arg_value: custom
enum_value: CUSTOM
help_text: |
Returns aspects filtered based on `--aspect-types` AND `--paths` arguments specified.
When used, at least one of `--aspect-types` and `--paths` arguments must be specified.
If the number of aspects would exceed 100, the first 100 will be returned.
- arg_value: all
enum_value: ALL
help_text: |
Returns all aspects.
If the number of aspects would exceed 100, the first 100 will be returned.
aspect_types:
api_field: aspectTypes
arg_name: aspect-types
type: 'googlecloudsdk.calliope.arg_parsers:ArgList:'
help_text: |
Limits the aspects returned to the provided aspect types.
Only works if the `--view=custom` is selected.
For example, if two aspect types are specified:
"projects/projectA/locations/us-central1/my-aspect-type,projects/projectB/locations/us/my-aspect-type2"
then only aspects matching these aspect types will be returned.
Can be further constrained by the `--paths` argument.
paths:
api_field: paths
arg_name: paths
type: 'googlecloudsdk.calliope.arg_parsers:ArgList:'
help_text: |
Limits the aspects returned to those associated with the provided paths within the Entry.
Only works if the `--view=custom` is selected.
For example, if two paths are specified:
"--paths=property1,property2"
then only aspects on these paths will be returned.
To return aspects without any path, the empty (root) path can be specified. For this "." can be
used. For example, when "--paths=.,property1" are specified, then only aspects on the path
"property1" and on the entry itself will be returned.
Can be further constrained by `--aspect-types` argument.

View File

@@ -0,0 +1,97 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Dataplex Entries commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
from typing import List
from googlecloudsdk.api_lib.dataplex import util as dataplex_util
from googlecloudsdk.api_lib.util import messages as messages_util
from googlecloudsdk.calliope import arg_parsers
dataplex_message = dataplex_util.GetMessageModule()
def IsoDateTime(datetime_str: str) -> str:
"""Parses datetime string, validates it and outputs the new datetime string in ISO format."""
return arg_parsers.Datetime.Parse(datetime_str).isoformat()
def ParseAspects(
aspects_file: str,
) -> dataplex_message.GoogleCloudDataplexV1Entry.AspectsValue:
"""Parse aspects from a YAML or JSON file.
Perform a basic validation that aspects are provided in a correct format.
Args:
aspects_file: The path to the YAML/JSON file containing aspects.
Returns:
A list of aspects parsed to a proto message (AspectsValue).
"""
parser = arg_parsers.YAMLFileContents()
raw_aspects = parser(aspects_file)
if not isinstance(raw_aspects, dict):
raise arg_parsers.ArgumentTypeError(
f"Invalid aspects file: {aspects_file}. It must contain a map with a"
" key in the format `ASPECT_TYPE@PATH` (or just `ASPECT_TYPE` if"
" attached to the root path). Values in the map represent Aspect's"
" content, which must conform to a template defined for a given"
" `ASPECT_TYPE`."
)
aspects = []
for aspect_key, aspect in raw_aspects.items():
aspects.append(
dataplex_message.GoogleCloudDataplexV1Entry.AspectsValue.AdditionalProperty(
key=aspect_key,
value=messages_util.DictToMessageWithErrorCheck(
aspect, dataplex_message.GoogleCloudDataplexV1Aspect
),
)
)
return dataplex_message.GoogleCloudDataplexV1Entry.AspectsValue(
additionalProperties=aspects
)
def ParseEntrySourceAncestors(ancestors: List[str]):
"""Parse ancestors from a string.
Args:
ancestors: A list of strings containing the JSON representation of the
Ancestors.
Returns:
A list of ancestors parsed to a proto message
(GoogleCloudDataplexV1EntrySourceAncestor).
"""
if ancestors is None:
return []
return list(
map(
lambda ancestor: messages_util.DictToMessageWithErrorCheck(
json.loads(ancestor),
dataplex_message.GoogleCloudDataplexV1EntrySourceAncestor,
),
ancestors,
)
)

View File

@@ -0,0 +1,23 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Data Catalog search commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def ProjectToSearchEntriesName(project):
# location is always global for SearchEntries
return 'projects/%s/locations/global' % project

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared request hooks for the Dataplex surface."""
from __future__ import annotations
from typing import Any
from googlecloudsdk.generated_clients.apis.dataplex.v1 import dataplex_v1_messages as messages
def TransformEntryRootPath(
unused_ref: str,
args: Any,
request: (
messages.DataplexProjectsLocationsLookupEntryRequest
| messages.DataplexProjectsLocationsEntryGroupsEntriesGetRequest
),
):
"""Transforms the root path from the "." in CLI to empty string expected in API."""
if args.paths is not None and isinstance(args.paths, list):
request.paths = list(set(map(lambda p: p if p != '.' else '', args.paths)))
return request

View File

@@ -0,0 +1,739 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource args for the Dataplex surface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.util.apis import yaml_data
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import properties
GENERATE_ID = '@%!#DATAPLEX_GENERATE_UUID@%!#'
def GetProjectSpec():
"""Gets Project spec."""
return concepts.ResourceSpec(
'dataplex.projects',
resource_name='projects',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
)
def GetLakeResourceSpec():
"""Gets Lake resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes',
resource_name='lakes',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
)
def GetZoneResourceSpec():
"""Gets Zone resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes.zones',
resource_name='zones',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
zonesId=ZoneAttributeConfig(),
)
def GetAssetResourceSpec():
"""Gets Asset resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes.zones.assets',
resource_name='assets',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
zonesId=ZoneAttributeConfig(),
assetsId=AssetAttributeConfig(),
)
def GetContentitemResourceSpec():
"""Gets Content resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes.contentitems',
resource_name='content',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
contentitemsId=ContentAttributeConfig(),
)
def GetTaskResourceSpec():
"""Gets Task resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes.tasks',
resource_name='tasks',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
tasksId=TaskAttributeConfig(),
)
def GetEnvironmentResourceSpec():
"""Gets Environment resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.lakes.environments',
resource_name='environments',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
lakesId=LakeAttributeConfig(),
environmentsId=EnvironmentAttributeConfig(),
)
def GetDatascanResourceSpec():
"""Gets Datascan resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.dataScans',
resource_name='datascan',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
dataScansId=DatascanAttributeConfig(),
)
def GetDataTaxonomyResourceSpec():
"""Gets DataTaxonomy resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.dataTaxonomies',
resource_name='data taxonomy',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
dataTaxonomiesId=DataTaxonomyAttributeConfig(),
)
def GetDataAttributeBindingResourceSpec():
"""Gets DataAttributeBinding resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.dataAttributeBindings',
resource_name='data attribute binding',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
dataAttributeBindingsId=DataAttributeBindingAttributeConfig(),
)
def GetDataAttributeResourceSpec():
"""Gets Data Attribute resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.dataTaxonomies.attributes',
resource_name='data attribute',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
dataTaxonomiesId=DataTaxonomyAttributeConfig(),
attributesId=DataAttributeConfig(),
)
def GetDataplexEntryGroupResourceSpec():
"""Gets Entry Group resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.entryGroups',
resource_name='entry group',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
entryGroupsId=EntryGroupAttributeConfig(),
)
def GetDataplexAspectTypeResourceSpec():
"""Gets Aspect Type resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.aspectTypes',
resource_name='aspect type',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
aspectTypesId=AspectTypeAttributeConfig(),
)
def GetDataplexEntryTypeResourceSpec():
"""Gets Entry Type resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.entryTypes',
resource_name='entry type',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
entryTypesId=EntryTypeAttributeConfig(),
)
def GetEntryTypeResourceSpec():
"""Gets EntryType resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.entryTypes',
resource_name='entry type',
projectsId=EntryTypeProjectAttributeConfig(),
locationsId=EntryTypeLocationAttributeConfig(),
entryTypesId=EntryTypeConfig(),
)
def GetDataplexEntryLinkResourceSpec():
"""Gets Entry Link resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.entryGroups.entryLinks',
resource_name='entry link',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
entryGroupsId=EntryGroupAttributeConfig(),
entryLinksId=EntryLinkAttributeConfig(),
)
def GetGovernanceRuleResourceSpec():
"""Gets GovernanceRule resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.governanceRules',
resource_name='governance rule',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
governanceRulesId=GovernanceRuleAttributeConfig(),
)
def GetGlossaryResourceSpec():
"""Gets Glossary resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.glossaries',
resource_name='glossary',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
glossariesId=GlossaryAttributeConfig(),
)
def GetGlossaryCategoryResourceSpec():
"""Gets Glossary Category resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.glossaries.categories',
resource_name='glossary category',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
glossariesId=GlossaryAttributeConfig(),
categoriesId=GlossaryCategoryAttributeConfig(),
)
def GetGlossaryTermResourceSpec():
"""Gets Glossary Term resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.glossaries.terms',
resource_name='glossary term',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
glossariesId=GlossaryAttributeConfig(),
termsId=GlossaryTermAttributeConfig(),
)
def GetMetadataJobResourceSpec():
"""Gets Metadata Job resource spec."""
return concepts.ResourceSpec(
'dataplex.projects.locations.metadataJobs',
resource_name='metadata job',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
metadataJobsId=MetadataJobAttributeConfig(),
)
def GetEncryptionConfigResourceSpec():
"""Gets EncryptionConfig resource spec."""
return concepts.ResourceSpec(
'dataplex.organizations.locations.encryptionConfigs',
resource_name='encryption config',
organizationsId=OrganizationAttributeConfig(),
locationsId=LocationAttributeConfig(),
encryptionConfigsId=EncryptionConfigAttributeConfig(),
)
def EntryTypeProjectAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry-type-project',
help_text='The project of the EntryType resource.',
)
def EntryTypeLocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry-type-location',
help_text='The location of the EntryType resource.',
)
def LocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
fallthroughs=[
deps.PropertyFallthrough(properties.FromString('dataplex/location'))
],
help_text='The location of the Dataplex resource.',
)
def LakeAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='lake', help_text='The identifier of the Dataplex lake resource.'
)
def ZoneAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='zone', help_text='The identifier of the Dataplex zone resource.'
)
def AssetAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='asset', help_text='The identifier of the Dataplex asset resource.'
)
def ContentAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='content', help_text='The name of the {resource} to use.'
)
def EnvironmentAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='environment', help_text='The name of {resource} to use.'
)
def DataTaxonomyAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='data_taxonomy', help_text='The name of {resource} to use.'
)
def DataAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='data_attribute', help_text='The name of {resource} to use.'
)
def DataAttributeBindingAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='data_attribute_binding', help_text='The name of {resource} to use.'
)
def EntryGroupAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry-group', help_text='The name of {resource} to use.'
)
def AspectTypeAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='aspect_type', help_text='The name of {resource} to use.'
)
def EntryTypeAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry_type', help_text='The name of {resource} to use.'
)
def EntryLinkAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry-link', help_text='The name of {resource} to use.'
)
def DatascanAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='dataScans', help_text='The name of {resource} to use.'
)
def EntryTypeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry_type', help_text='The name of {resource} to use.'
)
def GovernanceRuleAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='governance_rule', help_text='The name of {resource} to use.'
)
def GlossaryAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='glossary', help_text='The name of {resource} to use.'
)
def GlossaryCategoryAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='glossary_category', help_text='The name of {resource} to use.'
)
def GlossaryTermAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='glossary_term', help_text='The name of {resource} to use.'
)
def MetadataJobAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='metadata_job',
# Adding invalid job_id to keep job resource in the right format,
# this invalid value will be removed if no job_id is specified from
# the input and the underlaying client would generate a valid one.
fallthroughs=[
deps.ValueFallthrough(
GENERATE_ID,
hint='job ID is optional and will be generated if not specified',
)
],
help_text='The name of {resource} to use.',
)
def EncryptionConfigAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='encryption_config', help_text='The name of {resource} to use.'
)
def OrganizationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='organization', help_text='The name of {resource} to use.'
)
def AddDatascanResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Datascan."""
name = 'datascan' if positional else '--datascan'
return concept_parsers.ConceptParser.ForResource(
name,
GetDatascanResourceSpec(),
'Arguments and flags that define the Dataplex datascan you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddProjectArg(parser, verb, positional=True):
"""Adds a resource argument for a project."""
name = 'project' if positional else '--project'
return concept_parsers.ConceptParser.ForResource(
name,
GetProjectSpec(),
'Arguments and flags that define the project you want {}'.format(verb),
required=True,
).AddToParser(parser)
def TaskAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='task', help_text='The identifier of the Dataplex task resource.'
)
def AddLakeResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Lake."""
name = 'lake' if positional else '--lake'
return concept_parsers.ConceptParser.ForResource(
name,
GetLakeResourceSpec(),
'Arguments and flags that define the Dataplex lake you want {}'.format(
verb
),
required=True,
).AddToParser(parser)
def AddZoneResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Zone."""
name = 'zone' if positional else '--zone'
return concept_parsers.ConceptParser.ForResource(
name,
GetZoneResourceSpec(),
'Arguments and flags that define the Dataplex zone you want {}'.format(
verb
),
required=True,
).AddToParser(parser)
def AddAssetResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Asset."""
name = 'asset' if positional else '--asset'
return concept_parsers.ConceptParser.ForResource(
name,
GetAssetResourceSpec(),
'Arguments and flags that define the Dataplex asset you want {}'.format(
verb
),
required=True,
).AddToParser(parser)
def AddContentitemResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Content."""
name = 'content' if positional else '--content'
return concept_parsers.ConceptParser.ForResource(
name,
GetContentitemResourceSpec(),
'The Content {}'.format(verb),
required=True,
).AddToParser(parser)
def AddTaskResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Task."""
name = 'task' if positional else '--task'
return concept_parsers.ConceptParser.ForResource(
name,
GetTaskResourceSpec(),
'Arguments and flags that define the Dataplex task you want {}'.format(
verb
),
required=True,
).AddToParser(parser)
def AddEnvironmentResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Environment."""
name = 'environment' if positional else '--environment'
return concept_parsers.ConceptParser.ForResource(
name,
GetEnvironmentResourceSpec(),
'The Environment {}'.format(verb),
required=True,
).AddToParser(parser)
def AddDataTaxonomyResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Data Taxonomy."""
name = 'data_taxonomy' if positional else '--data_taxonomy'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataTaxonomyResourceSpec(),
'The DataTaxonomy {}'.format(verb),
required=True,
).AddToParser(parser)
def AddAttributeResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Attribute."""
name = 'data_attribute' if positional else '--data_attribute'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataAttributeResourceSpec(),
'The DataAttribute {}'.format(verb),
required=True,
).AddToParser(parser)
def AddDataAttributeBindingResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex DataAttributeBinding."""
name = 'data_attribute_binding' if positional else '--data_attribute_binding'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataAttributeBindingResourceSpec(),
'The DataAttributeBinding {}'.format(verb),
required=True,
).AddToParser(parser)
def AddDataplexEntryGroupResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex EntryGroup."""
name = 'entry_group' if positional else '--entry-group'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataplexEntryGroupResourceSpec(),
'Arguments and flags that define the Dataplex entry group you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddDataplexAspectTypeResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex AspectType."""
name = 'aspect_type' if positional else '--aspect_type'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataplexAspectTypeResourceSpec(),
'Arguments and flags that define the Dataplex aspect type you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddDataplexEntryTypeResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex EntryType."""
name = 'entry_type' if positional else '--entry_type'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataplexEntryTypeResourceSpec(),
'Arguments and flags that define the Dataplex entry type you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddEntryTypeResourceArg(parser):
"""Adds a resource argument for a Dataplex EntryType."""
return concept_parsers.ConceptParser.ForResource(
'--entry-type',
GetEntryTypeResourceSpec(),
'Arguments and flags that define the Dataplex EntryType you want to'
' reference.',
required=True,
).AddToParser(parser)
def AddEntryResourceArg(parser):
"""Adds a resource argument for a Dataplex Entry."""
entry_data = yaml_data.ResourceYAMLData.FromPath('dataplex.entry')
return concept_parsers.ConceptParser.ForResource(
'entry',
concepts.ResourceSpec.FromYaml(entry_data.GetData(), is_positional=True),
'Arguments and flags that define the Dataplex Entry you want to'
' reference.',
required=True,
).AddToParser(parser)
def AddDataplexEntryLinkResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex EntryLink."""
name = 'entry_link' if positional else '--entry-link'
return concept_parsers.ConceptParser.ForResource(
name,
GetDataplexEntryLinkResourceSpec(),
'Arguments and flags that define the Dataplex entry link you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddGovernanceRuleResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex GovernanceRule."""
name = 'governance_rule' if positional else '--governance_rule'
return concept_parsers.ConceptParser.ForResource(
name,
GetGovernanceRuleResourceSpec(),
'Arguments and flags that define the Dataplex governance rule you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddGlossaryResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Glossary."""
name = 'glossary' if positional else '--glossary'
return concept_parsers.ConceptParser.ForResource(
name,
GetGlossaryResourceSpec(),
'Arguments and flags that define the Dataplex Glossary you want {}'
.format(verb),
required=True,
).AddToParser(parser)
def AddGlossaryCategoryResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Glossary Category."""
name = 'glossary_category' if positional else '--glossary_category'
return concept_parsers.ConceptParser.ForResource(
name,
GetGlossaryCategoryResourceSpec(),
'Arguments and flags that define the Dataplex Glossary Category you'
' want {}'.format(verb),
required=True,
).AddToParser(parser)
def AddGlossaryTermResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex Glossary Term."""
name = 'glossary_term' if positional else '--glossary_term'
return concept_parsers.ConceptParser.ForResource(
name,
GetGlossaryTermResourceSpec(),
'Arguments and flags that define the Dataplex Glossary Term you'
' want {}'.format(verb),
required=True,
).AddToParser(parser)
def AddEncryptionConfigResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex EncryptionConfig."""
name = 'encryption_config' if positional else '--encryption_config'
return concept_parsers.ConceptParser.ForResource(
name,
GetEncryptionConfigResourceSpec(),
'Arguments and flags that define the Dataplex EncryptionConfig you'
' want {}'.format(verb),
required=True,
).AddToParser(parser)
def AddParentEntryResourceArg(parser):
"""Adds a resource argument for a Dataplex Entry parent."""
entry_data = yaml_data.ResourceYAMLData.FromPath('dataplex.entry')
return concept_parsers.ConceptParser.ForResource(
'--parent-entry',
concepts.ResourceSpec.FromYaml(entry_data.GetData()),
'Arguments and flags that define the parent Entry you want to reference.',
# Parent Entry has to belong to the same EntryGroup as the Entry,
# therefore we disable the location and entry_group flags for the parent
# entry resource and set fallthrougs to use the same location and
# entry_group as for the Entry.
command_level_fallthroughs={
'location': ['--location'],
'entry_group': ['--entry_group'],
},
flag_name_overrides={
'location': '',
'entry_group': '',
},
).AddToParser(parser)
def AddMetadataJobResourceArg(parser, verb, positional=True):
"""Adds a resource argument for a Dataplex MetadataJob."""
name = 'metadata_job' if positional else '--metadata_job'
return concept_parsers.ConceptParser.ForResource(
name,
GetMetadataJobResourceSpec(),
'Arguments and flags that define the Dataplex metdata job you want {}'
.format(verb),
required=True,
).AddToParser(parser)

View File

@@ -0,0 +1,385 @@
project:
name: project
collection: dataplex.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: |
Name of the Cloud project to use.
property: core/project
organization:
name: organization
collection: dataplex.organizations
attributes:
- &organization
parameter_name: organizationsId
attribute_name: organization
help: |
Name of the Cloud organization to use.
location:
name: location
collection: dataplex.projects.locations
disable_auto_completers: false
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: |
Location of the Dataplex resource.
property: dataplex/location
lake:
name: lake
collection: dataplex.projects.locations.lakes
request_id_field: lakeId
disable_auto_completers: false
attributes:
- *project
- *location
- &lake
parameter_name: lakesId
attribute_name: lake
help: |
Identifier of the Dataplex lake resource.
zone:
name: zone
collection: dataplex.projects.locations.lakes.zones
request_id_field: zoneId
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- &zone
parameter_name: zonesId
attribute_name: zone
help: |
Identifier of the Dataplex zone resource.
asset:
name: asset
collection: dataplex.projects.locations.lakes.zones.assets
request_id_field: assetId
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- *zone
- &asset
parameter_name: assetsId
attribute_name: asset
help: |
Identifier of the Dataplex asset resource.
task:
name: task
collection: dataplex.projects.locations.lakes.tasks
request_id_field: taskId
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- &task
parameter_name: tasksId
attribute_name: task
help: |
Identifier of the Dataplex task resource.
job:
name: job
collection: dataplex.projects.locations.lakes.tasks.jobs
request_id_field: jobID
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- *task
- &job
parameter_name: jobsId
attribute_name: job
help: |
Job_id of the Job running a particular Task in Dataplex.
environment:
name: environment
collection: dataplex.projects.locations.lakes.environments
request_id_field: environmentID
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- &environment
parameter_name: environmentsId
attribute_name: environment
help: |
Environment_id of a particular Environment in Dataplex.
session:
name: session
collection: dataplex.projects.locations.lakes.environments.sessions
request_id_field: sessionID
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- *environment
- &session
parameter_name: sessionsId
attribute_name: session
help: |
Session_id of a particular Session of an Environment in Dataplex.
content:
name: content
collection: dataplex.projects.locations.lakes.contentitems
request_id_field: contentId
disable_auto_completers: false
attributes:
- *project
- *location
- *lake
- &content
parameter_name: contentitemsId
attribute_name: content
help: |
Content_id for the Content Resource of the Dataplex service.
data_taxonomy:
name: data taxonomy
collection: dataplex.projects.locations.dataTaxonomies
request_id_field: dataTaxonomyId
disable_auto_completers: false
attributes:
- *project
- *location
- &data_taxonomy
parameter_name: dataTaxonomiesId
attribute_name: data_taxonomy
help: |
Identifier of the Dataplex DataTaxonomy resource.
data_attribute:
name: data attribute
collection: dataplex.projects.locations.dataTaxonomies.attributes
request_id_field: dataAttributeId
disable_auto_completers: false
attributes:
- *project
- *location
- *data_taxonomy
- &data_attribute
parameter_name: attributesId
attribute_name: data_attribute
help: |
Identifier of the Dataplex task resource.
datascan:
name: datascan
collection: dataplex.projects.locations.dataScans
request_id_field: dataScansId
disable_auto_completers: false
attributes:
- *project
- *location
- &datascan
parameter_name: dataScansId
attribute_name: datascan
help: |
Datascan ID of the Dataplex datascan resource.
datascanjob:
name: job
collection: dataplex.projects.locations.dataScans.jobs
request_id_field: jobID
disable_auto_completers: false
attributes:
- *project
- *location
- *datascan
- &datascanjob
parameter_name: jobsId
attribute_name: job
help: |
Job ID of the Job running a particular Datascan in Dataplex.
data_attribute_binding:
name: data attribute binding
collection: dataplex.projects.locations.dataAttributeBindings
request_id_field: dataAttributeBindingId
disable_auto_completers: false
attributes:
- *project
- *location
- &data_attribute_binding
parameter_name: dataAttributeBindingsId
attribute_name: data_attribute_binding
help: |
Identifier of the Dataplex Data Attribute Binding
entry_group:
name: entry group
collection: dataplex.projects.locations.entryGroups
request_id_field: entryGroupId
disable_auto_completers: false
attributes:
- *project
- *location
- &entry_group
parameter_name: entryGroupsId
attribute_name: entry_group
help: |
Entry group containing Dataplex Entries.
aspect_type:
name: aspect type
collection: dataplex.projects.locations.aspectTypes
request_id_field: aspectTypeId
disable_auto_completers: false
attributes:
- *project
- *location
- &aspect_type
parameter_name: aspectTypesId
attribute_name: aspect_type
help: |
Identifier of the Dataplex Aspect Type resource.
entry_type:
name: entry type
collection: dataplex.projects.locations.entryTypes
request_id_field: entryTypeId
disable_auto_completers: false
attributes:
- *project
- *location
- &entry_type
parameter_name: entryTypesId
attribute_name: entry_type
help: |
Identifier of the Dataplex Entry Type resource.
entry:
name: entry
collection: dataplex.projects.locations.entryGroups.entries
request_id_field: entryId
disable_auto_completers: false
attributes:
- *project
- *location
- *entry_group
- &entry
parameter_name: entriesId
attribute_name: entry
help: |
Entry containing metadata about some resource.
governance_rule:
name: governance rule
collection: dataplex.projects.locations.governanceRules
request_id_field: governanceRuleId
disable_auto_completers: false
attributes:
- *project
- *location
- &governance_rule
parameter_name: governanceRulesId
attribute_name: governance_rule
help: |
Identifier of the Dataplex Governance Rule resource.
encryption_config:
name: encryption config
collection: dataplex.organizations.locations.encryptionConfigs
request_id_field: encryptionConfigId
disable_auto_completers: false
attributes:
- *organization
- *location
- &encryption_config
parameter_name: encryptionConfigsId
attribute_name: encryption_config
help: |
Identifier of the Dataplex Encryption Config resource.
glossary:
name: glossary
collection: dataplex.projects.locations.glossaries
request_id_field: glossaryId
disable_auto_completers: false
attributes:
- *project
- *location
- &glossary
parameter_name: glossariesId
attribute_name: glossary
help: |
Identifier of the Dataplex Glossary resource.
glossary_category:
name: glossary category
collection: dataplex.projects.locations.glossaries.categories
request_id_field: categoryId
disable_auto_completers: false
attributes:
- *project
- *location
- *glossary
- &glossary_category
parameter_name: categoriesId
attribute_name: glossary_category
help: |
Identifier of the Dataplex Glossary Category resource.
glossary_term:
name: glossary term
collection: dataplex.projects.locations.glossaries.terms
request_id_field: termId
disable_auto_completers: false
attributes:
- *project
- *location
- *glossary
- &glossary_term
parameter_name: termsId
attribute_name: glossary_term
help: |
Identifier of the Dataplex Glossary Term resource.
metadata_job:
name: metadata job
collection: dataplex.projects.locations.metadataJobs
request_id_field: metadataJobId
disable_auto_completers: false
attributes:
- *project
- *location
- &metadata_job
parameter_name: metadataJobsId
attribute_name: metadata_job
help: |
Identifier of the Dataplex Metadata Job resource.
entry_link:
name: entry link
collection: dataplex.projects.locations.entryGroups.entryLinks
request_id_field: entryLinkId
disable_auto_completers: false
attributes:
- *project
- *location
- *entry_group
- &entry_link
parameter_name: entryLinksId
attribute_name: entry_link
help: |
Entry Link between two entries.

View File

@@ -0,0 +1,23 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Data Catalog search commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def ExtractEntryFromSearchEntriesResult(response, _):
for row in response:
yield row.get_assigned_value('dataplexEntry')