feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for Dataplex Lake Resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Lakes(base.Group):
"""Manage Dataplex Lake resources."""
category = base.DATA_ANALYTICS_CATEGORY

View File

@@ -0,0 +1,28 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for Dataplex Lake Resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Actions(base.Group):
"""Manage Dataplex lake resource actions."""
category = base.DATA_ANALYTICS_CATEGORY

View File

@@ -0,0 +1,33 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: |
List Dataplex lake actions.
description: |
List all Dataplex Actions under a specific lake.
examples: |
To list all actions of a Dataplex Lake `test-lake` defined in location `us-central1` run:
$ {command} --location=us-central1 --lake=test-lake
command_type: LIST
request:
ALPHA:
api_version: v1
collection: dataplex.projects.locations.lakes.actions
arguments:
resource:
help_text: |
Arguments and flags that define the Dataplex Lake actions you want to list.
override_resource_collection: true
spec: !REF googlecloudsdk.command_lib.dataplex.resources:lake
response:
id_field: name
output:
format: |
table(
name.basename():label=NAME,
data_locations:label=DATA_LOCATION,
category: label=CATEGORY,
issue:label=ISSUE
)

View File

@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to add-iam-policy-binding to a Dataplex lake resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.iam import iam_util
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class AddIamPolicyBinding(base.Command):
"""Add IAM policy binding to a Dataplex lake resource."""
detailed_help = {
'EXAMPLES':
"""\
To add an IAM policy binding for the role of `roles/dataplex.viewer`
for the user `test-user@gmail.com` to lake `test-lake` in location
`us-central`, run:
$ {command} test-lake --location=us-central1 --role=roles/dataplex.viewer --member=user:foo@gmail.com
See https://cloud.google.com/dataplex/docs/iam-roles for details of
policy role and member types.
""",
}
@staticmethod
def Args(parser):
resource_args.AddLakeResourceArg(parser, 'to add IAM policy binding to.')
iam_util.AddArgsForAddIamPolicyBinding(parser)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
lake_ref = args.CONCEPTS.lake.Parse()
result = lake.AddIamPolicyBinding(lake_ref, args.member, args.role)
return result

View File

@@ -0,0 +1,133 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to authorize a service agent to manage other resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudresourcemanager import projects_api
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.projects import util as project_util
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Authorize(base.Command):
"""Authorize a service agent to manage resources.
The service agent for the primary project will be granted an IAM role on a
secondary project, a Cloud Storage bucket, or a BigQuery dataset.
"""
detailed_help = {
'EXAMPLES': """\
To authorize the service agent in project `test-project` to manage
resources in the project `test-project2`, run:
$ {command} --project=test-project --project-resource=test-project2
To authorize the service agent in project `test-project` to manage the
Cloud Storage bucket `dataplex-storage-bucket`, run:
$ {command} --project=test-project --storage-bucket-resource=dataplex-storage-bucket
To authorize the service agent in project `test-project` to manage the
BigQuery dataset `test-dataset` in project `test-project2`, run:
$ {command} --project=test-project --bigquery-dataset-resource=test-dataset --secondary-project=test-project2
""",
}
@staticmethod
def Args(parser):
resource_args.AddProjectArg(
parser, 'to grant a role to the service agent in.'
)
data_group = parser.add_group(
mutex=True,
required=True,
help='The resource on which to grant a role to the service agent.',
)
data_group.add_argument(
'--storage-bucket-resource',
help="""The identifier of the Cloud Storage bucket that the service agent will manage.""",
)
data_group.add_argument(
'--project-resource',
help=(
'The identifier of the project whose resources the service agent'
' will manage.'
),
)
dataset_group = data_group.add_group(
help='Fields to identify the BigQuery dataset.'
)
dataset_group.add_argument(
'--bigquery-dataset-resource',
required=True,
help=(
'The identifier of the BigQuery dataset that the service agent will'
' manage.'
),
)
dataset_group.add_argument(
'--secondary-project',
required=True,
help=(
'The identifier of the project where the BigQuery dataset is'
' located.'
),
)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
project_ref = args.CONCEPTS.project.Parse()
service_account = 'service-' + str(
project_util.GetProjectNumber(project_ref.projectsId)
) + '@gcp-sa-dataplex.iam.gserviceaccount.com'
if args.IsSpecified('storage_bucket_resource'):
return storage_api.StorageClient().AddIamPolicyBinding(
storage_util.BucketReference(args.storage_bucket_resource),
'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent')
if args.IsSpecified('bigquery_dataset_resource'):
get_dataset_request = apis.GetMessagesModule(
'bigquery', 'v2').BigqueryDatasetsGetRequest(
datasetId=args.bigquery_dataset_resource,
projectId=args.secondary_project)
dataset = apis.GetClientInstance(
'bigquery', 'v2').datasets.Get(request=get_dataset_request)
lake.AddServiceAccountToDatasetPolicy(
apis.GetMessagesModule('bigquery', 'v2').Dataset.AccessValueListEntry,
dataset, service_account, 'roles/dataplex.serviceAgent')
return apis.GetClientInstance('bigquery', 'v2').datasets.Patch(
apis.GetMessagesModule('bigquery', 'v2').BigqueryDatasetsPatchRequest(
datasetId=args.bigquery_dataset_resource,
projectId=args.secondary_project,
dataset=dataset))
if args.IsSpecified('project_resource'):
return projects_api.AddIamPolicyBinding(
project_util.ParseProject(args.project_resource),
'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent')

View File

@@ -0,0 +1,124 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to create a Dataplex lake resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.dataplex import util as dataplex_util
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.util.args import labels_util
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Create(base.Command):
"""Create a Dataplex lake resource.
A lake is a centralized repository for managing data across the
organization, where enterprise data is distributed across many cloud projects,
and stored in a variety of storage services, such as Google Cloud Storage and
BigQuery. A lake provides data admins with tools to organize, secure and
manage their data at scale, and provides data scientists and data engineers an
integrated experience to easily search, discover, analyze and transform data
and associated metadata.
The Lake ID will be used to generate names such as database and dataset names
when publishing metadata to Hive Metastore and BigQuery.
The Lake id must follow these rules:
* Must contain only lowercase letters, numbers, and hyphens.
* Must start with a letter.
* Must end with a number or a letter.
* Must be between 1-63 characters.
* Must be unique within the customer project / location.
"""
detailed_help = {
'EXAMPLES':
"""\
To create a Dataplex lake with name `my-dataplex-lake` in location
`us-central1`, run:
$ {command} my-dataplex-lake --location=us-central
To create a Dataplex lake with name `my-dataplex-lake` in location
`us-central1` with metastore service `service-123abc` attached, run:
$ {command} my-dataplex-lake --location=us-central --metastore-service=projects/my-project/services/service-123abc
""",
}
@staticmethod
def Args(parser):
resource_args.AddLakeResourceArg(parser, 'to create.')
parser.add_argument(
'--validate-only',
action='store_true',
default=False,
help='Validate the create action, but don\'t actually perform it.')
metastore = parser.add_group(
help='Settings to manage metadata publishing to a Hive Metastore from a lake.'
)
metastore.add_argument(
'--metastore-service',
help=""" A relative reference to the Dataproc Metastore
(https://cloud.google.com/dataproc-metastore/docs) service instance into
which metadata will be published. This is of the form:
`projects/{project_number}/locations/{location_id}/services/{service_id}`
where the location matches the location of the lake.""")
parser.add_argument('--description', help='Description of the lake.')
parser.add_argument('--display-name', help='Display name of the lake.')
base.ASYNC_FLAG.AddToParser(parser)
labels_util.AddCreateLabelsFlags(parser)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
lake_ref = args.CONCEPTS.lake.Parse()
dataplex_client = dataplex_util.GetClientInstance()
message = dataplex_util.GetMessageModule()
create_req_op = dataplex_client.projects_locations_lakes.Create(
message.DataplexProjectsLocationsLakesCreateRequest(
lakeId=lake_ref.Name(),
parent=lake_ref.Parent().RelativeName(),
validateOnly=args.validate_only,
googleCloudDataplexV1Lake=message.GoogleCloudDataplexV1Lake(
description=args.description,
displayName=args.display_name,
labels=dataplex_util.CreateLabels(
message.GoogleCloudDataplexV1Lake, args),
metastore=message.GoogleCloudDataplexV1LakeMetastore(
service=args.metastore_service))))
validate_only = getattr(args, 'validate_only', False)
if validate_only:
log.status.Print('Validation complete.')
return
async_ = getattr(args, 'async_', False)
if not async_:
lake.WaitForLongOperation(create_req_op)
log.CreatedResource(
lake_ref.Name(),
details='Lake created in [{0}]'.format(
lake_ref.Parent().RelativeName()))
return
log.status.Print('Creating [{0}] with operation [{1}].'.format(
lake_ref, create_req_op.name))

View File

@@ -0,0 +1,125 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to deauthorize a service agent from managing resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudresourcemanager import projects_api
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.projects import util as project_util
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class DeauthorizeLake(base.Command):
"""Deauthorize a service agent from managing resources.
The service agent for the primary project will have its IAM role revoked from
a secondary project, a Cloud Storage bucket, or a BigQuery dataset.
"""
detailed_help = {
'EXAMPLES': """\
To deauthorize the service agent in project `test-project` from
managing resources in the project `test-project2`, run:
$ {command} --project=test-project --project-resource=test-project2
To deauthorize the service agent in project `test-project` from
managing the Cloud Storage bucket `dataplex-storage-bucket`, run:
$ {command} --project=test-project --storage-bucket-resource=dataplex-storage-bucket
To deauthorize the service agent in project `test-project` from
managing the BigQuery dataset `test-dataset` in project
`test-project2`, run:
$ {command} --project=test-project --bigquery-dataset-resource=test-dataset --secondary-project=test-project2
""",
}
@staticmethod
def Args(parser):
resource_args.AddProjectArg(
parser,
'to revoke a role from the service agent in.',
)
data_group = parser.add_group(
mutex=True,
required=True,
help="The resource for which the service agent's role will be revoked.",
)
data_group.add_argument(
'--storage-bucket-resource',
help="""The identifier of the Cloud Storage bucket that the service agent will no longer manage.""",
)
data_group.add_argument(
'--project-resource',
help=(
'The identifier of the project whose resources the service agent'
' will no longer manage.'
),
)
dataset_group = data_group.add_group(
help='Fields to identify the BigQuery dataset.'
)
dataset_group.add_argument(
'--bigquery-dataset-resource',
required=True,
help="""The identifier of the BigQuery dataset that the service agent will no longer manage.""",
)
dataset_group.add_argument(
'--secondary-project',
required=True,
help=(
'The identifier of the project where the BigQuery dataset is'
' located.'
),
)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
lake_ref = args.CONCEPTS.project.Parse()
service_account = 'service-' + str(
project_util.GetProjectNumber(
lake_ref.projectsId)) + '@gcp-sa-dataplex.iam.gserviceaccount.com'
if args.IsSpecified('storage_bucket_resource'):
return lake.RemoveServiceAccountFromBucketPolicy(
storage_util.BucketReference(args.storage_bucket_resource),
'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent')
if args.IsSpecified('bigquery_dataset_resource'):
get_dataset_request = apis.GetMessagesModule(
'bigquery', 'v2').BigqueryDatasetsGetRequest(
datasetId=args.bigquery_dataset_resource,
projectId=args.secondary_project)
dataset = apis.GetClientInstance(
'bigquery', 'v2').datasets.Get(request=get_dataset_request)
lake.RemoveServiceAccountFromDatasetPolicy(dataset, service_account,
'roles/dataplex.serviceAgent')
return apis.GetClientInstance('bigquery', 'v2').datasets.Patch(
apis.GetMessagesModule('bigquery', 'v2').BigqueryDatasetsPatchRequest(
datasetId=args.bigquery_dataset_resource,
projectId=args.secondary_project,
dataset=dataset))
if args.IsSpecified('project_resource'):
return projects_api.RemoveIamPolicyBinding(
project_util.ParseProject(args.project_resource),
'serviceAccount:' + service_account, 'roles/dataplex.serviceAgent')

View File

@@ -0,0 +1,21 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: |
Delete a Dataplex lake resource.
description: |
Delete a Dataplex lake resource.
examples: |
To delete a Dataplex lake `test-lake` in location `us-central1`, run:
$ {command} test-lake --location=us-central1
request:
ALPHA:
api_version: v1
collection: dataplex.projects.locations.lakes
arguments:
resource:
help_text: |
Arguments and flags that define the Dataplex lake you want to delete.
spec: !REF googlecloudsdk.command_lib.dataplex.resources:lake
async:
collection: dataplex.projects.locations.operations

View File

@@ -0,0 +1,20 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: |
Describe a Dataplex lake resource.
description: |
Displays all details of a Dataplex lake resource given a valid lake ID.
examples: |
To describe a Dataplex lake `test-lake` in location `us-central1`, run:
$ {command} test-lake --location=us-central1
request:
ALPHA:
api_version: v1
collection: dataplex.projects.locations.lakes
method: get
arguments:
resource:
help_text: |
Arguments and flags that define the Dataplex lake you want to retrieve.
spec: !REF googlecloudsdk.command_lib.dataplex.resources:lake

View File

@@ -0,0 +1,21 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: |
Get the IAM policy for a Dataplex lake resource.
description: |
Displays the IAM policy associated with a Dataplex lake resource.
If formatted as JSON, the output can be edited and used as
a policy file for *set-iam-policy*. The output includes an "etag"
field identifying the version emitted and allowing detection of
concurrent policy updates.
examples: |
To print the IAM policy for Dataplex lake `test-lake` in location `us-central1`, run:
$ {command} test-lake --location=us-central1
request:
collection: dataplex.projects.locations.lakes
arguments:
resource:
help_text: |
Arguments and flags that define the Dataplex lake IAM policy you want to retrieve.
spec: !REF googlecloudsdk.command_lib.dataplex.resources:lake

View File

@@ -0,0 +1,39 @@
- release_tracks: [ALPHA, GA]
help_text:
brief: |
List Dataplex lake resources under a project.
description: |
List all Dataplex lake resource under a specific project and location.
examples: |
To list all Dataplex lake resources in location `us-central`, run:
$ {command} --project=test-project --location=us-central1
To list all Dataplex lakes in all locations, run:
$ {command} --project=test-project --location=-
request:
ALPHA:
api_version: v1
collection: dataplex.projects.locations.lakes
arguments:
resource:
help_text: |
Arguments and flags that define the Dataplex lake you want to list.
spec: !REF googlecloudsdk.command_lib.dataplex.resources:location
response:
id_field: name
output:
format: |
table(
name.basename():label=NAME,
displayName:label=DISPLAY_NAME,
state:label=LAKE_STATUS,
metastoreStatus.state:label=METASTORE_STATUS,
metastore.service.basename():label=METASTORE,
securityStatus.state:label=SECURITY_STATUS,
assetStatus.activeAssets:label=ACTIVE_ASSETS,
labels:label=LABELS
)

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to remove-iam-policy-binding from a Dataplex lake resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.iam import iam_util
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class RemoveIamPolicyBinding(base.Command):
"""Remove IAM policy binding from a Dataplex lake resource."""
detailed_help = {
'EXAMPLES':
"""\
To remove an IAM policy binding for the role `roles/dataplex.viewer`
for the user `test-user@gmail.com` from lake `test-lake` in location
'us-central', run:
$ {command} test-lake --location=us-central1 --role=roles/dataplex.viewer --member=user:foo@gmail.com
See https://cloud.google.com/dataplex/docs/iam-roles for details of
policy role and member types.
""",
}
@staticmethod
def Args(parser):
resource_args.AddLakeResourceArg(parser,
'to remove IAM policy binding from.')
iam_util.AddArgsForRemoveIamPolicyBinding(parser)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
lake_ref = args.CONCEPTS.lake.Parse()
result = lake.RemoveIamPolicyBinding(lake_ref, args.member, args.role)
return result

View File

@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to set-iam-policy for a Dataplex lake resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.iam import iam_util
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class SetIamPolicy(base.Command):
"""Set the IAM policy to a Dataplex lake as defined in a JSON or YAML file.
See https://cloud.google.com/iam/docs/managing-policies for details of
the policy file format and contents.
"""
detailed_help = {
'EXAMPLES':
"""\
The following command will read an IAM policy defined in a JSON file
`policy.son` and set it for the Dataplex lake `test-lake` defined in
location `us-central1`:
$ {command} --location=us-central1 test-lake policy.json
""",
}
@staticmethod
def Args(parser):
resource_args.AddLakeResourceArg(parser, 'to set IAM policy binding to.')
iam_util.AddArgForPolicyFile(parser)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
lake_ref = args.CONCEPTS.lake.Parse()
result = lake.SetIamPolicyFromFile(lake_ref, args.policy_file)
return result

View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to update a Dataplex lake resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.dataplex import lake
from googlecloudsdk.api_lib.dataplex import util as dataplex_util
from googlecloudsdk.api_lib.util import exceptions as gcloud_exception
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.dataplex import resource_args
from googlecloudsdk.command_lib.util.args import labels_util
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class Update(base.Command):
"""Update a Dataplex lake resource."""
detailed_help = {
'EXAMPLES':
"""\
To update a Dataplex Lake `test-lake` in location `us-central1` to
have the display name `first-dataplex-lake` and metastore service \
`projects/test-lake/locations/us-central1/service/test-service`, run:
$ {command} test-lake --location=us-central1 --display-name="first-dataplex-lake" --metastore-service="projects/test-lake/locations/us-central1/service/test-service"
""",
}
@staticmethod
def Args(parser):
resource_args.AddLakeResourceArg(parser, 'to update.')
parser.add_argument(
'--validate-only',
action='store_true',
default=False,
help='Validate the update action, but don\'t actually perform it.')
parser.add_argument('--description', help='Description of the lake')
parser.add_argument('--display-name', help='Display Name')
metastore = parser.add_group(
help='Settings to manage metadata publishing to a Hive Metastore from a lake.'
)
metastore.add_argument(
'--metastore-service',
help=""" A relative reference to the Dataproc Metastore
(https://cloud.google.com/dataproc-metastore/docs) service instance into
which metadata will be published. This is of the form:
`projects/{project_number}/locations/{location_id}/services/{service_id}`
where the location matches the location of the lake.""")
base.ASYNC_FLAG.AddToParser(parser)
labels_util.AddCreateLabelsFlags(parser)
@gcloud_exception.CatchHTTPErrorRaiseHTTPException(
'Status code: {status_code}. {status_message}.')
def Run(self, args):
update_mask = lake.GenerateUpdateMask(args)
if len(update_mask) < 1:
raise exceptions.HttpException(
'Update commands must specify at least one additional parameter to change.'
)
lake_ref = args.CONCEPTS.lake.Parse()
dataplex_client = dataplex_util.GetClientInstance()
message = dataplex_util.GetMessageModule()
update_req_op = dataplex_client.projects_locations_lakes.Patch(
message.DataplexProjectsLocationsLakesPatchRequest(
name=lake_ref.RelativeName(),
validateOnly=args.validate_only,
updateMask=u','.join(update_mask),
googleCloudDataplexV1Lake=message.GoogleCloudDataplexV1Lake(
description=args.description,
displayName=args.display_name,
metastore=message.GoogleCloudDataplexV1LakeMetastore(
service=args.metastore_service),
labels=dataplex_util.CreateLabels(
message.GoogleCloudDataplexV1Lake, args))))
validate_only = getattr(args, 'validate_only', False)
if validate_only:
log.status.Print('Validation complete with errors:')
return update_req_op
async_ = getattr(args, 'async_', False)
if not async_:
lake.WaitForOperation(update_req_op)
log.UpdatedResource(lake_ref, details='Operation was sucessful.')
return
log.status.Print('Updating [{0}] with operation [{1}].'.format(
lake_ref, update_req_op.name))