feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,118 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Pub/Sub Lite subscriptions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from concurrent import futures
import time
from typing import Optional
from google.cloud.pubsublite import cloudpubsub
from google.cloud.pubsublite import types
from google.pubsub_v1 import PubsubMessage
from googlecloudsdk.command_lib.pubsub import lite_util
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import gapic_util
from googlecloudsdk.core import log
from six.moves import queue
_MAX_INT64 = 0x7FFFFFFFFFFFFFFF
class SubscribeOperationException(exceptions.Error):
"""Error when something went wrong while subscribing."""
def GetDefaultSubscriberClient():
return cloudpubsub.SubscriberClient(
credentials=gapic_util.GetGapicCredentials())
class SubscriberClient(object):
"""GCloud wrapper client for a Pub/Sub Lite subscriber."""
def __init__(self,
subscription_resource,
partitions,
max_messages,
auto_ack,
client=None):
self._client = client or GetDefaultSubscriberClient()
self._messages = queue.Queue()
self._subscription = self._SubscriptionResourceToPath(subscription_resource)
self._partitions = {types.Partition(partition) for partition in partitions}
self._flow_control_settings = types.FlowControlSettings(
messages_outstanding=max_messages,
bytes_outstanding=_MAX_INT64,
)
self._auto_ack = auto_ack
self._pull_future = None
def __enter__(self):
self._client.__enter__()
self._pull_future = self._client.subscribe(
self._subscription,
callback=self._messages.put,
per_partition_flow_control_settings=self._flow_control_settings,
fixed_partitions=self._partitions)
return self
def __exit__(self, exc_type, exc_value, traceback):
time.sleep(1) # Wait 1 second to ensure all acks have been processed
if not self._pull_future.done():
try:
# Cancel the streaming pull future and get the result to prevent
# logging an abandoned future.
self._pull_future.cancel()
self._pull_future.result()
except futures.CancelledError:
pass
self._client.__exit__(exc_type, exc_value, traceback)
def _SubscriptionResourceToPath(self, resource):
return types.SubscriptionPath(
project=lite_util.ProjectIdToProjectNumber(resource.projectsId),
location=lite_util.LocationToZoneOrRegion(resource.locationsId),
name=resource.subscriptionsId)
def _RaiseIfFailed(self):
if self._pull_future.done():
e = self._pull_future.exception()
if e:
raise SubscribeOperationException(
'Subscribe operation failed with error: {error}'.format(error=e))
log.debug('The streaming pull future completed unexpectedly without '
'raising an exception.')
raise exceptions.InternalError(
'The subscribe stream terminated unexpectedly.')
def Pull(self) -> Optional[PubsubMessage]:
"""Pulls and optionally acks a message from the provided subscription.
Returns:
A PubsubMessage pulled from the subscription.
"""
self._RaiseIfFailed()
try:
message = self._messages.get(timeout=1)
if self._auto_ack:
message.ack()
return message
except queue.Empty:
return None

View File

@@ -0,0 +1,93 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Pub/Sub Lite topics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from google.cloud.pubsublite import cloudpubsub
from google.cloud.pubsublite import types
from google.cloud.pubsublite.cloudpubsub import message_transforms
from googlecloudsdk.api_lib.pubsub import topics
from googlecloudsdk.command_lib.pubsub import lite_util
from googlecloudsdk.core import gapic_util
from googlecloudsdk.core.util import http_encoding
def GetDefaultPublisherClient():
return cloudpubsub.PublisherClient(
credentials=gapic_util.GetGapicCredentials())
class PublisherClient(object):
"""Wrapper client for a Pub/Sub Lite publisher."""
def __init__(self, client=None):
self._client = client or GetDefaultPublisherClient()
def __enter__(self):
self._client.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._client.__exit__(exc_type, exc_value, traceback)
def _TopicResourceToPath(self, resource):
return types.TopicPath(
project=lite_util.ProjectIdToProjectNumber(resource.projectsId),
location=lite_util.LocationToZoneOrRegion(resource.locationsId),
name=resource.topicsId)
def Publish(self,
topic_resource,
message=None,
ordering_key=None,
attributes=None,
event_time=None):
"""Publishes a message to the specified Pub/Sub Lite topic.
Args:
topic_resource: The pubsub.lite_topic resource to publish to.
message: The string message to publish.
ordering_key: The key for ordering delivery to subscribers.
attributes: A dict of attributes to attach to the message.
event_time: A user-specified event timestamp.
Raises:
EmptyMessageException: if the message is empty.
PublishOperationException: if the publish operation is not successful.
Returns:
The messageId of the published message, containing the Partition and
Offset.
"""
if not message and not attributes:
raise topics.EmptyMessageException(
'You cannot send an empty message. You must specify either a '
'MESSAGE, one or more ATTRIBUTE, or both.')
topic_path = self._TopicResourceToPath(topic_resource)
attributes = attributes or {}
if event_time:
attributes[message_transforms.PUBSUB_LITE_EVENT_TIME] = (
message_transforms.encode_attribute_event_time(event_time))
try:
return types.MessageMetadata.decode(
self._client.publish(topic_path, http_encoding.Encode(message),
ordering_key, **attributes).result())
except Exception as e:
raise topics.PublishOperationException(
'Publish operation failed with error: {error}'.format(error=e))

View File

@@ -0,0 +1,163 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Pub/Sub Message Transforms API."""
from googlecloudsdk.api_lib.pubsub import utils
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import exceptions
class EmptyMessageException(exceptions.Error):
"""Error when no message was specified for a Test operation."""
class EmptyFilePathException(exceptions.Error):
"""Error when no message transforms file was specified for a Validate operation."""
def GetClientInstance(no_http=False):
return apis.GetClientInstance('pubsub', 'v1', no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
class MessageTransformsClient(object):
"""Client for message transforms service in the Cloud Pub/Sub API."""
def __init__(self, client=None, messages=None):
self.client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self._service = self.client.projects
def Validate(self, project_ref, message_transform_file=None):
"""Validates a message transform.
Args:
project_ref (Resource): Resource reference for the project.
message_transform_file (str): The file path to the JSON or YAML file
containing the message transform.
Returns:
ValidateMessageTransformResponse (success) if the message transform is
valid, otherwise an error.
Raises:
EmptyFilePathException: If no message transform file was specified.
"""
if not message_transform_file:
raise EmptyFilePathException(
'You need to specify a path to JSON or YAML file containing the'
' message transform to validate.'
)
try:
message_transform = utils.GetMessageTransformFromFileForValidation(
self.messages.MessageTransform, message_transform_file
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
validate_request = self.messages.PubsubProjectsValidateMessageTransformRequest(
project=project_ref.RelativeName(),
validateMessageTransformRequest=self.messages.ValidateMessageTransformRequest(
messageTransform=message_transform,
),
)
return self._service.ValidateMessageTransform(validate_request)
def Test(
self,
project_ref,
message_body=None,
attributes=None,
message_transforms_file=None,
topic_ref=None,
subscription_ref=None,
):
"""Tests applying message transforms to a message.
Args:
project_ref (Resource): Resource reference for the project.
message_body (bytes): The message to test.
attributes (list[AdditionalProperty]): List of attributes to attach to the
message.
message_transforms_file (str): The file path to the JSON or YAML file
containing the message transforms.
topic_ref (Resource): The topic containing the message transforms to test
against.
subscription_ref (Resource): The subscription containing the message
transforms to test against.
Returns:
TestMessageTransformsResponse which contains a list of TransformedMessage.
Raises:
EmptyMessageException: If no message body or attributes were specified.
EmptyMessageTransformsException: If no message
transforms file/topic/subscription were specified.
"""
if not message_body and not attributes:
raise EmptyMessageException(
'You cannot send an empty message. You must specify either a '
'MESSAGE, one or more ATTRIBUTE, or both.'
)
message = self.messages.PubsubMessage(
data=message_body,
attributes=self.messages.PubsubMessage.AttributesValue(
additionalProperties=attributes
),
)
message_transforms = None
if message_transforms_file:
try:
message_transforms = utils.GetMessageTransformsFromFile(
self.messages.MessageTransform,
message_transforms_file,
enable_vertex_ai_smt=False,
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
message_transforms_msg = (
self.messages.MessageTransforms(messageTransforms=message_transforms)
if message_transforms
else None
)
test_request = self.messages.PubsubProjectsTestMessageTransformsRequest(
project=project_ref.RelativeName(),
testMessageTransformsRequest=self.messages.TestMessageTransformsRequest(
message=message,
messageTransforms=message_transforms_msg,
topic=topic_ref.RelativeName() if topic_ref else None,
subscription=subscription_ref.RelativeName()
if subscription_ref
else None,
),
)
return self._service.TestMessageTransforms(test_request)

View File

@@ -0,0 +1,129 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Pub/Sub Schemas API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.pubsub.util import InvalidArgumentError
def NoRevisionIdSpecified():
return InvalidArgumentError(
'The schema name must include a revision-id of the format:'
' SCHEMA_NAME@REVISION_ID.'
)
def CheckRevisionIdInSchemaPath(schema_ref):
find_id = schema_ref.split('@')
return len(find_id) > 1
def InvalidSchemaType():
return InvalidArgumentError(
'The schema type must be either AVRO or PROTOCOL-BUFFER.'
)
def ParseSchemaType(messages, schema_type):
type_str = schema_type.lower()
if type_str == 'protocol-buffer' or type_str == 'protocol_buffer':
return messages.Schema.TypeValueValuesEnum.PROTOCOL_BUFFER
elif type_str == 'avro':
return messages.Schema.TypeValueValuesEnum.AVRO
raise InvalidSchemaType()
def GetClientInstance(no_http=False):
return apis.GetClientInstance('pubsub', 'v1', no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
class SchemasClient(object):
"""Client for schemas service in the Cloud Pub/Sub API."""
def __init__(self, client=None, messages=None):
self.client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self._service = self.client.projects_schemas
def Commit(self, schema_ref, schema_definition, schema_type):
"""Commits a revision for a Schema.
Args:
schema_ref: The full schema_path.
schema_definition: The new schema definition to commit.
schema_type: The type of the schema (avro or protocol-buffer).
Returns:
Schema: the committed Schema revision
"""
schema = self.messages.Schema(
name=schema_ref,
type=ParseSchemaType(self.messages, schema_type),
definition=schema_definition,
)
commit_req = self.messages.PubsubProjectsSchemasCommitRequest(
commitSchemaRequest=self.messages.CommitSchemaRequest(schema=schema),
name=schema_ref,
)
return self._service.Commit(commit_req)
def Rollback(self, schema_ref, revision_id):
"""Rolls back to a previous schema revision.
Args:
schema_ref: The path of the schema to rollback.
revision_id: The revision_id to rollback to.
Returns:
Schema: the new schema revision you have rolled back to.
Raises:
InvalidArgumentError: If no revision_id is provided.
"""
rollback_req = self.messages.PubsubProjectsSchemasRollbackRequest(
rollbackSchemaRequest=self.messages.RollbackSchemaRequest(
revisionId=revision_id
),
name=schema_ref,
)
return self._service.Rollback(rollback_req)
def DeleteRevision(self, schema_ref):
"""Deletes a schema revision.
Args:
schema_ref: The path of the schema, with the revision_id.
Returns:
Schema: the deleted schema revision.
"""
if not CheckRevisionIdInSchemaPath(schema_ref):
raise NoRevisionIdSpecified()
delete_revision_req = (
self.messages.PubsubProjectsSchemasDeleteRevisionRequest(
name=schema_ref
)
)
return self._service.DeleteRevision(delete_revision_req)

View File

@@ -0,0 +1,132 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Pub/Sub Snapshots API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import exceptions
def GetClientInstance(no_http=False):
return apis.GetClientInstance('pubsub', 'v1', no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
class NoFieldsSpecifiedError(exceptions.Error):
"""Error when no fields were specified for a Patch operation."""
class _SnapshotUpdateSetting(object):
"""Data container class for updating a snapshot."""
def __init__(self, field_name, value):
self.field_name = field_name
self.value = value
class SnapshotsClient(object):
"""Client for snapshots service in the Cloud Pub/Sub API."""
def __init__(self, client=None, messages=None):
self.client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self._service = self.client.projects_snapshots
def Create(self, snapshot_ref, subscription_ref, labels=None, tags=None):
"""Creates a Snapshot."""
create_req = self.messages.PubsubProjectsSnapshotsCreateRequest(
createSnapshotRequest=self.messages.CreateSnapshotRequest(
subscription=subscription_ref.RelativeName(),
labels=labels,
tags=tags),
name=snapshot_ref.RelativeName())
return self._service.Create(create_req)
def Get(self, snapshot_ref):
"""Gets a Snapshot.
Args:
snapshot_ref (Resource): Resource reference to the Snapshot to get.
Returns:
Snapshot: The snapshot.
"""
get_req = self.messages.PubsubProjectsSnapshotsGetRequest(
snapshot=snapshot_ref.RelativeName())
return self._service.Get(get_req)
def Delete(self, snapshot_ref):
"""Deletes a Snapshot."""
delete_req = self.messages.PubsubProjectsSnapshotsDeleteRequest(
snapshot=snapshot_ref.RelativeName())
return self._service.Delete(delete_req)
def List(self, project_ref, page_size=100):
"""Lists Snapshots for a given project.
Args:
project_ref (Resource): Resource reference to Project to list
Snapshots from.
page_size (int): the number of entries in each batch (affects requests
made, but not the yielded results).
Returns:
A generator of Snapshots in the Project.
"""
list_req = self.messages.PubsubProjectsSnapshotsListRequest(
project=project_ref.RelativeName(),
pageSize=page_size
)
return list_pager.YieldFromList(
self._service, list_req, batch_size=page_size,
field='snapshots', batch_size_attribute='pageSize')
def Patch(self, snapshot_ref, labels=None):
"""Updates a Snapshot.
Args:
snapshot_ref (Resource): Resource reference for the snapshot to be
updated.
labels (LabelsValue): The Cloud labels for the snapshot.
Returns:
Snapshot: The updated snapshot.
Raises:
NoFieldsSpecifiedError: if no fields were specified.
"""
update_settings = [_SnapshotUpdateSetting('labels', labels)]
snapshot = self.messages.Snapshot(
name=snapshot_ref.RelativeName())
update_mask = []
for update_setting in update_settings:
if update_setting.value is not None:
setattr(snapshot, update_setting.field_name, update_setting.value)
update_mask.append(update_setting.field_name)
if not update_mask:
raise NoFieldsSpecifiedError('Must specify at least one field to update.')
patch_req = self.messages.PubsubProjectsSnapshotsPatchRequest(
updateSnapshotRequest=self.messages.UpdateSnapshotRequest(
snapshot=snapshot,
updateMask=','.join(update_mask)),
name=snapshot_ref.RelativeName())
return self._service.Patch(patch_req)

View File

@@ -0,0 +1,922 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Pub/Sub Subscriptions API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.pubsub import utils
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import exceptions
PULL_RPC_DEADLINE_SECONDS = '20'
SERVER_TIMEOUT_HEADER = 'X-Server-Timeout'
DEFAULT_MESSAGE_RETENTION_VALUE = 'default'
NEVER_EXPIRATION_PERIOD_VALUE = 'never'
CLEAR_DEAD_LETTER_VALUE = 'clear'
CLEAR_RETRY_VALUE = 'clear'
CLEAR_BIGQUERY_CONFIG_VALUE = 'clear'
CLEAR_CLOUD_STORAGE_CONFIG_VALUE = 'clear'
CLEAR_PUSH_NO_WRAPPER_CONFIG_VALUE = 'clear'
CLEAR_PUBSUB_EXPORT_CONFIG_VALUE = 'clear'
CLEAR_MESSAGE_TRANSFORMATIONS_VALUE = []
class NoFieldsSpecifiedError(exceptions.Error):
"""Error when no fields were specified for a Patch operation."""
def GetClientInstance(no_http=False):
return apis.GetClientInstance('pubsub', 'v1', no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
class _SubscriptionUpdateSetting(object):
"""Data container class for updating a subscription."""
def __init__(self, field_name, value):
self.field_name = field_name
self.value = value
class SubscriptionsClient(object):
"""Client for subscriptions service in the Cloud Pub/Sub API."""
def __init__(self, client=None, messages=None):
self.client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self._service = self.client.projects_subscriptions
def Ack(self, ack_ids, subscription_ref):
"""Acknowledges one or messages for a Subscription.
Args:
ack_ids (list[str]): List of ack ids for the messages being ack'd.
subscription_ref (Resource): Relative name of the subscription for which
to ack messages for.
Returns:
None:
"""
ack_req = self.messages.PubsubProjectsSubscriptionsAcknowledgeRequest(
acknowledgeRequest=self.messages.AcknowledgeRequest(ackIds=ack_ids),
subscription=subscription_ref.RelativeName(),
)
return self._service.Acknowledge(ack_req)
def Get(self, subscription_ref):
"""Gets a Subscription from the API.
Args:
subscription_ref (Resource): Relative name of the subscription to get.
Returns:
Subscription: the subscription.
"""
get_req = self.messages.PubsubProjectsSubscriptionsGetRequest(
subscription=subscription_ref.RelativeName()
)
return self._service.Get(get_req)
def Create(
self,
subscription_ref,
topic_ref,
ack_deadline,
push_config=None,
retain_acked_messages=None,
message_retention_duration=None,
labels=None,
no_expiration=False,
expiration_period=None,
enable_message_ordering=None,
filter_string=None,
dead_letter_topic=None,
max_delivery_attempts=None,
min_retry_delay=None,
max_retry_delay=None,
enable_exactly_once_delivery=None,
bigquery_table=None,
use_topic_schema=None,
use_table_schema=None,
write_metadata=None,
drop_unknown_fields=None,
bigquery_service_account_email=None,
cloud_storage_bucket=None,
cloud_storage_file_prefix=None,
cloud_storage_file_suffix=None,
cloud_storage_file_datetime_format=None,
cloud_storage_max_bytes=None,
cloud_storage_max_duration=None,
cloud_storage_max_messages=None,
cloud_storage_output_format=None,
cloud_storage_use_topic_schema=None,
cloud_storage_write_metadata=None,
cloud_storage_service_account_email=None,
pubsub_export_topic=None,
pubsub_export_topic_region=None,
message_transforms_file=None,
tags=None,
enable_vertex_ai_smt=False,
):
"""Creates a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
created.
topic_ref (Resource): Resource reference for the associated topic for the
subscriptions.
ack_deadline (int): Number of seconds the system will wait for a
subscriber to ack a message.
push_config (Message): Message containing the push endpoint for the
subscription.
retain_acked_messages (bool): Whether or not to retain acked messages.
message_retention_duration (int): How long to retained unacked messages.
labels (Subscriptions.LabelsValue): The labels for the request.
no_expiration (bool): Whether or not to set no expiration on subscription.
expiration_period (str): TTL on expiration_policy.
enable_message_ordering (bool): Whether or not to deliver messages with
the same ordering key in order.
filter_string (str): filter string in the Cloud Pub/Sub filter language.
dead_letter_topic (str): Topic for publishing dead messages.
max_delivery_attempts (int): Threshold of failed deliveries before sending
message to the dead letter topic.
min_retry_delay (str): The minimum delay between consecutive deliveries of
a given message.
max_retry_delay (str): The maximum delay between consecutive deliveries of
a given message.
enable_exactly_once_delivery (bool): Whether or not to set exactly once
delivery on the subscription.
bigquery_table (str): BigQuery table to which to write
use_topic_schema (bool): Whether or not to use the topic schema when
writing to BigQuery
use_table_schema (bool): Whether or not to use the table schema when
writing to BigQuery
write_metadata (bool): Whether or not to write metadata fields when
writing to BigQuery
drop_unknown_fields (bool): Whether or not to drop fields that are only in
the topic schema when writing to BigQuery
bigquery_service_account_email (str): The service account to use when
writing to BigQuery
cloud_storage_bucket (str): The name for the Cloud Storage bucket.
cloud_storage_file_prefix (str): The prefix for Cloud Storage filename.
cloud_storage_file_suffix (str): The suffix for Cloud Storage filename.
cloud_storage_file_datetime_format (str): The custom datetime format
string for Cloud Storage filename.
cloud_storage_max_bytes (int): The maximum bytes that can be written to a
Cloud Storage file before a new file is created.
cloud_storage_max_duration (str): The maximum duration that can elapse
before a new Cloud Storage file is created.
cloud_storage_max_messages (int): The maximum number of messages that can
be written to a Cloud Storage file before a new file is created.
cloud_storage_output_format (str): The output format for data written to
Cloud Storage.
cloud_storage_use_topic_schema (bool): Whether or not to use the topic
schema when writing to Cloud Storage.
cloud_storage_write_metadata (bool): Whether or not to write the
subscription name and other metadata in the output.
cloud_storage_service_account_email (str): The service account to use when
writing to Cloud Storage
pubsub_export_topic (str): The Pubsub topic to which to publish messages.
pubsub_export_topic_region (str): The Cloud region to which to publish
messages.
message_transforms_file (str): The file path to the JSON or YAML file
containing the message transforms.
tags (TagsValue): The tags Keys/Values to be bound to the subscription.
enable_vertex_ai_smt (bool): Whether or not to enable Vertex AI message
transforms.
Returns:
Subscription: the created subscription
"""
subscription = self.messages.Subscription(
name=subscription_ref.RelativeName(),
topic=topic_ref.RelativeName(),
ackDeadlineSeconds=ack_deadline,
pushConfig=push_config,
retainAckedMessages=retain_acked_messages,
labels=labels,
messageRetentionDuration=message_retention_duration,
expirationPolicy=self._ExpirationPolicy(
no_expiration, expiration_period
),
enableMessageOrdering=enable_message_ordering,
filter=filter_string,
deadLetterPolicy=self._DeadLetterPolicy(
dead_letter_topic, max_delivery_attempts
),
retryPolicy=self._RetryPolicy(min_retry_delay, max_retry_delay),
enableExactlyOnceDelivery=enable_exactly_once_delivery,
bigqueryConfig=self._BigQueryConfig(
bigquery_table,
use_topic_schema,
use_table_schema,
write_metadata,
drop_unknown_fields,
bigquery_service_account_email,
),
cloudStorageConfig=self._CloudStorageConfig(
cloud_storage_bucket,
cloud_storage_file_prefix,
cloud_storage_file_suffix,
cloud_storage_file_datetime_format,
cloud_storage_max_bytes,
cloud_storage_max_duration,
cloud_storage_max_messages,
cloud_storage_output_format,
cloud_storage_use_topic_schema,
cloud_storage_write_metadata,
cloud_storage_service_account_email,
),
pubsubExportConfig=self._PubsubExportConfig(
pubsub_export_topic, pubsub_export_topic_region
),
)
if message_transforms_file:
try:
subscription.messageTransforms = utils.GetMessageTransformsFromFile(
self.messages.MessageTransform,
message_transforms_file,
enable_vertex_ai_smt,
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
if tags:
subscription.tags = tags
return self._service.Create(subscription)
def Delete(self, subscription_ref):
"""Deletes a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
deleted.
Returns:
None:
"""
delete_req = self.messages.PubsubProjectsSubscriptionsDeleteRequest(
subscription=subscription_ref.RelativeName()
)
return self._service.Delete(delete_req)
def List(self, project_ref, page_size=100):
"""Lists Subscriptions for a given project.
Args:
project_ref (Resource): Resource reference to Project to list
subscriptions from.
page_size (int): the number of entries in each batch (affects requests
made, but not the yielded results).
Returns:
A generator of subscriptions in the project.
"""
list_req = self.messages.PubsubProjectsSubscriptionsListRequest(
project=project_ref.RelativeName(), pageSize=page_size
)
return list_pager.YieldFromList(
self._service,
list_req,
batch_size=page_size,
field='subscriptions',
batch_size_attribute='pageSize',
)
def ModifyAckDeadline(self, subscription_ref, ack_ids, ack_deadline):
"""Modifies the ack deadline for messages for a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
modified.
ack_ids (list[str]): List of ack ids to modify.
ack_deadline (int): The new ack deadline for the messages.
Returns:
None:
"""
mod_req = self.messages.PubsubProjectsSubscriptionsModifyAckDeadlineRequest(
modifyAckDeadlineRequest=self.messages.ModifyAckDeadlineRequest(
ackDeadlineSeconds=ack_deadline, ackIds=ack_ids
),
subscription=subscription_ref.RelativeName(),
)
return self._service.ModifyAckDeadline(mod_req)
def ModifyPushConfig(self, subscription_ref, push_config):
"""Modifies the push endpoint for a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
modified.
push_config (Message): The new push endpoint for the Subscription.
Returns:
None:
"""
mod_req = self.messages.PubsubProjectsSubscriptionsModifyPushConfigRequest(
modifyPushConfigRequest=self.messages.ModifyPushConfigRequest(
pushConfig=push_config
),
subscription=subscription_ref.RelativeName(),
)
return self._service.ModifyPushConfig(mod_req)
def Pull(self, subscription_ref, max_messages, return_immediately=True):
"""Pulls one or more messages from a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
pulled from.
max_messages (int): The maximum number of messages to retrieve.
return_immediately (bool): Whether or not to return immediately without
waiting for a new message for a bounded amount of time if there is
nothing to pull right now.
Returns:
PullResponse: proto containing the received messages.
"""
pull_req = self.messages.PubsubProjectsSubscriptionsPullRequest(
pullRequest=self.messages.PullRequest(
maxMessages=max_messages, returnImmediately=return_immediately
),
subscription=subscription_ref.RelativeName(),
)
self.client.additional_http_headers[SERVER_TIMEOUT_HEADER] = (
PULL_RPC_DEADLINE_SECONDS
)
pull_resp = self._service.Pull(pull_req)
del self.client.additional_http_headers[SERVER_TIMEOUT_HEADER]
return pull_resp
def Seek(self, subscription_ref, time=None, snapshot_ref=None):
"""Reset a Subscription's backlog to point to a given time or snapshot.
Args:
subscription_ref (Resource): Resource reference for subscription to be
seeked on.
time (str): The time to reset to.
snapshot_ref (Resource): Resource reference to a snapshot..
Returns:
None:
"""
snapshot = snapshot_ref and snapshot_ref.RelativeName()
seek_req = self.messages.PubsubProjectsSubscriptionsSeekRequest(
seekRequest=self.messages.SeekRequest(snapshot=snapshot, time=time),
subscription=subscription_ref.RelativeName(),
)
return self._service.Seek(seek_req)
def _ExpirationPolicy(self, no_expiration, expiration_period):
"""Build ExpirationPolicy message from argument values.
Args:
no_expiration (bool): Whether or not to set no expiration on subscription.
expiration_period (str): TTL on expiration_policy.
Returns:
ExpirationPolicy message or None.
"""
if no_expiration:
return self.messages.ExpirationPolicy(ttl=None)
if expiration_period:
return self.messages.ExpirationPolicy(ttl=expiration_period)
return None
def _DeadLetterPolicy(self, dead_letter_topic, max_delivery_attempts):
"""Builds DeadLetterPolicy message from argument values.
Args:
dead_letter_topic (str): Topic for publishing dead messages.
max_delivery_attempts (int): Threshold of failed deliveries before sending
message to the dead letter topic.
Returns:
DeadLetterPolicy message or None.
"""
if dead_letter_topic:
return self.messages.DeadLetterPolicy(
deadLetterTopic=dead_letter_topic,
maxDeliveryAttempts=max_delivery_attempts,
)
return None
def _RetryPolicy(self, min_retry_delay, max_retry_delay):
"""Builds RetryPolicy message from argument values.
Args:
min_retry_delay (str): The minimum delay between consecutive deliveries of
a given message.
max_retry_delay (str): The maximum delay between consecutive deliveries of
a given message.
Returns:
DeadLetterPolicy message or None.
"""
if min_retry_delay or max_retry_delay:
return self.messages.RetryPolicy(
minimumBackoff=min_retry_delay, maximumBackoff=max_retry_delay
)
return None
def _BigQueryConfig(
self,
table,
use_topic_schema,
use_table_schema,
write_metadata,
drop_unknown_fields,
service_account_email,
):
"""Builds BigQueryConfig message from argument values.
Args:
table (str): The name of the table
use_topic_schema (bool): Whether or not to use the topic schema
use_table_schema (bool): Whether or not to use the table schema
write_metadata (bool): Whether or not to write metadata fields
drop_unknown_fields (bool): Whether or not to drop fields that are only in
the topic schema
service_account_email(str): The service account to use
Returns:
BigQueryConfig message or None
"""
if table:
return self.messages.BigQueryConfig(
table=table,
useTopicSchema=use_topic_schema,
useTableSchema=use_table_schema,
writeMetadata=write_metadata,
dropUnknownFields=drop_unknown_fields,
serviceAccountEmail=service_account_email,
)
return None
def _CloudStorageConfig(
self,
bucket,
file_prefix,
file_suffix,
file_datetime_format,
max_bytes,
max_duration,
max_messages,
output_format,
use_topic_schema,
write_metadata,
service_account_email,
):
"""Builds CloudStorageConfig message from argument values.
Args:
bucket (str): The name for the Cloud Storage bucket.
file_prefix (str): The prefix for Cloud Storage filename.
file_suffix (str): The suffix for Cloud Storage filename.
file_datetime_format (str): The custom datetime format string for Cloud
Storage filename.
max_bytes (int): The maximum bytes that can be written to a Cloud Storage
file before a new file is created.
max_duration (str): The maximum duration that can elapse before a new
Cloud Storage file is created.
max_messages (int): The maximum number of messages that can be written to
a Cloud Storage file before a new file is created.
output_format (str): The output format for data written to Cloud Storage.
use_topic_schema (bool): Whether or not to use the topic schema when
writing to Cloud Storage.
write_metadata (bool): Whether or not to write the subscription name and
other metadata in the output.
service_account_email(str): The service account to use
Returns:
CloudStorageConfig message or None
"""
if bucket:
cloud_storage_config = self.messages.CloudStorageConfig(
bucket=bucket,
filenamePrefix=file_prefix,
filenameSuffix=file_suffix,
filenameDatetimeFormat=file_datetime_format,
maxBytes=max_bytes,
maxDuration=max_duration,
maxMessages=max_messages,
serviceAccountEmail=service_account_email,
)
if output_format == 'text':
cloud_storage_config.textConfig = self.messages.TextConfig()
# TODO(b/318394291) Propagate error should avro fields be populated.
elif output_format == 'avro':
cloud_storage_config.avroConfig = self.messages.AvroConfig(
writeMetadata=write_metadata if write_metadata else False,
# TODO(b/318394291) set use_topic_schema else False when promoting
# to GA.
useTopicSchema=use_topic_schema if use_topic_schema else None,
)
return cloud_storage_config
return None
def _PubsubExportConfig(self, topic, region):
"""Builds PubsubExportConfig message from argument values.
Args:
topic (str): The Pubsub topic to which to publish messages.
region (str): The Cloud region to which to publish messages.
Returns:
PubsubExportConfig message or None
"""
if topic:
return self.messages.PubSubExportConfig(topic=topic, region=region)
return None
def _HandleMessageRetentionUpdate(self, update_setting):
if update_setting.value == DEFAULT_MESSAGE_RETENTION_VALUE:
update_setting.value = None
def _HandleDeadLetterPolicyUpdate(self, update_setting):
if update_setting.value == CLEAR_DEAD_LETTER_VALUE:
update_setting.value = None
def _HandleRetryPolicyUpdate(self, update_setting):
if update_setting.value == CLEAR_RETRY_VALUE:
update_setting.value = None
def _HandleBigQueryConfigUpdate(self, update_setting):
if update_setting.value == CLEAR_BIGQUERY_CONFIG_VALUE:
update_setting.value = None
def _HandleCloudStorageConfigUpdate(self, update_setting):
if update_setting.value == CLEAR_CLOUD_STORAGE_CONFIG_VALUE:
update_setting.value = None
def _HandlePushNoWrapperUpdate(self, update_setting):
if update_setting.value == CLEAR_PUSH_NO_WRAPPER_CONFIG_VALUE:
update_setting.value = None
def _HandlePubsubExportConfigUpdate(self, update_setting):
if update_setting.value == CLEAR_PUBSUB_EXPORT_CONFIG_VALUE:
update_setting.value = None
def Patch(
self,
subscription_ref,
ack_deadline=None,
push_config=None,
retain_acked_messages=None,
message_retention_duration=None,
labels=None,
no_expiration=False,
expiration_period=None,
dead_letter_topic=None,
max_delivery_attempts=None,
clear_dead_letter_policy=False,
min_retry_delay=None,
max_retry_delay=None,
clear_retry_policy=False,
enable_exactly_once_delivery=None,
bigquery_table=None,
use_topic_schema=None,
use_table_schema=None,
write_metadata=None,
drop_unknown_fields=None,
bigquery_service_account_email=None,
clear_bigquery_config=False,
cloud_storage_bucket=None,
cloud_storage_file_prefix=None,
cloud_storage_file_suffix=None,
cloud_storage_file_datetime_format=None,
cloud_storage_max_bytes=None,
cloud_storage_max_duration=None,
cloud_storage_max_messages=None,
cloud_storage_output_format=None,
cloud_storage_use_topic_schema=None,
cloud_storage_write_metadata=None,
cloud_storage_service_account_email=None,
clear_cloud_storage_config=False,
clear_push_no_wrapper_config=False,
pubsub_export_topic=None,
pubsub_export_topic_region=None,
clear_pubsub_export_config=False,
message_transforms_file=None,
clear_message_transforms=False,
enable_vertex_ai_smt=False,
):
"""Updates a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to be
updated.
ack_deadline (int): Number of seconds the system will wait for a
subscriber to ack a message.
push_config (Message): Message containing the push endpoint for the
subscription.
retain_acked_messages (bool): Whether or not to retain acked messages.
message_retention_duration (str): How long to retained unacked messages.
labels (LabelsValue): The Cloud labels for the subscription.
no_expiration (bool): Whether or not to set no expiration on subscription.
expiration_period (str): TTL on expiration_policy.
dead_letter_topic (str): Topic for publishing dead messages.
max_delivery_attempts (int): Threshold of failed deliveries before sending
message to the dead letter topic.
clear_dead_letter_policy (bool): If set, clear the dead letter policy from
the subscription.
min_retry_delay (str): The minimum delay between consecutive deliveries of
a given message.
max_retry_delay (str): The maximum delay between consecutive deliveries of
a given message.
clear_retry_policy (bool): If set, clear the retry policy from the
subscription.
enable_exactly_once_delivery (bool): Whether or not to set exactly once
delivery on the subscription.
bigquery_table (str): BigQuery table to which to write
use_topic_schema (bool): Whether or not to use the topic schema when
writing to BigQuery
use_table_schema (bool): Whether or not to use the table schema when
writing to BigQuery
write_metadata (bool): Whether or not to write metadata fields when
writing to BigQuery
drop_unknown_fields (bool): Whether or not to drop fields that are only in
the topic schema when writing to BigQuery
bigquery_service_account_email (str): The service account to use when
writing to BigQuery
clear_bigquery_config (bool): If set, clear the BigQuery config from the
subscription
cloud_storage_bucket (bool): The name for the Cloud Storage bucket.
cloud_storage_file_prefix (str): The prefix for Cloud Storage filename.
cloud_storage_file_suffix (str): The suffix for Cloud Storage filename.
cloud_storage_file_datetime_format (str): The custom datetime format
string for Cloud Storage filename.
cloud_storage_max_bytes (int): The maximum bytes that can be written to a
Cloud Storage file before a new file is created.
cloud_storage_max_duration (str): The maximum duration that can elapse
before a new Cloud Storage file is created.
cloud_storage_max_messages (int): The maximum number of messages that can
be written to a Cloud Storage file before a new file is created.
cloud_storage_output_format (str): The output format for data written to
Cloud Storage.
cloud_storage_use_topic_schema (bool): Whether or not to use the topic
schema when writing to Cloud Storage.
cloud_storage_write_metadata (bool): Whether or not to write the
subscription name and other metadata in the output.
cloud_storage_service_account_email (str): The service account to use when
writing to Cloud Storage
clear_cloud_storage_config (bool): If set, clear the Cloud Storage config
from the subscription.
clear_push_no_wrapper_config (bool): If set, clear the Push No Wrapper
config from the subscription.
pubsub_export_topic (str): The Pubsub topic to which to publish messages.
pubsub_export_topic_region (str): The Cloud region to which to publish
messages.
clear_pubsub_export_config (bool): If set, clear the Pubsub export config
from the subscription.
message_transforms_file (str): The file path to the JSON or YAML file
containing the message transforms.
clear_message_transforms (bool): If set, clears all message transforms
from the subscription.
enable_vertex_ai_smt (bool): If set, enables Vertex AI message
transforms.
Returns:
Subscription: The updated subscription.
Raises:
NoFieldsSpecifiedError: if no fields were specified.
"""
if clear_cloud_storage_config:
cloud_storage_config_settings = CLEAR_CLOUD_STORAGE_CONFIG_VALUE
else:
cloud_storage_config_settings = self._CloudStorageConfig(
cloud_storage_bucket,
cloud_storage_file_prefix,
cloud_storage_file_suffix,
cloud_storage_file_datetime_format,
cloud_storage_max_bytes,
cloud_storage_max_duration,
cloud_storage_max_messages,
cloud_storage_output_format,
cloud_storage_use_topic_schema,
cloud_storage_write_metadata,
cloud_storage_service_account_email,
)
if clear_dead_letter_policy:
dead_letter_policy = CLEAR_DEAD_LETTER_VALUE
else:
dead_letter_policy = self._DeadLetterPolicy(
dead_letter_topic, max_delivery_attempts
)
if clear_retry_policy:
retry_policy = CLEAR_RETRY_VALUE
else:
retry_policy = self._RetryPolicy(min_retry_delay, max_retry_delay)
if clear_bigquery_config:
bigquery_config = CLEAR_BIGQUERY_CONFIG_VALUE
else:
bigquery_config = self._BigQueryConfig(
bigquery_table,
use_topic_schema,
use_table_schema,
write_metadata,
drop_unknown_fields,
bigquery_service_account_email,
)
if clear_pubsub_export_config:
pubsub_export_config = CLEAR_PUBSUB_EXPORT_CONFIG_VALUE
else:
pubsub_export_config = self._PubsubExportConfig(
pubsub_export_topic, pubsub_export_topic_region
)
if clear_push_no_wrapper_config:
push_config_no_wrapper = CLEAR_PUSH_NO_WRAPPER_CONFIG_VALUE
else:
push_config_no_wrapper = None
if message_transforms_file:
try:
message_transforms = utils.GetMessageTransformsFromFile(
self.messages.MessageTransform,
message_transforms_file,
enable_vertex_ai_smt,
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
else:
message_transforms = None
if clear_message_transforms:
clear_messages = CLEAR_MESSAGE_TRANSFORMATIONS_VALUE
else:
clear_messages = None
update_settings = [
_SubscriptionUpdateSetting('ackDeadlineSeconds', ack_deadline),
_SubscriptionUpdateSetting('pushConfig', push_config),
_SubscriptionUpdateSetting(
'retainAckedMessages', retain_acked_messages
),
_SubscriptionUpdateSetting(
'enableExactlyOnceDelivery', enable_exactly_once_delivery
),
_SubscriptionUpdateSetting(
'messageRetentionDuration', message_retention_duration
),
_SubscriptionUpdateSetting('labels', labels),
_SubscriptionUpdateSetting(
'expirationPolicy',
self._ExpirationPolicy(no_expiration, expiration_period),
),
_SubscriptionUpdateSetting('deadLetterPolicy', dead_letter_policy),
_SubscriptionUpdateSetting('retryPolicy', retry_policy),
_SubscriptionUpdateSetting('bigqueryConfig', bigquery_config),
_SubscriptionUpdateSetting(
'cloudStorageConfig', cloud_storage_config_settings
),
_SubscriptionUpdateSetting(
'pushConfig.noWrapper', push_config_no_wrapper
),
_SubscriptionUpdateSetting('pubsubExportConfig', pubsub_export_config),
_SubscriptionUpdateSetting('messageTransforms', message_transforms),
_SubscriptionUpdateSetting('messageTransforms', clear_messages),
]
subscription = self.messages.Subscription(
name=subscription_ref.RelativeName()
)
update_mask = []
for update_setting in update_settings:
if update_setting.value is not None:
if update_setting.field_name == 'messageRetentionDuration':
self._HandleMessageRetentionUpdate(update_setting)
if update_setting.field_name == 'deadLetterPolicy':
self._HandleDeadLetterPolicyUpdate(update_setting)
if update_setting.field_name == 'retryPolicy':
self._HandleRetryPolicyUpdate(update_setting)
if update_setting.field_name == 'bigqueryConfig':
self._HandleBigQueryConfigUpdate(update_setting)
if update_setting.field_name == 'cloudStorageConfig':
self._HandleCloudStorageConfigUpdate(update_setting)
if update_setting.field_name == 'pubsubExportConfig':
self._HandlePubsubExportConfigUpdate(update_setting)
if update_setting.field_name == 'pushConfig.noWrapper':
self._HandlePushNoWrapperUpdate(update_setting)
if push_config is None:
update_mask.append(update_setting.field_name)
continue
setattr(subscription, update_setting.field_name, update_setting.value)
update_mask.append(update_setting.field_name)
if not update_mask:
raise NoFieldsSpecifiedError('Must specify at least one field to update.')
patch_req = self.messages.PubsubProjectsSubscriptionsPatchRequest(
updateSubscriptionRequest=self.messages.UpdateSubscriptionRequest(
subscription=subscription, updateMask=','.join(update_mask)
),
name=subscription_ref.RelativeName(),
)
return self._service.Patch(patch_req)
def SetIamPolicy(self, subscription_ref, policy):
"""Sets an IAM policy on a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to set
IAM policy on.
policy (Policy): The policy to be added to the Subscription.
Returns:
Policy: the policy which was set.
"""
request = self.messages.PubsubProjectsSubscriptionsSetIamPolicyRequest(
resource=subscription_ref.RelativeName(),
setIamPolicyRequest=self.messages.SetIamPolicyRequest(policy=policy),
)
return self._service.SetIamPolicy(request)
def GetIamPolicy(self, subscription_ref):
"""Gets the IAM policy for a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to get
the IAM policy of.
Returns:
Policy: the policy for the Subscription.
"""
request = self.messages.PubsubProjectsSubscriptionsGetIamPolicyRequest(
resource=subscription_ref.RelativeName()
)
return self._service.GetIamPolicy(request)
def AddIamPolicyBinding(self, subscription_ref, member, role):
"""Adds an IAM Policy binding to a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to add
IAM policy binding to.
member (str): The member to add.
role (str): The role to assign to the member.
Returns:
Policy: the updated policy.
Raises:
api_exception.HttpException: If either of the requests failed.
"""
policy = self.GetIamPolicy(subscription_ref)
iam_util.AddBindingToIamPolicy(self.messages.Binding, policy, member, role)
return self.SetIamPolicy(subscription_ref, policy)
def RemoveIamPolicyBinding(self, subscription_ref, member, role):
"""Removes an IAM Policy binding from a Subscription.
Args:
subscription_ref (Resource): Resource reference for subscription to remove
IAM policy binding from.
member (str): The member to add.
role (str): The role to assign to the member.
Returns:
Policy: the updated policy.
Raises:
api_exception.HttpException: If either of the requests failed.
"""
policy = self.GetIamPolicy(subscription_ref)
iam_util.RemoveBindingFromIamPolicy(policy, member, role)
return self.SetIamPolicy(subscription_ref, policy)

View File

@@ -0,0 +1,957 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Cloud Pub/Sub Topics API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.pubsub import utils
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import exceptions
CLEAR_MESSAGE_TRANSFORMS_VALUE = []
class PublishOperationException(exceptions.Error):
"""Error when something went wrong with publish."""
class EmptyMessageException(exceptions.Error):
"""Error when no message was specified for a Publish operation."""
class NoFieldsSpecifiedError(exceptions.Error):
"""Error when no fields were specified for a Patch operation."""
class InvalidSchemaSettingsException(exceptions.Error):
"""Error when the schema settings are invalid."""
class ConflictingIngestionSettingsException(exceptions.Error):
"""Error when the ingestion settings are invalid."""
class _TopicUpdateSetting(object):
"""Data container class for updating a topic."""
def __init__(self, field_name, value):
self.field_name = field_name
self.value = value
def GetClientInstance(no_http=False):
return apis.GetClientInstance('pubsub', 'v1', no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
def ParseMessageEncoding(messages, message_encoding):
enc = message_encoding.lower()
if enc == 'json':
return messages.SchemaSettings.EncodingValueValuesEnum.JSON
elif enc == 'binary':
return messages.SchemaSettings.EncodingValueValuesEnum.BINARY
else:
raise InvalidSchemaSettingsException(
'Unknown message encoding. Options are JSON or BINARY.'
)
class TopicsClient(object):
"""Client for topics service in the Cloud Pub/Sub API."""
def __init__(self, client=None, messages=None):
self.client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self._service = self.client.projects_topics
self._subscriptions_service = self.client.projects_subscriptions
def _ParseIngestionPlatformLogsSettings(self, ingestion_log_severity):
if ingestion_log_severity:
return self.messages.PlatformLogsSettings(
severity=self.messages.PlatformLogsSettings.SeverityValueValuesEnum(
ingestion_log_severity
)
)
return None
def _ParseIngestionDataSourceSettings(
self,
kinesis_ingestion_stream_arn=None,
kinesis_ingestion_consumer_arn=None,
kinesis_ingestion_role_arn=None,
kinesis_ingestion_service_account=None,
cloud_storage_ingestion_bucket=None,
cloud_storage_ingestion_input_format=None,
cloud_storage_ingestion_text_delimiter=None,
cloud_storage_ingestion_minimum_object_create_time=None,
cloud_storage_ingestion_match_glob=None,
azure_event_hubs_ingestion_resource_group=None,
azure_event_hubs_ingestion_namespace=None,
azure_event_hubs_ingestion_event_hub=None,
azure_event_hubs_ingestion_client_id=None,
azure_event_hubs_ingestion_tenant_id=None,
azure_event_hubs_ingestion_subscription_id=None,
azure_event_hubs_ingestion_service_account=None,
aws_msk_ingestion_cluster_arn=None,
aws_msk_ingestion_topic=None,
aws_msk_ingestion_aws_role_arn=None,
aws_msk_ingestion_service_account=None,
confluent_cloud_ingestion_bootstrap_server=None,
confluent_cloud_ingestion_cluster_id=None,
confluent_cloud_ingestion_topic=None,
confluent_cloud_ingestion_identity_pool_id=None,
confluent_cloud_ingestion_service_account=None,
ingestion_log_severity=None,
):
"""Returns an IngestionDataSourceSettings message from the provided args."""
# For each datasource type, check if all required flags are passed, and
# conditionally construct the source and return the first datasource type
# which is present. We let the argument parser enforce mutual exclusion of
# argument groups.
is_kinesis = (
(kinesis_ingestion_stream_arn is not None)
and (kinesis_ingestion_consumer_arn is not None)
and (kinesis_ingestion_role_arn is not None)
and (kinesis_ingestion_service_account is not None)
)
is_cloud_storage = (cloud_storage_ingestion_bucket is not None) and (
cloud_storage_ingestion_input_format is not None
)
is_azure_event_hubs = (
(azure_event_hubs_ingestion_resource_group is not None)
and (azure_event_hubs_ingestion_namespace is not None)
and (azure_event_hubs_ingestion_event_hub is not None)
and (azure_event_hubs_ingestion_client_id is not None)
and (azure_event_hubs_ingestion_tenant_id is not None)
and (azure_event_hubs_ingestion_subscription_id is not None)
and (azure_event_hubs_ingestion_service_account is not None)
)
is_msk = (
(aws_msk_ingestion_cluster_arn is not None)
and (aws_msk_ingestion_topic is not None)
and (aws_msk_ingestion_aws_role_arn is not None)
and (aws_msk_ingestion_service_account is not None)
)
is_confluent_cloud = (
(confluent_cloud_ingestion_bootstrap_server is not None)
and (confluent_cloud_ingestion_cluster_id is not None)
and (confluent_cloud_ingestion_topic is not None)
and (confluent_cloud_ingestion_identity_pool_id is not None)
and (confluent_cloud_ingestion_service_account is not None)
)
if is_kinesis:
kinesis_source = self.messages.AwsKinesis(
streamArn=kinesis_ingestion_stream_arn,
consumerArn=kinesis_ingestion_consumer_arn,
awsRoleArn=kinesis_ingestion_role_arn,
gcpServiceAccount=kinesis_ingestion_service_account,
)
return self.messages.IngestionDataSourceSettings(
awsKinesis=kinesis_source,
platformLogsSettings=self._ParseIngestionPlatformLogsSettings(
ingestion_log_severity
),
)
elif is_cloud_storage:
cloud_storage_source = self.messages.CloudStorage(
bucket=cloud_storage_ingestion_bucket,
minimumObjectCreateTime=cloud_storage_ingestion_minimum_object_create_time,
matchGlob=cloud_storage_ingestion_match_glob,
)
if cloud_storage_ingestion_input_format == 'text':
cloud_storage_source.textFormat = self.messages.TextFormat(
delimiter=cloud_storage_ingestion_text_delimiter
)
elif cloud_storage_ingestion_input_format == 'avro':
cloud_storage_source.avroFormat = self.messages.AvroFormat()
elif cloud_storage_ingestion_input_format == 'pubsub_avro':
cloud_storage_source.pubsubAvroFormat = self.messages.PubSubAvroFormat()
return self.messages.IngestionDataSourceSettings(
cloudStorage=cloud_storage_source,
platformLogsSettings=self._ParseIngestionPlatformLogsSettings(
ingestion_log_severity
),
)
elif is_azure_event_hubs:
azure_event_hubs_source = self.messages.AzureEventHubs(
resourceGroup=azure_event_hubs_ingestion_resource_group,
namespace=azure_event_hubs_ingestion_namespace,
eventHub=azure_event_hubs_ingestion_event_hub,
clientId=azure_event_hubs_ingestion_client_id,
tenantId=azure_event_hubs_ingestion_tenant_id,
subscriptionId=azure_event_hubs_ingestion_subscription_id,
gcpServiceAccount=azure_event_hubs_ingestion_service_account,
)
return self.messages.IngestionDataSourceSettings(
azureEventHubs=azure_event_hubs_source,
platformLogsSettings=self._ParseIngestionPlatformLogsSettings(
ingestion_log_severity
),
)
elif is_msk:
msk_source = self.messages.AwsMsk(
clusterArn=aws_msk_ingestion_cluster_arn,
topic=aws_msk_ingestion_topic,
awsRoleArn=aws_msk_ingestion_aws_role_arn,
gcpServiceAccount=aws_msk_ingestion_service_account,
)
return self.messages.IngestionDataSourceSettings(
awsMsk=msk_source,
platformLogsSettings=self._ParseIngestionPlatformLogsSettings(
ingestion_log_severity
),
)
elif is_confluent_cloud:
confluent_cloud_source = self.messages.ConfluentCloud(
bootstrapServer=confluent_cloud_ingestion_bootstrap_server,
clusterId=confluent_cloud_ingestion_cluster_id,
topic=confluent_cloud_ingestion_topic,
identityPoolId=confluent_cloud_ingestion_identity_pool_id,
gcpServiceAccount=confluent_cloud_ingestion_service_account,
)
return self.messages.IngestionDataSourceSettings(
confluentCloud=confluent_cloud_source,
platformLogsSettings=self._ParseIngestionPlatformLogsSettings(
ingestion_log_severity
),
)
elif ingestion_log_severity:
raise ConflictingIngestionSettingsException(
'Must set ingestion settings with log severity.'
)
return None
def Create(
self,
topic_ref,
labels=None,
kms_key=None,
message_retention_duration=None,
message_storage_policy_allowed_regions=None,
message_storage_policy_enforce_in_transit=False,
schema=None,
message_encoding=None,
first_revision_id=None,
last_revision_id=None,
kinesis_ingestion_stream_arn=None,
kinesis_ingestion_consumer_arn=None,
kinesis_ingestion_role_arn=None,
kinesis_ingestion_service_account=None,
cloud_storage_ingestion_bucket=None,
cloud_storage_ingestion_input_format=None,
cloud_storage_ingestion_text_delimiter=None,
cloud_storage_ingestion_minimum_object_create_time=None,
cloud_storage_ingestion_match_glob=None,
azure_event_hubs_ingestion_resource_group=None,
azure_event_hubs_ingestion_namespace=None,
azure_event_hubs_ingestion_event_hub=None,
azure_event_hubs_ingestion_client_id=None,
azure_event_hubs_ingestion_tenant_id=None,
azure_event_hubs_ingestion_subscription_id=None,
azure_event_hubs_ingestion_service_account=None,
aws_msk_ingestion_cluster_arn=None,
aws_msk_ingestion_topic=None,
aws_msk_ingestion_aws_role_arn=None,
aws_msk_ingestion_service_account=None,
confluent_cloud_ingestion_bootstrap_server=None,
confluent_cloud_ingestion_cluster_id=None,
confluent_cloud_ingestion_topic=None,
confluent_cloud_ingestion_identity_pool_id=None,
confluent_cloud_ingestion_service_account=None,
ingestion_log_severity=None,
message_transforms_file=None,
tags=None,
enable_vertex_ai_smt=False,
):
"""Creates a Topic.
Args:
topic_ref (Resource): Resource reference to the Topic to create.
labels (LabelsValue): Labels for the topic to create.
kms_key (str): Full resource name of kms_key to set on Topic or None.
message_retention_duration (str): How long to retain messages published to
the Topic.
message_storage_policy_allowed_regions (list[str]): List of Cloud regions
in which messages are allowed to be stored at rest.
message_storage_policy_enforce_in_transit (bool): Whether or not to
enforce in-transit guarantees for this topic using the allowed regions.
schema (Resource): Full resource name of schema used to validate messages
published on Topic.
message_encoding (str): If a schema is set, the message encoding of
incoming messages to be validated against the schema.
first_revision_id (str): If a schema is set, the revision id of the oldest
revision allowed for validation.
last_revision_id (str): If a schema is set, the revision id of the newest
revision allowed for validation.
kinesis_ingestion_stream_arn (str): The Kinesis data stream ARN to ingest
data from.
kinesis_ingestion_consumer_arn (str): The Kinesis data streams consumer
ARN to use for ingestion.
kinesis_ingestion_role_arn (str): AWS role ARN to be used for Federated
Identity authentication with Kinesis.
kinesis_ingestion_service_account (str): The GCP service account to be
used for Federated Identity authentication with Kinesis
cloud_storage_ingestion_bucket (str): The Cloud Storage bucket to ingest
data from.
cloud_storage_ingestion_input_format (str): the format of the data in the
Cloud Storage bucket ('text', 'avro', or 'pubsub_avro').
cloud_storage_ingestion_text_delimiter (optional[str]): delimiter to use
with text format when partioning the object.
cloud_storage_ingestion_minimum_object_create_time (optional[str]): only
Cloud Storage objects with a larger or equal creation timestamp will be
ingested.
cloud_storage_ingestion_match_glob (optional[str]): glob pattern used to
match Cloud Storage objects that will be ingested. If unset, all objects
will be ingested.
azure_event_hubs_ingestion_resource_group (str): The name of the resource
group within an Azure subscription.
azure_event_hubs_ingestion_namespace (str): The name of the Azure Event
Hubs namespace.
azure_event_hubs_ingestion_event_hub (str): The name of the Azure event
hub.
azure_event_hubs_ingestion_client_id (str): The client id of the Azure
Event Hubs application used to authenticate Pub/Sub.
azure_event_hubs_ingestion_tenant_id (str): The tenant id of the Azure
Event Hubs application used to authenticate Pub/Sub.
azure_event_hubs_ingestion_subscription_id (str): The id of the Azure
Event Hubs subscription.
azure_event_hubs_ingestion_service_account (str): The GCP service account
to be used for Federated Identity authentication with Azure Event Hubs.
aws_msk_ingestion_cluster_arn (str): The ARN that uniquely identifies the
MSK cluster.
aws_msk_ingestion_topic (str): The name of the MSK topic that Pub/Sub will
import from.
aws_msk_ingestion_aws_role_arn (str): AWS role ARN to be used for
Federated Identity authentication with MSK.
aws_msk_ingestion_service_account (str): The GCP service account to be
used for Federated Identity authentication with MSK.
confluent_cloud_ingestion_bootstrap_server (str): The address of the
Confluent Cloud bootstrap server. The format is url:port.
confluent_cloud_ingestion_cluster_id (str): The id of the Confluent Cloud
cluster.
confluent_cloud_ingestion_topic (str): The name of the Confluent Cloud
topic that Pub/Sub will import from.
confluent_cloud_ingestion_identity_pool_id (str): The id of the identity
pool to be used for Federated Identity authentication with Confluent
Cloud.
confluent_cloud_ingestion_service_account (str): The GCP service account
to be used for Federated Identity authentication with Confluent Cloud.
ingestion_log_severity (optional[str]): The log severity to use for
ingestion.
message_transforms_file (str): The file path to the JSON or YAML file
containing the message transforms.
tags (TagsValue): The tag Keys/Values to be bound to the topic.
enable_vertex_ai_smt (bool): Whether or not to enable Vertex AI message
transforms.
Returns:
Topic: The created topic.
Raises:
InvalidSchemaSettingsException: If an invalid --schema,
--message-encoding flag comnbination is specified,
or if the --first_revision_id revision is newer than
the --last_revision_id specified.
"""
topic = self.messages.Topic(
name=topic_ref.RelativeName(),
labels=labels,
messageRetentionDuration=message_retention_duration,
)
if kms_key:
topic.kmsKeyName = kms_key
if message_storage_policy_allowed_regions:
message_storage_policy = self.messages.MessageStoragePolicy(
allowedPersistenceRegions=message_storage_policy_allowed_regions
)
if message_storage_policy_enforce_in_transit:
message_storage_policy.enforceInTransit = (
message_storage_policy_enforce_in_transit
)
topic.messageStoragePolicy = message_storage_policy
if schema and message_encoding:
encoding_enum = ParseMessageEncoding(self.messages, message_encoding)
topic.schemaSettings = self.messages.SchemaSettings(
schema=schema,
encoding=encoding_enum,
firstRevisionId=first_revision_id,
lastRevisionId=last_revision_id,
)
topic.ingestionDataSourceSettings = self._ParseIngestionDataSourceSettings(
kinesis_ingestion_stream_arn=kinesis_ingestion_stream_arn,
kinesis_ingestion_consumer_arn=kinesis_ingestion_consumer_arn,
kinesis_ingestion_role_arn=kinesis_ingestion_role_arn,
kinesis_ingestion_service_account=kinesis_ingestion_service_account,
cloud_storage_ingestion_bucket=cloud_storage_ingestion_bucket,
cloud_storage_ingestion_input_format=cloud_storage_ingestion_input_format,
cloud_storage_ingestion_text_delimiter=cloud_storage_ingestion_text_delimiter,
cloud_storage_ingestion_minimum_object_create_time=cloud_storage_ingestion_minimum_object_create_time,
cloud_storage_ingestion_match_glob=cloud_storage_ingestion_match_glob,
azure_event_hubs_ingestion_resource_group=azure_event_hubs_ingestion_resource_group,
azure_event_hubs_ingestion_namespace=azure_event_hubs_ingestion_namespace,
azure_event_hubs_ingestion_event_hub=azure_event_hubs_ingestion_event_hub,
azure_event_hubs_ingestion_client_id=azure_event_hubs_ingestion_client_id,
azure_event_hubs_ingestion_tenant_id=azure_event_hubs_ingestion_tenant_id,
azure_event_hubs_ingestion_subscription_id=azure_event_hubs_ingestion_subscription_id,
azure_event_hubs_ingestion_service_account=azure_event_hubs_ingestion_service_account,
aws_msk_ingestion_cluster_arn=aws_msk_ingestion_cluster_arn,
aws_msk_ingestion_topic=aws_msk_ingestion_topic,
aws_msk_ingestion_aws_role_arn=aws_msk_ingestion_aws_role_arn,
aws_msk_ingestion_service_account=aws_msk_ingestion_service_account,
confluent_cloud_ingestion_bootstrap_server=confluent_cloud_ingestion_bootstrap_server,
confluent_cloud_ingestion_cluster_id=confluent_cloud_ingestion_cluster_id,
confluent_cloud_ingestion_topic=confluent_cloud_ingestion_topic,
confluent_cloud_ingestion_identity_pool_id=confluent_cloud_ingestion_identity_pool_id,
confluent_cloud_ingestion_service_account=confluent_cloud_ingestion_service_account,
ingestion_log_severity=ingestion_log_severity,
)
if message_transforms_file:
try:
topic.messageTransforms = utils.GetMessageTransformsFromFile(
self.messages.MessageTransform,
message_transforms_file,
enable_vertex_ai_smt,
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
if tags:
topic.tags = tags
return self._service.Create(topic)
def Get(self, topic_ref):
"""Gets a Topic.
Args:
topic_ref (Resource): Resource reference to the Topic to get.
Returns:
Topic: The topic.
"""
get_req = self.messages.PubsubProjectsTopicsGetRequest(
topic=topic_ref.RelativeName()
)
return self._service.Get(get_req)
def Delete(self, topic_ref):
"""Deletes a Topic.
Args:
topic_ref (Resource): Resource reference to the Topic to delete.
Returns:
Empty: An empty response message.
"""
delete_req = self.messages.PubsubProjectsTopicsDeleteRequest(
topic=topic_ref.RelativeName()
)
return self._service.Delete(delete_req)
def DetachSubscription(self, subscription_ref):
"""Detaches the subscription from its topic.
Args:
subscription_ref (Resource): Resource reference to the Subscription to
detach.
Returns:
Empty: An empty response message.
"""
detach_req = self.messages.PubsubProjectsSubscriptionsDetachRequest(
subscription=subscription_ref.RelativeName()
)
return self._subscriptions_service.Detach(detach_req)
def List(self, project_ref, page_size=100):
"""Lists Topics for a given project.
Args:
project_ref (Resource): Resource reference to Project to list Topics from.
page_size (int): the number of entries in each batch (affects requests
made, but not the yielded results).
Returns:
A generator of Topics in the Project.
"""
list_req = self.messages.PubsubProjectsTopicsListRequest(
project=project_ref.RelativeName(), pageSize=page_size
)
return list_pager.YieldFromList(
self._service,
list_req,
batch_size=page_size,
field='topics',
batch_size_attribute='pageSize',
)
def ListSnapshots(self, topic_ref, page_size=100):
"""Lists Snapshots for a given topic.
Args:
topic_ref (Resource): Resource reference to Topic to list snapshots from.
page_size (int): the number of entries in each batch (affects requests
made, but not the yielded results).
Returns:
A generator of Snapshots for the Topic.
"""
list_req = self.messages.PubsubProjectsTopicsSnapshotsListRequest(
topic=topic_ref.RelativeName(), pageSize=page_size
)
list_snaps_service = self.client.projects_topics_snapshots
return list_pager.YieldFromList(
list_snaps_service,
list_req,
batch_size=page_size,
field='snapshots',
batch_size_attribute='pageSize',
)
def ListSubscriptions(self, topic_ref, page_size=100):
"""Lists Subscriptions for a given topic.
Args:
topic_ref (Resource): Resource reference to Topic to list subscriptions
from.
page_size (int): the number of entries in each batch (affects requests
made, but not the yielded results).
Returns:
A generator of Subscriptions for the Topic..
"""
list_req = self.messages.PubsubProjectsTopicsSubscriptionsListRequest(
topic=topic_ref.RelativeName(), pageSize=page_size
)
list_subs_service = self.client.projects_topics_subscriptions
return list_pager.YieldFromList(
list_subs_service,
list_req,
batch_size=page_size,
field='subscriptions',
batch_size_attribute='pageSize',
)
def Publish(
self, topic_ref, message_body=None, attributes=None, ordering_key=None
):
"""Publishes a message to the given topic.
Args:
topic_ref (Resource): Resource reference to Topic to publish to.
message_body (bytes): Message to send.
attributes (list[AdditionalProperty]): List of attributes to attach to the
message.
ordering_key (string): The ordering key to associate with this message.
Returns:
PublishResponse: Response message with message ids from the API.
Raises:
EmptyMessageException: If neither message nor attributes is
specified.
PublishOperationException: When something went wrong with the publish
operation.
"""
if not message_body and not attributes:
raise EmptyMessageException(
'You cannot send an empty message. You must specify either a '
'MESSAGE, one or more ATTRIBUTE, or both.'
)
message = self.messages.PubsubMessage(
data=message_body,
attributes=self.messages.PubsubMessage.AttributesValue(
additionalProperties=attributes
),
orderingKey=ordering_key,
)
publish_req = self.messages.PubsubProjectsTopicsPublishRequest(
publishRequest=self.messages.PublishRequest(messages=[message]),
topic=topic_ref.RelativeName(),
)
result = self._service.Publish(publish_req)
if not result.messageIds:
# If we got a result with empty messageIds, then we've got a problem.
raise PublishOperationException(
'Publish operation failed with Unknown error.'
)
return result
def SetIamPolicy(self, topic_ref, policy):
"""Sets an IAM policy on a Topic.
Args:
topic_ref (Resource): Resource reference for topic to set IAM policy on.
policy (Policy): The policy to be added to the Topic.
Returns:
Policy: the policy which was set.
"""
request = self.messages.PubsubProjectsTopicsSetIamPolicyRequest(
resource=topic_ref.RelativeName(),
setIamPolicyRequest=self.messages.SetIamPolicyRequest(policy=policy),
)
return self._service.SetIamPolicy(request)
def GetIamPolicy(self, topic_ref):
"""Gets the IAM policy for a Topic.
Args:
topic_ref (Resource): Resource reference for topic to get the IAM policy
of.
Returns:
Policy: the policy for the Topic.
"""
request = self.messages.PubsubProjectsTopicsGetIamPolicyRequest(
resource=topic_ref.RelativeName()
)
return self._service.GetIamPolicy(request)
def AddIamPolicyBinding(self, topic_ref, member, role):
"""Adds an IAM Policy binding to a Topic.
Args:
topic_ref (Resource): Resource reference for subscription to add IAM
policy binding to.
member (str): The member to add.
role (str): The role to assign to the member.
Returns:
Policy: the updated policy.
Raises:
api_exception.HttpException: If either of the requests failed.
"""
policy = self.GetIamPolicy(topic_ref)
iam_util.AddBindingToIamPolicy(self.messages.Binding, policy, member, role)
return self.SetIamPolicy(topic_ref, policy)
def RemoveIamPolicyBinding(self, topic_ref, member, role):
"""Removes an IAM Policy binding from a Topic.
Args:
topic_ref (Resource): Resource reference for subscription to remove IAM
policy binding from.
member (str): The member to remove.
role (str): The role to remove the member from.
Returns:
Policy: the updated policy.
Raises:
api_exception.HttpException: If either of the requests failed.
"""
policy = self.GetIamPolicy(topic_ref)
iam_util.RemoveBindingFromIamPolicy(policy, member, role)
return self.SetIamPolicy(topic_ref, policy)
def Patch(
self,
topic_ref,
labels=None,
kms_key_name=None,
message_retention_duration=None,
clear_message_retention_duration=False,
recompute_message_storage_policy=False,
message_storage_policy_allowed_regions=None,
message_storage_policy_enforce_in_transit=False,
schema=None,
message_encoding=None,
first_revision_id=None,
last_revision_id=None,
clear_schema_settings=None,
clear_ingestion_data_source_settings=False,
kinesis_ingestion_stream_arn=None,
kinesis_ingestion_consumer_arn=None,
kinesis_ingestion_role_arn=None,
kinesis_ingestion_service_account=None,
cloud_storage_ingestion_bucket=None,
cloud_storage_ingestion_input_format=None,
cloud_storage_ingestion_text_delimiter=None,
cloud_storage_ingestion_minimum_object_create_time=None,
cloud_storage_ingestion_match_glob=None,
azure_event_hubs_ingestion_resource_group=None,
azure_event_hubs_ingestion_namespace=None,
azure_event_hubs_ingestion_event_hub=None,
azure_event_hubs_ingestion_client_id=None,
azure_event_hubs_ingestion_tenant_id=None,
azure_event_hubs_ingestion_subscription_id=None,
azure_event_hubs_ingestion_service_account=None,
aws_msk_ingestion_cluster_arn=None,
aws_msk_ingestion_topic=None,
aws_msk_ingestion_aws_role_arn=None,
aws_msk_ingestion_service_account=None,
confluent_cloud_ingestion_bootstrap_server=None,
confluent_cloud_ingestion_cluster_id=None,
confluent_cloud_ingestion_topic=None,
confluent_cloud_ingestion_identity_pool_id=None,
confluent_cloud_ingestion_service_account=None,
ingestion_log_severity=None,
message_transforms_file=None,
clear_message_transforms=False,
enable_vertex_ai_smt=False,
):
"""Updates a Topic.
Args:
topic_ref (Resource): Resource reference for the topic to be updated.
labels (LabelsValue): The Cloud labels for the topic.
kms_key_name (str): The full resource name of the Cloud KMS key to
associate with the topic, or None.
message_retention_duration (str): How long to retain messages.
clear_message_retention_duration (bool): If set, remove retention from the
topic.
recompute_message_storage_policy (bool): True to have the API recalculate
the message storage policy.
message_storage_policy_allowed_regions (list[str]): List of Cloud regions
in which messages are allowed to be stored at rest.
message_storage_policy_enforce_in_transit (bool): Whether or not to
enforce in-transit guarantees for this topic using the allowed regions.
schema (Resource): Full resource name of schema used to validate messages
published on Topic.
message_encoding (str): If a schema is set, the message encoding of
incoming messages to be validated against the schema.
first_revision_id (str): If a schema is set, the revision id of the oldest
revision allowed for validation.
last_revision_id (str): If a schema is set, the revision id of the newest
revision allowed for validation.
clear_schema_settings (bool): If set, clear schema settings from the
topic.
clear_ingestion_data_source_settings (bool): If set, clear
IngestionDataSourceSettings from the topic.
kinesis_ingestion_stream_arn (str): The Kinesis data stream ARN to ingest
data from.
kinesis_ingestion_consumer_arn (str): The Kinesis data streams consumer
ARN to use for ingestion.
kinesis_ingestion_role_arn (str): AWS role ARN to be used for Federated
Identity authentication with Kinesis.
kinesis_ingestion_service_account (str): The GCP service account to be
used for Federated Identity authentication with Kinesis
cloud_storage_ingestion_bucket (str): The Cloud Storage bucket to ingest
data from.
cloud_storage_ingestion_input_format (str): the format of the data in the
Cloud Storage bucket ('text', 'avro', or 'pubsub_avro').
cloud_storage_ingestion_text_delimiter (optional[str]): delimiter to use
with text format when partioning the object.
cloud_storage_ingestion_minimum_object_create_time (optional[str]): only
Cloud Storage objects with a larger or equal creation timestamp will be
ingested.
cloud_storage_ingestion_match_glob (optional[str]): glob pattern used to
match Cloud Storage objects that will be ingested. If unset, all objects
will be ingested.
azure_event_hubs_ingestion_resource_group (str): The name of the resource
group within an Azure subscription.
azure_event_hubs_ingestion_namespace (str): The name of the Azure Event
Hubs namespace.
azure_event_hubs_ingestion_event_hub (str): The name of the Azure event
hub.
azure_event_hubs_ingestion_client_id (str): The client id of the Azure
Event Hubs application used to authenticate Pub/Sub.
azure_event_hubs_ingestion_tenant_id (str): The tenant id of the Azure
Event Hubs application used to authenticate Pub/Sub.
azure_event_hubs_ingestion_subscription_id (str): The id of the Azure
Event Hubs subscription.
azure_event_hubs_ingestion_service_account (str): The GCP service account
to be used for Federated Identity authentication with Azure Event Hubs.
aws_msk_ingestion_cluster_arn (str): The ARN that uniquely identifies the
MSK cluster.
aws_msk_ingestion_topic (str): The name of the MSK topic that Pub/Sub will
import from.
aws_msk_ingestion_aws_role_arn (str): AWS role ARN to be used for
Federated Identity authentication with MSK.
aws_msk_ingestion_service_account (str): The GCP service account to be
used for Federated Identity authentication with MSK.
confluent_cloud_ingestion_bootstrap_server (str): The address of the
Confluent Cloud bootstrap server. The format is url:port.
confluent_cloud_ingestion_cluster_id (str): The id of the Confluent Cloud
cluster.
confluent_cloud_ingestion_topic (str): The name of the Confluent Cloud
topic that Pub/Sub will import from.
confluent_cloud_ingestion_identity_pool_id (str): The id of the identity
pool to be used for Federated Identity authentication with Confluent
Cloud.
confluent_cloud_ingestion_service_account (str): The GCP service account
to be used for Federated Identity authentication with Confluent Cloud.
ingestion_log_severity (optional[str]): The log severity to use for
ingestion.
message_transforms_file (str): The file path to the JSON or YAML file
containing the message transforms.
clear_message_transforms (bool): If set, clears all message transforms
from the topic.
enable_vertex_ai_smt (bool): If set, enables Vertex AI message
transforms.
Returns:
Topic: The updated topic.
Raises:
NoFieldsSpecifiedError: if no fields were specified.
PatchConflictingArgumentsError: if conflicting arguments were provided
InvalidSchemaSettingsException: If an invalid --schema,
--message-encoding flag comnbination is specified,
or if the --first_revision_id revision is newer than
the --last_revision_id specified.
"""
update_settings = []
if labels:
update_settings.append(_TopicUpdateSetting('labels', labels))
if kms_key_name:
update_settings.append(_TopicUpdateSetting('kmsKeyName', kms_key_name))
if message_retention_duration:
update_settings.append(
_TopicUpdateSetting(
'messageRetentionDuration', message_retention_duration
)
)
if clear_message_retention_duration:
update_settings.append(
_TopicUpdateSetting('messageRetentionDuration', None)
)
if recompute_message_storage_policy:
update_settings.append(_TopicUpdateSetting('messageStoragePolicy', None))
elif message_storage_policy_allowed_regions:
message_storage_policy = self.messages.MessageStoragePolicy(
allowedPersistenceRegions=message_storage_policy_allowed_regions
)
if message_storage_policy_enforce_in_transit:
message_storage_policy.enforceInTransit = (
message_storage_policy_enforce_in_transit
)
update_settings.append(
_TopicUpdateSetting('messageStoragePolicy', message_storage_policy)
)
if clear_schema_settings:
update_settings.append(_TopicUpdateSetting('schemaSettings', None))
elif schema and message_encoding:
encoding_enum = ParseMessageEncoding(self.messages, message_encoding)
update_settings.append(
_TopicUpdateSetting(
'schemaSettings',
self.messages.SchemaSettings(
schema=schema,
encoding=encoding_enum,
firstRevisionId=first_revision_id,
lastRevisionId=last_revision_id,
),
)
)
if clear_ingestion_data_source_settings:
update_settings.append(
_TopicUpdateSetting('ingestionDataSourceSettings', None)
)
else:
new_settings = self._ParseIngestionDataSourceSettings(
kinesis_ingestion_stream_arn=kinesis_ingestion_stream_arn,
kinesis_ingestion_consumer_arn=kinesis_ingestion_consumer_arn,
kinesis_ingestion_role_arn=kinesis_ingestion_role_arn,
kinesis_ingestion_service_account=kinesis_ingestion_service_account,
cloud_storage_ingestion_bucket=cloud_storage_ingestion_bucket,
cloud_storage_ingestion_input_format=cloud_storage_ingestion_input_format,
cloud_storage_ingestion_text_delimiter=cloud_storage_ingestion_text_delimiter,
cloud_storage_ingestion_minimum_object_create_time=cloud_storage_ingestion_minimum_object_create_time,
cloud_storage_ingestion_match_glob=cloud_storage_ingestion_match_glob,
azure_event_hubs_ingestion_resource_group=azure_event_hubs_ingestion_resource_group,
azure_event_hubs_ingestion_namespace=azure_event_hubs_ingestion_namespace,
azure_event_hubs_ingestion_event_hub=azure_event_hubs_ingestion_event_hub,
azure_event_hubs_ingestion_client_id=azure_event_hubs_ingestion_client_id,
azure_event_hubs_ingestion_tenant_id=azure_event_hubs_ingestion_tenant_id,
azure_event_hubs_ingestion_subscription_id=azure_event_hubs_ingestion_subscription_id,
azure_event_hubs_ingestion_service_account=azure_event_hubs_ingestion_service_account,
aws_msk_ingestion_cluster_arn=aws_msk_ingestion_cluster_arn,
aws_msk_ingestion_topic=aws_msk_ingestion_topic,
aws_msk_ingestion_aws_role_arn=aws_msk_ingestion_aws_role_arn,
aws_msk_ingestion_service_account=aws_msk_ingestion_service_account,
confluent_cloud_ingestion_bootstrap_server=confluent_cloud_ingestion_bootstrap_server,
confluent_cloud_ingestion_cluster_id=confluent_cloud_ingestion_cluster_id,
confluent_cloud_ingestion_topic=confluent_cloud_ingestion_topic,
confluent_cloud_ingestion_identity_pool_id=confluent_cloud_ingestion_identity_pool_id,
confluent_cloud_ingestion_service_account=confluent_cloud_ingestion_service_account,
ingestion_log_severity=ingestion_log_severity,
)
if new_settings is not None:
update_settings.append(
_TopicUpdateSetting('ingestionDataSourceSettings', new_settings)
)
if message_transforms_file:
try:
update_settings.append(
_TopicUpdateSetting(
'messageTransforms',
utils.GetMessageTransformsFromFile(
self.messages.MessageTransform,
message_transforms_file,
enable_vertex_ai_smt=enable_vertex_ai_smt,
),
)
)
except (
utils.MessageTransformsInvalidFormatError,
utils.MessageTransformsEmptyFileError,
utils.MessageTransformsMissingFileError,
) as e:
e.args = (utils.GetErrorMessage(e),)
raise
if clear_message_transforms:
update_settings.append(
_TopicUpdateSetting(
'messageTransforms', CLEAR_MESSAGE_TRANSFORMS_VALUE
)
)
topic = self.messages.Topic(name=topic_ref.RelativeName())
update_mask = []
for update_setting in update_settings:
setattr(topic, update_setting.field_name, update_setting.value)
update_mask.append(update_setting.field_name)
if not update_mask:
raise NoFieldsSpecifiedError('Must specify at least one field to update.')
patch_req = self.messages.PubsubProjectsTopicsPatchRequest(
updateTopicRequest=self.messages.UpdateTopicRequest(
topic=topic, updateMask=','.join(update_mask)
),
name=topic_ref.RelativeName(),
)
return self._service.Patch(patch_req)

View File

@@ -0,0 +1,235 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Pub/Sub."""
import enum
from apitools.base.py import encoding
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import yaml
from googlecloudsdk.core.util import files
import six
class ErrorCause(enum.Enum):
LIST = 'list'
YAML_OR_JSON = 'yaml_or_json'
UNRECOGNIZED_FIELDS = 'unrecognized_fields'
MULTIPLE_SMTS_VALIDATE = 'multiple_smts_validate'
class MessageTransformsMissingFileError(exceptions.Error):
"""Error when the message transforms file is missing."""
def __init__(self, message, path):
super().__init__(message)
self.path = path
class MessageTransformsEmptyFileError(exceptions.Error):
"""Error when the message transforms file is empty."""
def __init__(self, path, message=''):
super().__init__(message)
self.path = path
class MessageTransformsInvalidFormatError(exceptions.Error):
"""Error when the message transforms file has an invalid format."""
def __init__(self, path, error_cause, message=''):
super().__init__(message)
self.path = path
self.error_cause = error_cause
def GetErrorMessage(err):
"""Returns the formatted error string for an error type.
Args:
err: Error raised during the GetMessageTransformsFromFile execution.
Returns:
Formatted error message as a string.
"""
if isinstance(err, MessageTransformsMissingFileError):
return 'Message transforms file [{0}] is missing or does not exist'.format(
err.path
)
elif isinstance(err, MessageTransformsEmptyFileError):
return 'Empty message transforms file [{0}]'.format(err.path)
elif isinstance(err, MessageTransformsInvalidFormatError):
if err.error_cause == ErrorCause.LIST:
return (
'Message transforms file [{0}] not properly formatted as a list'
.format(err.path)
)
elif err.error_cause == ErrorCause.YAML_OR_JSON:
return (
'Message transforms file [{0}] is not properly formatted in YAML'
' or JSON due to [{1}]'.format(err.path, six.text_type(err))
)
elif err.error_cause == ErrorCause.MULTIPLE_SMTS_VALIDATE:
return (
'Message transform file [{0}] contains a list of message transforms'
' instead of a single (1) message transform. Please edit your'
' message-transform-file to contain a single element.'.format(
err.path
)
)
else:
return (
'Message transforms file [{0}] contains unrecognized fields: [{1}]'
.format(err.path, six.text_type(err))
)
else:
return str(err)
def ValidateMessageTransformMessage(message, path, enable_vertex_ai_smt=False):
"""Validate all parsed message from file are valid."""
errors = encoding.UnrecognizedFieldIter(message)
unrecognized_field_paths = []
for edges_to_message, field_names in errors:
message_field_path = '.'.join(six.text_type(e) for e in edges_to_message)
for field_name in field_names:
unrecognized_field_paths.append(
'{}.{}'.format(message_field_path, field_name)
)
if unrecognized_field_paths:
raise MessageTransformsInvalidFormatError(
path,
ErrorCause.UNRECOGNIZED_FIELDS,
'\n'.join(unrecognized_field_paths),
)
if not enable_vertex_ai_smt and message.aiInference:
raise MessageTransformsInvalidFormatError(
path,
ErrorCause.UNRECOGNIZED_FIELDS,
'.aiInference',
)
def ReadFileFromPath(path):
"""Reads a file from a local path.
Args:
path: A local path to an object specification in YAML or JSON format.
Returns:
The contents of the file as a string.
Raises:
MessageTransformsMissingFileError: If file is missing.
MessageTransformsEmptyFileError: If file is empty.
"""
try:
contents = files.ReadFileContents(path)
except files.MissingFileError as e:
raise MessageTransformsMissingFileError(e, path)
if not contents:
raise MessageTransformsEmptyFileError(path=path)
return contents
def GetMessageTransformsFromFile(message, path, enable_vertex_ai_smt=False):
"""Reads a YAML or JSON object of type message from local path.
Parses a list of message transforms.
Args:
message: The message type to be parsed from the file.
path: A local path to an object specification in YAML or JSON format.
enable_vertex_ai_smt: Whether or not to enable Vertex AI message transforms.
Returns:
Sequence of objects of type message, if successful.
Raises:
MessageTransformsMissingFileError: If file is missing.
MessageTransformsEmptyFileError: If file is empty.
MessageTransformsInvalidFormat: If file's format is invalid.
"""
contents = ReadFileFromPath(path)
# Parsing YAML or JSON file
try:
# yaml.load() is able to parse YAML and JSON files
message_transforms = yaml.load(contents)
if not isinstance(message_transforms, list):
raise MessageTransformsInvalidFormatError(
path=path, error_cause=ErrorCause.LIST
)
result = []
for py_value in message_transforms:
transform = encoding.PyValueToMessage(message, py_value)
ValidateMessageTransformMessage(transform, path, enable_vertex_ai_smt)
result.append(transform)
except (
TypeError,
ValueError,
AttributeError,
yaml.YAMLParseError,
) as e:
raise MessageTransformsInvalidFormatError(path, ErrorCause.YAML_OR_JSON, e)
return result
def GetMessageTransformFromFileForValidation(
message, path, enable_vertex_ai_smt=False
):
"""Reads a YAML or JSON object of type message from local path.
Parses a single message transform.
Args:
message: The message type to be parsed from the file.
path: A local path to an object specification in YAML or JSON format.
enable_vertex_ai_smt: Whether or not to enable Vertex AI message transforms.
Returns:
Object of type message, if successful.
Raises:
MessageTransformsMissingFileError: If file is missing.
MessageTransformsEmptyFileError: If file is empty.
MessageTransformsInvalidFormat: If file's format is invalid.
"""
contents = ReadFileFromPath(path)
# Parsing YAML or JSON file
try:
# yaml.load() is able to parse YAML and JSON files
message_transform = yaml.load(contents)
if isinstance(message_transform, list):
if len(message_transform) == 1:
message_transform = message_transform[0]
else:
raise MessageTransformsInvalidFormatError(
path, ErrorCause.MULTIPLE_SMTS_VALIDATE
)
result = encoding.PyValueToMessage(message, message_transform)
ValidateMessageTransformMessage(result, path, enable_vertex_ai_smt)
except (
TypeError,
ValueError,
AttributeError,
yaml.YAMLParseError,
) as e:
raise MessageTransformsInvalidFormatError(path, ErrorCause.YAML_OR_JSON, e)
return result