feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,554 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from googlecloudsdk.calliope import actions
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
def AddTypeFlag(parser):
"""Adds a --type flag to the given parser."""
help_text = """Type can be MYSQL, ORACLE, POSTGRESQL, SQLSERVER, SALESFORCE, GOOGLE-CLOUD-STORAGE or BIGQUERY"""
parser.add_argument('--type', help=help_text, required=True)
def AddDisplayNameFlag(parser, required=True):
"""Adds a --display-name flag to the given parser."""
help_text = """Friendly name for the connection profile."""
parser.add_argument('--display-name', help=help_text, required=required)
def AddMysqlProfileGroup(parser, required=True):
"""Adds necessary mysql profile flags to the given parser."""
mysql_profile = parser.add_group()
mysql_profile.add_argument(
'--mysql-hostname',
help="""IP or hostname of the MySQL source database.""",
required=required)
mysql_profile.add_argument(
'--mysql-port',
help="""Network port of the MySQL source database.""",
required=required,
type=int)
mysql_profile.add_argument(
'--mysql-username',
help="""Username Datastream will use to connect to the database.""",
required=required)
password_group = mysql_profile.add_group(required=required, mutex=True)
password_group.add_argument(
'--mysql-password',
help="""\
Password for the user that Datastream will be using to
connect to the database.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='')
password_group.add_argument(
'--mysql-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to the database.')
password_group.add_argument(
'--mysql-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to the database.'
),
default='',
)
ssl_config = mysql_profile.add_group()
ssl_config.add_argument(
'--ca-certificate',
help="""\
x509 PEM-encoded certificate of the CA that signed the source database
server's certificate. The replica will use this certificate to verify
it's connecting to the right host.""",
required=required)
ssl_config.add_argument(
'--client-certificate',
help="""\
x509 PEM-encoded certificate that will be used by the replica to
authenticate against the source database server.""",
required=required)
ssl_config.add_argument(
'--client-key',
help="""\
Unencrypted PKCS#1 or PKCS#8 PEM-encoded private key associated with
the Client Certificate.""",
required=required)
def AddOracleProfileGroup(parser, required=True):
"""Adds necessary oracle profile flags to the given parser."""
oracle_profile = parser.add_group()
oracle_profile.add_argument(
'--oracle-hostname',
help="""IP or hostname of the oracle source database.""",
required=required)
oracle_profile.add_argument(
'--oracle-port',
help="""Network port of the oracle source database.""",
required=required,
type=int)
oracle_profile.add_argument(
'--oracle-username',
help="""Username Datastream will use to connect to the database.""",
required=required)
oracle_profile.add_argument(
'--database-service',
help="""Database service for the Oracle connection.""",
required=required)
password_group = oracle_profile.add_group(required=required, mutex=True)
password_group.add_argument(
'--oracle-password',
help="""\
Password for the user that Datastream will be using to
connect to the database.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='')
password_group.add_argument(
'--oracle-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to the database.')
password_group.add_argument(
'--oracle-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to the database.'
),
default='',
)
ssl_config = oracle_profile.add_group()
ssl_config.add_argument(
'--oracle-ca-certificate',
help="""\
PEM-encoded certificate of the CA that signed the source database
server's certificate.""",
default='',
)
ssl_config.add_argument(
'--oracle-server-certificate-distinguished-name',
help="""\
The distinguished name (DN) mentioned in the server
certificate. This corresponds to the SSL_SERVER_CERT_DN sqlnet parameter.
If this field is not provided, the DN matching is not enforced.""",
default='',
)
def AddPostgresqlProfileGroup(parser, required=True):
"""Adds necessary postgresql profile flags to the given parser."""
postgresql_profile = parser.add_group()
postgresql_profile.add_argument(
'--postgresql-hostname',
help="""IP or hostname of the PostgreSQL source database.""",
required=required)
postgresql_profile.add_argument(
'--postgresql-port',
help="""Network port of the PostgreSQL source database.""",
required=required,
type=int)
postgresql_profile.add_argument(
'--postgresql-username',
help="""Username Datastream will use to connect to the database.""",
required=required)
postgresql_profile.add_argument(
'--postgresql-database',
help="""Database service for the PostgreSQL connection.""",
required=required)
password_group = postgresql_profile.add_group(required=required, mutex=True)
password_group.add_argument(
'--postgresql-password',
help="""\
Password for the user that Datastream will be using to
connect to the database.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='')
password_group.add_argument(
'--postgresql-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to the database.')
password_group.add_argument(
'--postgresql-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to the database.'
),
default='',
)
ssl_config = postgresql_profile.add_group()
ssl_config.add_argument(
'--postgresql-ca-certificate',
help="""\
x509 PEM-encoded certificate of the CA that signed the source database
server's certificate. The replica will use this certificate to verify
it's connecting to the right host.""",
required=required)
ssl_config.add_argument(
'--postgresql-server-certificate-hostname',
help="""\
The hostname mentioned in the Subject or SAN extension of the server
certificate. If this field is not provided, the hostname in the
server certificate is not validated.""",
default='',
)
client_ssl_config = ssl_config.add_group()
client_ssl_config.add_argument(
'--postgresql-client-certificate',
help="""\
x509 PEM-encoded certificate that will be used by the replica to
authenticate against the source database server.""",
required=required)
client_ssl_config.add_argument(
'--postgresql-client-key',
help="""\
Unencrypted PKCS#1 or PKCS#8 PEM-encoded private key associated with
the Client Certificate.""",
required=required)
def AddSqlServerProfileGroup(parser, required=True):
"""Adds necessary sqlserver profile flags to the given parser."""
sqlserver_profile = parser.add_group()
sqlserver_profile.add_argument(
'--sqlserver-hostname',
help="""IP or hostname of the SQL Server source database.""",
required=required,
)
sqlserver_profile.add_argument(
'--sqlserver-port',
help="""Network port of the SQL Server source database.""",
required=required,
type=int,
)
sqlserver_profile.add_argument(
'--sqlserver-username',
help="""Username Datastream will use to connect to the database.""",
required=required,
)
sqlserver_profile.add_argument(
'--sqlserver-database',
help="""Database service for the SQL Server connection.""",
required=required,
)
password_group = sqlserver_profile.add_group(required=required, mutex=True)
password_group.add_argument(
'--sqlserver-password',
help="""\
Password for the user that Datastream will be using to
connect to the database.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='',
)
password_group.add_argument(
'--sqlserver-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to the database.',
)
password_group.add_argument(
'--sqlserver-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to the database.'
),
default='',
)
def AddSalesforceProfileGroup(parser, required=True):
"""Adds necessary salesforce profile flags to the given parser.
Args:
parser: The parser for the command line flags.
required: Whether or not the flags are required.
"""
salesforce_profile = parser.add_group()
salesforce_profile.add_argument(
'--salesforce-domain',
help="""Domain of the Salesforce organization. For example, 'myorg.my.salesforce.com'""",
required=required,
)
login_group = salesforce_profile.add_group(required=required, mutex=True)
user_login_group = login_group.add_group()
user_login_group.add_argument(
'--salesforce-username',
help="""Username Datastream will use to connect to the database.""",
required=required,
)
password_group = user_login_group.add_group(required=required, mutex=True)
password_group.add_argument(
'--salesforce-password',
help="""\
Password for the user that Datastream will be using to
connect to Salesforce.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='',
)
password_group.add_argument(
'--salesforce-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to Salesforce.',
)
password_group.add_argument(
'--salesforce-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to Salesforce.'
),
default='',
)
security_token_group = user_login_group.add_group(
required=required, mutex=True
)
security_token_group.add_argument(
'--salesforce-security-token',
help="""\
Security token for the user that Datastream will be using to
connect to Salesforce.""",
default='',
)
security_token_group.add_argument(
'--salesforce-prompt-for-security-token',
action='store_true',
help='Prompt for the security token used to connect to Salesforce.',
)
security_token_group.add_argument(
'--salesforce-secret-manager-stored-security-token',
help=(
'Path to secret manager, storing the security token used to connect'
' to Salesforce.'
),
default='',
)
oauth2_login_group = login_group.add_group()
oauth2_login_group.add_argument(
'--salesforce-oauth2-client-id',
help="""OAuth 2.0 Client ID used to connect to Salesforce.""",
required=required,
)
client_secret_group = oauth2_login_group.add_group(
required=required, mutex=True
)
client_secret_group.add_argument(
'--salesforce-oauth2-client-secret',
help="""\
OAuth 2.0 Client secret used to connect to Salesforce.""",
default='',
)
client_secret_group.add_argument(
'--salesforce-prompt-for-oauth2-client-secret',
action='store_true',
help=(
'Prompt for the OAuth 2.0 Client secret used to connect to'
' Salesforce.'
),
)
client_secret_group.add_argument(
'--salesforce-secret-manager-stored-oauth2-client-secret',
help=(
'Path to secret manager, storing the OAuth 2.0 Client secret used to'
' connect to Salesforce.'
),
default='',
)
def AddGcsProfileGroup(parser, release_track, required=True):
"""Adds necessary GCS profile flags to the given parser."""
gcs_profile = parser.add_group()
bucket_field_name = '--bucket'
if release_track == base.ReleaseTrack.BETA:
bucket_field_name = '--bucket-name'
gcs_profile.add_argument(
bucket_field_name,
help="""The full project and resource path for Cloud Storage
bucket including the name.""",
required=required)
gcs_profile.add_argument(
'--root-path',
help="""The root path inside the Cloud Storage bucket.""",
required=False)
def AddMongodbProfileGroup(parser, required=True):
"""Adds necessary mongodb profile flags to the given parser."""
mongodb_profile = parser.add_group()
mongodb_profile.add_argument(
'--mongodb-host-addresses',
help="""IP or hostname and port of the MongoDB source database.""",
type=arg_parsers.ArgList(min_length=1),
metavar='IPv4_ADDRESS_OR_HOSTNAME:PORT',
required=required,
)
mongodb_profile.add_argument(
'--mongodb-replica-set',
help="""Replica set of the MongoDB source database.""",
)
mongodb_profile.add_argument(
'--mongodb-username',
help="""Username Datastream will use to connect to the database.""",
required=required,
)
password_group = mongodb_profile.add_group(required=required, mutex=True)
password_group.add_argument(
'--mongodb-password',
help="""\
Password for the user that Datastream will be using to
connect to the database.
This field is not returned on request, and the value is encrypted
when stored in Datastream.""",
default='',
)
password_group.add_argument(
'--mongodb-prompt-for-password',
action='store_true',
help='Prompt for the password used to connect to the database.',
)
password_group.add_argument(
'--mongodb-secret-manager-stored-password',
help=(
'Path to secret manager, storing the password for the user used to'
' connect to the database.'
),
default='',
)
connection_format_group = mongodb_profile.add_group(
required=required, mutex=True
)
connection_format_group.add_argument(
'--mongodb-srv-connection-format',
help="""SRV Connection format for the MongoDB source database.""",
action='store_true',
default=False,
)
connection_format_group.add_argument(
'--mongodb-standard-connection-format',
help="""Standard connection format for the MongoDB source database.""",
action='store_true',
default=False,
)
mongodb_profile.add_argument(
'--mongodb-direct-connection',
help="""Connect to the mongodb hosts directly and do not try to resolve
any of the replicas from the replica set.""",
action='store_true',
default=False,
)
ssl_config = mongodb_profile.add_group()
ssl_config.add_argument(
'--mongodb-tls',
help="""Enable Transport Layer Security for the monogodb connection.""",
action='store_true',
default=False)
ssl_config.add_argument(
'--mongodb-ca-certificate',
help="""\
x509 PEM-encoded certificate of the CA that signed the source database
server's certificate. The replica will use this certificate to verify
it's connecting to the right host.""")
def AddDepthGroup(parser):
"""Adds necessary depth flags for discover command parser."""
depth_parser = parser.add_group(mutex=True)
depth_parser.add_argument(
'--recursive',
help="""Whether to retrieve the full hierarchy of data objects (TRUE) or only the current level (FALSE).""",
action=actions.DeprecationAction(
'--recursive',
warn=(
'The {flag_name} option is deprecated; use `--full-hierarchy`'
' instead.'
),
removed=False,
action='store_true',
),
)
depth_parser.add_argument(
'--recursive-depth',
help="""The number of hierarchy levels below the current level to be retrieved.""",
action=actions.DeprecationAction(
'--recursive-depth',
warn=(
'The {flag_name} option is deprecated; use `--hierarchy-depth`'
' instead.'
),
removed=False,
),
)
def AddHierarchyGroup(parser):
"""Adds necessary hierarchy flags for discover command parser."""
hierarchy_parser = parser.add_group(mutex=True)
hierarchy_parser.add_argument(
'--full-hierarchy',
help="""Whether to retrieve the full hierarchy of data objects (TRUE) or only the current level (FALSE).""",
action='store_true',
)
hierarchy_parser.add_argument(
'--hierarchy-depth',
help="""The number of hierarchy levels below the current level to be retrieved.""",
)
def AddRdbmsGroup(parser):
"""Adds necessary RDBMS params for discover command parser."""
rdbms_parser = parser.add_group(mutex=True)
rdbms_parser.add_argument(
'--mysql-rdbms-file',
help="""Path to a YAML (or JSON) file containing the MySQL RDBMS to enrich with child data objects and metadata. If you pass - as the value of the flag the file content will be read from stdin. """
)
rdbms_parser.add_argument(
'--oracle-rdbms-file',
help="""Path to a YAML (or JSON) file containing the Oracle RDBMS to enrich with child data objects and metadata. If you pass - as the value of the flag the file content will be read from stdin."""
)
rdbms_parser.add_argument(
'--postgresql-rdbms-file',
help="""Path to a YAML (or JSON) file containing the PostgreSQL RDBMS to enrich with child data objects and metadata. If you pass - as the value of the flag the file content will be read from stdin."""
)
rdbms_parser.add_argument(
'--sqlserver-rdbms-file',
help="""Path to a YAML (or JSON) file containing the SQL Server RDBMS to enrich with child data objects and metadata. If you pass - as the value of the flag the file content will be read from stdin.""",
)
def AddValidationGroup(parser, verb):
"""Adds a --force flag to the given parser."""
validation_group = parser.add_group(mutex=True)
validation_group.add_argument(
'--force',
help="""%s the connection profile without validating it.""" % verb,
action='store_true',
default=False)

View File

@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.util.args import labels_util
def AddLabelsCreateFlags(parser):
"""Adds flags related to creating labels."""
labels_util.AddCreateLabelsFlags(parser)
def AddLabelsUpdateFlags(parser):
"""Adds flags related to updating labels."""
labels_util.AddUpdateLabelsFlags(parser)

View File

@@ -0,0 +1,79 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def AddMysqlObjectIdentifier(parser):
"""Adds a --mysql-database & --mysql-table flags to the given parser."""
mysql_object_parser = parser.add_group()
mysql_object_parser.add_argument(
'--mysql-database',
help="""Mysql database for the object.""",
required=True)
mysql_object_parser.add_argument(
'--mysql-table', help="""Mysql table for the object.""", required=True)
def AddOracleObjectIdentifier(parser):
"""Adds a --oracle-schema & --oracle-table flags to the given parser."""
oracle_object_parser = parser.add_group()
oracle_object_parser.add_argument(
'--oracle-schema',
help="""Oracle schema for the object.""",
required=True)
oracle_object_parser.add_argument(
'--oracle-table', help="""Oracle table for the object.""", required=True)
def AddPostgresqlObjectIdentifier(parser):
"""Adds a --postgresql-schema & --postgresql-table flags to the given parser."""
postgresql_object_parser = parser.add_group()
postgresql_object_parser.add_argument(
'--postgresql-schema',
help="""PostgreSQL schema for the object.""",
required=True)
postgresql_object_parser.add_argument(
'--postgresql-table',
help="""PostgreSQL table for the object.""",
required=True)
def AddSqlServerObjectIdentifier(parser):
"""Adds a --sqlserver-schema & --sqlserver-table flags to the given parser."""
sqlserver_object_parser = parser.add_group()
sqlserver_object_parser.add_argument(
'--sqlserver-schema',
help="""SQL Server schema for the object.""",
required=True,
)
sqlserver_object_parser.add_argument(
'--sqlserver-table',
help="""SQL Server table for the object.""",
required=True,
)
def AddSalesforceObjectIdentifier(parser):
"""Adds a --salesforce-object-name flag to the given parser."""
salesforce_object_parser = parser.add_group()
salesforce_object_parser.add_argument(
'--salesforce-object-name',
help="""Salesforce object name.""",
required=True,
)

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def AddDisplayNameFlag(parser):
"""Adds a --display-name flag to the given parser."""
help_text = """Friendly name for the private connection."""
parser.add_argument('--display-name', help=help_text, required=True)
def AddNetworkAttachmentFlag(parser):
"""Adds the `--network-attachment` flag to the parser."""
parser.add_argument(
'--network-attachment',
required=True,
type=str,
help=(
'Full URI of the network attachment that datastream will connect to.'
'For example, this would be of the form:'
'`network-attachment=projects/test-project/regions/us-central1/networkAttachments/my-na`'
),
)
def AddValidateOnlyFlag(parser):
"""Adds the `--validate-only` flag to the parser."""
parser.add_argument(
'--validate-only',
required=False,
action='store_true',
help=(
'If set, the request will retrieve the project id to allow in the '
' network attachment Datastream will connect to.'
),
)

View File

@@ -0,0 +1,791 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource flags for Datastream commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.command_lib.util.concepts import presentation_specs
_MYSQL_SOURCE_CONFIG_HELP_TEXT_BETA = """\
Path to a YAML (or JSON) file containing the configuration for MySQL Source Config.
The JSON file is formatted as follows, with snake_case field naming:
```
{
"allowlist": {},
"rejectlist": {
"mysql_databases": [
{
"database_name":"sample_database",
"mysql_tables": [
{
"table_name": "sample_table",
"mysql_columns": [
{
"column_name": "sample_column",
}
]
}
]
}
]
}
}
```
"""
_MYSQL_SOURCE_CONFIG_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the configuration for MySQL Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"mysqlDatabases": [
{
"database":"sample_database",
"mysqlTables": [
{
"table": "sample_table",
"mysqlColumns": [
{
"column": "sample_column",
}
]
}
]
}
]
}
}
```
"""
_ORACLE_SOURCE_CONFIG_HELP_TEXT_BETA = """\
Path to a YAML (or JSON) file containing the configuration for Oracle Source Config.
The JSON file is formatted as follows, with snake_case field naming:
```
{
"allowlist": {},
"rejectlist": {
"oracle_schemas": [
{
"schema_name": "SAMPLE",
"oracle_tables": [
{
"table_name": "SAMPLE_TABLE",
"oracle_columns": [
{
"column_name": "COL",
}
]
}
]
}
]
}
}
```
"""
_ORACLE_SOURCE_CONFIG_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the configuration for Oracle Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"oracleSchemas": [
{
"schema": "SAMPLE",
"oracleTables": [
{
"table": "SAMPLE_TABLE",
"oracleColumns": [
{
"column": "COL",
}
]
}
]
}
]
}
}
```
"""
_POSTGRESQL_CREATE_SOURCE_CONFIG_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the configuration for PostgreSQL Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"postgresqlSchemas": [
{
"schema": "SAMPLE",
"postgresqlTables": [
{
"table": "SAMPLE_TABLE",
"postgresqlColumns": [
{
"column": "COL",
}
]
}
]
}
]
},
"replicationSlot": "SAMPLE_REPLICATION_SLOT",
"publication": "SAMPLE_PUBLICATION"
}
```
"""
_POSTGRESQL_UPDATE_SOURCE_CONFIG_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the configuration for PostgreSQL Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"postgresqlSchemas": [
{
"schema": "SAMPLE",
"postgresqlTables": [
{
"table": "SAMPLE_TABLE",
"postgresqlColumns": [
{
"column": "COL",
}
]
}
]
}
]
},
"replicationSlot": "SAMPLE_REPLICATION_SLOT",
"publication": "SAMPLE_PUBLICATION"
}
```
"""
_SQLSERVER_CREATE_SOURCE_CONFIG_HELP_TEXT = """
Path to a YAML (or JSON) file containing the configuration for SQL Server Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"schemas": [
{
"schema": "SAMPLE",
"tables": [
{
"table": "SAMPLE_TABLE",
"columns": [
{
"column": "COL",
}
]
}
]
}
]
},
"maxConcurrentCdcTasks": 2,
"maxConcurrentBackfillTasks": 10,
"transactionLogs": {} # Or changeTables
}
```
"""
_SQLSERVER_UPDATE_SOURCE_CONFIG_HELP_TEXT = """
Path to a YAML (or JSON) file containing the configuration for SQL Server Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"schemas": [
{
"schema": "SAMPLE",
"tables": [
{
"table": "SAMPLE_TABLE",
"columns": [
{
"column": "COL",
}
]
}
]
}
]
},
"maxConcurrentCdcTasks": 2,
"maxConcurrentBackfillTasks": 10,
"transactionLogs": {} # Or changeTables
}
```
"""
_SALESFORCE_CREATE_SOURCE_CONFIG_HELP_TEXT = """
Path to a YAML (or JSON) file containing the configuration for Salesforce Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"pollingInterval": "3000s",
"includeObjects": {},
"excludeObjects": {
"objects": [
{
"objectName": "SAMPLE",
"fields": [
{
"fieldName": "SAMPLE_FIELD",
}
]
}
]
}
}
```
"""
_SALESFORCE_UPDATE_SOURCE_CONFIG_HELP_TEXT = """
Path to a YAML (or JSON) file containing the configuration for Salesforce Source Config.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"pollingInterval": "3000s",
"includeObjects": {},
"excludeObjects": {
"objects": [
{
"objectName": "SAMPLE",
"fields": [
{
"fieldName": "SAMPLE_FIELD",
}
]
}
]
}
}
```
"""
_MONGODB_SOURCE_CONFIG_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the configuration for MongoDB Source Config.
The JSON file is formatted as follows, with snake_case field naming:
```
{
"includeObjects": {},
"excludeObjects": {
"databases": [
{
"database": "sampleDb",
"collections": [
{
"collection": "sampleCollection",
"fields": [
{
"field": "SAMPLE_FIELD",
}
]
}
]
}
]
}
}
```
"""
def ConnectionProfileAttributeConfig(name='connection_profile'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The connection profile of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='id')
def PrivateConnectionAttributeConfig(name='private_connection'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The private connection of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='id')
def StreamAttributeConfig(name='stream'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The stream of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='id')
def RouteAttributeConfig(name='route'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The route of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='id')
def LocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location', help_text='The Cloud location for the {resource}.')
def GetLocationResourceSpec(resource_name='location'):
return concepts.ResourceSpec(
'datastream.projects.locations',
resource_name=resource_name,
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=True)
def GetConnectionProfileResourceSpec(resource_name='connection_profile'):
return concepts.ResourceSpec(
'datastream.projects.locations.connectionProfiles',
resource_name=resource_name,
connectionProfilesId=ConnectionProfileAttributeConfig(name=resource_name),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=True)
def GetPrivateConnectionResourceSpec(resource_name='private_connection'):
return concepts.ResourceSpec(
'datastream.projects.locations.privateConnections',
resource_name=resource_name,
privateConnectionsId=PrivateConnectionAttributeConfig(name=resource_name),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=True)
def GetStreamResourceSpec(resource_name='stream'):
return concepts.ResourceSpec(
'datastream.projects.locations.streams',
resource_name=resource_name,
streamsId=StreamAttributeConfig(name=resource_name),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=True)
def GetRouteResourceSpec(resource_name='route'):
return concepts.ResourceSpec(
'datastream.projects.locations.privateConnections.routes',
resource_name=resource_name,
routesId=RouteAttributeConfig(name=resource_name),
privateConnectionsId=PrivateConnectionAttributeConfig(
'private-connection'),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=True)
def AddConnectionProfileResourceArg(parser,
verb,
release_track,
positional=True,
required=True):
"""Add a resource argument for a Datastream connection profile.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
release_track: Some arguments are added based on the command release
track.
positional: bool, if True, means that the resource is a positional rather
than a flag.
required: bool, if True, means that a flag is required.
"""
if positional:
name = 'connection_profile'
else:
name = '--connection-profile'
connectivity_parser = parser.add_group(mutex=True)
connectivity_parser.add_argument(
'--static-ip-connectivity',
action='store_true',
help="""use static ip connectivity""")
if release_track == base.ReleaseTrack.BETA:
connectivity_parser.add_argument(
'--no-connectivity', action='store_true', help="""no connectivity""")
forward_ssh_parser = connectivity_parser.add_group()
forward_ssh_parser.add_argument(
'--forward-ssh-hostname',
help="""Hostname for the SSH tunnel.""",
required=required)
forward_ssh_parser.add_argument(
'--forward-ssh-username',
help="""Username for the SSH tunnel.""",
required=required)
forward_ssh_parser.add_argument(
'--forward-ssh-port',
help="""Port for the SSH tunnel, default value is 22.""",
type=int,
default=22)
password_group = forward_ssh_parser.add_group(required=required, mutex=True)
password_group.add_argument(
'--forward-ssh-password', help="""\
SSH password.
""")
password_group.add_argument(
'--forward-ssh-private-key', help='SSH private key..')
# TODO(b/207467120): deprecate BETA client.
private_connection_flag_name = 'private-connection'
if release_track == base.ReleaseTrack.BETA:
private_connection_flag_name = 'private-connection-name'
resource_specs = [
presentation_specs.ResourcePresentationSpec(
name,
GetConnectionProfileResourceSpec(),
'The connection profile {}.'.format(verb),
required=True),
presentation_specs.ResourcePresentationSpec(
'--%s' % private_connection_flag_name,
GetPrivateConnectionResourceSpec(),
'Resource ID of the private connection.',
flag_name_overrides={'location': ''},
group=connectivity_parser)
]
concept_parsers.ConceptParser(
resource_specs,
command_level_fallthroughs={
'--%s.location' % private_connection_flag_name: ['--location'],
}).AddToParser(parser)
def AddConnectionProfileDiscoverResourceArg(parser):
"""Add a resource argument for a Datastream connection profile discover command.
Args:
parser: the parser for the command.
"""
connection_profile_parser = parser.add_group(mutex=True, required=True)
connection_profile_parser.add_argument(
'--connection-profile-object-file',
help="""Path to a YAML (or JSON) file containing the configuration
for a connection profile object. If you pass - as the value of the
flag the file content will be read from stdin."""
)
resource_specs = [
presentation_specs.ResourcePresentationSpec(
'--connection-profile-name',
GetConnectionProfileResourceSpec(),
'Resource ID of the connection profile.',
flag_name_overrides={'location': ''},
group=connection_profile_parser)
]
concept_parsers.ConceptParser(
resource_specs,
command_level_fallthroughs={
'--connection-profile-name.location': ['--location'],
}).AddToParser(parser)
def GetVpcResourceSpec():
"""Constructs and returns the Resource specification for VPC."""
def VpcAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='vpc',
help_text="""fully qualified name of the VPC Datastream will peer to."""
)
return concepts.ResourceSpec(
'compute.networks',
resource_name='vpc',
network=VpcAttributeConfig(),
project=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG)
def AddPrivateConnectionResourceArg(parser,
verb,
positional=True):
"""Add a resource argument for a Datastream private connection.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
positional: bool, if True, means that the resource is a positional rather
than a flag.
"""
if positional:
name = 'private_connection'
else:
name = '--private-connection'
resource_specs = [
presentation_specs.ResourcePresentationSpec(
name,
GetPrivateConnectionResourceSpec(),
'The private connection {}.'.format(verb),
required=True),
]
concept_parsers.ConceptParser(
resource_specs).AddToParser(parser)
def AddStreamResourceArg(parser, verb, release_track, required=True):
"""Add resource arguments for creating/updating a stream.
Args:
parser: argparse.ArgumentParser, the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
release_track: base.ReleaseTrack, some arguments are added based on the
command release track.
required: bool, if True, means that a flag is required.
"""
source_parser = parser.add_group(required=required)
source_config_parser_group = source_parser.add_group(
required=required, mutex=True)
source_config_parser_group.add_argument(
'--oracle-source-config',
help=_ORACLE_SOURCE_CONFIG_HELP_TEXT_BETA if release_track
== base.ReleaseTrack.BETA else _ORACLE_SOURCE_CONFIG_HELP_TEXT)
source_config_parser_group.add_argument(
'--mysql-source-config',
help=_MYSQL_SOURCE_CONFIG_HELP_TEXT_BETA if release_track
== base.ReleaseTrack.BETA else _MYSQL_SOURCE_CONFIG_HELP_TEXT)
source_config_parser_group.add_argument(
'--postgresql-source-config',
help=_POSTGRESQL_UPDATE_SOURCE_CONFIG_HELP_TEXT
if verb == 'update'
else _POSTGRESQL_CREATE_SOURCE_CONFIG_HELP_TEXT,
)
source_config_parser_group.add_argument(
'--sqlserver-source-config',
help=_SQLSERVER_UPDATE_SOURCE_CONFIG_HELP_TEXT
if verb == 'update'
else _SQLSERVER_CREATE_SOURCE_CONFIG_HELP_TEXT,
)
source_config_parser_group.add_argument(
'--salesforce-source-config',
help=_SALESFORCE_UPDATE_SOURCE_CONFIG_HELP_TEXT
if verb == 'update'
else _SALESFORCE_CREATE_SOURCE_CONFIG_HELP_TEXT,
)
source_config_parser_group.add_argument(
'--mongodb-source-config',
help=_MONGODB_SOURCE_CONFIG_HELP_TEXT,
)
destination_parser = parser.add_group(required=required)
destination_config_parser_group = destination_parser.add_group(
required=required, mutex=True)
destination_config_parser_group.add_argument(
'--gcs-destination-config',
help="""\
Path to a YAML (or JSON) file containing the configuration for Google Cloud Storage Destination Config.
The JSON file is formatted as follows:
```
{
"path": "some/path",
"fileRotationMb":5,
"fileRotationInterval":"15s",
"avroFileFormat": {}
}
```
""",
)
destination_config_parser_group.add_argument(
'--bigquery-destination-config',
help="""\
Path to a YAML (or JSON) file containing the configuration for Google BigQuery Destination Config.
The YAML (or JSON) file should be formatted as follows:
BigQuery configuration with source hierarchy datasets and merge mode (merge mode is by default):
```
{
"sourceHierarchyDatasets": {
"datasetTemplate": {
"location": "us-central1",
"datasetIdPrefix": "my_prefix",
"kmsKeyName": "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}"
}
},
"merge": {}
"dataFreshness": "3600s"
}
```
BigQuery configuration with source hierarchy datasets and append only mode:
```
{
"sourceHierarchyDatasets": {
"datasetTemplate": {
"location": "us-central1",
"datasetIdPrefix": "my_prefix",
"kmsKeyName": "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}"
}
},
"appendOnly": {}
}
```
BigQuery configuration with single target dataset and merge mode:
```
{
"singleTargetDataset": {
"datasetId": "projectId:my_dataset"
},
"merge": {}
"dataFreshness": "3600s"
}
```
BigQuery configuration with Big Lake table configuration:
```
{
"singleTargetDataset": {
"datasetId": "projectId:datasetId"
},
"appendOnly": {},
"blmtConfig": {
"bucket": "bucketName",
"tableFormat": "ICEBERG",
"fileFormat": "PARQUET",
"connectionName": "projectId.region.connectionName",
"rootPath": "/root"
}
}
```
""",
)
source_field = 'source'
destination_field = 'destination'
if release_track == base.ReleaseTrack.BETA:
source_field = 'source-name'
destination_field = 'destination-name'
resource_specs = [
presentation_specs.ResourcePresentationSpec(
'stream',
GetStreamResourceSpec(),
'The stream to {}.'.format(verb),
required=True),
presentation_specs.ResourcePresentationSpec(
'--%s' % source_field,
GetConnectionProfileResourceSpec(),
'Resource ID of the source connection profile.',
required=required,
flag_name_overrides={'location': ''},
group=source_parser),
presentation_specs.ResourcePresentationSpec(
'--%s' % destination_field,
GetConnectionProfileResourceSpec(),
'Resource ID of the destination connection profile.',
required=required,
flag_name_overrides={'location': ''},
group=destination_parser)
]
concept_parsers.ConceptParser(
resource_specs,
command_level_fallthroughs={
'--%s.location' % source_field: ['--location'],
'--%s.location' % destination_field: ['--location']
}).AddToParser(parser)
def AddStreamObjectResourceArg(parser):
"""Add a resource argument for a Datastream stream object.
Args:
parser: the parser for the command.
"""
resource_specs = [
presentation_specs.ResourcePresentationSpec(
'--stream',
GetStreamResourceSpec(),
'The stream to list objects for.',
required=True),
]
concept_parsers.ConceptParser(
resource_specs,
command_level_fallthroughs={
'--stream.location': ['--location'],
}).AddToParser(parser)
def AddRouteResourceArg(parser, verb, positional=True):
"""Add a resource argument for a Datastream route.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to create'.
positional: bool, if True, means that the resource is a positional rather
than a flag.
"""
if positional:
name = 'route'
else:
name = '--route'
resource_specs = [
presentation_specs.ResourcePresentationSpec(
name,
GetRouteResourceSpec(),
'The route {}.'.format(verb),
required=True)
]
concept_parsers.ConceptParser(
resource_specs).AddToParser(parser)

View File

@@ -0,0 +1,133 @@
project:
name: project
collection: datastream.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: The project ID.
property: core/project
location:
name: location
collection: datastream.projects.locations
request_id_field: location.name
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: The location of the resources.
connection_profile:
name: connection_profile
collection: datastream.projects.locations.connectionProfiles
request_id_field: connection_profile.name
attributes:
- *project
- *location
- &connection_profile
parameter_name: connectionProfilesId
attribute_name: connection_profile
help: The connection profile name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false
stream:
name: stream
collection: datastream.projects.locations.streams
request_id_field: stream.name
attributes:
- *location
- &stream
parameter_name: streamsId
attribute_name: stream
help: The stream name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false
stream_object:
name: stream_object
collection: datastream.projects.locations.streams.objects
request_id_field: stream_object.name
attributes:
- *project
- *location
- *stream
- &stream_object
parameter_name: objectsId
attribute_name: object
help: The Stream object name.
disable_auto_completers: false
operation:
name: operation
collection: datastream.projects.locations.operations
request_id_field: operation.name
attributes:
- *project
- *location
- &operation
parameter_name: operationsId
attribute_name: operation
help: The operation name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false
# Gcloud disallows positional arguments with dashes (-) but the resource is expected to be referred
# to with dashes when used in non-positional manner. To avoid the conflict, we define two
# near-identical resources (only differing by attribute_name).
positional_private_connection:
name: private_connection
collection: datastream.projects.locations.privateConnections
request_id_field: private_connection.name
attributes:
- *project
- *location
- &positional_private_connection
parameter_name: privateConnectionsId
attribute_name: private_connection
help: The private connection name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false
private_connection:
name: private_connection
collection: datastream.projects.locations.privateConnections
request_id_field: private_connection.name
attributes:
- *project
- *location
- &private_connection
parameter_name: privateConnectionsId
attribute_name: private-connection
help: The private connection name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false
route:
name: route
collection: datastream.projects.locations.privateConnections.routes
request_id_field: route.name
attributes:
- *project
- *location
- *private_connection
- &route
parameter_name: routesId
attribute_name: route
help: The route name.
completion_request_params:
- fieldName: fieldMask
value: name
disable_auto_completers: false

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
def AddDisplayNameFlag(parser):
"""Adds a --display-name flag to the given parser."""
help_text = """Friendly name for the route."""
parser.add_argument('--display-name', help=help_text, required=True)
def AddDestinationAddressFlag(parser):
"""Adds a --destination-addresss flag to the given parser."""
help_text = """Destination address for connection."""
parser.add_argument('--destination-address', help=help_text, required=True)
def AddDestinationPortFlag(parser):
"""Adds a --display-name flag to the given parser."""
help_text = """Destination port for connection."""
parser.add_argument(
'--destination-port', help=help_text, type=int)

View File

@@ -0,0 +1,297 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags and helpers for the Datastream related commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
_MYSQL_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the MySQL data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"mysqlDatabases": [
{
"database":"sample_database",
"mysqlTables": [
{
"table": "sample_table",
"mysqlColumns": [
{
"column": "sample_column",
}
]
}
]
}
]
}
```
"""
_ORACLE_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the Oracle data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"oracleSchemas": [
{
"schema": "SAMPLE",
"oracleTables": [
{
"table": "SAMPLE_TABLE",
"oracleColumns": [
{
"column": "COL",
}
]
}
]
}
]
}
```
"""
_POSTGRESQL_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the PostgreSQL data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"postgresqlSchemas": [
{
"schema": "SAMPLE",
"postgresqlTables": [
{
"table": "SAMPLE_TABLE",
"postgresqlColumns": [
{
"column": "COL",
}
]
}
]
}
]
}
```
"""
_SQLSERVER_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the SQL Server data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"schemas": [
{
"schema": "SAMPLE",
"tables": [
{
"table": "SAMPLE_TABLE",
"columns": [
{
"column": "COL",
}
]
}
]
}
]
}
```
"""
_SALESFORCE_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the Salesforce data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"objects": [
{
"objectName": "SAMPLE",
},
{
"objectName": "SAMPLE2",
}
]
}
```
"""
_MONGODB_EXCLUDED_OBJECTS_HELP_TEXT = """\
Path to a YAML (or JSON) file containing the MongoDB data sources to avoid backfilling.
The JSON file is formatted as follows, with camelCase field naming:
```
{
"databases": [
{
"database":"sample_database",
"collections": [
{
"collection": "sample_collection",
"fields": [
{
"field": "sample_field",
}
]
}
]
}
]
}
```
"""
def AddRuleSetsFlag(parser):
"""Adds a --rule-sets flag to the given parser."""
help_text = """Path to a JSON file containing a list of rule sets
to be applied to the stream.
The JSON file is formatted as follows, with camelCase field naming:
```
[
{
"objectFilter": {
"sourceObjectIdentifier": {
"oracleIdentifier": {
"schema": "schema1",
"table": "table1"
}
}
},
"customizationRules": [
{
"bigqueryClustering": {
"columns": ["COL_A"]
}
}
]
},
{
"objectFilter": {
"sourceObjectIdentifier": {
"oracleIdentifier": {
"schema": "schema2",
"table": "table2"
}
}
},
"customizationRules": [
{
"bigqueryPartitioning": {
"timeUnitPartition": {
"column": "TIME_COL",
"partitioningTimeGranularity": "PARTITIONING_TIME_GRANULARITY_DAY"
}
}
}
]
}
]
```
"""
parser.add_argument('--rule-sets', help=help_text)
def AddDisplayNameFlag(parser, required=True):
"""Adds a --display-name flag to the given parser."""
help_text = """Friendly name for the stream."""
parser.add_argument('--display-name', help=help_text, required=required)
def AddUpdateMaskFlag(parser):
"""Adds a --update-mask flag to the given parser."""
help_text = """Used to specify the fields to be overwritten in the stream resource by the update.
If the update mask is used, then a field will be overwritten only if it is in the mask. If the user does not provide a mask then all fields will be overwritten.
This is a comma-separated list of fully qualified names of fields, written as snake_case or camelCase. Example: "display_name, source_config.oracle_source_config"."""
parser.add_argument('--update-mask', help=help_text)
def AddStateFlag(parser):
"""Adds a --state flag to the given parser."""
help_text = """Stream state, can be set to: "RUNNING" or "PAUSED"."""
parser.add_argument('--state', help=help_text)
def AddValidationGroup(parser, verb):
"""Adds a --validate-only or --force flag to the given parser."""
validation_group = parser.add_group(mutex=True)
validation_group.add_argument(
'--validate-only',
help="""Only validate the stream, but do not %s any resources.
The default is false.""" % verb.lower(),
action='store_true',
default=False)
validation_group.add_argument(
'--force',
help="""%s the stream without validating it.""" % verb,
action='store_true',
default=False)
def AddBackfillStrategyGroup(parser, required=True):
"""Adds a --backfiill-all or --backfill-none flag to the given parser."""
backfill_group = parser.add_group(required=required, mutex=True)
backfill_group.add_argument(
'--backfill-none',
help="""Do not automatically backfill any objects. This flag is equivalent
to selecting the Manual backfill type in the Google Cloud console.""",
action='store_true')
backfill_all_group = backfill_group.add_group()
backfill_all_group.add_argument(
'--backfill-all',
help="""Automatically backfill objects included in the stream source
configuration. Specific objects can be excluded. This flag is equivalent
to selecting the Automatic backfill type in the Google Cloud console.""",
action='store_true')
backfill_all_excluded_objects = backfill_all_group.add_group(mutex=True)
backfill_all_excluded_objects.add_argument(
'--oracle-excluded-objects', help=_ORACLE_EXCLUDED_OBJECTS_HELP_TEXT
)
backfill_all_excluded_objects.add_argument(
'--mysql-excluded-objects', help=_MYSQL_EXCLUDED_OBJECTS_HELP_TEXT
)
backfill_all_excluded_objects.add_argument(
'--postgresql-excluded-objects',
help=_POSTGRESQL_EXCLUDED_OBJECTS_HELP_TEXT,
)
backfill_all_excluded_objects.add_argument(
'--sqlserver-excluded-objects',
help=_SQLSERVER_EXCLUDED_OBJECTS_HELP_TEXT,
)
backfill_all_excluded_objects.add_argument(
'--salesforce-excluded-objects',
help=_SALESFORCE_EXCLUDED_OBJECTS_HELP_TEXT,
)
backfill_all_excluded_objects.add_argument(
'--mongodb-excluded-objects',
help=_MONGODB_EXCLUDED_OBJECTS_HELP_TEXT,
)