feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for managng common agent pool flags.
Tested more through command surface tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from googlecloudsdk.calliope import arg_parsers
def setup_parser(parser):
"""Adds flags to agent-pools create and agent-pools update commands."""
parser.add_argument(
'name', help='A unique, permanent identifier for this pool.')
parser.add_argument(
'--display-name',
help='A modifiable name to help you identify this pool. You can include'
" details that might not fit in the pool's unique full resource name.")
parser.add_argument(
'--bandwidth-limit',
type=arg_parsers.BoundedInt(1, sys.maxsize, unlimited=True),
help="Set how much of your bandwidth to make available to this pool's"
' agents. A bandwidth limit applies to all agents in a pool and can'
" help prevent the pool's transfer workload from disrupting other"
" operations that share your bandwidth. For example, enter '50' to set"
' a bandwidth limit of 50 MB/s. By leaving this flag unspecified, this'
" flag unspecified, this pool's agents will use all bandwidth available"
' to them.')

View File

@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and functions for STS transfer agents."""
import enum
class ContainerManager(enum.Enum):
"""Container manager to use for the agent."""
DOCKER = 'docker'
PODMAN = 'podman'
@classmethod
def from_args(cls, args, flag_name='container_manager'):
"""Returns the container manager attribute from the args."""
container_manager_arg = getattr(args, flag_name, cls.DOCKER.value)
return ContainerManager(container_manager_arg)

View File

@@ -0,0 +1,195 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for transfer appliances commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.command_lib.transfer.appliances import regions
_MESSAGES = apis.GetMessagesModule('transferappliance', 'v1alpha1')
APPLIANCE_MODEL_ENUM = _MESSAGES.Appliance.ModelValueValuesEnum
_APPLIANCE_STATE_ENUM = _MESSAGES.Appliance.StateValueValuesEnum
_ORDER_STATE_ENUM = _MESSAGES.Order.StateValueValuesEnum
# We skip the first enum value, TYPE_UNSPECIFIED, in each case.
_APPLIANCE_MODELS = [e.name for e in APPLIANCE_MODEL_ENUM][1:]
_APPLIANCE_STATES = [e.name for e in _APPLIANCE_STATE_ENUM][1:]
_ORDER_STATES = [e.name for e in _ORDER_STATE_ENUM][1:]
_ADDRESS_HELP = """\
Address where the appliance will be shipped. All fields (or list items)
have a maximum of 80 characters. For more information see
https://support.google.com/business/answer/6397478.
*lines*::: Line of the postal address that doesn't fit in the other
fields. For most countries/regions, the first line will include a street
number and street name. You can have up to 5 address lines.
*locality*::: Generally refers to the city/town portion of the address.
*administrative_area*::: The state or province where the business is
located. Enter the full name (e.g. "California"), common postal
abbreviation (e.g. "CA"), or subdivision (ISO 3166-2) code
(e.g. "US-CA").
*postal_code*::: The postal code of the address.
"""
_CONTACT_HELP = """\
*business*::: Name of the business, if applicable.
*name*::: Name of the primary contact.
*phone*::: The phone number of the primary contact. Should be given in E.164
format consisting of the country calling code (1 to 3 digits) and the
subscriber number, with no additional spaces or formatting, e.g.
`15552220123`.
*emails*::: The email of the primary contact along with any additional email
addresses to include with all correspondence.
"""
_OFFLINE_EXPORT_HELP = """\
Configuration for an offline export transfer, where data is downloaded onto
the appliance at Google and copied from the appliance at the customer site.
*source*::: The Cloud Storage bucket or folder where the data is located,
in the form of `gs://my-bucket/path/to/folder/`.
*manifest*::: Specifies the path to the manifest in Cloud Storage.
An example path is `gs://bucket_name/path/manifest.csv`. The paths in
the manifest file are relative to bucketname. For example, to export
`SOURCE_PATH/object1.pdf`, manifest will have `object1.pdf` in the first
column, followed by object version (optional). For more information see
https://cloud.google.com/storage-transfer/docs/manifest#object_storage_transfers.
"""
def add_appliance_settings(parser, for_create_command=True):
"""Adds appliance flags for appliances orders create."""
appliance_settings = parser.add_group(category='APPLIANCE')
appliance_settings.add_argument(
'--model',
choices=_APPLIANCE_MODELS,
required=for_create_command,
type=str.upper,
help='Model of the appliance to order.',
)
appliance_settings.add_argument(
'--display-name',
help='A mutable, user-settable name for both the appliance and the order.'
)
if for_create_command:
appliance_settings.add_argument(
'--internet-enabled',
action='store_true',
help=(
'Gives the option to put the appliance into online mode,'
' allowing it to transfer data and/or remotely report progress to'
' the cloud over the network. When disabled only offline'
' transfers are possible.'
),
)
appliance_settings.add_argument(
'--cmek',
help=(
'Customer-managed key which will add additional layer of security.'
' By default data is encrypted with a Google-managed key.'
),
)
appliance_settings.add_argument(
'--online-import',
help=(
'Destination for a online import, where data is loaded onto the'
' appliance and automatically transferred to Cloud Storage whenever'
' it has an internet connection. Should be in the form of'
' `gs://my-bucket/path/to/folder/`.'
),
)
appliance_settings.add_argument(
'--offline-import',
help=(
'Destination for a offline import, where data is loaded onto the'
' appliance at the customer site and ingested at Google. Should be in'
' the form of `gs://my-bucket/path/to/folder/`.'
),
)
appliance_settings.add_argument(
'--offline-export',
type=arg_parsers.ArgDict(spec={'source': str, 'manifest': str}),
help=_OFFLINE_EXPORT_HELP,
)
def add_delivery_information(parser, for_create_command=True):
"""Adds delivery flags for appliances orders create."""
if for_create_command:
parser.add_argument(
'--country',
choices=regions.APPROVED_COUNTRIES,
required=True,
help=(
'Country where the appliance will be shipped. Note that this cannot'
' be changed. To ship the appliance to a different country, clone'
' the order instead and set a different country and delivery '
' address. To view a complete list of country codes and names see'
' https://support.google.com/business/answer/6270107.'
),
)
delivery_information = parser.add_group(category='DELIVERY')
delivery_information.add_argument(
'--address',
type=arg_parsers.ArgDict(
spec={
'lines': arg_parsers.ArgList(max_length=5),
'locality': str,
'administrative-area': str,
'postal-code': str,
},
allow_key_only=True,
required_keys=['lines'],
),
help=_ADDRESS_HELP,
)
delivery_information.add_argument(
'--delivery-notes',
help=(
'Add any additional details about your order, such as site details'
' and a preference date when the appliance should be delivered.'
),
)
contact_arg_type = arg_parsers.ArgDict(
spec={
'business': str,
'name': str,
'phone': str,
'emails': arg_parsers.ArgList(),
},
allow_key_only=True,
required_keys=['name', 'phone', 'emails'],
)
delivery_information.add_argument(
'--order-contact',
type=contact_arg_type,
help=_CONTACT_HELP,
)
delivery_information.add_argument(
'--shipping-contact',
type=contact_arg_type,
help=_CONTACT_HELP,
)

View File

@@ -0,0 +1,167 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for transfer appliances commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.transfer.appliances import flags
def apply_args_to_appliance(appliance_resource, args):
"""Maps command arguments to appliance resource values.
Args:
appliance_resource (messages.Appliance): The target appliance resource.
args (parser_extensions.Namespace): The args from the command.
Returns:
List[str] A list of strings representing the update mask.
"""
update_mask = []
if args.model is not None:
appliance_resource.model = getattr(flags.APPLIANCE_MODEL_ENUM, args.model)
update_mask.append('model')
# Using IsSpecified here ensures we can clear these fields with an empty
# string.
if args.IsSpecified('display_name'):
appliance_resource.displayName = args.display_name
update_mask.append('displayName')
if args.IsSpecified('cmek'):
appliance_resource.customerManagedKey = args.cmek
update_mask.append('customerManagedKey')
# We use hasattr() because the --internet-enabled flag is only available to
# the create command.
if hasattr(args, 'internet_enabled'):
appliance_resource.internetEnabled = args.internet_enabled
if args.offline_import is not None:
destination = _get_gcs_destination_from_url_string(args.offline_import)
appliance_resource.offlineImportFeature = destination
update_mask.append('offlineImportFeature')
if args.online_import is not None:
destination = _get_gcs_destination_from_url_string(args.online_import)
appliance_resource.onlineImportFeature = destination
update_mask.append('onlineImportFeature')
if args.offline_export is not None:
offline_export = {'source': []}
source = args.offline_export.get('source', None)
manifest = args.offline_export.get('manifest', None)
if source is not None:
bucket, path = _get_bucket_folder_from_url_string(source)
offline_export['source'].append({'bucket': '{}/{}'.format(bucket, path)})
if manifest is not None:
offline_export['transferManifest'] = {'location': manifest}
appliance_resource.offlineExportFeature = offline_export
update_mask.append('offlineExportFeature')
return ','.join(update_mask)
def _apply_args_to_order_contact(contact_field):
"""Maps command arguments to order contact values."""
emails = contact_field.get('emails', [])
return {
'email': emails.pop(0),
'additionalEmails': emails,
'business': contact_field.get('business', None),
'contactName': contact_field.get('name', None),
'phone': contact_field.get('phone', None)
}
def apply_args_to_order(order_resource, args, appliance_name=None):
"""Maps command arguments to appliance resource values.
Args:
order_resource (messages.Order): The target order resource.
args (parser_extensions.Namespace): The args from the command.
appliance_name (str): The name of the appliance associated with the order.
Returns:
List['field1', 'field2']
"""
update_mask = []
# Using IsSpecified here ensures we can clear these fields.
if args.IsSpecified('delivery_notes'):
order_resource.deliveryNotes = args.delivery_notes
update_mask.append('deliveryNotes')
if args.IsSpecified('display_name'):
order_resource.displayName = args.display_name
update_mask.append('displayName')
if appliance_name is not None:
order_resource.appliances = [appliance_name]
if args.address is not None:
order_resource.address = {
'addressLines': args.address.get('lines', None),
'locality': args.address.get('locality', None),
'administrativeArea': args.address.get('administrative-area', None),
'postalCode': args.address.get('postal-code', None),
'regionCode': _get_region_code(order_resource, args),
}
update_mask.append('address')
if args.order_contact is not None:
order_resource.orderContact = _apply_args_to_order_contact(
args.order_contact)
update_mask.append('orderContact')
if args.shipping_contact is not None:
order_resource.shippingContact = _apply_args_to_order_contact(
args.shipping_contact)
update_mask.append('shippingContact')
return ','.join(update_mask)
def _get_region_code(order_resource, args):
"""Get region code either from the country arg or the previous value."""
# The create command requires country, but its immutable and therefore isn't
# available on the update command.
if hasattr(args, 'country'):
return args.country
# The update command will be able to use the code from the previous address.
return order_resource.address.regionCode
def _get_bucket_folder_from_url_string(url_string):
"""Takes a storage_url string and returns a tuple of bucket and folder."""
url = storage_url.storage_url_from_string(url_string)
bucket = url.bucket_name
folder = url.resource_name
if folder is not None and not folder.endswith('/'):
folder += '/'
return bucket, folder
def _get_gcs_destination_from_url_string(url_string):
"""Takes a storage_url string and returns a GcsDestination."""
bucket, folder = _get_bucket_folder_from_url_string(url_string)
return {
'destination': {
'outputBucket': bucket,
'outputPath': folder,
}
}

View File

@@ -0,0 +1,216 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Appliance offline import feature printer."""
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core.resource import custom_printer_base
from googlecloudsdk.core.resource import flattened_printer
from googlecloudsdk.core.resource import resource_transform
OFFLINE_IMPORT_PRINTER_FORMAT = "offlineImportFeature"
class OfflineImportPrinter(custom_printer_base.CustomPrinterBase):
"""Prints Offline Import feature fields in customized format."""
MESSAGES = apis.GetMessagesModule("transferappliance", "v1alpha1")
def Transform(self, resp):
"""Transforms Offline Import feature data into a customized format.
Args:
resp: Response object containing data for the offline import feature,
including its status, bytes transferred, objects transferred,
destination, end time, and any missing files.
Example output:
Status : Completed with errors
Bytes transferred : 1.8 MiB of 2.5 MiB
Objects transferred : 8 objects of 10 objects
Destination : example-bucket
Start time : June 10, 2024, 06:47 PM UTC
End time : March 12, 2024, 04:30 PM UTC
Found Files : gs://example-bucket/logs/found_files.log
Missing Files : gs://example-bucket/logs/failed_transfers.log
"""
printer = flattened_printer.FlattenedPrinter()
status = self._get_status_message(resp.offlineImportFeature)
bytes_transferred = self._get_bytes_transferred(resp.offlineImportFeature)
bytes_prepared = self._get_bytes_prepared(resp.offlineImportFeature)
objects_transferred = self._get_objects_transferred(
resp.offlineImportFeature
)
destination = self._get_destination(resp.offlineImportFeature)
start_time = self._get_start_time(resp.offlineImportFeature)
end_time = self._get_end_time(resp.offlineImportFeature)
found_files = self._get_found_files(resp.offlineImportFeature)
missing_files = self._get_missing_files(resp.offlineImportFeature)
records = [
{"Status ": status},
{"Bytes prepared ": bytes_prepared},
{"Bytes transferred ": bytes_transferred},
{"Objects transferred ": objects_transferred},
{"Destination ": destination},
{"Start time ": start_time},
{"End time ": end_time},
{"Found Files ": found_files},
{"Missing Files ": missing_files}
]
for record in records:
printer.AddRecord(record, delimit=False)
def _get_status_message(self, offline_import_feature):
state = self._get_value(
offline_import_feature, "state"
)
if state is None:
return "-"
state_enum = self.MESSAGES.OfflineImportFeature.StateValueValuesEnum
status_messages = {
state_enum.STATE_UNSPECIFIED: "State unspecified",
state_enum.DRAFT: "Draft",
state_enum.ACTIVE: "Not Yet Started",
state_enum.PREPARING: "Preparing data for transfer",
state_enum.TRANSFERRING: "Transferring data to customer bucket",
state_enum.VERIFYING: "Verifying Transferred data",
state_enum.COMPLETED: "Successfully Completed",
state_enum.CANCELLED: "Cancelled",
}
# Handle completion status
if (
state
== self.MESSAGES.OfflineImportFeature.StateValueValuesEnum.COMPLETED
):
return self._check_completion_status(offline_import_feature)
return status_messages.get(state, "-")
def _check_completion_status(self, offline_import_feature):
objects_found = self._get_value(
offline_import_feature, "transferResults.objectsFoundCount"
)
objects_copied = self._get_value(
offline_import_feature, "transferResults.objectsCopiedCount"
)
if objects_found is None or objects_copied is None:
return "-"
return (
"Completed with errors"
if objects_found > objects_copied
else "Successfully Completed"
)
def _get_bytes_prepared(self, offline_import_feature):
bytes_prepared = self._get_value(
offline_import_feature, "preparedBytesCount"
)
bytes_allocated = self._get_value(
offline_import_feature, "allocatedBytesCount"
)
if bytes_prepared is None or bytes_allocated is None:
return "-"
return (
f"{resource_transform.TransformSize(bytes_prepared)}"
f" of {resource_transform.TransformSize(bytes_allocated)}"
)
def _get_bytes_transferred(self, offline_import_feature):
bytes_copied = self._get_value(
offline_import_feature, "transferResults.bytesCopiedCount"
)
bytes_found = self._get_value(
offline_import_feature, "transferResults.bytesFoundCount"
)
if bytes_copied is None or bytes_found is None:
return "-"
return (
f"{resource_transform.TransformSize(offline_import_feature.transferResults.bytesCopiedCount)}"
f" of {resource_transform.TransformSize(offline_import_feature.transferResults.bytesFoundCount)}"
)
def _get_objects_transferred(self, offline_import_feature):
objects_copied = self._get_value(
offline_import_feature, "transferResults.objectsCopiedCount"
)
objects_found = self._get_value(
offline_import_feature, "transferResults.objectsFoundCount"
)
if objects_copied is None or objects_found is None:
return "-"
return (
f"{offline_import_feature.transferResults.objectsCopiedCount} of "
f"{offline_import_feature.transferResults.objectsFoundCount} objects"
)
def _get_destination(self, offline_import_feature):
destination = self._get_value(
offline_import_feature, "destination.outputBucket"
)
return destination if destination else "-"
def _get_start_time(self, offline_import_feature):
start_time = self._get_value(
offline_import_feature, "transferResults.startTime"
)
if start_time:
return resource_transform.TransformDate(
start_time, format="%B %d, %Y, %I:%M %p %Z"
)
return "-"
def _get_end_time(self, offline_import_feature):
end_time = self._get_value(
offline_import_feature, "transferResults.endTime"
)
if end_time:
return resource_transform.TransformDate(
end_time, format="%B %d, %Y, %I:%M %p %Z"
)
return "-"
def _get_found_files(self, offline_import_feature):
found_files = self._get_value(
offline_import_feature, "transferResults.applianceFilesInfoUri"
)
return found_files if found_files else "-"
def _get_missing_files(self, offline_import_feature):
missing_files = self._get_value(
offline_import_feature, "transferResults.errorLog"
)
return missing_files if missing_files else "-"
def _get_value(self, obj, attribute):
"""Responsible for returning an attribute (might be nested) from an object."""
attributes = attribute.split(".")
for attribute in attributes:
obj = getattr(obj, attribute, None)
if obj is None:
return None
return obj

View File

@@ -0,0 +1,106 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for transfer appliances commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
class ApprovedCountries(enum.Enum):
"""Countries where Transfer Appliances are approved for use."""
US = 'US' # United States
# Beginning of European Union countries
AT = 'AT'
BE = 'BE'
BG = 'BG'
HR = 'HR'
CY = 'CY'
CZ = 'CZ'
DK = 'DK'
EE = 'EE'
FI = 'FI'
FR = 'FR'
DE = 'DE'
GR = 'GR'
HU = 'HU'
IE = 'IE'
IT = 'IT'
LV = 'LV'
LT = 'LT'
LU = 'LU'
MT = 'MT'
NL = 'NL'
PL = 'PL'
PT = 'PT'
RO = 'RO'
SK = 'SK'
SI = 'SI'
ES = 'ES'
SE = 'SE'
GB = 'GB'
# End of European Union countries
CA = 'CA' # Canada
NO = 'NO' # Norway
CH = 'CH' # Switzerland
SG = 'SG' # Singapore
class CloudRegions(enum.Enum):
US_CENTRAL1 = 'us-central1'
EUROPE_WEST1 = 'europe-west1'
ASIA_SOUTHEAST1 = 'asia-southeast1'
APPROVED_COUNTRIES = [e.value for e in ApprovedCountries]
CLOUD_REGIONS = [e.value for e in CloudRegions]
COUNTRY_TO_LOCATION_MAP = {
ApprovedCountries.US.value: CloudRegions.US_CENTRAL1.value, # United States
ApprovedCountries.CA.value: CloudRegions.US_CENTRAL1.value, # Canada
# Beginning of European Union countries
ApprovedCountries.AT.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.BE.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.BG.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.HR.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.CY.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.CZ.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.DK.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.EE.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.FI.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.FR.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.DE.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.GR.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.HU.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.IE.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.IT.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.LV.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.LT.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.LU.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.MT.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.NL.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.PL.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.PT.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.RO.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.SK.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.SI.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.ES.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.SE.value: CloudRegions.EUROPE_WEST1.value,
ApprovedCountries.GB.value: CloudRegions.ASIA_SOUTHEAST1.value,
# End of European Union countries
ApprovedCountries.NO.value: CloudRegions.EUROPE_WEST1.value, # Norway
ApprovedCountries.CH.value: CloudRegions.EUROPE_WEST1.value, # Switzerland
ApprovedCountries.SG.value: CloudRegions.ASIA_SOUTHEAST1.value, # Singapore
}

View File

@@ -0,0 +1,274 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource flags for Transfer Appliance commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.transfer.appliances import regions
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.command_lib.util.concepts import presentation_specs
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
APPLIANCES_COLLECTION = 'transferappliance.projects.locations.appliances'
ORDERS_COLLECTION = 'transferappliance.projects.locations.orders'
class ResourceVerb(enum.Enum):
DELETE = 'delete'
DESCRIBE = 'describe'
LIST = 'list'
UPDATE = 'update'
def appliance_attribute_config(name='appliance'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The appliance affiliated with the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def order_attribute_config(name='order'):
return concepts.ResourceParameterAttributeConfig(
name=name,
help_text='The order affiliated with the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def region_attribute_config():
return concepts.ResourceParameterAttributeConfig(
name='region',
help_text='The region affiliated with the {resource}.',
fallthroughs=[deps.ArgFallthrough('--region')])
def get_appliance_resource_spec(resource_name='appliance'):
return concepts.ResourceSpec(
APPLIANCES_COLLECTION,
resource_name=resource_name,
appliancesId=appliance_attribute_config(name=resource_name),
locationsId=region_attribute_config(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def get_order_resource_spec(resource_name='order'):
return concepts.ResourceSpec(
ORDERS_COLLECTION,
resource_name=resource_name,
ordersId=order_attribute_config(name=resource_name),
locationsId=region_attribute_config(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def _add_region_flag(parser, verb):
"""Add region flag for appliances/orders.
Normally we'd rely on the argument output by region_attribute_config() but
we can set "choices" and convert the value to lower if we add it this way.
Args:
parser (arg_parse.Parser): The parser for the command.
verb (ResourceVerb): The action taken on the resource, such as 'update'.
"""
parser.add_argument(
'--region',
choices=regions.CLOUD_REGIONS,
type=str.lower,
help='The location affiliated with the appliance order to {}.'.format(
verb.value))
def add_appliance_resource_arg(parser, verb):
"""Add a resource argument for a transfer appliance.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser (arg_parse.Parser): The parser for the command.
verb (ResourceVerb): The action taken on the resource, such as 'update'.
"""
concept_parsers.ConceptParser.ForResource(
'appliance',
get_appliance_resource_spec(),
'The appliance to {}.'.format(verb.value),
flag_name_overrides={'region': ''},
prefixes=True,
required=True).AddToParser(parser)
_add_region_flag(parser, verb)
def add_order_resource_arg(parser, verb):
"""Add a resource argument for a transfer appliance order.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser (arg_parse.Parser): The parser for the command.
verb (ResourceVerb): The action taken on the resource, such as 'update'.
"""
concept_parsers.ConceptParser.ForResource(
'order',
get_order_resource_spec(),
'The order to {}.'.format(verb.value),
flag_name_overrides={'region': ''},
prefixes=True,
required=True).AddToParser(parser)
_add_region_flag(parser, verb)
def add_clone_resource_arg(parser):
"""Add a resource argument for cloning a transfer appliance.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser (arg_parse.Parser): The parser for the command.
"""
concept_parsers.ConceptParser.ForResource(
'--clone',
get_order_resource_spec(),
'The order to clone.',
prefixes=True,
required=False).AddToParser(parser)
def _get_appliance_uri(appliance):
return resources.REGISTRY.Parse(
appliance.name,
params={'projectsId': properties.VALUES.core.project.Get()},
collection=APPLIANCES_COLLECTION).SelfLink()
def _get_order_uri(order):
return resources.REGISTRY.Parse(
order.name,
params={'projectsId': properties.VALUES.core.project.Get()},
collection=ORDERS_COLLECTION).SelfLink()
def add_list_resource_args(parser, listing_orders=True):
"""Add both order and appliance resource arguments for list commands.
Args:
parser (arg_parse.Parser): The parser for the command.
listing_orders (bool): Toggles the help text phrasing to match either orders
or appliances being the resource being listed.
"""
verb = ResourceVerb.LIST
primary_help = 'The {} to {}.'
secondary_help = 'The {} associated with the {} to {}.'
if listing_orders:
orders_help = primary_help.format('orders', verb.value)
appliances_help = secondary_help.format('appliances', 'orders', verb.value)
parser.display_info.AddUriFunc(_get_order_uri)
else:
appliances_help = primary_help.format('appliances', verb.value)
orders_help = secondary_help.format('orders', 'appliances', verb.value)
parser.display_info.AddUriFunc(_get_appliance_uri)
arg_specs = [
presentation_specs.ResourcePresentationSpec(
'--appliances',
get_appliance_resource_spec('appliances'),
appliances_help,
flag_name_overrides={'region': ''},
plural=True,
prefixes=False),
presentation_specs.ResourcePresentationSpec(
'--orders',
get_order_resource_spec('orders'),
orders_help,
flag_name_overrides={'region': ''},
plural=True,
prefixes=True)
]
concept_parsers.ConceptParser(arg_specs).AddToParser(parser)
_add_region_flag(parser, verb)
def _get_filter_clause_from_resources(filter_key, resource_refs):
if not resource_refs:
return ''
filter_list = [
'{}:{}'.format(filter_key, ref.RelativeName()) for ref in resource_refs
]
resource_list = ' OR '.join(filter_list)
return '({})'.format(resource_list)
def parse_list_resource_args_as_filter_string(args, listing_orders=True):
"""Parses list resource args as a filter string.
Args:
args (parser_extensions.Namespace): the parsed arguments for the command.
listing_orders (bool): Toggles the appropriate keys for order and appliance
depending on which resource is primarily being listed.
Returns:
A filter string.
"""
filter_list = [args.filter] if args.filter else []
if args.IsSpecified('orders'):
order_refs = args.CONCEPTS.orders.Parse()
if order_refs:
filter_key = 'name' if listing_orders else 'order'
filter_list.append(_get_filter_clause_from_resources(
filter_key, order_refs))
if args.IsSpecified('appliances'):
appliance_refs = args.CONCEPTS.appliances.Parse()
if appliance_refs:
filter_key = 'appliances' if listing_orders else 'name'
filter_list.append(_get_filter_clause_from_resources(
filter_key, appliance_refs))
return ' AND '.join(filter_list)
def get_parent_string(region):
"""Returns a presentation string for list and create calls, given a region."""
project = properties.VALUES.core.project.Get()
return 'projects/{}/locations/{}'.format(project, region or '-')
def get_appliance_name(locations_id, appliances_id):
"""Returns an appliance name to locations and appliances ID."""
return resources.Resource.RelativeName(
resources.REGISTRY.Create(
APPLIANCES_COLLECTION,
appliancesId=appliances_id,
locationsId=locations_id,
projectsId=properties.VALUES.core.project.Get()))
def get_order_name(locations_id, orders_id):
"""Returns an appliance name to locations and orders ID."""
return resources.Resource.RelativeName(
resources.REGISTRY.Create(
ORDERS_COLLECTION,
ordersId=orders_id,
locationsId=locations_id,
projectsId=properties.VALUES.core.project.Get()))

View File

@@ -0,0 +1,135 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for handing transfer credentials."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import os
import boto3
from googlecloudsdk.core.resource import resource_property
from googlecloudsdk.core.util import files
from six.moves import configparser
def _assign_with_error_on_duplicate(key, value, result_dict):
"""Assigns value to results_dict and raises error on duplicate key."""
if key in result_dict:
raise KeyError('Duplicate key in file: {}'.format(key))
result_dict[key] = value
def _extract_keys(keys, search_dict, result_dict):
"""Converts key to multiple cases and attempts to extract from search_dict."""
for original_key in keys:
if original_key in search_dict:
_assign_with_error_on_duplicate(original_key, search_dict[original_key],
result_dict)
else:
# Can error if both camel and snake case matches are present.
# Note: The below conversion utils don't work all the time.
# For example, they cannot handle kebab-case.
camel_case_key = resource_property.ConvertToCamelCase(original_key)
snake_case_key = resource_property.ConvertToSnakeCase(original_key)
if camel_case_key in search_dict:
_assign_with_error_on_duplicate(original_key,
search_dict[camel_case_key],
result_dict)
if snake_case_key in search_dict:
_assign_with_error_on_duplicate(original_key,
search_dict[snake_case_key],
result_dict)
def get_values_for_keys_from_file(file_path, keys):
"""Reads JSON or INI file and returns dict with values for requested keys.
JSON file keys should be top level.
INI file sections will be flattened.
Args:
file_path (str): Path of JSON or INI file to read.
keys (list[str]): Search for these keys to return from file.
Returns:
Dict[cred_key: cred_value].
Raises:
ValueError: The file was the incorrect format.
KeyError: Duplicate key found.
"""
result = {}
real_path = os.path.realpath(os.path.expanduser(file_path))
with files.FileReader(real_path) as file_reader:
try:
file_dict = json.loads(file_reader.read())
_extract_keys(keys, file_dict, result)
except json.JSONDecodeError:
# More file formats to try before raising error.
config = configparser.ConfigParser()
try:
config.read(real_path)
except configparser.ParsingError:
raise ValueError('Source creds file must be JSON or INI format.')
# Parse all sections of INI file into dict.
for section in config:
section_dict = dict(config[section])
_extract_keys(keys, section_dict, result)
return result
def get_aws_creds_from_file(file_path):
"""Scans file for AWS credentials keys.
Key fields prefixed with "aws" take precedence.
Args:
file_path (str): Path to creds file.
Returns:
Tuple of (access_key_id, secret_access_key).
Each tuple entry can be a string or None.
"""
creds_dict = get_values_for_keys_from_file(file_path, [
'aws_access_key_id', 'aws_secret_access_key', 'access_key_id',
'secret_access_key', 'role_arn'
])
access_key_id = creds_dict.get('aws_access_key_id',
creds_dict.get('access_key_id', None))
secret_access_key = creds_dict.get('aws_secret_access_key',
creds_dict.get('secret_access_key', None))
role_arn = creds_dict.get('role_arn', None)
return access_key_id, secret_access_key, role_arn
def get_default_aws_creds():
"""Returns creds from common AWS config file paths.
Currently does not return "role_arn" because there is no way to extract
this data from a boto3 Session object.
Returns:
Tuple of (access_key_id, secret_access_key, role_arn).
Each tuple entry can be a string or None.
"""
credentials = boto3.session.Session().get_credentials()
if credentials:
return credentials.access_key, credentials.secret_key
return None, None

View File

@@ -0,0 +1,778 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for managng the many transfer job flags.
Tested through surface/transfer/jobs/create_test.py.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
from googlecloudsdk.command_lib.storage import errors
from googlecloudsdk.command_lib.storage import storage_url
from googlecloudsdk.command_lib.transfer import creds_util
from googlecloudsdk.command_lib.transfer import jobs_flag_util
from googlecloudsdk.command_lib.transfer import name_util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import times
UPDATE_FIELD_MASK = [
'description',
'logging_config',
'notification_config',
'status',
]
UPDATE_FIELD_MASK_WITH_TRANSFER_SPEC = ','.join(
UPDATE_FIELD_MASK + ['schedule', 'transfer_spec']
)
UPDATE_FIELD_MASK_WITH_REPLICATION_SPEC = ','.join(
UPDATE_FIELD_MASK + ['replication_spec']
)
COMMON_VALID_TRANSFER_SCHEMES = (
storage_url.ProviderPrefix.POSIX,
storage_url.ProviderPrefix.GCS,
storage_url.ProviderPrefix.S3,
storage_url.ProviderPrefix.HTTP,
storage_url.ProviderPrefix.HTTPS,
)
VALID_SOURCE_TRANSFER_SCHEMES = COMMON_VALID_TRANSFER_SCHEMES + (
storage_url.ProviderPrefix.HDFS,
)
VALID_DESTINATION_TRANSFER_SCHEMES = COMMON_VALID_TRANSFER_SCHEMES
VALID_REPLICATON_SCHEMES = [storage_url.ProviderPrefix.GCS]
def _prompt_user_and_add_valid_scheme(url, valid_schemes):
"""Has user select a valid scheme from a list and returns new URL."""
# Prompt user if the provided URL lacks a scheme.
if url.scheme is storage_url.ProviderPrefix.FILE:
if not console_io.CanPrompt():
raise errors.InvalidUrlError(
'Did you mean "posix://{}"'.format(url.resource_name)
)
scheme_index = console_io.PromptChoice(
[scheme.value + '://' for scheme in valid_schemes],
cancel_option=True,
message=(
'Storage Transfer does not support direct file URLs: {}\n'
'Did you mean to use "posix://"?\n'
'Run this command with "--help" for more info,\n'
'or select a valid scheme below.'
).format(url),
)
new_scheme = valid_schemes[scheme_index]
return storage_url.switch_scheme(url, new_scheme)
return url
def add_source_url(specs, args, messages, source_url):
"""Adds source url to transfer or replication spec.
Args:
specs:
a submessage, must be one of [job.transferSpec, job.replicationSpec].
args: argparse.namespace, the parsed arguments from the command line.
messages: storagetransfer_v1_message instance.
source_url:
An instance of the storage_url variable specifying the source
location for the data transfer.
"""
if source_url.scheme is storage_url.ProviderPrefix.HDFS:
specs.hdfsDataSource = messages.HdfsData(
path=source_url.resource_name)
elif source_url.scheme is storage_url.ProviderPrefix.POSIX:
specs.posixDataSource = messages.PosixFilesystem(
rootDirectory=source_url.resource_name)
elif source_url.scheme is storage_url.ProviderPrefix.GCS:
specs.gcsDataSource = messages.GcsData(
bucketName=source_url.bucket_name,
path=source_url.resource_name,
)
elif source_url.scheme is storage_url.ProviderPrefix.S3:
if args.source_endpoint:
specs.awsS3CompatibleDataSource = (
messages.AwsS3CompatibleData(
bucketName=source_url.bucket_name,
endpoint=args.source_endpoint,
path=source_url.resource_name,
region=args.source_signing_region,
s3Metadata=_get_s3_compatible_metadata(args, messages)))
else:
specs.awsS3DataSource = messages.AwsS3Data(
bucketName=source_url.bucket_name,
path=source_url.resource_name,
)
elif isinstance(source_url, storage_url.AzureUrl):
specs.azureBlobStorageDataSource = (
messages.AzureBlobStorageData(
container=source_url.bucket_name,
path=source_url.resource_name,
storageAccount=source_url.account,
))
def add_destination_url(specs, messages, destination_url):
"""Adds destination url to transfer or replication spec.
Args:
specs:
a submessage, must be one of [job.transferSpec, job.replicationSpec]
messages: storagetransfer_v1_message instance.
destination_url:
An instance of the storage_url variable specifying the destination
location for the data transfer.
"""
if destination_url.scheme is storage_url.ProviderPrefix.GCS:
specs.gcsDataSink = messages.GcsData(
bucketName=destination_url.bucket_name,
path=destination_url.resource_name,
)
elif destination_url.scheme is storage_url.ProviderPrefix.POSIX:
specs.posixDataSink = messages.PosixFilesystem(
rootDirectory=destination_url.resource_name)
def validate_and_add_source_url(
specs, args, messages, source_url, valid_schemes
):
"""It validates the source url and adds it to transfer or replication spec.
If no URL scheme is provided, prompt the user to add a valid one
(e.g., 'gs://').
Args:
specs:
a submessage, must be one of [job.transferSpec, job.replicationSpec].
args: argparse.namespace, the parsed arguments from the command line.
messages: storagetransfer_v1_message instance.
source_url:
An instance of the storage_url variable specifying the source
location for the data transfer.
valid_schemes: the schemes supported by the specs.
"""
# Prompt user if the provided URL lacks a scheme.
source_url = _prompt_user_and_add_valid_scheme(source_url, valid_schemes)
add_source_url(specs, args, messages, source_url)
def validate_and_add_destination_url(
specs, messages, destination_url, valid_schemes
):
"""Adds destination url to transfer or replication spec.
If no URL scheme is provided, prompt the user to add a valid one
(e.g., 'gs://').
Args:
specs:
a submessage, must be one of [job.transferSpec, job.replicationSpec]
messages: storagetransfer_v1_message instance.
destination_url:
An instance of the storage_url variable specifying the destination
location for the data transfer.
valid_schemes: the schemes supported by the specs.
"""
# Prompt user if the provided URL lacks a scheme.
destination_url = _prompt_user_and_add_valid_scheme(
destination_url, valid_schemes
)
add_destination_url(specs, messages, destination_url)
def _create_or_modify_transfer_options(transfer_spec, args, messages):
"""Creates or modifies TransferOptions object based on args."""
if not (getattr(args, 'overwrite_when', None) or getattr(
args, 'delete_from', None) or getattr(args, 'preserve_metadata', None) or
getattr(args, 'custom_storage_class', None)):
return
if not transfer_spec.transferOptions:
transfer_spec.transferOptions = messages.TransferOptions()
overwrite_when_argument = getattr(args, 'overwrite_when', None)
if overwrite_when_argument:
transfer_spec.transferOptions.overwriteWhen = getattr(
messages.TransferOptions.OverwriteWhenValueValuesEnum,
overwrite_when_argument.upper())
if getattr(args, 'delete_from', None):
delete_option = jobs_flag_util.DeleteOption(args.delete_from)
if delete_option is jobs_flag_util.DeleteOption.SOURCE_AFTER_TRANSFER:
transfer_spec.transferOptions.deleteObjectsFromSourceAfterTransfer = True
elif delete_option is jobs_flag_util.DeleteOption.DESTINATION_IF_UNIQUE:
transfer_spec.transferOptions.deleteObjectsUniqueInSink = True
metadata_options = messages.MetadataOptions()
if getattr(args, 'preserve_metadata', None):
for field_value in args.preserve_metadata:
field_key = jobs_flag_util.PreserveMetadataField(field_value)
if field_key == jobs_flag_util.PreserveMetadataField.ACL:
metadata_options.acl = (
messages.MetadataOptions.AclValueValuesEnum.ACL_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.GID:
metadata_options.gid = (
messages.MetadataOptions.GidValueValuesEnum.GID_NUMBER)
elif field_key == jobs_flag_util.PreserveMetadataField.UID:
metadata_options.uid = (
messages.MetadataOptions.UidValueValuesEnum.UID_NUMBER)
elif field_key == jobs_flag_util.PreserveMetadataField.KMS_KEY:
metadata_options.kmsKey = (
messages.MetadataOptions.KmsKeyValueValuesEnum.KMS_KEY_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.MODE:
metadata_options.mode = (
messages.MetadataOptions.ModeValueValuesEnum.MODE_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.STORAGE_CLASS:
metadata_options.storageClass = (
messages.MetadataOptions.StorageClassValueValuesEnum
.STORAGE_CLASS_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.SYMLINK:
metadata_options.symlink = (
messages.MetadataOptions.SymlinkValueValuesEnum.SYMLINK_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.TEMPORARY_HOLD:
metadata_options.temporaryHold = (
messages.MetadataOptions.TemporaryHoldValueValuesEnum
.TEMPORARY_HOLD_PRESERVE)
elif field_key == jobs_flag_util.PreserveMetadataField.TIME_CREATED:
metadata_options.timeCreated = (
messages.MetadataOptions.TimeCreatedValueValuesEnum.TIME_CREATED_PRESERVE_AS_CUSTOM_TIME
)
if getattr(args, 'custom_storage_class', None):
metadata_options.storageClass = getattr(
messages.MetadataOptions.StorageClassValueValuesEnum,
'STORAGE_CLASS_' + args.custom_storage_class.upper())
if metadata_options != messages.MetadataOptions():
transfer_spec.transferOptions.metadataOptions = metadata_options
def _create_or_modify_object_conditions(transfer_spec, args, messages):
"""Creates or modifies ObjectConditions based on args."""
if not (getattr(args, 'include_prefixes', None) or
getattr(args, 'exclude_prefixes', None) or
getattr(args, 'include_modified_before_absolute', None) or
getattr(args, 'include_modified_after_absolute', None) or
getattr(args, 'include_modified_before_relative', None) or
getattr(args, 'include_modified_after_relative', None)):
return
if not transfer_spec.objectConditions:
transfer_spec.objectConditions = messages.ObjectConditions()
if getattr(args, 'include_prefixes', None):
transfer_spec.objectConditions.includePrefixes = args.include_prefixes
if getattr(args, 'exclude_prefixes', None):
transfer_spec.objectConditions.excludePrefixes = args.exclude_prefixes
if getattr(args, 'include_modified_before_absolute', None):
modified_before_datetime_string = (
args.include_modified_before_absolute.astimezone(times.UTC).isoformat())
transfer_spec.objectConditions.lastModifiedBefore = modified_before_datetime_string
if getattr(args, 'include_modified_after_absolute', None):
modified_after_datetime_string = (
args.include_modified_after_absolute.astimezone(times.UTC).isoformat())
transfer_spec.objectConditions.lastModifiedSince = modified_after_datetime_string
if getattr(args, 'include_modified_before_relative', None):
transfer_spec.objectConditions.minTimeElapsedSinceLastModification = '{}s'.format(
args.include_modified_before_relative)
if getattr(args, 'include_modified_after_relative', None):
transfer_spec.objectConditions.maxTimeElapsedSinceLastModification = '{}s'.format(
args.include_modified_after_relative)
def _create_or_modify_creds(transfer_spec, args, messages):
"""Creates or modifies TransferSpec source creds based on args."""
if transfer_spec.awsS3DataSource:
if getattr(args, 'source_creds_file', None):
access_key_id, secret_access_key, role_arn = (
creds_util.get_aws_creds_from_file(args.source_creds_file))
else:
log.warning('No --source-creds-file flag. Checking system config files'
' for AWS credentials.')
access_key_id, secret_access_key = creds_util.get_default_aws_creds()
role_arn = None
if not ((access_key_id and secret_access_key) or role_arn):
log.warning('Missing AWS source creds.')
transfer_spec.awsS3DataSource.awsAccessKey = messages.AwsAccessKey(
accessKeyId=access_key_id, secretAccessKey=secret_access_key)
transfer_spec.awsS3DataSource.roleArn = role_arn
elif transfer_spec.azureBlobStorageDataSource:
if getattr(args, 'source_creds_file', None):
sas_token = creds_util.get_values_for_keys_from_file(
args.source_creds_file, ['sasToken'])['sasToken']
else:
log.warning('No Azure source creds set. Consider adding'
' --source-creds-file flag.')
sas_token = None
transfer_spec.azureBlobStorageDataSource.azureCredentials = (
messages.AzureCredentials(sasToken=sas_token))
def _get_s3_compatible_metadata(args, messages):
"""Generates advanced settings for S3-compatible providers."""
if not (args.source_auth_method or args.source_list_api or
args.source_network_protocol or args.source_request_model):
return None
s3_compatible_metadata = messages.S3CompatibleMetadata()
if args.source_auth_method:
s3_compatible_metadata.authMethod = getattr(
messages.S3CompatibleMetadata.AuthMethodValueValuesEnum,
'AUTH_METHOD_' + args.source_auth_method)
if args.source_list_api:
s3_compatible_metadata.listApi = getattr(
messages.S3CompatibleMetadata.ListApiValueValuesEnum,
args.source_list_api)
if args.source_network_protocol:
s3_compatible_metadata.protocol = getattr(
messages.S3CompatibleMetadata.ProtocolValueValuesEnum,
'NETWORK_PROTOCOL_' + args.source_network_protocol)
if args.source_request_model:
s3_compatible_metadata.requestModel = getattr(
messages.S3CompatibleMetadata.RequestModelValueValuesEnum,
'REQUEST_MODEL_' + args.source_request_model)
return s3_compatible_metadata
def _add_additional_s3_source_options(transfer_spec, args):
"""Adds additional options for S3 source."""
if args.s3_cloudfront_domain:
transfer_spec.awsS3DataSource.cloudfrontDomain = args.s3_cloudfront_domain
def _create_or_modify_transfer_spec(job, args, messages):
"""Creates or modifies TransferSpec based on args."""
if not job.transferSpec:
job.transferSpec = messages.TransferSpec()
if getattr(args, 'source', None):
# Clear any existing source to make space for new one.
job.transferSpec.httpDataSource = None
job.transferSpec.posixDataSource = None
job.transferSpec.gcsDataSource = None
job.transferSpec.awsS3CompatibleDataSource = None
job.transferSpec.awsS3DataSource = None
job.transferSpec.azureBlobStorageDataSource = None
job.transferSpec.hdfsDataSource = None
try:
source_url = storage_url.storage_url_from_string(args.source)
except errors.InvalidUrlError:
if args.source.startswith(storage_url.ProviderPrefix.HTTP.value):
job.transferSpec.httpDataSource = messages.HttpData(listUrl=args.source)
source_url = None
else:
raise
else:
validate_and_add_source_url(
job.transferSpec,
args,
messages,
source_url,
VALID_SOURCE_TRANSFER_SCHEMES,
)
# If additional options are specified for S3 source, add them here.
if job.transferSpec.awsS3DataSource:
_add_additional_s3_source_options(job.transferSpec, args)
if getattr(args, 'destination', None):
# Clear any existing destination to make space for new one.
job.transferSpec.posixDataSink = None
job.transferSpec.gcsDataSink = None
destination_url = storage_url.storage_url_from_string(args.destination)
validate_and_add_destination_url(
job.transferSpec,
messages,
destination_url,
VALID_DESTINATION_TRANSFER_SCHEMES,
)
if getattr(args, 'destination_agent_pool', None):
job.transferSpec.sinkAgentPoolName = name_util.add_agent_pool_prefix(
args.destination_agent_pool)
if getattr(args, 'source_agent_pool', None):
job.transferSpec.sourceAgentPoolName = name_util.add_agent_pool_prefix(
args.source_agent_pool)
if getattr(args, 'intermediate_storage_path', None):
intermediate_storage_url = storage_url.storage_url_from_string(
args.intermediate_storage_path)
job.transferSpec.gcsIntermediateDataLocation = messages.GcsData(
bucketName=intermediate_storage_url.bucket_name,
path=intermediate_storage_url.resource_name)
if getattr(args, 'manifest_file', None):
job.transferSpec.transferManifest = messages.TransferManifest(
location=args.manifest_file)
_create_or_modify_creds(job.transferSpec, args, messages)
_create_or_modify_object_conditions(job.transferSpec, args, messages)
_create_or_modify_transfer_options(job.transferSpec, args, messages)
def _create_or_modify_event_stream_configuration(job, args, messages):
"""Creates or modifies event stream config. Returns if flag present."""
event_stream_name = getattr(args, 'event_stream_name', None)
event_stream_start = getattr(args, 'event_stream_starts', None)
event_stream_expire = getattr(args, 'event_stream_expires', None)
if not (event_stream_name or event_stream_start or event_stream_expire):
# Nothing needs modification.
return False
if not job.eventStream:
job.eventStream = messages.EventStream()
job.eventStream.name = event_stream_name
job.eventStream.eventStreamStartTime = event_stream_start
job.eventStream.eventStreamExpirationTime = event_stream_expire
return True
def _create_or_modify_schedule(
job, args, messages, is_update, is_event_driven_transfer=False
):
"""Creates or modifies transfer Schedule object based on args."""
schedule_starts = getattr(args, 'schedule_starts', None)
schedule_repeats_every = getattr(args, 'schedule_repeats_every', None)
schedule_repeats_until = getattr(args, 'schedule_repeats_until', None)
has_schedule_flag = (
schedule_starts or schedule_repeats_every or schedule_repeats_until
)
if has_schedule_flag:
if not is_update and args.do_not_run:
raise ValueError('Cannot set schedule and do-not-run flag.')
if is_event_driven_transfer and (
has_schedule_flag or getattr(args, 'do_not_run', False)
):
raise ValueError('Cannot set schedule on event-driven transfer.')
if (
(not is_update and args.do_not_run)
or is_event_driven_transfer
or (is_update and not has_schedule_flag)
):
# (1) Cannot have schedule for non-running job.
# (2) Cannot have schedule on event-driven transfer.
# (3) Nothing needs updating.
return
if not job.schedule:
job.schedule = messages.Schedule()
if schedule_starts:
start = schedule_starts.astimezone(times.UTC)
job.schedule.scheduleStartDate = messages.Date(
day=start.day,
month=start.month,
year=start.year,
)
job.schedule.startTimeOfDay = messages.TimeOfDay(
hours=start.hour,
minutes=start.minute,
seconds=start.second,
)
elif not is_update:
# By default, run job immediately on create.
today_date = datetime.date.today()
job.schedule.scheduleStartDate = messages.Date(
day=today_date.day, month=today_date.month, year=today_date.year)
if schedule_repeats_every:
job.schedule.repeatInterval = '{}s'.format(schedule_repeats_every)
# Default behavior of running job every 24 hours if field not set will be
# blocked by schedule_repeats_until handling.
if schedule_repeats_until:
if not job.schedule.repeatInterval:
raise ValueError(
'Scheduling a job end time requires setting a frequency with'
' --schedule-repeats-every. If no job end time is set, the job will'
' run one time.')
end = schedule_repeats_until.astimezone(times.UTC)
job.schedule.scheduleEndDate = messages.Date(
day=end.day,
month=end.month,
year=end.year,
)
job.schedule.endTimeOfDay = messages.TimeOfDay(
hours=end.hour,
minutes=end.minute,
seconds=end.second,
)
elif not is_update and not job.schedule.repeatInterval:
# By default, run operation once on create.
# If job frequency set, allow operation to repeat endlessly.
job.schedule.scheduleEndDate = job.schedule.scheduleStartDate
def _create_or_modify_notification_config(job, args, messages, is_update=False):
"""Creates or modifies transfer NotificationConfig object based on args."""
notification_pubsub_topic = getattr(args, 'notification_pubsub_topic', None)
notification_event_types = getattr(args, 'notification_event_types', None)
notification_payload_format = getattr(args, 'notification_payload_format',
None)
if not (notification_pubsub_topic or notification_event_types or
notification_payload_format):
# Nothing to modify with.
return
if notification_pubsub_topic:
if not job.notificationConfig:
# Create config with required PubSub topic.
job.notificationConfig = messages.NotificationConfig(
pubsubTopic=notification_pubsub_topic)
else:
job.notificationConfig.pubsubTopic = notification_pubsub_topic
if (notification_event_types or
notification_payload_format) and not job.notificationConfig:
raise ValueError('Cannot set notification config without'
' --notification-pubsub-topic.')
if notification_payload_format:
payload_format_key = notification_payload_format.upper()
job.notificationConfig.payloadFormat = getattr(
messages.NotificationConfig.PayloadFormatValueValuesEnum,
payload_format_key)
elif not is_update:
# New job default.
job.notificationConfig.payloadFormat = (
messages.NotificationConfig.PayloadFormatValueValuesEnum.JSON)
if notification_event_types:
event_types = []
for event_type_arg in notification_event_types:
event_type_key = 'TRANSFER_OPERATION_' + event_type_arg.upper()
event_type = getattr(
messages.NotificationConfig.EventTypesValueListEntryValuesEnum,
event_type_key)
event_types.append(event_type)
job.notificationConfig.eventTypes = event_types
elif not is_update:
# New job default.
job.notificationConfig.eventTypes = [
(messages.NotificationConfig.EventTypesValueListEntryValuesEnum
.TRANSFER_OPERATION_SUCCESS),
(messages.NotificationConfig.EventTypesValueListEntryValuesEnum
.TRANSFER_OPERATION_FAILED),
(messages.NotificationConfig.EventTypesValueListEntryValuesEnum
.TRANSFER_OPERATION_ABORTED)
]
def _enable_onprem_gcs_transfer_logs(job, args, is_update):
"""Sets enableOnpremGcsTransferLogs boolean."""
enable_posix_transfer_logs = getattr(args, 'enable_posix_transfer_logs', None)
# GCS transfer logs only supported for POSIX.
if job.replicationSpec or not (
job.transferSpec.posixDataSource or job.transferSpec.posixDataSink
):
job.loggingConfig.enableOnpremGcsTransferLogs = False
# Caller has specifically enabled or disabled logs.
elif enable_posix_transfer_logs is not None:
job.loggingConfig.enableOnpremGcsTransferLogs = enable_posix_transfer_logs
# Default to creating new POSIX transfers with GCS transfer logs enabled.
elif not is_update:
job.loggingConfig.enableOnpremGcsTransferLogs = True
# Avoid modifying existing POSIX transfers on UpdateTransferJob.
else:
pass
return
def _create_or_modify_logging_config(job, args, messages, is_update):
"""Creates or modifies transfer LoggingConfig object based on args."""
if not job.loggingConfig:
job.loggingConfig = messages.LoggingConfig()
# TODO(b/322289474): enable-posix-transfer-logs logic can be cleaned up once
# POSIX logs are deprecated.
_enable_onprem_gcs_transfer_logs(job, args, is_update)
log_actions = getattr(args, 'log_actions', None)
log_action_states = getattr(args, 'log_action_states', None)
if not (log_actions or log_action_states):
# Nothing remaining to modify with.
return
existing_log_actions = job.loggingConfig and job.loggingConfig.logActions
existing_log_action_states = (
job.loggingConfig and job.loggingConfig.logActionStates)
if (not (log_actions and log_action_states) and
((log_actions and not existing_log_action_states) or
(log_action_states and not existing_log_actions))):
raise ValueError('Both --log-actions and --log-action-states are required'
' for a complete log config.')
if log_actions:
actions = []
for action in log_actions:
actions.append(
getattr(job.loggingConfig.LogActionsValueListEntryValuesEnum,
action.upper()))
job.loggingConfig.logActions = actions
if log_action_states:
action_states = []
for action_state in log_action_states:
action_states.append(
getattr(job.loggingConfig.LogActionStatesValueListEntryValuesEnum,
action_state.upper()))
job.loggingConfig.logActionStates = action_states
def generate_patch_transfer_job_message(messages, job, field_mask):
"""Generates Apitools patch message for transfer jobs."""
project_id = job.projectId
job.projectId = None
if job.schedule == messages.Schedule():
# Jobs returned by API are populated with their user-set schedule or an
# empty schedule. Empty schedules cannot be re-submitted to the API.
job.schedule = None
return messages.StoragetransferTransferJobsPatchRequest(
jobName=job.name,
updateTransferJobRequest=messages.UpdateTransferJobRequest(
projectId=project_id,
transferJob=job,
updateTransferJobFieldMask=field_mask,
))
def _create_or_modify_replication_spec(
job, args, messages, has_event_stream_flag=False
):
"""Adds/Updates the replication spec to transfer job."""
if has_event_stream_flag:
raise ValueError(
'Not allowed to set event stream flags on replication jobs.'
)
if not job.replicationSpec:
job.replicationSpec = messages.ReplicationSpec()
if getattr(args, 'source', None):
# Clear any existing source to make space for new one.
job.replicationSpec.gcsDataSource = None
source_url = storage_url.storage_url_from_string(args.source)
if source_url.scheme not in VALID_REPLICATON_SCHEMES:
raise errors.Error(
'Replication feature is currently available for Google Cloud Storage'
' buckets only.'
)
validate_and_add_source_url(
job.replicationSpec,
args,
messages,
source_url,
VALID_REPLICATON_SCHEMES,
)
if getattr(args, 'destination', None):
# Clear any existing destination to make space for new one.
job.replicationSpec.gcsDataSink = None
destination_url = storage_url.storage_url_from_string(args.destination)
if destination_url.scheme not in VALID_REPLICATON_SCHEMES:
raise errors.Error(
'Replication feature is currently available for Google Cloud Storage'
' buckets only.'
)
validate_and_add_destination_url(
job.replicationSpec, messages, destination_url, VALID_REPLICATON_SCHEMES
)
_create_or_modify_object_conditions(job.replicationSpec, args, messages)
_create_or_modify_transfer_options(job.replicationSpec, args, messages)
def generate_transfer_job_message(args, messages, existing_job=None):
"""Generates Apitools transfer message based on command arguments."""
if existing_job:
job = existing_job
else:
job = messages.TransferJob()
if not job.projectId:
job.projectId = properties.VALUES.core.project.Get()
if getattr(args, 'name', None):
job.name = name_util.add_job_prefix(args.name)
if getattr(args, 'description', None):
job.description = args.description
if existing_job:
# Is job update instead of create.
if getattr(args, 'status', None):
status_key = args.status.upper()
job.status = getattr(
messages.TransferJob.StatusValueValuesEnum, status_key
)
else:
job.status = messages.TransferJob.StatusValueValuesEnum.ENABLED
has_event_stream_flag = _create_or_modify_event_stream_configuration(
job, args, messages
)
is_transfer_job = (
(
not existing_job and not getattr(args, 'replication', None)
) # In case of create, replication flag shouldn't be there.
or job.transferSpec # In case of update, transferSpec should exists.
)
if is_transfer_job:
_create_or_modify_transfer_spec(job, args, messages)
else:
_create_or_modify_replication_spec(
job, args, messages, has_event_stream_flag=has_event_stream_flag
)
is_event_driven_transfer = job.eventStream or job.replicationSpec
_create_or_modify_schedule(
job,
args,
messages,
is_update=bool(existing_job),
is_event_driven_transfer=is_event_driven_transfer,
)
_create_or_modify_notification_config(
job, args, messages, is_update=bool(existing_job)
)
_create_or_modify_logging_config(
job, args, messages, is_update=bool(existing_job)
)
if existing_job:
update_mask = (
UPDATE_FIELD_MASK_WITH_TRANSFER_SPEC
if is_transfer_job
else UPDATE_FIELD_MASK_WITH_REPLICATION_SPEC
)
return generate_patch_transfer_job_message(
messages,
job,
update_mask
)
return job

View File

@@ -0,0 +1,634 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for managng the many transfer job flags.
Tested more through command surface tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import enum
from googlecloudsdk.calliope import arg_parsers
_POSIX_SOURCE_OR_DESTINATION_HELP_TEXT = (
'POSIX filesystem - Specify the `posix://` scheme followed by the absolute'
' path to the desired directory, starting from the root of the host machine'
' (denoted by a leading slash). For example:\n'
'* posix:///path/directory/\n\n'
'A file transfer agent must be installed on the POSIX filesystem, and you'
' need an agent pool flag on this `jobs` command to activate the agent.')
_HDFS_SOURCE_HELP_TEXT = (
'Hadoop Distributed File System (HDFS) - Specify the `hdfs://` scheme'
' followed by the absolute path to the desired directory, starting from the'
' root of the file system (denoted by a leading slash). For example:\n'
'* hdfs:///path/directory/\n\n'
'Namenode details should not be included in the path specification, as they'
' are required separately during the agent installation process.\n\n'
'A file transfer agent must be installed, and you need an agent pool flag'
' on this `jobs` command to activate the agent.')
_SOURCE_HELP_TEXT = (
'The source of your data. Available sources and formatting information:\n\n'
'Public clouds -\n'
'* [Google Cloud Storage] gs://example-bucket/example-folder/\n'
'* [Amazon S3] s3://examplebucket/example-folder\n'
'* [Azure Blob Storage or Data Lake Storage] http://examplestorageaccount.'
'blob.core.windows.net/examplecontainer/examplefolder\n\n'
'{}\n\n'
'{}\n\n'
'Publicly-accessible objects - Specify the URL of a TSV file containing a'
' list of URLs of publicly-accessible objects. For example:\n'
'* http://example.com/tsvfile'
).format(_POSIX_SOURCE_OR_DESTINATION_HELP_TEXT, _HDFS_SOURCE_HELP_TEXT)
_DESTINATION_HELP_TEXT = (
'The destination of your transferred data. Available destinations and '
' formatting information:\n\n'
'Google Cloud Storage - Specify the `gs://` scheme; name of the bucket;'
' and, if transferring to a folder, the path to the folder. For example:\n'
'* gs://example-bucket/example-folder/\n\n'
'{}'
).format(_POSIX_SOURCE_OR_DESTINATION_HELP_TEXT)
class AuthMethod(enum.Enum):
AWS_SIGNATURE_V2 = 'AWS_SIGNATURE_V2'
AWS_SIGNATURE_V4 = 'AWS_SIGNATURE_V4'
class DeleteOption(enum.Enum):
DESTINATION_IF_UNIQUE = 'destination-if-unique'
SOURCE_AFTER_TRANSFER = 'source-after-transfer'
class JobStatus(enum.Enum):
DELETED = 'deleted'
DISABLED = 'disabled'
ENABLED = 'enabled'
class LogAction(enum.Enum):
COPY = 'copy'
DELETE = 'delete'
FIND = 'find'
class LogActionState(enum.Enum):
FAILED = 'failed'
SUCCEEDED = 'succeeded'
SKIPPED = 'skipped'
class PreserveMetadataField(enum.Enum):
ACL = 'acl'
GID = 'gid'
KMS_KEY = 'kms-key'
MODE = 'mode'
STORAGE_CLASS = 'storage-class'
SYMLINK = 'symlink'
TEMPORARY_HOLD = 'temporary-hold'
TIME_CREATED = 'time-created'
UID = 'uid'
class ListApi(enum.Enum):
LIST_OBJECTS = 'LIST_OBJECTS'
LIST_OBJECTS_V2 = 'LIST_OBJECTS_V2'
class NetworkProtocol(enum.Enum):
HTTP = 'HTTP'
HTTPS = 'HTTPS'
class OverwriteOption(enum.Enum):
ALWAYS = 'always'
DIFFERENT = 'different'
NEVER = 'never'
class RequestModel(enum.Enum):
PATH_STYLE = 'PATH_STYLE'
VIRTUAL_HOSTED_STYLE = 'VIRTUAL_HOSTED_STYLE'
def add_source_creds_flag(parser):
parser.add_argument(
'--source-creds-file',
help='Path to a local file on your machine that includes credentials'
' for an Amazon S3 or Azure Blob Storage source (not required for'
' Google Cloud Storage sources). If not specified for an S3 source,'
' gcloud will check your system for an AWS config file. However, this'
' flag must be specified to use AWS\'s "role_arn" auth service. For'
' formatting, see:\n\n'
'S3: https://cloud.google.com/storage-transfer/docs/reference/'
'rest/v1/TransferSpec#AwsAccessKey\n'
'Note: Be sure to put quotations around the JSON value strings.\n\n'
'Azure: https://cloud.google.com/storage-transfer/docs/reference/rest/'
'v1/TransferSpec#AzureCredentials\n\n')
def setup_parser(parser, is_update=False, release_track=None):
"""Adds flags to job create and job update commands."""
# Flags and arg groups appear in help text in the order they are added here.
# The order was designed by UX, so please do not modify.
del release_track # Unused.
parser.SetSortArgs(False)
if is_update:
parser.add_argument(
'name', help="Name of the transfer job you'd like to update.")
else:
parser.add_argument('source', help=_SOURCE_HELP_TEXT)
parser.add_argument('destination', help=_DESTINATION_HELP_TEXT)
job_information = parser.add_group(help='JOB INFORMATION', sort_args=False)
if is_update:
job_information.add_argument(
'--status',
choices=sorted(status.value for status in JobStatus),
help='Specify this flag to change the status of the job. Options'
" include 'enabled', 'disabled', 'deleted'.")
job_information.add_argument('--source', help=_SOURCE_HELP_TEXT)
job_information.add_argument('--destination', help=_DESTINATION_HELP_TEXT)
job_information.add_argument(
'--clear-description',
action='store_true',
help='Remove the description from the transfer job.')
job_information.add_argument(
'--clear-source-creds-file',
action='store_true',
help='Remove the source creds file from the transfer job.')
job_information.add_argument(
'--clear-source-agent-pool',
action='store_true',
help='Remove the source agent pool from the transfer job.')
job_information.add_argument(
'--clear-destination-agent-pool',
action='store_true',
help='Remove the destination agent pool from the transfer job.')
job_information.add_argument(
'--clear-intermediate-storage-path',
action='store_true',
help='Remove the intermediate storage path from the transfer job.')
job_information.add_argument(
'--clear-manifest-file',
action='store_true',
help='Remove the manifest file from the transfer job.')
else:
job_information.add_argument(
'--name',
help='A unique identifier for the job. Referring to your source and'
' destination is recommended. If left blank, the name is'
' auto-generated upon submission of the job.')
job_information.add_argument(
'--description',
help='An optional description to help identify the job using details'
" that don't fit in its name.")
add_source_creds_flag(job_information)
job_information.add_argument(
'--source-agent-pool',
help='If using a POSIX filesystem source, specify the ID of the agent'
' pool associated with source filesystem.')
job_information.add_argument(
'--destination-agent-pool',
help='If using a POSIX filesystem destination, specify the ID of the'
' agent pool associated with destination filesystem.')
job_information.add_argument(
'--intermediate-storage-path',
help='If transferring between filesystems, specify the path to a folder'
' in a Google Cloud Storage bucket (gs://example-bucket/example-folder)'
' to use as intermediary storage. Recommended: Use an empty folder'
" reserved for this transfer job to ensure transferred data doesn't"
' interact with any of your existing Cloud Storage data.')
job_information.add_argument(
'--manifest-file',
help='Path to a .csv file containing a list of files to transfer from'
' your source. For manifest files in Cloud Storage, specify the absolute'
' path (e.g., `gs://mybucket/manifest.csv`). For manifest files stored in'
' a source or destination POSIX file system, provide the relative path'
' (e.g., `source://path/to/manfest.csv` or'
' `destination://path/to/manifest.csv`). For manifest file formatting,'
' see https://cloud.google.com/storage-transfer/docs/manifest.')
if not is_update:
replication_group = parser.add_group(help='REPLICATION OPTIONS')
replication_group.add_argument(
'--replication',
action='store_true',
help=(
'Enable replication to automatically copy all new and existing'
' objects from the source to the destination. Note: Objects'
' deleted from the source bucket will not be deleted from the'
' destination bucket. Please note that it is an event-driven'
' transfer.'
),
)
event_stream = parser.add_group(
help=('EVENT STREAM\n\nConfigure an event stream to transfer data'
' whenever it is added or changed at your source, enabling you to'
' act on the data in near real time. This event-driven transfer'
' execution mode is available for transfers from Google Cloud'
' Storage and Amazon S3. For formatting information, see'
' https://cloud.google.com/sdk/gcloud/reference/topic/datetimes.'),
sort_args=False)
event_stream.add_argument(
'--event-stream-name',
help=('Specify an event stream that Storage Transfer Service can use to'
' listen for when objects are created or updated. For Google Cloud'
' Storage sources, specify a Cloud Pub/Sub subscription, using'
' format "projects/yourproject/subscriptions/yoursubscription". For'
' Amazon S3 sources, specify the Amazon Resource Name (ARN) of an'
' Amazon Simple Queue Service (SQS) queue using format'
' "arn:aws:sqs:region:account_id:queue_name".'))
event_stream.add_argument(
'--event-stream-starts',
help=('Set when to start listening for events UTC using the'
' %Y-%m-%dT%H:%M:%S%z datetime format (e.g.,'
' 2020-04-12T06:42:12+04:00). If not set, the job will start'
' running and listening for events upon the successful submission'
' of the create job command.'))
event_stream.add_argument(
'--event-stream-expires',
help=('Set when to stop listening for events UTC using the'
' %Y-%m-%dT%H:%M:%S%z datetime format (e.g.,'
' 2020-04-12T06:42:12+04:00). If not set, the job will continue'
' running and listening for events indefinitely.'))
if is_update:
event_stream.add_argument(
'--clear-event-stream',
action='store_true',
help=(
"Remove the job's entire event stream configuration by clearing all scheduling"
' all event stream flags. The job will no longer listen for'
' events unless a new configuratin is specified.'))
schedule = parser.add_group(
help=("SCHEDULE\n\nA job's schedule determines when and how often the job"
' will run. For formatting information, see'
' https://cloud.google.com/sdk/gcloud/reference/topic/datetimes.'),
sort_args=False)
if is_update:
schedule.add_argument(
'--clear-schedule',
action='store_true',
help=("Remove the job's entire schedule by clearing all scheduling"
' flags. The job will no longer run unless an operation is'
' manually started or a new schedule is specified.'))
else:
schedule.add_argument(
'--do-not-run',
action='store_true',
help='Disable default Transfer Service behavior of running job upon'
' creation if no schedule is set. If this flag is specified, the job'
" won't run until an operation is manually started or a schedule is"
' added.')
schedule.add_argument(
'--schedule-starts',
type=arg_parsers.Datetime.Parse,
help='Set when the job will start using the %Y-%m-%dT%H:%M:%S%z'
' datetime format (e.g., 2020-04-12T06:42:12+04:00). If not set,'
' the job will run upon the successful submission of the create'
' job command unless the --do-not-run flag is included.')
schedule.add_argument(
'--schedule-repeats-every',
type=arg_parsers.Duration(),
help='Set the frequency of the job using the absolute duration'
' format (e.g., 1 month is p1m; 1 hour 30 minutes is 1h30m). If'
' not set, the job will run once.')
schedule.add_argument(
'--schedule-repeats-until',
type=arg_parsers.Datetime.Parse,
help='Set when the job will stop recurring using the'
' %Y-%m-%dT%H:%M:%S%z datetime format (e.g.,'
' 2020-04-12T06:42:12+04:00). If specified, you must also include a'
' value for the --schedule-repeats-every flag. If not specified, the'
' job will continue to repeat as specified in its repeat-every field'
' unless the job is manually disabled or you add this field later.')
object_conditions = parser.add_group(
help=(
'OBJECT CONDITIONS\n\nA set of conditions to determine which objects'
' are transferred. For time-based object condition formatting tips,'
' see https://cloud.google.com/sdk/gcloud/reference/topic/datetimes.'
' Note: If you specify multiple conditions, objects must have at'
" least one of the specified 'include' prefixes and all of the"
" specified time conditions. If an object has an 'exclude' prefix, it"
' will be excluded even if it matches other conditions.'),
sort_args=False)
if is_update:
object_conditions.add_argument(
'--clear-include-prefixes',
action='store_true',
help='Remove the list of object prefixes to include from the'
' object conditions.')
object_conditions.add_argument(
'--clear-exclude-prefixes',
action='store_true',
help='Remove the list of object prefixes to exclude from the'
' object conditions.')
object_conditions.add_argument(
'--clear-include-modified-before-absolute',
action='store_true',
help='Remove the maximum modification datetime from the'
' object conditions.')
object_conditions.add_argument(
'--clear-include-modified-after-absolute',
action='store_true',
help='Remove the minimum modification datetime from the'
' object conditions.')
object_conditions.add_argument(
'--clear-include-modified-before-relative',
action='store_true',
help='Remove the maximum duration since modification from the'
' object conditions.')
object_conditions.add_argument(
'--clear-include-modified-after-relative',
action='store_true',
help='Remove the minimum duration since modification from the'
' object conditions.')
object_conditions.add_argument(
'--include-prefixes',
type=arg_parsers.ArgList(),
metavar='INCLUDED_PREFIXES',
help='Include only objects that start with the specified prefix(es).'
' Separate multiple prefixes with commas, omitting spaces after'
' the commas (e.g., --include-prefixes=foo,bar).')
object_conditions.add_argument(
'--exclude-prefixes',
type=arg_parsers.ArgList(),
metavar='EXCLUDED_PREFIXES',
help='Exclude any objects that start with the prefix(es) entered.'
' Separate multiple prefixes with commas, omitting spaces after'
' the commas (e.g., --exclude-prefixes=foo,bar).')
object_conditions.add_argument(
'--include-modified-before-absolute',
type=arg_parsers.Datetime.Parse,
help='Include objects last modified before an absolute date/time. Ex.'
" by specifying '2020-01-01', the transfer would include objects"
' last modified before January 1, 2020. Use the'
' %Y-%m-%dT%H:%M:%S%z datetime format.')
object_conditions.add_argument(
'--include-modified-after-absolute',
type=arg_parsers.Datetime.Parse,
help='Include objects last modified after an absolute date/time. Ex.'
" by specifying '2020-01-01', the transfer would include objects"
' last modified after January 1, 2020. Use the'
' %Y-%m-%dT%H:%M:%S%z datetime format.')
object_conditions.add_argument(
'--include-modified-before-relative',
type=arg_parsers.Duration(),
help='Include objects that were modified before a relative date/time in'
" the past. Ex. by specifying a duration of '10d', the transfer"
' would include objects last modified *more than* 10 days before'
' its start time. Use the absolute duration format (ex. 1m for 1'
' month; 1h30m for 1 hour 30 minutes).')
object_conditions.add_argument(
'--include-modified-after-relative',
type=arg_parsers.Duration(),
help='Include objects that were modified after a relative date/time in'
" the past. Ex. by specifying a duration of '10d', the transfer"
' would include objects last modified *less than* 10 days before'
' its start time. Use the absolute duration format (ex. 1m for 1'
' month; 1h30m for 1 hour 30 minutes).')
transfer_options = parser.add_group(help='TRANSFER OPTIONS', sort_args=False)
if is_update:
transfer_options.add_argument(
'--clear-delete-from',
action='store_true',
help='Remove a specified deletion option from the transfer job. If '
" this flag is specified, the transfer job won't delete any data from"
' your source or destination.')
transfer_options.add_argument(
'--clear-preserve-metadata',
action='store_true',
help='Skips preserving optional metadata fields of objects being'
' transferred.')
transfer_options.add_argument(
'--clear-custom-storage-class',
action='store_true',
help='Reverts to using destination default storage class.')
transfer_options.add_argument(
'--overwrite-when',
choices=sorted(option.value for option in OverwriteOption),
help='Determine when destination objects are overwritten by source'
' objects. Options include:\n'
" - 'different' - Overwrites files with the same name if the contents"
" are different (e.g., if etags or checksums don't match)\n"
" - 'always' - Overwrite destination file whenever source file has the"
" same name -- even if they're identical\n"
" - 'never' - Never overwrite destination file when source file has the"
' same name')
transfer_options.add_argument(
'--delete-from',
choices=sorted(option.value for option in DeleteOption),
help="By default, transfer jobs won't delete any data from your source"
' or destination. These options enable you to delete data if'
' needed for your use case. Options include:\n'
" - 'destination-if-unique' - Delete files from destination if they're"
' not also at source. Use to sync destination to source (i.e., make'
' destination match source exactly)\n'
" - 'source-after-transfer' - Delete files from source after they're"
' transferred')
transfer_options.add_argument(
'--preserve-metadata',
type=arg_parsers.ArgList(
choices=sorted(field.value for field in PreserveMetadataField)),
metavar='METADATA_FIELDS',
help='Specify object metadata values that can optionally be preserved.'
' Example: --preserve-metadata=storage-class,uid\n\n'
'For more info, see: https://cloud.google.com/storage-transfer/docs/'
'metadata-preservation\n\n')
transfer_options.add_argument(
'--custom-storage-class',
help='Specifies the storage class to set on objects being transferred to'
" Cloud Storage buckets. If unspecified, the objects' storage class is"
' set to the destination bucket default.'
' Valid values are:\n\n'
' - Any of the values listed in the Cloud Storage documentation:'
' [Available storage classes](https://cloud.google.com/storage/docs/storage-classes#classes).\n'
" - `preserve` - Preserves each object's original storage class. Only"
' supported for transfers between Cloud Storage buckets.\n'
' \nCustom storage class settings are ignored if the destination bucket'
' is'
' [Autoclass-enabled](https://cloud.google.com/storage/docs/autoclass).'
' Objects transferred into Autoclass-enabled buckets are initially'
' set to the `STANDARD` storage class.')
notification_config = parser.add_group(
help=(
'NOTIFICATION CONFIG\n\nA configuration for receiving notifications of'
'transfer operation status changes via Cloud Pub/Sub.'),
sort_args=False)
if is_update:
notification_config.add_argument(
'--clear-notification-config',
action='store_true',
help="Remove the job's full notification configuration to no"
' longer receive notifications via Cloud Pub/Sub.')
notification_config.add_argument(
'--clear-notification-event-types',
action='store_true',
help='Remove the event types from the notification config.')
notification_config.add_argument(
'--notification-pubsub-topic',
help='Pub/Sub topic used for notifications.')
notification_config.add_argument(
'--notification-event-types',
type=arg_parsers.ArgList(choices=['success', 'failed', 'aborted']),
metavar='EVENT_TYPES',
help='Define which change of transfer operation status will trigger'
" Pub/Sub notifications. Choices include 'success', 'failed',"
" 'aborted'. To trigger notifications for all three status changes,"
" you can leave this flag unspecified as long as you've specified"
' a topic for the --notification-pubsub-topic flag.')
notification_config.add_argument(
'--notification-payload-format',
choices=['json', 'none'],
help="If 'none', no transfer operation details are included with"
" notifications. If 'json', a json representation of the relevant"
' transfer operation is included in notification messages (e.g., to'
' see errors after an operation fails).')
logging_config = parser.add_group(
help=('LOGGING CONFIG\n\nConfigure which transfer actions and action'
' states are reported when logs are generated for this job. Logs'
' can be viewed by running the following command:\n'
'gcloud logging read "resource.type=storage_transfer_job"'),
sort_args=False)
if is_update:
logging_config.add_argument(
'--clear-log-config',
action='store_true',
help="Remove the job's full logging config.")
logging_config.add_argument(
'--enable-posix-transfer-logs',
action=arg_parsers.StoreTrueFalseAction,
help='Sets whether to generate logs for transfers with a POSIX'
' filesystem source. This setting will later be merged with other log'
' configurations.')
logging_config.add_argument(
'--log-actions',
type=arg_parsers.ArgList(
choices=sorted(option.value for option in LogAction)),
metavar='LOG_ACTIONS',
help='Define the transfer operation actions to report in logs. Separate'
' multiple actions with commas, omitting spaces after the commas'
' (e.g., --log-actions=find,copy).')
logging_config.add_argument(
'--log-action-states',
type=arg_parsers.ArgList(
choices=sorted(option.value for option in LogActionState)),
metavar='LOG_ACTION_STATES',
help='The states in which the actions specified in --log-actions are'
' logged. Separate multiple states with a comma, omitting the space after'
' the comma (e.g., --log-action-states=succeeded,failed).')
additional_options = parser.add_group(
help='ADDITIONAL OPTIONS', sort_args=False)
additional_options.add_argument(
'--source-endpoint',
help='For transfers from S3-compatible sources, specify your storage'
" system's endpoint. Check with your provider for formatting (ex."
' s3.us-east-1.amazonaws.com for Amazon S3).')
additional_options.add_argument(
'--source-signing-region',
help='For transfers from S3-compatible sources, specify a region for'
' signing requests. You can leave this unspecified if your storage'
" provider doesn't require a signing region.")
additional_options.add_argument(
'--source-auth-method',
choices=sorted(option.value for option in AuthMethod),
help='For transfers from S3-compatible sources, choose a process for'
" adding authentication information to S3 API requests. Refer to AWS's"
' SigV4 (https://docs.aws.amazon.com/general/latest/gr/signature-version'
'-4.html) and SigV2 (https://docs.aws.amazon.com/general/latest/gr/'
'signature-version-2.html) documentation for more information.')
additional_options.add_argument(
'--source-list-api',
choices=sorted(option.value for option in ListApi),
help='For transfers from S3-compatible sources, choose the version of the'
" S3 listing API for returning objects from the bucket. Refer to AWS's"
' ListObjectsV2 (https://docs.aws.amazon.com/AmazonS3/latest/API/'
'API_ListObjectsV2.html) and ListObjects (https://docs.aws.amazon.com/'
'AmazonS3/latest/API/API_ListObjects.html) documentation for more'
' information.')
additional_options.add_argument(
'--source-network-protocol',
choices=sorted(option.value for option in NetworkProtocol),
help='For transfers from S3-compatible sources, choose the network'
' protocol agents should use for this job.')
additional_options.add_argument(
'--source-request-model',
choices=sorted(option.value for option in RequestModel),
help='For transfers from S3-compatible sources, choose which addressing'
' style to use. Determines if the bucket name is in the hostname or part'
' of the URL. For example, https://s3.region.amazonaws.com/bucket-name'
'/key-name for path style and Ex. https://bucket-name.s3.region.'
'amazonaws.com/key-name for virtual-hosted style.')
additional_options.add_argument(
'--s3-cloudfront-domain',
help=(
'For transfers from S3, optionally route egress traffic through a'
' CloudFront instance. Supply the endpoint of the CloudFront'
' instance: https://example.cloudfront.net. See documentation'
' (https://cloud.google.com/storage-transfer/docs/s3-cloudfront)'
' for more information.'
),
)
if is_update:
additional_options.add_argument(
'--clear-source-endpoint',
action='store_true',
help='Removes source endpoint.')
additional_options.add_argument(
'--clear-source-signing-region',
action='store_true',
help='Removes source signing region.')
additional_options.add_argument(
'--clear-source-auth-method',
action='store_true',
help='Removes source auth method.')
additional_options.add_argument(
'--clear-source-list-api',
action='store_true',
help='Removes source list API.')
additional_options.add_argument(
'--clear-source-network-protocol',
action='store_true',
help='Removes source network protocol.')
additional_options.add_argument(
'--clear-source-request-model',
action='store_true',
help='Removes source request model.')
additional_options.add_argument(
'--clear-s3-cloudfront-domain',
action='store_true',
help='Removes S3 CloudFront domain.',
)
if not is_update:
execution_options = parser.add_group(
help='EXECUTION OPTIONS', sort_args=False)
execution_options.add_argument(
'--no-async',
action='store_true',
help='For jobs set to run upon creation, this flag blocks other tasks'
" in your terminal until the job's initial, immediate transfer"
' operation has completed. If not included, tasks will run'
' asynchronously.')

View File

@@ -0,0 +1,78 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for transfer list commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.core import log
# Backend default.
_TRANSFER_LIST_PAGE_SIZE = 256
def add_common_list_flags(parser):
"""Inheriting from ListCommand adds flags transfer needs to modify."""
parser.add_argument(
'--limit',
type=arg_parsers.BoundedInt(1, sys.maxsize, unlimited=True),
help='Return the first items from the API up to this limit.')
parser.add_argument(
'--page-size',
type=arg_parsers.BoundedInt(1, sys.maxsize, unlimited=True),
default=_TRANSFER_LIST_PAGE_SIZE,
help='Retrieve batches of this many items from the API.')
def print_transfer_resources_iterator(resource_iterator,
command_display_function, command_args):
"""Gcloud's built-in display logic has issues with enormous lists.
Args:
resource_iterator (iterable): Likely an instance of Apitools
list_pager.YieldFromList but can also be a List.
command_display_function (func): The self.Display function built into
classes inheriting from base.Command.
command_args (argparse.Namespace): The args object passed to self.Display
and self.Run of commands inheriting from base.Command.
"""
# Output may look something like:
# NAME STATUS
# resource1 ENABLED
# resource2 DISABLED
# ...
#
# NAME STATUS
# resource257 DISABLED
resource_list = []
for resource in resource_iterator:
resource_list.append(resource)
if len(resource_list) >= _TRANSFER_LIST_PAGE_SIZE:
log.status.Print()
command_display_function(command_args, resource_list)
resource_list = []
if resource_list:
log.status.Print()
command_display_function(command_args, resource_list)
# Prevents command base class from trying to handle custom format after.
command_args.format = None

View File

@@ -0,0 +1,111 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for manipulating transfer resource names."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from googlecloudsdk.core import properties
_JOBS_PREFIX_REGEX = r'^transferJobs/.+'
_OPERATIONS_PREFIX_REGEX = r'^transferOperations/.+'
_AGENT_POOLS_PREFIX_REGEX = r'^projects\/(.+)\/agentPools\/(.+)'
_JOBS_PREFIX_STRING = 'transferJobs/'
_OPERATIONS_PREFIX_STRING = 'transferOperations/'
def _add_single_transfer_prefix(prefix_to_check, prefix_to_add,
resource_string):
"""Adds prefix to one resource string if necessary."""
if re.match(prefix_to_check, resource_string):
return resource_string
return prefix_to_add + resource_string
def _add_transfer_prefix(prefix_to_check, prefix_to_add,
resource_string_or_list):
"""Adds prefix to one resource string or list of strings if necessary."""
if isinstance(resource_string_or_list, str):
return _add_single_transfer_prefix(prefix_to_check, prefix_to_add,
resource_string_or_list)
elif isinstance(resource_string_or_list, list):
return [
_add_single_transfer_prefix(prefix_to_check, prefix_to_add,
resource_string)
for resource_string in resource_string_or_list
]
raise ValueError('Argument must be string or list of strings.')
def add_job_prefix(job_name_string_or_list):
"""Adds prefix to transfer job(s) if necessary."""
return _add_transfer_prefix(_JOBS_PREFIX_REGEX, _JOBS_PREFIX_STRING,
job_name_string_or_list)
def add_operation_prefix(job_operation_string_or_list):
"""Adds prefix to transfer operation(s) if necessary."""
return _add_transfer_prefix(_OPERATIONS_PREFIX_REGEX,
_OPERATIONS_PREFIX_STRING,
job_operation_string_or_list)
def add_agent_pool_prefix(agent_pool_string_or_list):
"""Adds prefix to transfer agent pool(s) if necessary."""
project_id = properties.VALUES.core.project.Get()
prefix_to_add = 'projects/{}/agentPools/'.format(project_id)
result = _add_transfer_prefix(_AGENT_POOLS_PREFIX_REGEX, prefix_to_add,
agent_pool_string_or_list)
if not project_id and result != agent_pool_string_or_list:
raise ValueError(
'Project ID not found. Please set a gcloud-wide project, or use full'
' agent pool names (e.g. "projects/[your project ID]/agentPools/[your'
' agent pool name]").')
return result
def remove_job_prefix(job_string):
"""Removes prefix from transfer job if necessary."""
if job_string.startswith(_JOBS_PREFIX_STRING):
return job_string[len(_JOBS_PREFIX_STRING):]
return job_string
def remove_operation_prefix(operation_string):
"""Removes prefix from transfer operation if necessary."""
if operation_string.startswith(_OPERATIONS_PREFIX_STRING):
return operation_string[len(_OPERATIONS_PREFIX_STRING):]
return operation_string
def remove_agent_pool_prefix(agent_pool_string):
"""Removes prefix from transfer agent pool if necessary."""
prefix_search_result = re.search(_AGENT_POOLS_PREFIX_REGEX, agent_pool_string)
if prefix_search_result:
return prefix_search_result.group(2)
return agent_pool_string
def get_agent_pool_project_from_string(agent_pool_string):
prefix_search_result = re.search(_AGENT_POOLS_PREFIX_REGEX, agent_pool_string)
if prefix_search_result:
return prefix_search_result.group(1)
raise ValueError(
'Full agent pool prefix required to extract project from string'
' (e.g. "projects/[project ID]/agentPools/[pool name]).')

View File

@@ -0,0 +1,8 @@
transfer_job:
name: job
collection: storagetransfer.transferJobs
attributes:
- &job
parameter_name: transferJobsId
attribute_name: job
help: The name of the storage transfer job.