feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,133 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Backup plan associations client."""
from __future__ import absolute_import
from __future__ import annotations
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.command_lib.backupdr import util as command_util
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.generated_clients.apis.backupdr.v1 import backupdr_v1_messages
class BackupPlanAssociationsClient(util.BackupDrClientBase):
"""Cloud Backup and DR Backup plan associations client."""
def __init__(self):
super(BackupPlanAssociationsClient, self).__init__()
self.service = self.client.projects_locations_backupPlanAssociations
def Create(
self, bpa_resource, backup_plan, workload_resource, resource_type=""
):
parent = bpa_resource.Parent().RelativeName()
bpa_id = bpa_resource.Name()
bpa = self.messages.BackupPlanAssociation(
backupPlan=backup_plan.RelativeName(),
resource=workload_resource,
resourceType=resource_type,
)
request = self.messages.BackupdrProjectsLocationsBackupPlanAssociationsCreateRequest(
parent=parent,
backupPlanAssociation=bpa,
backupPlanAssociationId=bpa_id,
)
return self.service.Create(request)
def ParseUpdate(self, backup_plan):
updated_bpa = self.messages.BackupPlanAssociation()
if backup_plan is not None:
updated_bpa.backupPlan = backup_plan.RelativeName()
return updated_bpa
def Update(self, bpa_resource, bpa, update_mask):
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupPlanAssociationsPatchRequest(
backupPlanAssociation=bpa,
name=bpa_resource.RelativeName(),
requestId=request_id,
updateMask=update_mask,
)
return self.service.Patch(request)
def Delete(self, resource):
request = self.messages.BackupdrProjectsLocationsBackupPlanAssociationsDeleteRequest(
name=resource.RelativeName()
)
return self.service.Delete(request)
def TriggerBackup(
self,
resource: resources.Resource,
backup_rule: str,
custom_retention_days: int | None = None,
labels: dict[str, str] | None = None,
) -> backupdr_v1_messages.Operation:
"""Triggers an on demand backup according to the given backup rule.
Args:
resource: The backup plan association resource.
backup_rule: The backup rule to be used for the adhoc backup
custom_retention_days: The custom retention days to be used for the adhoc
backup
labels: The labels to be applied to the backup.
Returns:
A long running operation
"""
labels_value = None
if labels:
labels_value = self.messages.TriggerBackupRequest.LabelsValue(
additionalProperties=[
self.messages.TriggerBackupRequest.LabelsValue.AdditionalProperty(
key=key, value=value
)
for key, value in labels.items()
]
)
trigger_backup_request = self.messages.TriggerBackupRequest(
ruleId=backup_rule,
customRetentionDays=custom_retention_days,
labels=labels_value,
)
request = self.messages.BackupdrProjectsLocationsBackupPlanAssociationsTriggerBackupRequest(
name=resource.RelativeName(),
triggerBackupRequest=trigger_backup_request,
)
return self.service.TriggerBackup(request)
def FetchForResourceType(
self,
location,
resource_type,
filter_expression=None,
page_size=None,
order_by=None,
):
project = properties.VALUES.core.project.GetOrFail()
parent = "projects/{}/locations/{}".format(project, location)
request = self.messages.BackupdrProjectsLocationsBackupPlanAssociationsFetchForResourceTypeRequest(
parent=parent,
resourceType=resource_type,
pageSize=page_size,
filter=filter_expression,
orderBy=order_by,
)
return self.service.FetchForResourceType(request)

View File

@@ -0,0 +1,299 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup Plans client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.backupdr import util as command_util
# TODO: b/416214401 - Add type annotations.
class BackupPlansClient(util.BackupDrClientBase):
"""Cloud Backup Plans client."""
def __init__(self):
super(BackupPlansClient, self).__init__()
self.service = self.client.projects_locations_backupPlans
def _ParseBackupRules(self, backup_rules):
backup_rules_message = []
for backup_rule in backup_rules:
standard_schedule = self.messages.StandardSchedule()
standard_schedule.timeZone = (
'UTC' if 'time-zone' not in backup_rule else backup_rule['time-zone']
)
standard_schedule.backupWindow = self.messages.BackupWindow(
startHourOfDay=backup_rule['backup-window-start'],
endHourOfDay=backup_rule['backup-window-end'],
)
standard_schedule.recurrenceType = (
self.messages.StandardSchedule.RecurrenceTypeValueValuesEnum(
backup_rule['recurrence']
)
)
if 'hourly-frequency' in backup_rule:
standard_schedule.hourlyFrequency = backup_rule['hourly-frequency']
if 'days-of-week' in backup_rule:
standard_schedule.daysOfWeek = [
self.messages.StandardSchedule.DaysOfWeekValueListEntryValuesEnum(
day
)
for day in backup_rule['days-of-week']
]
if 'week-day-of-month' in backup_rule:
week_day_of_month = backup_rule['week-day-of-month'].split('-')
standard_schedule.weekDayOfMonth = self.messages.WeekDayOfMonth(
weekOfMonth=self.messages.WeekDayOfMonth.WeekOfMonthValueValuesEnum(
week_day_of_month[0]
),
dayOfWeek=self.messages.WeekDayOfMonth.DayOfWeekValueValuesEnum(
week_day_of_month[1]
),
)
if 'days-of-month' in backup_rule:
standard_schedule.daysOfMonth = backup_rule['days-of-month']
if 'months' in backup_rule:
standard_schedule.months = [
self.messages.StandardSchedule.MonthsValueListEntryValuesEnum(month)
for month in backup_rule['months']
]
backup_rule_message = self.messages.BackupRule(
ruleId=backup_rule['rule-id'],
backupRetentionDays=backup_rule['retention-days'],
standardSchedule=standard_schedule,
)
backup_rules_message.append(backup_rule_message)
return backup_rules_message
def Create(
self,
resource,
backup_vault,
resource_type,
backup_rules,
log_retention_days,
description,
labels,
max_custom_on_demand_retention_days,
):
"""Creates a Backup Plan.
Args:
resource: The Backup Plan resource.
backup_vault: The Backup Vault resource.
resource_type: The resource type of the Backup Plan.
backup_rules: The backup rules of the Backup Plan.
log_retention_days: The log retention days of the Backup Plan.
description: The description of the Backup Plan.
labels: The labels of the Backup Plan.
max_custom_on_demand_retention_days: The custom on demand retention days
limit of the Backup Plan.
Returns:
The created Backup Plan.
"""
parent = resource.Parent().RelativeName()
backup_plan_id = resource.Name()
backup_plan = self.messages.BackupPlan(
resourceType=resource_type,
backupVault=backup_vault,
)
if description is not None:
backup_plan.description = description
if labels is not None:
backup_plan.labels = self.messages.BackupPlan.LabelsValue(
additionalProperties=[
self.messages.BackupPlan.LabelsValue.AdditionalProperty(
key=key, value=value
)
for key, value in labels.items()
]
)
backup_plan.backupRules = self._ParseBackupRules(backup_rules)
if log_retention_days is not None:
backup_plan.logRetentionDays = log_retention_days
if max_custom_on_demand_retention_days is not None:
backup_plan.maxCustomOnDemandRetentionDays = (
int(max_custom_on_demand_retention_days)
)
request = self.messages.BackupdrProjectsLocationsBackupPlansCreateRequest(
parent=parent,
backupPlan=backup_plan,
backupPlanId=backup_plan_id,
)
return self.service.Create(request)
def Describe(self, resource):
"""Describes a Backup Plan.
Args:
resource: The Backup Plan resource.
Returns:
The described Backup Plan.
"""
request = self.messages.BackupdrProjectsLocationsBackupPlansGetRequest(
name=resource.RelativeName()
)
return self.service.Get(request)
def ParseUpdate(
self,
description,
new_backup_rules_from_file,
update_backup_rules,
add_backup_rules,
remove_backup_rules,
current_backup_plan,
log_retention_days,
max_custom_on_demand_retention_days,
):
"""Parses the update request for a Backup Plan.
Args:
description: The description of the Backup Plan.
new_backup_rules_from_file: The backup rules to update from file in the
Backup Plan.
update_backup_rules: The backup rules to update in the Backup Plan.
add_backup_rules: The backup rules to add to the Backup Plan.
remove_backup_rules: The backup rules to remove from the Backup Plan.
current_backup_plan: The current Backup Plan.
log_retention_days: The log retention days of the Backup Plan.
max_custom_on_demand_retention_days: The custom on demand retention days
limit of the Backup Plan.
Returns:
The updated Backup Plan.
Raises:
InvalidArgumentException: If the backup rules are invalid.
ValueError: If the backup plan is not found.
"""
if current_backup_plan is None:
raise ValueError('Could not find the backup plan.')
updated_backup_plan = self.messages.BackupPlan(
resourceType=current_backup_plan.resourceType
)
if description is not None:
updated_backup_plan.description = description
if log_retention_days is not None:
updated_backup_plan.logRetentionDays = log_retention_days
if max_custom_on_demand_retention_days is not None:
updated_backup_plan.maxCustomOnDemandRetentionDays = (
int(max_custom_on_demand_retention_days)
)
current_rule_ids = {rule.ruleId for rule in current_backup_plan.backupRules}
if new_backup_rules_from_file is not None:
updated_backup_plan.backupRules = self._ParseBackupRules(
new_backup_rules_from_file
)
return updated_backup_plan
if update_backup_rules is not None:
rule_ids = collections.Counter(
[rule['rule-id'] for rule in update_backup_rules]
)
duplicate_rule_ids = [
rule_id for rule_id, count in rule_ids.items() if count > 1
]
if duplicate_rule_ids:
raise exceptions.InvalidArgumentException(
'rule-id',
f'Rules {duplicate_rule_ids} found in more than one'
' --backup-rule flag.',
)
not_found_rule_ids = list(set([
rule['rule-id']
for rule in update_backup_rules
if rule['rule-id'] not in current_rule_ids
]))
if not_found_rule_ids:
raise exceptions.InvalidArgumentException(
'rule-id',
f'Rules {not_found_rule_ids} not found in the backup plan.'
' The --backup-rule flag can only be used to modify existing'
' rules.',
)
update_rule_ids = [rule['rule-id'] for rule in update_backup_rules]
updated_backup_plan.backupRules = [
rule
for rule in current_backup_plan.backupRules
if rule.ruleId not in update_rule_ids
]
updated_backup_plan.backupRules.extend(
self._ParseBackupRules(update_backup_rules)
)
else:
updated_backup_plan.backupRules = current_backup_plan.backupRules
if add_backup_rules is not None:
updated_backup_plan.backupRules.extend(
self._ParseBackupRules(add_backup_rules)
)
if remove_backup_rules is not None:
not_found_rule_ids = list(set([
rule_id
for rule_id in remove_backup_rules
if rule_id not in current_rule_ids
]))
if not_found_rule_ids:
raise exceptions.InvalidArgumentException(
'rule-id',
f'Rules {not_found_rule_ids} not found in the backup plan.',
)
updated_backup_plan.backupRules = [
rule for rule in updated_backup_plan.backupRules
if rule.ruleId not in remove_backup_rules
]
return updated_backup_plan
def Update(self, resource, backup_plan, update_mask):
"""Updates a Backup Plan.
Args:
resource: The Backup Plan resource.
backup_plan: The updated Backup Plan.
update_mask: The update mask to edit the Backup Plan.
Returns:
The updated Backup Plan.
"""
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupPlansPatchRequest(
backupPlan=backup_plan,
name=resource.RelativeName(),
requestId=request_id,
updateMask=update_mask,
)
return self.service.Patch(request)
def Delete(self, resource):
"""Deletes a Backup Plan.
Args:
resource: The Backup Plan resource.
Returns:
The deleted Backup Plan.
"""
request = self.messages.BackupdrProjectsLocationsBackupPlansDeleteRequest(
name=resource.RelativeName()
)
return self.service.Delete(request)

View File

@@ -0,0 +1,205 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Backup Vaults client."""
import enum
from typing import Any, Optional, Mapping, Sequence
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.command_lib.backupdr import util as command_util
from googlecloudsdk.generated_clients.apis.backupdr.v1 import backupdr_v1_messages
class AccessRestriction(enum.Enum):
WITHIN_PROJECT = 'within-project'
WITHIN_ORGANIZATION = 'within-org'
UNRESTRICTED = 'unrestricted'
WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA = 'within-org-but-unrestricted-for-ba'
class BackupVaultsClient(util.BackupDrClientBase):
"""Cloud Backup and DR Backup Vault client."""
def __init__(self):
super(BackupVaultsClient, self).__init__()
self.service = self.client.projects_locations_backupVaults
def Create(
self,
resource,
backup_min_enforced_retention: str,
description: Optional[str],
labels: Mapping[str, str],
effective_time: Optional[str],
access_restriction: Optional[str],
backup_retention_inheritance: Optional[str],
encryption_config: Optional[backupdr_v1_messages.EncryptionConfig],
) -> Any:
parent = resource.Parent().RelativeName()
backup_vault_id = resource.Name()
backup_vault = self.messages.BackupVault(
backupMinimumEnforcedRetentionDuration=backup_min_enforced_retention,
description=description,
labels=labels,
effectiveTime=effective_time,
accessRestriction=self.ParseAccessRestrictionEnum(access_restriction),
encryptionConfig=encryption_config,
)
backup_vault.backupRetentionInheritance = (
self.ParseBackupRetentionInheritanceEnum(backup_retention_inheritance)
)
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupVaultsCreateRequest(
backupVault=backup_vault,
backupVaultId=backup_vault_id,
parent=parent,
requestId=request_id,
)
return self.service.Create(request)
def ParseBackupRetentionInheritanceEnum(
self, backup_retention_inheritance_str: Optional[str]
):
if backup_retention_inheritance_str is None:
return (
self.messages.BackupVault.BackupRetentionInheritanceValueValuesEnum.BACKUP_RETENTION_INHERITANCE_UNSPECIFIED
)
elif backup_retention_inheritance_str == 'inherit-vault-retention':
return (
self.messages.BackupVault.BackupRetentionInheritanceValueValuesEnum.INHERIT_VAULT_RETENTION
)
elif backup_retention_inheritance_str == 'match-backup-expire-time':
return (
self.messages.BackupVault.BackupRetentionInheritanceValueValuesEnum.MATCH_BACKUP_EXPIRE_TIME
)
def ParseAccessRestrictionEnum(self, access_restriction_str: Optional[str]):
if access_restriction_str is None:
return (
self.messages.BackupVault.AccessRestrictionValueValuesEnum.WITHIN_ORGANIZATION
)
access_restriction = AccessRestriction(access_restriction_str)
if access_restriction == AccessRestriction.WITHIN_PROJECT:
return (
self.messages.BackupVault.AccessRestrictionValueValuesEnum.WITHIN_PROJECT
)
elif access_restriction == AccessRestriction.WITHIN_ORGANIZATION:
return (
self.messages.BackupVault.AccessRestrictionValueValuesEnum.WITHIN_ORGANIZATION
)
elif access_restriction == AccessRestriction.UNRESTRICTED:
return (
self.messages.BackupVault.AccessRestrictionValueValuesEnum.UNRESTRICTED
)
elif (
access_restriction
== AccessRestriction.WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA
):
return (
self.messages.BackupVault.AccessRestrictionValueValuesEnum.WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA
)
else:
raise ValueError(f'Invalid access restriction: {access_restriction_str}')
def Delete(
self,
resource,
ignore_inactive_datasources: bool,
ignore_backup_plan_references: bool,
allow_missing: bool,
) -> Any:
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupVaultsDeleteRequest(
name=resource.RelativeName(),
force=ignore_inactive_datasources,
ignoreBackupPlanReferences=ignore_backup_plan_references,
allowMissing=allow_missing,
requestId=request_id,
)
return self.service.Delete(request)
def List(
self,
parent_ref,
limit=None,
page_size: int = 100,
) -> Sequence[Any]:
request = self.messages.BackupdrProjectsLocationsBackupVaultsListRequest(
parent=parent_ref.RelativeName()
)
return list_pager.YieldFromList(
self.service,
request,
batch_size_attribute='pageSize',
batch_size=page_size,
limit=limit,
field='backupVaults',
)
def ParseUpdate(
self,
description: Optional[str],
effective_time: Optional[str],
backup_min_enforced_retention: Optional[str],
access_restriction: Optional[str],
):
updated_bv = self.messages.BackupVault()
if description is not None:
updated_bv.description = description
if effective_time is not None:
updated_bv.effectiveTime = effective_time
if backup_min_enforced_retention != 'Nones':
updated_bv.backupMinimumEnforcedRetentionDuration = (
backup_min_enforced_retention
)
if access_restriction is not None:
access_restriction_enum = self.ParseAccessRestrictionEnum(
access_restriction
)
updated_bv.accessRestriction = access_restriction_enum
return updated_bv
def Update(
self,
resource,
backup_vault,
force_update: bool,
force_update_access_restriction: bool,
update_mask: Optional[str],
) -> Any:
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupVaultsPatchRequest(
backupVault=backup_vault,
name=resource.RelativeName(),
updateMask=update_mask,
requestId=request_id,
force=force_update,
forceUpdateAccessRestriction=force_update_access_restriction,
)
return self.service.Patch(request)
def Describe(self, resource) -> Any:
request = self.messages.BackupdrProjectsLocationsBackupVaultsGetRequest(
name=resource.RelativeName(),
)
return self.service.Get(request)

View File

@@ -0,0 +1,618 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Backups client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.api_lib.backupdr.restore_util import ComputeUtil
from googlecloudsdk.api_lib.backupdr.restore_util import DiskUtil
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.backupdr import util as command_util
from googlecloudsdk.core import resources
from googlecloudsdk.generated_clients.apis.backupdr.v1 import backupdr_v1_messages
class ComputeRestoreConfig(util.RestrictedDict):
"""Restore configuration."""
def __init__(self, *args, **kwargs):
supported_flags = [
"Name",
"TargetZone",
"TargetProject",
"NetworkInterfaces",
"ServiceAccount",
"Scopes",
"NoScopes",
"CreateDisks",
"Description",
"Metadata",
"Labels",
"Tags",
"MachineType",
"Hostname",
"EnableUefiNetworking",
"ThreadsPerCore",
"VisibleCoreCount",
"Accelerator",
"MinCpuPlatform",
"MaintenancePolicy",
"Preemptible",
"RestartOnFailure",
"MinNodeCpus",
"ProvisioningModel",
"InstanceTerminationAction",
"LocalSsdRecoveryTimeout",
"NodeAffinityFile",
"ReservationAffinity",
"Reservation",
"EnableDisplayDevice",
"CanIpForward",
"PrivateIpv6GoogleAccessType",
"NetworkPerformanceConfigs",
"ConfidentialCompute",
"DeletionProtection",
"ResourceManagerTags",
"ResourcePolicies",
"KeyRevocationActionType",
"InstanceKmsKey",
"ClearOverridesFieldMask",
]
super(ComputeRestoreConfig, self).__init__(supported_flags, *args, **kwargs)
class DiskRestoreConfig(util.RestrictedDict):
"""Restore configuration."""
def __init__(self, *args, **kwargs):
supported_flags = [
"Name",
"TargetZone",
"TargetRegion",
"TargetProject",
"ReplicaZones",
"Description",
"Labels",
"Licenses",
"GuestOsFeatures",
"ConfidentialCompute",
"Type",
"AccessMode",
"ResourcePolicies",
"ProvisionedIops",
"KmsKey",
"Architecture",
"Size",
"ProvisionedThroughput",
"StoragePool",
"ClearOverridesFieldMask",
]
super(DiskRestoreConfig, self).__init__(supported_flags, *args, **kwargs)
class BackupsClient(util.BackupDrClientBase):
"""Cloud Backup and DR Backups client."""
def __init__(self):
super(BackupsClient, self).__init__()
self.service = (
self.client.projects_locations_backupVaults_dataSources_backups
)
def Delete(
self, resource: resources.Resource
) -> backupdr_v1_messages.Operation:
"""Deletes the given backup.
Args:
resource: The backup to be deleted.
Returns:
A long running operation
"""
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupVaultsDataSourcesBackupsDeleteRequest(
name=resource.RelativeName(), requestId=request_id
)
return self.service.Delete(request)
def RestoreCompute(self, resource, restore_config: ComputeRestoreConfig):
"""Restores the given backup.
Args:
resource: The backup to be restored.
restore_config: Restore configuration.
Returns:
A long running operation
"""
restore_request = self.messages.RestoreBackupRequest()
restore_request.computeInstanceRestoreProperties = (
self.messages.ComputeInstanceRestoreProperties(
name=restore_config["Name"],
)
)
restore_request.computeInstanceTargetEnvironment = (
self.messages.ComputeInstanceTargetEnvironment(
zone=restore_config["TargetZone"],
project=restore_config["TargetProject"],
)
)
# Network Interface
if "NetworkInterfaces" in restore_config:
network_interfaces_message = ComputeUtil.ParserNetworkInterface(
self.messages, restore_config["NetworkInterfaces"]
)
if network_interfaces_message:
restore_request.computeInstanceRestoreProperties.networkInterfaces.extend(
network_interfaces_message
)
# Service Account & Scopes
service_accounts_message = ComputeUtil.ParserServiceAccount(
self.messages,
restore_config.get("ServiceAccount", None),
restore_config.get(
"Scopes", [] if restore_config.get("NoScopes", False) else None
),
)
if service_accounts_message:
restore_request.computeInstanceRestoreProperties.serviceAccounts = (
service_accounts_message
)
# Create Disks
if "CreateDisks" in restore_config:
disks_message = ComputeUtil.ParserDisks(
self.messages, restore_config["CreateDisks"]
)
if disks_message:
restore_request.computeInstanceRestoreProperties.disks.extend(
disks_message
)
# Description
if "Description" in restore_config:
restore_request.computeInstanceRestoreProperties.description = (
restore_config["Description"]
)
# Metadata
if "Metadata" in restore_config:
metadata_message = ComputeUtil.ParseMetadata(
self.messages, restore_config["Metadata"]
)
if metadata_message:
restore_request.computeInstanceRestoreProperties.metadata = (
metadata_message
)
# Labels
if "Labels" in restore_config:
labels_message = ComputeUtil.ParseLabels(
self.messages, restore_config["Labels"]
)
if labels_message:
restore_request.computeInstanceRestoreProperties.labels = labels_message
# Tags
if "Tags" in restore_config:
tags_message = self.messages.Tags(items=restore_config["Tags"])
if tags_message:
restore_request.computeInstanceRestoreProperties.tags = tags_message
# Machine Type
if "MachineType" in restore_config:
restore_request.computeInstanceRestoreProperties.machineType = (
restore_config["MachineType"]
)
# Hostname
if "Hostname" in restore_config:
restore_request.computeInstanceRestoreProperties.hostname = (
restore_config["Hostname"]
)
# AdvancedMachineFeatures
# EnableUefiNetworking, ThreadsPerCore, VisibleCoreCount
advanced_machine_features_message = (
ComputeUtil.ParseAdvancedMachineFeatures(
self.messages,
restore_config.get("EnableUefiNetworking", None),
restore_config.get("ThreadsPerCore", None),
restore_config.get("VisibleCoreCount", None),
)
)
if advanced_machine_features_message:
restore_request.computeInstanceRestoreProperties.advancedMachineFeatures = (
advanced_machine_features_message
)
# Accelerator
if "Accelerator" in restore_config:
accelerators_message = ComputeUtil.ParseAccelerator(
self.messages, restore_config["Accelerator"]
)
if accelerators_message:
restore_request.computeInstanceRestoreProperties.guestAccelerators = (
accelerators_message
)
# MinCpuPlatform
if "MinCpuPlatform" in restore_config:
restore_request.computeInstanceRestoreProperties.minCpuPlatform = (
restore_config["MinCpuPlatform"]
)
# Scheduling Flags
if any(
flag in restore_config
for flag in [
"MaintenancePolicy",
"Preemptible",
"RestartOnFailure",
"MinNodeCpus",
"ProvisioningModel",
"InstanceTerminationAction",
"LocalSsdRecoveryTimeout",
"NodeAffinityFile",
]
):
restore_request.computeInstanceRestoreProperties.scheduling = (
self.messages.Scheduling()
)
# MaintenancePolicy
if "MaintenancePolicy" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.onHostMaintenance = self.messages.Scheduling.OnHostMaintenanceValueValuesEnum(
restore_config["MaintenancePolicy"]
)
# Preemptible
if "Preemptible" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.preemptible = restore_config[
"Preemptible"
]
# RestartOnFailure
if "RestartOnFailure" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.automaticRestart = restore_config[
"RestartOnFailure"
]
# MinNodeCpus
if "MinNodeCpus" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.minNodeCpus = restore_config[
"MinNodeCpus"
]
# ProvisioningModel
if "ProvisioningModel" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.provisioningModel = self.messages.Scheduling.ProvisioningModelValueValuesEnum(
restore_config["ProvisioningModel"]
)
# InstanceTerminationAction
if "InstanceTerminationAction" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.instanceTerminationAction = self.messages.Scheduling.InstanceTerminationActionValueValuesEnum(
restore_config["InstanceTerminationAction"]
)
# LocalSsdRecoveryTimeout
if "LocalSsdRecoveryTimeout" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.localSsdRecoveryTimeout = self.messages.SchedulingDuration(
seconds=restore_config["LocalSsdRecoveryTimeout"]
)
# NodeAffinityFile
if "NodeAffinityFile" in restore_config:
restore_request.computeInstanceRestoreProperties.scheduling.nodeAffinities = ComputeUtil.GetNodeAffinitiesFromFile(
self.messages, restore_config["NodeAffinityFile"]
)
# ReservationAffinity & Reservation
if "ReservationAffinity" in restore_config:
restore_request.computeInstanceRestoreProperties.reservationAffinity = (
ComputeUtil.ParseReservationAffinity(
self.messages,
restore_config["ReservationAffinity"],
restore_config.get("Reservation", None),
)
)
# EnableDisplayDevice
if "EnableDisplayDevice" in restore_config:
restore_request.computeInstanceRestoreProperties.displayDevice = (
self.messages.DisplayDevice(
enableDisplay=restore_config["EnableDisplayDevice"]
)
)
# CanIpForward
if "CanIpForward" in restore_config:
restore_request.computeInstanceRestoreProperties.canIpForward = (
restore_config["CanIpForward"]
)
# PrivateIpv6GoogleAccess
if "PrivateIpv6GoogleAccessType" in restore_config:
restore_request.computeInstanceRestoreProperties.privateIpv6GoogleAccess = self.messages.ComputeInstanceRestoreProperties.PrivateIpv6GoogleAccessValueValuesEnum(
restore_config["PrivateIpv6GoogleAccessType"]
)
# NetworkPerformanceConfigs
if "NetworkPerformanceConfigs" in restore_config:
network_performance_configs = self.messages.NetworkPerformanceConfig()
if (
"total-egress-bandwidth-tier"
in restore_config["NetworkPerformanceConfigs"]
):
network_performance_configs.totalEgressBandwidthTier = self.messages.NetworkPerformanceConfig.TotalEgressBandwidthTierValueValuesEnum(
restore_config["NetworkPerformanceConfigs"][
"total-egress-bandwidth-tier"
]
)
restore_request.computeInstanceRestoreProperties.networkPerformanceConfig = (
network_performance_configs
)
# ConfidentialCompute
if "ConfidentialCompute" in restore_config:
restore_request.computeInstanceRestoreProperties.confidentialInstanceConfig = self.messages.ConfidentialInstanceConfig(
enableConfidentialCompute=restore_config["ConfidentialCompute"]
)
# DeletionProtection
if "DeletionProtection" in restore_config:
restore_request.computeInstanceRestoreProperties.deletionProtection = (
restore_config["DeletionProtection"]
)
# ResourceManagerTags
if "ResourceManagerTags" in restore_config:
restore_request.computeInstanceRestoreProperties.params = self.messages.InstanceParams(
resourceManagerTags=self.messages.InstanceParams.ResourceManagerTagsValue(
additionalProperties=[
self.messages.InstanceParams.ResourceManagerTagsValue.AdditionalProperty(
key=key, value=value
)
for key, value in restore_config[
"ResourceManagerTags"
].items()
]
)
)
# ResourcePolicies
if "ResourcePolicies" in restore_config:
restore_request.computeInstanceRestoreProperties.resourcePolicies = (
restore_config["ResourcePolicies"]
)
# KeyRevocationActionType
if "KeyRevocationActionType" in restore_config:
restore_request.computeInstanceRestoreProperties.keyRevocationActionType = self.messages.ComputeInstanceRestoreProperties.KeyRevocationActionTypeValueValuesEnum(
restore_config["KeyRevocationActionType"]
)
# InstanceKmsKey
if "InstanceKmsKey" in restore_config:
restore_request.computeInstanceRestoreProperties.instanceEncryptionKey = (
self.messages.CustomerEncryptionKey(
kmsKeyName=restore_config["InstanceKmsKey"],
)
)
# ClearOverridesFieldMask
if "ClearOverridesFieldMask" in restore_config:
restore_request.clearOverridesFieldMask = restore_config[
"ClearOverridesFieldMask"
]
request = self.messages.BackupdrProjectsLocationsBackupVaultsDataSourcesBackupsRestoreRequest(
name=resource.RelativeName(), restoreBackupRequest=restore_request
)
return self.service.Restore(request)
def RestoreDisk(self, resource, restore_config: DiskRestoreConfig):
"""Restores the given backup.
Args:
resource: The backup to be restored.
restore_config: Restore configuration.
Returns:
A long running operation
"""
restore_request = self.messages.RestoreBackupRequest()
restore_request.diskRestoreProperties = self.messages.DiskRestoreProperties(
name=restore_config["Name"],
)
target_zone = restore_config.get("TargetZone", None)
target_region = restore_config.get("TargetRegion", None)
if target_zone is None and target_region is None:
raise exceptions.InvalidArgumentException(
"target_zone",
"Target zone or target region is required for disk restore",
)
if target_zone is not None and target_region is not None:
raise exceptions.InvalidArgumentException(
"target_zone",
"Both Target zone and target region cannot be specified for disk"
" restore",
)
if target_zone is not None:
restore_request.diskTargetEnvironment = (
self.messages.DiskTargetEnvironment(
zone=restore_config["TargetZone"],
project=restore_config["TargetProject"],
)
)
elif target_region is not None:
restore_request.regionDiskTargetEnvironment = (
self.messages.RegionDiskTargetEnvironment(
region=restore_config["TargetRegion"],
project=restore_config["TargetProject"],
replicaZones=restore_config["ReplicaZones"],
)
)
# Description
if "Description" in restore_config:
restore_request.diskRestoreProperties.description = restore_config[
"Description"
]
# Labels
if "Labels" in restore_config:
labels_message = DiskUtil.ParseLabels(
self.messages, restore_config["Labels"]
)
if labels_message:
restore_request.diskRestoreProperties.labels = labels_message
# Licenses
if "Licenses" in restore_config:
restore_request.diskRestoreProperties.licenses = restore_config[
"Licenses"
]
# ConfidentialCompute
if "ConfidentialCompute" in restore_config:
restore_request.diskRestoreProperties.enableConfidentialCompute = (
restore_config["ConfidentialCompute"]
)
# Type
if "Type" in restore_config:
restore_request.diskRestoreProperties.type = restore_config["Type"]
# Size
if "Size" in restore_config:
restore_request.diskRestoreProperties.sizeGb = restore_config["Size"]
# StoragePool
if "StoragePool" in restore_config:
restore_request.diskRestoreProperties.storagePool = restore_config[
"StoragePool"
]
# Architecture
if "Architecture" in restore_config:
restore_request.diskRestoreProperties.architecture = (
self.messages.DiskRestoreProperties.ArchitectureValueValuesEnum(
restore_config["Architecture"]
)
)
# AccessMode
if "AccessMode" in restore_config:
restore_request.diskRestoreProperties.accessMode = (
self.messages.DiskRestoreProperties.AccessModeValueValuesEnum(
restore_config["AccessMode"]
)
)
# ResourcePolicies
if "ResourcePolicies" in restore_config:
restore_request.diskRestoreProperties.resourcePolicy = restore_config[
"ResourcePolicies"
]
# ProvisionedIops
if "ProvisionedIops" in restore_config:
restore_request.diskRestoreProperties.provisionedIops = restore_config[
"ProvisionedIops"
]
# ProvisionedThroughput
if "ProvisionedThroughput" in restore_config:
restore_request.diskRestoreProperties.provisionedThroughput = (
restore_config["ProvisionedThroughput"]
)
# KmsKey
if "KmsKey" in restore_config:
restore_request.diskRestoreProperties.diskEncryptionKey = (
self.messages.CustomerEncryptionKey(
kmsKeyName=restore_config["KmsKey"],
)
)
# ClearOverridesFieldMask
if "ClearOverridesFieldMask" in restore_config:
restore_request.clearOverridesFieldMask = restore_config[
"ClearOverridesFieldMask"
]
# GuestOsFeatures
if "GuestOsFeatures" in restore_config:
guest_os_features = []
for feature in restore_config["GuestOsFeatures"]:
guest_os_features.append(
self.messages.GuestOsFeature(
type=self.messages.GuestOsFeature.TypeValueValuesEnum(feature)
)
)
restore_request.diskRestoreProperties.guestOsFeature = guest_os_features
request = self.messages.BackupdrProjectsLocationsBackupVaultsDataSourcesBackupsRestoreRequest(
name=resource.RelativeName(), restoreBackupRequest=restore_request
)
return self.service.Restore(request)
def ParseUpdate(self, enforced_retention, expire_time):
updated_backup = self.messages.Backup()
if enforced_retention is not None:
updated_backup.enforcedRetentionEndTime = enforced_retention
if expire_time is not None:
updated_backup.expireTime = expire_time
return updated_backup
def Update(self, resource, backup, update_mask):
request_id = command_util.GenerateRequestId()
request = self.messages.BackupdrProjectsLocationsBackupVaultsDataSourcesBackupsPatchRequest(
backup=backup,
name=resource.RelativeName(),
updateMask=update_mask,
requestId=request_id,
)
return self.service.Patch(request)
def FetchForResourceType(
self,
resource,
resource_type,
filter_expression=None,
page_size=None,
order_by=None,
):
request = self.messages.BackupdrProjectsLocationsBackupVaultsDataSourcesBackupsFetchForResourceTypeRequest(
parent=resource.RelativeName(),
resourceType=resource_type,
pageSize=page_size,
filter=filter_expression,
orderBy=order_by,
)
return self.service.FetchForResourceType(request)

View File

@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Backup plan associations client."""
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.core import properties
class DataSourceReferencesClient(util.BackupDrClientBase):
"""Cloud Backup and DR Data Source References client."""
def __init__(self):
super(DataSourceReferencesClient, self).__init__()
self.service = self.client.projects_locations_dataSourceReferences
def FetchForResourceType(
self,
location,
resource_type,
filter_expression=None,
page_size=None,
order_by=None,
):
project = properties.VALUES.core.project.GetOrFail()
parent = 'projects/{}/locations/{}'.format(project, location)
request = self.messages.BackupdrProjectsLocationsDataSourceReferencesFetchForResourceTypeRequest(
parent=parent,
resourceType=resource_type,
pageSize=page_size,
filter=filter_expression,
orderBy=order_by,
)
return self.service.FetchForResourceType(request)

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Management Servers client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.backupdr import util
class ManagementServersClient(util.BackupDrClientBase):
"""Cloud Backup and DR Management client."""
def __init__(self):
super(ManagementServersClient, self).__init__()
self.service = self.client.projects_locations_managementServers
def Create(self, resource, network=None):
parent = resource.Parent().RelativeName()
management_server_id = resource.Name()
if network is not None:
networks = [self.messages.NetworkConfig(network=network)]
management_server = self.messages.ManagementServer(
networks=networks,
type=self.messages.ManagementServer.TypeValueValuesEnum.BACKUP_RESTORE,
)
else:
management_server = self.messages.ManagementServer(
type=self.messages.ManagementServer.TypeValueValuesEnum.BACKUP_RESTORE,
)
request = (
self.messages.BackupdrProjectsLocationsManagementServersCreateRequest(
parent=parent,
managementServer=management_server,
managementServerId=management_server_id,
)
)
return self.service.Create(request)
def Delete(self, resource):
request = (
self.messages.BackupdrProjectsLocationsManagementServersDeleteRequest(
name=resource.RelativeName()
)
)
return self.service.Delete(request)

View File

@@ -0,0 +1,96 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for list filter parameter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import string
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core.resource import resource_expr_rewrite
class ListFilterRewrite(resource_expr_rewrite.Backend):
"""Limit filter expressions to those supported by the ProtectionSummary API backend."""
_VALID_FILTER_MAPPING = {
'targetResourceDisplayName': 'target_resource_display_name',
'targetResourceType': 'target_resource_type',
'backupConfigured': 'backup_configured',
'vaulted': 'vaulted',
'backupConfigsDetails.backupConfigSourceDisplayName':
'backup_configs_details.backup_config_source_display_name',
'backupConfigsDetails.type': 'backup_configs_details.type',
}
_VALID_SERVER_FILTERS = {
'target_resource_display_name': string,
'target_resource_type': string,
'backup_configured': bool,
'vaulted': bool,
'backup_configs_details.backup_config_source_display_name': string,
'backup_configs_details.type': string,
}
def RewriteTerm(self, key, op, operand, key_type):
"""Rewrites a <key op operand> term of a filter expression.
Args:
key: The key, a string.
op: The operator, a string.
operand: The operand, a string or list of strings.
key_type: The key type, unknown if None.
Returns:
the new term, as a string.
"""
key = self._RewriteKey(key)
op = self._RewriteOp(key, op)
operand = self._RewriteOperand(key, operand)
return f'{key}{op}{operand}'
def Parenthesize(self, expression):
# Override parenthesize to not parenthesize AND/OR.
return expression
def _RewriteKey(self, key):
if key in self._VALID_FILTER_MAPPING:
return self._VALID_FILTER_MAPPING[key]
if key in self._VALID_SERVER_FILTERS:
return key
else:
raise exceptions.InvalidArgumentException(
'filter',
'Invalid filter key: %s. Valid filters are: %s'
% (key, ', '.join(self._VALID_SERVER_FILTERS.keys()))
)
# _RewriteOp replaces the EQ operator with HAS for member fields such as:
# backup_configs_details.backup_config_source_display_name,
# backup_configs_details.type
def _RewriteOp(self, key, op):
if '.' not in key:
return op
if op == '=':
return ':'
return op
def _RewriteOperand(self, key, operand):
# If the key is a boolean field then do not quote the operand.
if self._VALID_SERVER_FILTERS[key] == bool:
return operand
return self.QuoteOperand(operand, always=True)

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource Backup Configs API Client for Protection Summary."""
from apitools.base.py import exceptions as apitools_exceptions
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.backupdr import util
from googlecloudsdk.calliope import exceptions
class ResourceBackupConfigClient(util.BackupDrClientBase):
"""Resource Backup Configs API Client for Protection Summary."""
def __init__(self):
super(ResourceBackupConfigClient, self).__init__()
self.service = self.client.projects_locations_resourceBackupConfigs
def List(self, parent, filters, page_size=None, limit=None, order_by=None):
request = (
self.messages.BackupdrProjectsLocationsResourceBackupConfigsListRequest(
parent=parent,
filter=filters,
pageSize=page_size,
orderBy=order_by,
)
)
try:
for resource in list_pager.YieldFromList(
self.service,
request,
batch_size_attribute='pageSize',
batch_size=page_size,
limit=limit,
field='resourceBackupConfigs',
):
yield resource
except apitools_exceptions.HttpError as e:
raise exceptions.HttpException(e, util.HTTP_ERROR_FORMAT)
def Get(self, name):
request = (
self.messages.BackupdrProjectsLocationsResourceBackupConfigsGetRequest(
name=name
)
)
return self.service.Get(request)

View File

@@ -0,0 +1,441 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Backup and DR restore command apis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import types
from typing import Any, Dict, List
from apitools.base.py import encoding
from googlecloudsdk.api_lib.compute import alias_ip_range_utils
from googlecloudsdk.api_lib.compute import constants as compute_constants
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import yaml
from googlecloudsdk.core.util import files
class ComputeUtil(object):
"""Util class for Restoring Compute Engine Instance."""
@staticmethod
def _HasIpV6AccessConfig(network_interface: Dict[str, Any]) -> bool:
return "external-ipv6-address" in network_interface
@staticmethod
def _HasIpV4AccessConfig(network_interface: Dict[str, Any]) -> bool:
return "address" in network_interface
@staticmethod
def ParserNetworkInterface(
client_messages: types.ModuleType, network_interfaces
):
"""Parses the network interface data into client messages.
Args:
client_messages:
network_interfaces: A dictionary containing the network interface data
Returns:
List of parsed client messages for Network Interface
Raises:
InvalidArgumentException:
"""
if network_interfaces is None:
return None
messages = list()
for network_interface in network_interfaces:
message = client_messages.NetworkInterface()
access_config = client_messages.AccessConfig()
access_config_ipv6 = client_messages.AccessConfig()
if "network" in network_interface:
message.network = network_interface["network"]
if "subnet" in network_interface:
message.subnetwork = network_interface["subnet"]
if "private-network-ip" in network_interface:
message.networkIP = network_interface["private-network-ip"]
if "internal-ipv6-address" in network_interface:
message.ipv6Address = network_interface["internal-ipv6-address"]
if "internal-ipv6-prefix-length" in network_interface:
message.internalIpv6PrefixLength = network_interface[
"internal-ipv6-prefix-length"
]
else:
raise exceptions.InvalidArgumentException(
"internal-ipv6-prefix-length",
"Prefix length of the provided IPv6 address is expected but not"
" found",
)
if "address" in network_interface:
access_config.natIP = network_interface["address"]
if "external-ipv6-address" in network_interface:
access_config_ipv6.externalIpv6 = network_interface[
"external-ipv6-address"
]
if "external-ipv6-prefix-length" in network_interface:
access_config_ipv6.externalIpv6PrefixLength = network_interface[
"external-ipv6-prefix-length"
]
else:
raise exceptions.InvalidArgumentException(
"external-ipv6-prefix-length",
"Prefix length of the provided IPv6 address is expected but not"
" found",
)
if "network-tier" in network_interface:
access_config.networkTier = (
client_messages.AccessConfig.NetworkTierValueValuesEnum(
network_interface["network-tier"]
)
)
access_config_ipv6.networkTier = (
client_messages.AccessConfig.NetworkTierValueValuesEnum(
network_interface["network-tier"]
)
)
if "aliases" in network_interface:
message.aliasIpRanges = (
alias_ip_range_utils.CreateAliasIpRangeMessagesFromString(
client_messages,
True,
network_interface["aliases"],
)
)
if "stack-type" in network_interface:
message.stackType = (
client_messages.NetworkInterface.StackTypeValueValuesEnum(
network_interface["stack-type"]
)
)
if "queue-count" in network_interface:
message.queueCount = network_interface["queue-count"]
if "nic-type" in network_interface:
message.nicType = (
client_messages.NetworkInterface.NicTypeValueValuesEnum(
network_interface["nic-type"]
)
)
if "network-attachment" in network_interface:
message.networkAttachment = network_interface["network-attachment"]
# Only one of IPv4 Access config and IPv6 Access Config can exist.
if ComputeUtil._HasIpV6AccessConfig(network_interface):
access_config_ipv6.type = (
client_messages.AccessConfig.TypeValueValuesEnum.DIRECT_IPV6
)
message.ipv6AccessConfigs.extend([access_config_ipv6])
elif ComputeUtil._HasIpV4AccessConfig(network_interface):
access_config.type = (
client_messages.AccessConfig.TypeValueValuesEnum.ONE_TO_ONE_NAT
)
message.accessConfigs.extend([access_config])
messages.append(message)
return messages
@staticmethod
def ParserServiceAccount(
client_messages: types.ModuleType, service_account: str, scopes: List[str]
):
"""Parses the service account data into client messages.
Args:
client_messages:
service_account: An email id of the service account
scopes: A list containing the scopes
Returns:
List of parsed client messages for Service Account
"""
def _ConvertAliasToScopes(scopes):
converted_scopes = list()
for scope in scopes:
scope = compute_constants.SCOPES.get(scope, [scope])
converted_scopes.extend(scope)
return converted_scopes
if service_account is None:
service_account = "default"
if scopes is None:
scopes = compute_constants.DEFAULT_SCOPES
return [
client_messages.ServiceAccount(
email=service_account, scopes=_ConvertAliasToScopes(scopes)
)
]
@staticmethod
def ParserDisks(
client_messages: types.ModuleType, disks: List[Dict[str, Any]]
):
"""Parses the disk data into client messages.
Args:
client_messages:
disks: A list of dictionaries containing the disk data
Returns:
List of parsed client messages for Disk
"""
if disks is None:
return None
messages = list()
for disk in disks:
message = client_messages.AttachedDisk()
message.initializeParams = client_messages.InitializeParams()
if "device-name" in disk:
message.deviceName = disk["device-name"]
if "name" in disk:
message.initializeParams.diskName = disk["name"]
if "replica-zones" in disk:
message.initializeParams.replicaZones = disk["replica-zones"]
if "kms-key" in disk:
message.diskEncryptionKey = (
client_messages.CustomerEncryptionKey(
kmsKeyName=disk["kms-key"],
)
)
messages.append(message)
return messages
@staticmethod
def ParseMetadata(
client_messages: types.ModuleType, metadata: Dict[str, Any]
):
"""Parses the metadata data into client messages.
Args:
client_messages:
metadata: A dictionary containing the metadata
Returns:
List of parsed client messages for Metadata
"""
return client_messages.Metadata(
items=[
client_messages.Entry(key=key, value=value)
for key, value in metadata.items()
]
)
@staticmethod
def ParseLabels(client_messages: types.ModuleType, labels: Dict[str, Any]):
"""Parses the labels data into client messages.
Args:
client_messages:
labels: A dictionary containing the labels
Returns:
List of parsed client messages for Labels
"""
return client_messages.ComputeInstanceRestoreProperties.LabelsValue(
additionalProperties=[
client_messages.ComputeInstanceRestoreProperties.LabelsValue.AdditionalProperty(
key=key, value=value
)
for key, value in labels.items()
]
)
@staticmethod
def ParseAdvancedMachineFeatures(
client_messages: types.ModuleType,
enable_uefi_networking: bool,
threads_per_core: int,
visible_core_count: int,
):
"""Parses the advanced machine features data into client messages.
Args:
client_messages:
enable_uefi_networking:
threads_per_core:
visible_core_count:
Returns:
List of parsed client messages for AdvancedMachineFeatures
"""
if (
enable_uefi_networking is None
and threads_per_core is None
and visible_core_count is None
):
return None
message = client_messages.AdvancedMachineFeatures()
if enable_uefi_networking is not None:
message.enableUefiNetworking = enable_uefi_networking
if threads_per_core is not None:
message.threadsPerCore = threads_per_core
if visible_core_count is not None:
message.visibleCoreCount = visible_core_count
return message
@staticmethod
def ParseAccelerator(
client_messages: types.ModuleType, accelerator: Dict[str, Any]
):
"""Parses the accelerator data into client messages.
Args:
client_messages:
accelerator: A dictionaries containing the accelerator data
Returns:
List of parsed client messages for Accelerator
"""
if accelerator is None or "type" not in accelerator:
return None
return [
client_messages.AcceleratorConfig(
acceleratorType=accelerator["type"],
acceleratorCount=accelerator.get("count", 1),
)
]
class NodeAffinityFileParseError(core_exceptions.Error):
"""Error raised when node affinity file cannot be parsed."""
@staticmethod
def GetNodeAffinitiesFromFile(
client_messages: types.ModuleType, file_path: str
):
"""Parses the node affinity data from file into client messages.
Args:
client_messages:
file_path: A path to the file containing the node affinity data.
Returns:
List of parsed client messages for NodeAffinity
Raises:
NodeAffinityFileParseError:
"""
if file_path is None:
return None
node_affinities_file = files.ReadFileContents(file_path)
affinities_yaml = yaml.load(node_affinities_file)
if not affinities_yaml:
raise ComputeUtil.NodeAffinityFileParseError(
"No node affinity labels specified. You must specify at least one "
"label to create a sole tenancy instance."
)
node_affinities = []
for affinity in affinities_yaml:
if not affinity:
raise ComputeUtil.NodeAffinityFileParseError(
"Empty list item in JSON/YAML file."
)
try:
node_affinity = encoding.PyValueToMessage(
client_messages.NodeAffinity, affinity
)
except Exception as e: # pylint: disable=broad-except
raise ComputeUtil.NodeAffinityFileParseError(
"Failed to parse node affinity values from the file {}.".format(
file_path
)
) from e
if not node_affinity.key:
raise ComputeUtil.NodeAffinityFileParseError(
"A key must be specified for every node affinity label."
)
if node_affinity.all_unrecognized_fields():
raise ComputeUtil.NodeAffinityFileParseError(
"Key [{0}] has invalid field formats for: {1}".format(
node_affinity.key, node_affinity.all_unrecognized_fields()
)
)
node_affinities.append(node_affinity)
return node_affinities
RESERVATION_AFFINITY_KEY = "compute.googleapis.com/reservation-name"
@staticmethod
def ParseReservationAffinity(
client_messages: types.ModuleType,
reservation_affinity: str,
reservation: str,
):
"""Parses the reservation affinity data into client messages.
Args:
client_messages:
reservation_affinity: type of reservation affinity
reservation: name of the specific reservation
Returns:
List of parsed client messages for ReservationAffinity
Raises:
InvalidArgumentException:
"""
if reservation_affinity is None:
return None
if reservation_affinity == "any":
return client_messages.AllocationAffinity(
consumeReservationType=client_messages.AllocationAffinity.ConsumeReservationTypeValueValuesEnum.ANY_RESERVATION
)
if reservation_affinity == "none":
return client_messages.AllocationAffinity(
consumeReservationType=client_messages.AllocationAffinity.ConsumeReservationTypeValueValuesEnum.NO_RESERVATION
)
if reservation_affinity == "specific":
if reservation is None:
raise exceptions.InvalidArgumentException(
"reservation",
"Reservation is required for specific reservation affinity",
)
return client_messages.AllocationAffinity(
consumeReservationType=client_messages.AllocationAffinity.ConsumeReservationTypeValueValuesEnum.SPECIFIC_RESERVATION,
key=ComputeUtil.RESERVATION_AFFINITY_KEY,
values=[reservation],
)
return None
class DiskUtil(object):
"""Util class for Restoring Disk."""
@staticmethod
def ParseLabels(client_messages: types.ModuleType, labels: Dict[str, Any]):
"""Parses the labels data into client messages.
Args:
client_messages:
labels: A dictionary containing the labels
Returns:
List of parsed client messages for Labels
"""
return client_messages.DiskRestoreProperties.LabelsValue(
additionalProperties=[
client_messages.DiskRestoreProperties.LabelsValue.AdditionalProperty(
key=key, value=value
)
for key, value in labels.items()
]
)

View File

@@ -0,0 +1,51 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR Service Config client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.backupdr import util
class ServiceConfigClient(util.BackupDrClientBase):
"""Cloud Backup and DR Service Config client."""
def __init__(self):
super(ServiceConfigClient, self).__init__()
self.service = self.client.projects_locations_serviceConfig
def Init(self, location, resource_type):
"""Calls the Backup and DR Initialize service.
Args:
location: location of the service config.
resource_type: resource type for which the service config is being
initialized.
Returns:
A long running operation
"""
name = f'{location}/serviceConfig'
request = (
self.messages.BackupdrProjectsLocationsServiceConfigInitializeRequest(
name=name,
initializeServiceRequest=self.messages.InitializeServiceRequest(
resourceType=resource_type,
),
)
)
return self.service.Initialize(request)

View File

@@ -0,0 +1,129 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Backup and DR API utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.core import resources
DEFAULT_API_NAME = 'backupdr'
DEFAULT_API_VERSION = 'v1'
HTTP_ERROR_FORMAT = (
'ResponseError: code={status_code}, message={status_message}'
)
ASYNC_OPERATION_MESSAGE = (
'Run [backup-dr operations describe {}]'
' to check the status of this operation.'
)
class BackupDrClientBase(object):
"""Base class for Backup and DR API client wrappers."""
def __init__(self, api_version=DEFAULT_API_VERSION):
self._client = apis.GetClientInstance('backupdr', api_version)
self._messages = apis.GetMessagesModule('backupdr', api_version)
self.service = None
self.operations_service = self.client.projects_locations_operations
@property
def client(self):
return self._client
@property
def messages(self):
return self._messages
def GetOperationRef(self, operation):
"""Converts an Operation to a Resource that can be used with `waiter.WaitFor`."""
if operation.name is None:
return 'None'
return resources.REGISTRY.ParseRelativeName(
operation.name, collection='backupdr.projects.locations.operations'
)
def WaitForOperation(
self,
operation_ref,
message,
has_result=True,
max_wait=datetime.timedelta(seconds=3600),
):
"""Waits for an operation to complete.
Polls the Backup and DR Operation service until the operation completes,
fails, or
max_wait_seconds elapses.
Args:
operation_ref: a Resource created by GetOperationRef describing the
operation.
message: the message to display to the user while they wait.
has_result: if True, the function will return the target of the operation
when it completes. If False, nothing will be returned (useful for Delete
operations)
max_wait: The time to wait for the operation to succeed before returning.
Returns:
if has_result = True, a Backup and DR entity.
Otherwise, None.
"""
if has_result:
poller = waiter.CloudOperationPoller(
self.service, self.operations_service
)
else:
poller = waiter.CloudOperationPollerNoResources(self.operations_service)
return waiter.WaitFor(
poller, operation_ref, message, max_wait_ms=max_wait.seconds * 1000
)
class RestrictedDict(dict):
"""Restricted dict only allows specific keys, useful in creating a config object."""
def __init__(self, allowed_keys, *args, **kwargs):
super().__init__(*args, **kwargs)
self.allowed_keys = allowed_keys
def __setitem__(self, key, value):
if key not in self.allowed_keys:
raise KeyError(
f"The Key '{key}' is not one of [{', '.join(self.allowed_keys)}]"
)
super().__setitem__(key, value)
def update(self, other=None, **kwargs):
# Check keys in 'other' if it's a dictionary-like object
if other:
other_keys = other.keys() if isinstance(other, dict) else other
invalid_keys = set(other_keys) - set(self.allowed_keys)
if invalid_keys:
raise KeyError(
f'The Keys {invalid_keys} are not part of '
f' [{",".join(self.allowed_keys)}]'
)
# Update the dictionary
super().update(other, **kwargs)