feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Attachment utils for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.command_lib.artifacts import docker_util
from googlecloudsdk.command_lib.artifacts import requests
def GetAttachmentToDownload(args):
"""Get the artifact registry Attachment."""
if not args.oci_version_name:
return GetAttachment(args.CONCEPTS.attachment.Parse())
oci_version = docker_util.ParseDockerVersionStr(args.oci_version_name)
client = requests.GetClient()
messages = requests.GetMessages()
request = messages.ArtifactregistryProjectsLocationsRepositoriesAttachmentsListRequest(
parent=oci_version.image.docker_repo.GetRepositoryName(),
)
request.filter = 'oci_version_name="{name}"'.format(
name=oci_version.GetVersionName()
)
response = client.projects_locations_repositories_attachments.List(request)
if not response.attachments:
raise ar_exceptions.InvalidInputValueError(
'OCI version name {} is not found in repository {}.'.format(
oci_version.GetVersionName(),
oci_version.image.docker_repo.GetRepositoryName(),
)
)
if len(response.attachments) != 1:
raise ar_exceptions.InvalidInputValueError(
'OCI version name {} points to more than one attachment.'.format(
oci_version.GetVersionName()
)
)
return response.attachments[0]
def GetAttachment(attachment_ref):
"""Get the artifact registry Attachment."""
client = requests.GetClient()
messages = requests.GetMessages()
request = messages.ArtifactregistryProjectsLocationsRepositoriesAttachmentsGetRequest(
name=attachment_ref.RelativeName()
)
attachment = client.projects_locations_repositories_attachments.Get(request)
return attachment

View File

@@ -0,0 +1,147 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming Artifact Registry requests around cleanup policies."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
from apitools.base.py import encoding as apitools_encoding
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import times
import six
def ParseCleanupPolicy(path):
"""Reads a cleanup policy from a JSON formatted file.
Args:
path: str, path to the policy file.
Returns:
A dict describing a cleanup policy, matching the proto description.
Raises:
InvalidInputValueError: The JSON file could not be parsed or the data does
not follow the correct schema.
"""
content = console_io.ReadFromFileOrStdin(path, binary=False)
try:
file_policies = json.loads(encoding.Decode(content))
except ValueError as e:
raise apitools_exceptions.InvalidUserInputError(
'Could not read JSON file {}: {}'.format(path, e))
if not isinstance(file_policies, list):
raise apitools_exceptions.InvalidUserInputError(
'Policy file must contain a list of policies.'
)
policies = dict()
for i in range(len(file_policies)):
policy = file_policies[i]
if not isinstance(policy, dict):
raise apitools_exceptions.InvalidUserInputError(
'Invalid policy at index {}.'.format(i)
)
name = policy.get('name')
if name is None:
raise ar_exceptions.InvalidInputValueError(
'Key "name" not found in policy.'
)
if name in policies:
raise ar_exceptions.InvalidInputValueError(
'Duplicate key "{}" in policy list.'.format(name)
)
action = policy.get('action')
if action is None:
raise ar_exceptions.InvalidInputValueError(
'Key "action" not found in policy "{}".'.format(name)
)
try:
action = action.get('type', '')
except AttributeError as error:
six.raise_from(
ar_exceptions.InvalidInputValueError(
'Invalid action "{}" in policy "{}".'.format(action, name)
),
error,
)
condition = policy.get('condition')
if condition is not None:
if not isinstance(condition, dict):
raise ar_exceptions.InvalidInputValueError(
'Invalid value for "condition" in policy "{}".'.format(name)
)
for duration_key in ['versionAge', 'olderThan', 'newerThan']:
if duration_key in condition:
seconds = times.ParseDuration(condition[duration_key])
condition[duration_key] = six.text_type(seconds.total_seconds) + 's'
most_recent_versions = policy.get('mostRecentVersions')
if 'condition' not in policy and 'mostRecentVersions' not in policy:
raise ar_exceptions.InvalidInputValueError(
'Key "condition" or "mostRecentVersions" not found in policy "{}".'
.format('name')
)
if 'condition' in policy and 'mostRecentVersions' in policy:
raise ar_exceptions.InvalidInputValueError(
'Only one of "condition" or "mostRecentVersions" '
'allowed in policy "{}".'.format(name)
)
policies[name] = {
'id': name,
'action': action,
'condition': condition,
'mostRecentVersions': most_recent_versions,
}
return policies
def SetDeleteCleanupPolicyUpdateMask(unused_ref, unused_args, request):
"""Sets update mask for deleting Cleanup Policies."""
request.updateMask = 'cleanup_policies'
return request
def RepositoryToCleanupPoliciesResponse(response, unused_args):
"""Formats Cleanup Policies output and displays Dry Run status."""
if response.cleanupPolicyDryRun:
log.status.Print('Dry run is enabled.')
else:
log.status.Print('Dry run is disabled.')
if not response.cleanupPolicies:
return []
policies = apitools_encoding.MessageToDict(response.cleanupPolicies)
sorted_policies = sorted(policies.values(), key=lambda p: p['id'])
for policy in sorted_policies:
policy['name'] = policy['id']
del policy['id']
policy['action'] = {'type': policy['action']}
return sorted_policies
def DeleteCleanupPolicyFields(unused_ref, args, request):
removed_policies = args.policynames.split(',')
remaining_policies = []
if request.repository.cleanupPolicies:
for policy in request.repository.cleanupPolicies.additionalProperties:
if policy.key not in removed_policies:
remaining_policies.append(policy)
request.repository.cleanupPolicies.additionalProperties = remaining_policies
request.updateMask = None
return request

View File

@@ -0,0 +1,702 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for interacting with containeranalysis API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import re
from googlecloudsdk.api_lib.containeranalysis import filter_util
from googlecloudsdk.api_lib.containeranalysis import requests as ca_requests
from googlecloudsdk.api_lib.services import enable_api
from googlecloudsdk.api_lib.services import exceptions as serviceusage_exceptions
import six
class ContainerAnalysisMetadata:
"""ContainerAnalysisMetadata defines metadata retrieved from containeranalysis API.
"""
def __init__(self):
self.vulnerability = PackageVulnerabilitySummary()
self.image = ImageBasisSummary()
self.discovery = DiscoverySummary()
self.deployment = DeploymentSummary()
self.build = BuildSummary()
self.provenance = ProvenanceSummary()
self.package = PackageSummary()
self.attestation = AttestationSummary()
self.upgrade = UpgradeSummary()
self.compliance = ComplianceSummary()
self.dsse_attestation = DsseAttestaionSummary()
self.sbom_reference = SbomReferenceSummary()
def AddOccurrence(self, occ, include_build=True):
"""Adds occurrences retrieved from containeranalysis API.
Generally we have a 1-1 correspondence between type and summary it's added
to. The exceptions (due to backwards compatibility issues) are:
BUILD: If you pass in --show-provenance, there will be a provenance
section (for intoto builds) but no build section. If you pass in
--show-all-metadata or --show-build-details, there will be a provenance
section (for intoto builds) and a builds section (for every build). That
does mean an occurrence may be in both provenance_summary and build_summary.
DSSE_ATTESTATION: We always return it in both the DSSE_ATTESTATION section
and the provenance section.
Args:
occ: the occurrence retrieved from the API.
include_build: whether build-kind occurrences should be added to build.
"""
messages = ca_requests.GetMessages()
if occ.kind == messages.Occurrence.KindValueValuesEnum.VULNERABILITY:
self.vulnerability.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.IMAGE:
self.image.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.DEPLOYMENT:
self.deployment.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.DISCOVERY:
self.discovery.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.DSSE_ATTESTATION:
self.provenance.AddOccurrence(occ)
elif (
occ.kind == messages.Occurrence.KindValueValuesEnum.BUILD
and occ.build
and (occ.build.intotoStatement or occ.build.inTotoSlsaProvenanceV1)
):
self.provenance.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.PACKAGE:
self.package.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.ATTESTATION:
self.attestation.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.UPGRADE:
self.upgrade.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.COMPLIANCE:
self.compliance.AddOccurrence(occ)
elif occ.kind == messages.Occurrence.KindValueValuesEnum.SBOM_REFERENCE:
self.sbom_reference.AddOccurrence(occ)
# DSSEAttestation should also have its own section, even if it was already
# added to the provenance section, as a user can make a non-provenance dsse.
if occ.kind == messages.Occurrence.KindValueValuesEnum.DSSE_ATTESTATION:
self.dsse_attestation.AddOccurrence(occ)
# BUILD should also have its own section, even if it was already
# added to the provenance section.
if (
occ.kind == messages.Occurrence.KindValueValuesEnum.BUILD
and include_build
):
self.build.AddOccurrence(occ)
def ImagesListView(self):
"""Returns a dictionary representing the metadata.
The returned dictionary is used by artifacts docker images list command.
"""
view = {}
if self.image.base_images:
view['IMAGE'] = self.image.base_images
if self.deployment.deployments:
view['DEPLOYMENT'] = self.deployment.deployments
if self.discovery.discovery:
view['DISCOVERY'] = self.discovery.discovery
if self.build.build_details:
view['BUILD'] = self.build.build_details
if self.package.packages:
view['PACKAGE'] = self.package.packages
if self.attestation.attestations:
view['ATTESTATION'] = self.attestation.attestations
if self.upgrade.upgrades:
view['UPGRADE'] = self.upgrade.upgrades
if self.compliance.compliances:
view['COMPLIANCE'] = self.compliance.compliances
if self.dsse_attestation.dsse_attestations:
view['DSSE_ATTESTATION'] = self.dsse_attestation.dsse_attestations
if self.sbom_reference.sbom_references:
view['SBOM_REFERENCE'] = self.sbom_reference.sbom_references
view.update(self.vulnerability.ImagesListView())
return view
def ArtifactsDescribeView(self):
"""Returns a dictionary representing the metadata.
The returned dictionary is used by artifacts docker images describe command.
"""
view = {}
if self.image.base_images:
view['image_basis_summary'] = self.image
if self.deployment.deployments:
view['deployment_summary'] = self.deployment
if self.discovery.discovery:
view['discovery_summary'] = self.discovery
if self.build.build_details:
view['build_details_summary'] = self.build
vuln = self.vulnerability.ArtifactsDescribeView()
if vuln:
view['package_vulnerability_summary'] = vuln
if self.provenance.provenance:
view['provenance_summary'] = self.provenance
if self.package.packages:
view['package_summary'] = self.package
if self.attestation.attestations:
view['attestation_summary'] = self.attestation
if self.upgrade.upgrades:
view['upgrade_summary'] = self.upgrade
if self.compliance.compliances:
view['compliance_summary'] = self.compliance
if self.dsse_attestation.dsse_attestations:
view['dsse_attestation_summary'] = self.dsse_attestation
if self.sbom_reference.sbom_references:
view['sbom_summary'] = self.sbom_reference
return view
def SLSABuildLevel(self):
"""Returns SLSA build level 0-3 or unknown."""
if self.provenance.provenance:
return _ComputeSLSABuildLevel(self.provenance.provenance)
return 'unknown'
def SbomLocations(self):
return [
sbom_ref.sbomReference.payload.predicate.location
for sbom_ref in self.sbom_reference.sbom_references
]
class PackageVulnerabilitySummary:
"""PackageVulnerabilitySummary holds package vulnerability information."""
def __init__(self):
self.vulnerabilities = {}
self.counts = []
def AddOccurrence(self, occ):
sev = six.text_type(occ.vulnerability.effectiveSeverity)
self.vulnerabilities.setdefault(sev, []).append(occ)
def AddSummary(self, summary):
self.counts += summary.counts
def AddCount(self, count):
self.counts.append(count)
def ArtifactsDescribeView(self):
"""Returns a dictionary representing package vulnerability metadata.
The returned dictionary is used by artifacts docker images describe command.
"""
messages = ca_requests.GetMessages()
view = {}
if self.vulnerabilities:
view['vulnerabilities'] = self.vulnerabilities
for count in self.counts:
# SEVERITY_UNSPECIFIED represents total counts across all serverities
if (count.severity == messages.FixableTotalByDigest
.SeverityValueValuesEnum.SEVERITY_UNSPECIFIED):
view['not_fixed_vulnerability_count'] = (
count.totalCount - count.fixableCount)
view['total_vulnerability_count'] = count.totalCount
break
return view
def ImagesListView(self):
"""Returns a dictionary representing package vulnerability metadata.
The returned dictionary is used by artifacts docker images list command.
"""
messages = ca_requests.GetMessages()
view = {}
if self.vulnerabilities:
view['PACKAGE_VULNERABILITY'] = self.vulnerabilities
vuln_counts = {}
for count in self.counts:
# SEVERITY_UNSPECIFIED represents total counts across all serverities
sev = count.severity
if (sev and sev != messages.FixableTotalByDigest.SeverityValueValuesEnum
.SEVERITY_UNSPECIFIED):
vuln_counts.update({sev: vuln_counts.get(sev, 0) + count.totalCount})
if vuln_counts:
view['vuln_counts'] = vuln_counts
return view
class ImageBasisSummary:
"""ImageBasisSummary holds image basis information."""
def __init__(self):
self.base_images = []
def AddOccurrence(self, occ):
self.base_images.append(occ)
class BuildSummary:
"""BuildSummary holds image build information."""
def __init__(self):
self.build_details = []
def AddOccurrence(self, occ):
self.build_details.append(occ)
class DeploymentSummary:
"""DeploymentSummary holds image deployment information."""
def __init__(self):
self.deployments = []
def AddOccurrence(self, occ):
self.deployments.append(occ)
class DiscoverySummary:
"""DiscoverySummary holds image vulnerability discovery information."""
def __init__(self):
self.discovery = []
def AddOccurrence(self, occ):
self.discovery.append(occ)
class ProvenanceSummary:
"""ProvenanceSummary holds image provenance information."""
def __init__(self):
self.provenance = []
def AddOccurrence(self, occ):
self.provenance.append(occ)
class PackageSummary:
"""PackageSummary holds image package information."""
def __init__(self):
self.packages = []
def AddOccurrence(self, occ):
self.packages.append(occ)
class AttestationSummary:
"""AttestationSummary holds image attestation information."""
def __init__(self):
self.attestations = []
def AddOccurrence(self, occ):
self.attestations.append(occ)
class UpgradeSummary:
"""UpgradeSummary holds image upgrade information."""
def __init__(self):
self.upgrades = []
def AddOccurrence(self, occ):
self.upgrades.append(occ)
class ComplianceSummary:
"""ComplianceSummary holds image compliance information."""
def __init__(self):
self.compliances = []
def AddOccurrence(self, occ):
self.compliances.append(occ)
class DsseAttestaionSummary:
"""DsseAttestaionSummary holds image dsse_attestation information."""
def __init__(self):
self.dsse_attestations = []
def AddOccurrence(self, occ):
self.dsse_attestations.append(occ)
class SbomReferenceSummary:
"""SbomReferenceSummary holds image SBOM reference information."""
def __init__(self):
self.sbom_references = []
def AddOccurrence(self, occ):
self.sbom_references.append(occ)
def GetContainerAnalysisMetadata(docker_version, args):
"""Retrieves metadata for a docker image."""
metadata = ContainerAnalysisMetadata()
docker_urls = [
'https://{}'.format(docker_version.GetDockerString()),
docker_version.GetDockerString(),
]
occ_filter = _CreateFilterFromImagesDescribeArgs(docker_urls, args)
if occ_filter is None:
return metadata
occurrences = ca_requests.ListOccurrences(docker_version.project, occ_filter)
include_build = (
args.show_build_details or args.show_all_metadata or args.metadata_filter
)
for occ in occurrences:
metadata.AddOccurrence(occ, include_build)
if metadata.vulnerability.vulnerabilities:
vuln_summary = ca_requests.GetVulnerabilitySummary(
docker_version.project,
filter_util.ContainerAnalysisFilter().WithResources(
docker_urls).GetFilter())
metadata.vulnerability.AddSummary(vuln_summary)
return metadata
def GetImageSummaryMetadata(docker_version):
"""Retrieves build and SBOM metadata for a docker image.
This function is used only for SLSA build level computation and retrieving
SBOM locations. If the containeranalysis API is disabled for the project, no
request will be sent and it returns empty metadata resulting in 'unknown' SLSA
level.
Args:
docker_version: docker info about image and project.
Returns:
The build and SBOM metadata for the given image.
"""
metadata = ContainerAnalysisMetadata()
try:
ca_enabled = enable_api.IsServiceEnabled(
docker_version.project, 'containeranalysis.googleapis.com'
)
except serviceusage_exceptions.GetServicePermissionDeniedException:
# Do not raise the exception, as it will break the command.
ca_enabled = False
if not ca_enabled:
return metadata
docker_urls = [
'https://{}'.format(docker_version.GetDockerString()),
docker_version.GetDockerString(),
]
occ_filter = _CreateFilterForImageSummaryOccurrences(docker_urls)
occurrences = ca_requests.ListOccurrences(docker_version.project, occ_filter)
for occ in occurrences:
metadata.AddOccurrence(occ, False)
return metadata
def GetArtifactOccurrences(project, artifact_resource):
"""Retrieves occurrences for Maven artifacts."""
metadata = ContainerAnalysisMetadata()
occ_filter = _CreateFilterForMaven(artifact_resource)
occurrences = ca_requests.ListOccurrences(project, occ_filter)
for occ in occurrences:
metadata.AddOccurrence(occ, False)
return metadata
def GetContainerAnalysisMetadataForImages(repo_or_image, occurrence_filter,
images):
"""Retrieves metadata for all images with a given path prefix.
The prefix may initially be used to resolve to a list of images if
--show-occurrences-from is used.
To account for cases where there is or isn't a list of images,
this always filters on both prefix and the list of images. In both of
those cases, the lookup is for both the case where there is and isn't
an https prefix, in both the prefixes and in the images list.
Args:
repo_or_image: The repository originally given by the user.
occurrence_filter: The repository originally given by the user.
images: The list of images that matched the prefix, without https prepended.
Returns:
The metadata about the given images.
"""
metadata = collections.defaultdict(ContainerAnalysisMetadata)
prefixes = [
'https://{}'.format(repo_or_image.GetDockerString()),
repo_or_image.GetDockerString()
]
image_urls = images + ['https://{}'.format(img) for img in images]
occ_filters = _CreateFilterForImages(prefixes, occurrence_filter, image_urls)
occurrences = ca_requests.ListOccurrencesWithFilters(repo_or_image.project,
occ_filters)
for occ in occurrences:
metadata.setdefault(occ.resourceUri,
ContainerAnalysisMetadata()).AddOccurrence(occ)
summary_filters = filter_util.ContainerAnalysisFilter().WithResourcePrefixes(
prefixes).WithResources(image_urls).GetChunkifiedFilters()
summaries = ca_requests.GetVulnerabilitySummaryWithFilters(
repo_or_image.project, summary_filters)
for summary in summaries:
for count in summary.counts:
metadata.setdefault(
count.resourceUri,
ContainerAnalysisMetadata()).vulnerability.AddCount(count)
return metadata
def _CreateFilterForMaven(maven_resource):
"""Builds filters for containeranalysis APIs for Maven Artifacts."""
occ_filter = filter_util.ContainerAnalysisFilter()
filter_kinds = ['VULNERABILITY', 'DISCOVERY']
occ_filter.WithKinds(filter_kinds)
occ_filter.WithResources([maven_resource])
return occ_filter.GetFilter()
def _CreateFilterForImageSummaryOccurrences(images):
"""Builds filters for containeranalysis APIs for build and SBOM occurrences."""
occ_filter = filter_util.ContainerAnalysisFilter()
filter_kinds = ['BUILD', 'SBOM_REFERENCE']
occ_filter.WithKinds(filter_kinds)
occ_filter.WithResources(images)
return occ_filter.GetFilter()
def _CreateFilterFromImagesDescribeArgs(images, args):
r"""Parses `docker images describe` arguments into a filter to send to containeranalysis API.
The returned filter will combine the user-provided filter specified by
the --metadata-filter flag and occurrence kind filters specified by flags
such as --show-package-vulnerability.
Returns None if there is no information to fetch from containeranalysis API.
Args:
images: list, the fully-qualified path of docker images.
args: user provided command line arguments.
Returns:
A filter string to send to the containeranalysis API.
For example, given a user input:
gcloud docker images describe \
us-west1-docker.pkg.dev/my-project/my-repo/ubuntu@sha256:abc \
--show-package-vulnerability \
--show-image-basis \
--metadata-filter='createTime>"2019-04-10T"'
this method will create a filter:
'''
((kind="VULNERABILITY") OR (kind="IMAGE")) AND
(createTime>"2019-04-10T") AND
(resourceUrl=us-west1-docker.pkg.dev/my-project/my-repo/ubuntu@sha256:abc' OR
resourceUrl=https://us-west1-docker.pkg.dev/my-project/my-repo/ubuntu@sha256:abc'))
'''
"""
occ_filter = filter_util.ContainerAnalysisFilter()
filter_kinds = []
# We don't need to filter on kinds when showing all metadata
if not args.show_all_metadata:
if args.show_build_details:
filter_kinds.append('BUILD')
if args.show_package_vulnerability:
filter_kinds.append('VULNERABILITY')
filter_kinds.append('DISCOVERY')
if args.show_image_basis:
filter_kinds.append('IMAGE')
if args.show_deployment:
filter_kinds.append('DEPLOYMENT')
if args.show_provenance:
filter_kinds.append('DSSE_ATTESTATION')
filter_kinds.append('BUILD')
if args.show_sbom_references:
filter_kinds.append('SBOM_REFERENCE')
# args include none of the occurrence types, there's no need to call the
# containeranalysis API.
# The exception to this is where there is a user provided filter.
if not filter_kinds and not args.metadata_filter:
return None
occ_filter.WithKinds(filter_kinds)
occ_filter.WithCustomFilter(args.metadata_filter)
occ_filter.WithResources(images)
return occ_filter.GetFilter()
def _CreateFilterForImages(prefixes, custom_filter, images):
"""Creates a list of filters from a docker image prefix, a custom filter and fully-qualified image URLs.
Args:
prefixes: URL prefixes. Only metadata of images with any of these prefixes
will be retrieved.
custom_filter: user provided filter string.
images: fully-qualified docker image URLs. Only metadata of these images
will be retrieved.
Returns:
A filter string to send to the containeranalysis API.
"""
occ_filter = filter_util.ContainerAnalysisFilter()
occ_filter.WithResourcePrefixes(prefixes)
occ_filter.WithResources(images)
occ_filter.WithCustomFilter(custom_filter)
return occ_filter.GetChunkifiedFilters()
def _ComputeSLSABuildLevel(provenance):
"""Computes SLSA build level from a build provenance.
Determines SLSA Level based on a list of occurrences,
preferring data from SLSA v1.0 occurrences over others.
Args:
provenance: build provenance list containing build occurrences.
Returns:
A string `unknown` if build provenance doesn't exist, otherwise
an integer from 0 to 3 indicating SLSA build level.
"""
if not provenance:
return 'unknown'
builder_id_v1 = 'https://cloudbuild.googleapis.com/GoogleHostedWorker'
builds_v1 = [
p for p in provenance if p.build and p.build.inTotoSlsaProvenanceV1
]
for build_v1 in builds_v1:
provenance_v1 = build_v1.build.inTotoSlsaProvenanceV1
# GCB Build Occurrences that populate SLSA v1.0 data
# always have SLSA Level 3.
if (
provenance_v1.predicate
and provenance_v1.predicate.runDetails
and provenance_v1.predicate.runDetails.builder
and provenance_v1.predicate.runDetails.builder.id
and provenance_v1.predicate.runDetails.builder.id == builder_id_v1
):
return 3
# No SLSA v1.0 data was found, just compute the SLSA level from
# the first occurrence found with defined slsaProvenance.
builds_v0_1 = [
p for p in provenance if p.build and p.build.intotoStatement
]
if not builds_v0_1:
return 'unknown'
provenance = builds_v0_1[0]
intoto = provenance.build.intotoStatement
if _HasSteps(intoto):
if _HasValidKey(provenance):
if _HasLevel3BuildVersion(intoto):
return 3
return 2
return 1
return 0
def _HasSteps(intoto):
"""Check whether a build provenance contains build steps.
Args:
intoto: intoto statement in build occurrence.
Returns:
A boolean value indicating whether intoto contains build steps.
"""
if (
intoto
and hasattr(intoto, 'slsaProvenance')
and hasattr(intoto.slsaProvenance, 'recipe')
and hasattr(intoto.slsaProvenance.recipe, 'arguments')
and hasattr(
intoto.slsaProvenance.recipe.arguments, 'additionalProperties'
)
):
properties = intoto.slsaProvenance.recipe.arguments.additionalProperties
return any(p.key == 'steps' and p.value for p in properties)
return False
def _HasValidKey(build):
"""Check whether a build provenance contains valid signature and key id.
Args:
build: container analysis build occurrence.
Returns:
A boolean value indicating whether build occurrence contains valid signature
and key id.
"""
if (
build
and hasattr(build, 'envelope')
and hasattr(build.envelope, 'signatures')
and build.envelope.signatures
):
key_id_pattern = '^projects/verified-builder/locations/.+/keyRings/attestor/cryptoKeys/builtByGCB/cryptoKeyVersions/1$'
def CheckSignature(signature):
return (hasattr(signature, 'sig') and
signature.sig and
hasattr(signature, 'keyid') and
re.match(key_id_pattern, signature.keyid))
filtered = filter(CheckSignature, build.envelope.signatures)
if filtered:
return True
return False
def _HasLevel3BuildVersion(intoto):
"""Check whether a build provenance contains level 3 build version.
Args:
intoto: intoto statement in build occurrence.
Returns:
A boolean value indicating whether intoto contains level 3 build version.
"""
if (
intoto
and hasattr(intoto, 'slsaProvenance')
and hasattr(intoto.slsaProvenance, 'builder')
and hasattr(intoto.slsaProvenance.builder, 'id')
and intoto.slsaProvenance.builder.id
):
[uri, version] = intoto.slsaProvenance.builder.id.split('@v')
if (
uri == 'https://cloudbuild.googleapis.com/GoogleHostedWorker'
and version
):
[major_version, minor_version] = version.split('.')
return int(major_version) > 0 or int(minor_version) >= 3
return False

View File

@@ -0,0 +1,183 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Download utils for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from concurrent import futures
import json
import os
import sys
import threading
from apitools.base.py import transfer
from googlecloudsdk.command_lib.artifacts import requests
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import transports
from googlecloudsdk.core.util import files
def Download(
dest_path: str,
file_res_name: str,
file_name: str,
allow_overwrite: bool,
chunk_size: int,
parallelism: int = 1,
) -> None:
"""Downloads a file to a local path."""
client = requests.GetClient()
# call expanduser so that `~` can be used to represent the home directory.
dest_path = os.path.expanduser(dest_path)
# Only move the file to the user specified path if overwrites are allowed.
if os.path.exists(dest_path) and not allow_overwrite:
log.error(f'File {dest_path} already exists.')
sys.exit(1)
m = requests.GetMessages()
file_req = m.ArtifactregistryProjectsLocationsRepositoriesFilesGetRequest(
name=file_res_name
)
file_res = client.projects_locations_repositories_files.Get(file_req)
# Create the placeholder file so we can do parallel seek and write later.
temp_dest_path = dest_path + '.tmp'
try:
with files.BinaryFileWriter(temp_dest_path) as f:
f.truncate(file_res.sizeBytes)
except FileExistsError:
log.error(
f'Temporary file {temp_dest_path} already exists (likely from a'
' previous failed download attempt). Please remove it and try again.'
)
sys.exit(1)
# For some reason, there is no "v1" in the base URL even though it's supposed
# to have the API version already in the client.
download_url = f'{client.url}v1/{file_res_name}:download?alt=media'
# Unlikely but when there are less bytes then the parallelism,
# We fallback to 1 thread download.
if file_res.sizeBytes < parallelism:
parallelism = 1
range_size = file_res.sizeBytes // parallelism
ranges = []
for i in range(0, parallelism):
if i < parallelism - 1:
ranges.append((i * range_size, (i + 1) * range_size - 1))
else:
ranges.append((i * range_size, file_res.sizeBytes - 1))
with SharedProgressBar(
label=f'Downloading {file_name}',
parallelism=parallelism,
total=file_res.sizeBytes,
) as progress_bar:
with futures.ThreadPoolExecutor(max_workers=parallelism) as executor:
fs = [
executor.submit(
_DownloadRange,
i,
temp_dest_path,
download_url,
start,
end,
chunk_size,
progress_bar,
client,
)
for i, (start, end) in enumerate(ranges)
]
for future in futures.as_completed(fs):
try:
future.result()
except Exception as exc: # pylint: disable=broad-except
raise exc
if allow_overwrite and os.path.exists(dest_path):
os.remove(dest_path)
# Rename the temp file to the final destination path
os.rename(temp_dest_path, dest_path)
def _DownloadRange(
thread_index,
temp_dest_path,
download_url,
start,
end,
chunk_size,
progress_bar,
client,
):
"""Downloads a range of bytes to the placeholder file."""
ser_dict = {
'auto_transfer': True,
'total_size': end + 1,
'progress': start,
'url': download_url,
}
json_data = json.dumps(ser_dict)
with files.BinaryFileWriter(temp_dest_path) as f:
f.seek(start)
d = transfer.Download.FromData(
f,
json_data,
chunksize=chunk_size,
client=client,
)
d.bytes_http = transports.GetApitoolsTransport(response_encoding=None)
try:
d.StreamMedia(
callback=lambda _, dl: progress_bar.SetProgress(
thread_index,
dl.progress - start,
)
)
finally:
d.stream.close()
class SharedProgressBar(object):
"""A thread safe progress bar that allows adding increamental progress."""
def __init__(self, parallelism, total, *args, **kwargs):
self.completed_per_thread = [0] * parallelism
self.total = total
self._progress_bar = console_io.ProgressBar(*args, **kwargs)
self._lock = threading.Lock()
def __enter__(self):
self._progress_bar.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._progress_bar.__exit__(exc_type, exc_value, traceback)
def SetProgress(self, thread_index, p):
with self._lock:
self.completed_per_thread[thread_index] = p
self._progress_bar.SetProgress(
sum(self.completed_per_thread) / self.total
)

View File

@@ -0,0 +1,167 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utilities for operating on different endpoints."""
import contextlib
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.core import properties
# Rerouted regions are regions that don't have a regional endpoint
# and must be redirected to global region.
# go/rep-deployment-roadmap, go/rep-dashboards
_CONTAINER_ANALYSIS_REROUTED_LOCATIONS = frozenset([
# TODO(b/445909332): REP is available. Turn up CA API RSLB.
# go/keep-sorted start
"eu",
"europe-north2",
"europe-west12",
"northamerica-south1",
"us",
"us-west8",
# go/keep-sorted end
# No REP available.
# go/keep-sorted start
"asia",
"asia-southeast3",
"europe-west15",
"global"
# go/keep-sorted end
])
# Direct locations are regions and multi-regions that have regional endpoints.
_CONTAINER_ANALYSIS_DIRECT_LOCATIONS = frozenset([
# go/keep-sorted start
"africa-south1",
"asia-east1",
"asia-east2",
"asia-northeast1",
"asia-northeast2",
"asia-northeast3",
"asia-south1",
"asia-south2",
"asia-southeast1",
"asia-southeast2",
"australia-southeast1",
"australia-southeast2",
"europe-central2",
"europe-north1",
"europe-southwest1",
"europe-west1",
"europe-west10",
"europe-west2",
"europe-west3",
"europe-west4",
"europe-west6",
"europe-west8",
"europe-west9",
"me-central1",
"me-central2",
"me-west1",
"northamerica-northeast1",
"northamerica-northeast2",
"southamerica-east1",
"southamerica-west1",
"us-central1",
"us-central2",
"us-east1",
"us-east4",
"us-east5",
"us-east7",
"us-south1",
"us-west1",
"us-west2",
"us-west3",
"us-west4",
# go/keep-sorted end
])
_CONTAINER_ANALYSIS_REP_STRUCTURE = "https://containeranalysis.{}.rep.{}/"
_ARTIFACT_REGISTRY_FACADE_STRUCTURE = "{protocol}{prefix}{location}-{format}.{domain}"
_ARTIFACT_REGISTRY_FACADE_REP_STRUCTURE = "{protocol}{prefix}{format}.{location}.rep.{domain}"
def _GetRegionalEndpoint(region):
universe_domain = properties.VALUES.core.universe_domain.Get()
regional_endpoint = _CONTAINER_ANALYSIS_REP_STRUCTURE.format(
region, universe_domain
)
return regional_endpoint
@contextlib.contextmanager
def WithRegion(region=None):
"""WithRegion overrides artifact analysis endpoint with endpoint of region.
A call to WithRegion should be done in a with clause.
If an existing override exists, this command does not do anything.
If a rerouted region is passed in, this command does not do anything.
An error is raised if an invalid location is passed in.
Args:
region: str, location
Raises:
aa_exceptions.UnsupportedLocationError if location provided is invalid.
Yields:
None
"""
override = properties.VALUES.api_endpoint_overrides.containeranalysis.Get()
if region is None:
pass
elif (
region not in _CONTAINER_ANALYSIS_DIRECT_LOCATIONS
and region not in _CONTAINER_ANALYSIS_REROUTED_LOCATIONS
):
raise ar_exceptions.UnsupportedLocationError()
elif (
override is None and region not in _CONTAINER_ANALYSIS_REROUTED_LOCATIONS
):
regional_endpoint = _GetRegionalEndpoint(region)
properties.VALUES.api_endpoint_overrides.containeranalysis.Set(
regional_endpoint
)
try:
yield
finally:
properties.VALUES.api_endpoint_overrides.containeranalysis.Set(override)
def ArtifactRegistryDomainEndpoint(
location, repo_format, protocol="", rep=False,
):
"""Returns the Artifact Registry domain endpoint for the given region."""
# TODO(b/399155579): read from universe descriptor once AR is added.
domain = "pkg.dev"
prefix = properties.VALUES.artifacts.registry_endpoint_prefix.Get()
if protocol:
if protocol != "https" and protocol != "http":
raise ar_exceptions.ArtifactRegistryError(
"Invalid protocol: {}, must be https or http".format(protocol)
)
protocol = protocol + "://"
template = (
_ARTIFACT_REGISTRY_FACADE_REP_STRUCTURE
if rep
else _ARTIFACT_REGISTRY_FACADE_STRUCTURE
)
return template.format(
protocol=protocol,
prefix=prefix,
location=location,
format=repo_format,
domain=domain,
)

View File

@@ -0,0 +1,285 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""File utils for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import re
from apitools.base.protorpclite import protojson
from googlecloudsdk.api_lib.artifacts import exceptions
from googlecloudsdk.api_lib.artifacts import filter_rewriter
from googlecloudsdk.api_lib.util import common_args
from googlecloudsdk.command_lib.artifacts import requests
from googlecloudsdk.command_lib.artifacts import util
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
def EscapeFileName(ref):
"""Escapes slashes, pluses and hats from request names."""
return resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.files",
projectsId=ref.projectsId,
locationsId=ref.locationsId,
repositoriesId=ref.repositoriesId,
filesId=ref.filesId.replace("/", "%2F")
.replace("+", "%2B")
.replace("^", "%5E"),
)
def EscapeFileNameHook(ref, unused_args, req):
"""Escapes slashes, pluses and hats from request names."""
file = resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.files",
projectsId=ref.projectsId,
locationsId=ref.locationsId,
repositoriesId=ref.repositoriesId,
filesId=ref.filesId.replace("/", "%2F")
.replace("+", "%2B")
.replace("^", "%5E"),
)
req.name = file.RelativeName()
return req
def EscapeFileNameFromIDs(project_id, location_id, repo_id, file_id):
"""Escapes slashes and pluses from request names."""
return resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.files",
projectsId=project_id,
locationsId=location_id,
repositoriesId=repo_id,
filesId=file_id.replace("/", "%2F")
.replace("+", "%2B")
.replace("^", "%5E"),
)
def ConvertFilesHashes(files):
"""Convert hashes of file list to hex strings."""
return [ConvertFileHashes(f, None) for f in files]
def ConvertFileHashes(response, unused_args):
"""Convert file hashes to hex strings."""
# File hashes are "bytes", and if it's returned directly, it will be
# automatically encoded with base64.
# We want to display them as hex strings instead.
# The returned file obj restricts the field type, so we can't simply update
# the "bytes" field to a "string" field.
# Convert it to a json object and then update the field as a workaround.
json_obj = json.loads(protojson.encode_message(response))
hashes = []
for h in response.hashes:
hashes.append({
"type": h.type,
"value": h.value.hex(),
})
if hashes:
json_obj["hashes"] = hashes
# Proto map fields are converted into type "AnnotationsValue" in the response,
# which contains a list of key-value pairs as "additionalProperties".
# We want to convert this back to a dict.
annotations = {}
if response.annotations:
for p in response.annotations.additionalProperties:
annotations[p.key] = p.value
if annotations:
json_obj["annotations"] = annotations
return json_obj
def ListGenericFiles(args):
"""Lists the Generic Files stored."""
client = requests.GetClient()
messages = requests.GetMessages()
project = util.GetProject(args)
location = util.GetLocation(args)
repo = util.GetRepo(args)
package = args.package
version = args.version
version_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages.versions",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=package,
versionsId=version,
)
)
arg_filters = 'owner="{}"'.format(version_path)
repo_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories",
projectsId=project,
locationsId=location,
repositoriesId=repo,
)
)
files = requests.ListFiles(client, messages, repo_path, arg_filters)
return files
def ListFiles(args):
"""Lists files in a given project.
Args:
args: User input arguments.
Returns:
List of files.
"""
client = requests.GetClient()
messages = requests.GetMessages()
project = util.GetProject(args)
location = args.location or properties.VALUES.artifacts.location.Get()
repo = util.GetRepo(args)
package = args.package
version = args.version
tag = args.tag
page_size = args.page_size
order_by = common_args.ParseSortByArg(args.sort_by)
_, server_filter = filter_rewriter.Rewriter().Rewrite(args.filter)
if order_by is not None:
if "," in order_by:
# Multi-ordering is not supported yet on backend, fall back to client-side
# sort-by.
order_by = None
if package or version or tag:
# Cannot use server-side sort-by with --package, --version or --tag,
# fall back to client-side sort-by.
order_by = None
if args.limit is not None and args.filter is not None:
if server_filter is not None:
# Apply limit to server-side page_size to improve performance when
# server-side filter is used.
page_size = args.limit
else:
# Fall back to client-side paging with client-side filtering.
page_size = None
if server_filter:
if package or version or tag:
# Cannot use server-side filter with --package, --version or --tag,
# fallback to client-side filter.
server_filter = None
# Parse fully qualified path in package argument
if package:
if re.match(
r"projects\/.*\/locations\/.*\/repositories\/.*\/packages\/.*", package
):
params = (
package.replace("projects/", "", 1)
.replace("/locations/", " ", 1)
.replace("/repositories/", " ", 1)
.replace("/packages/", " ", 1)
.split(" ")
)
project, location, repo, package = [params[i] for i in range(len(params))]
# Escape slashes, pluses and carets in package name
if package:
package = package.replace("/", "%2F").replace("+", "%2B")
package = package.replace("^", "%5E")
# Retrieve version from tag name
if version and tag:
raise exceptions.InvalidInputValueError(
"Specify either --version or --tag with --package argument."
)
if package and tag:
tag_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages.tags",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=package,
tagsId=tag,
)
)
version = requests.GetVersionFromTag(client, messages, tag_path)
if package and version:
version_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages.versions",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=package,
versionsId=version,
)
)
server_filter = 'owner="{}"'.format(version_path)
elif package:
package_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=package,
)
)
server_filter = 'owner="{}"'.format(package_path)
elif version or tag:
raise exceptions.InvalidInputValueError(
"Package name is required when specifying version or tag."
)
repo_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories",
projectsId=project,
locationsId=location,
repositoriesId=repo,
)
)
server_args = {
"client": client,
"messages": messages,
"repo": repo_path,
"server_filter": server_filter,
"page_size": page_size,
"order_by": order_by,
}
server_args_skipped, lfiles = util.RetryOnInvalidArguments(
requests.ListFiles, **server_args
)
if not server_args_skipped:
# If server-side filter or sort-by is parsed correctly and the request
# succeeds, remove the client-side filter and sort-by.
if server_filter and server_filter == args.filter:
args.filter = None
if order_by:
args.sort_by = None
return ConvertFilesHashes(lfiles)

View File

@@ -0,0 +1,646 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common flags for artifacts print-settings commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import textwrap
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.calliope import actions
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import properties
_PACKAGE_TYPE_CHOICES = {
'MAVEN': 'Maven package.',
'GO': 'Go standard library and third party packages.',
'NPM': 'NPM package.',
'PYTHON': 'Python package.',
'RUST': 'Rust package.',
'RUBYGEMS': 'RubyGems package.',
'COMPOSER': 'PHP Composer package.',
'NUGET': 'NuGet package.',
}
_EXPERIMENTAL_PACKAGE_TYPE_CHOICES = {}
def RepoAttributeConfig():
fts = [deps.PropertyFallthrough(properties.VALUES.artifacts.repository)]
return concepts.ResourceParameterAttributeConfig(
name='repository',
help_text='Repository of the {resource}.',
fallthroughs=fts)
def LocationAttributeConfig():
fts = [deps.PropertyFallthrough(properties.VALUES.artifacts.location)]
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='Location of the {resource}.',
fallthroughs=fts)
def PackageAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='package',
help_text='Package of the {resource}.')
def GetRepoResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories',
resource_name='repository',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig())
def GetBetaRepoResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories',
resource_name='repository',
api_version='v1beta1',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig())
def GetLocationResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations',
resource_name='location',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig())
def GetFileResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories.files',
resource_name='file',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig())
def GetAttachmentResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories.attachments',
resource_name='attachment',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig(),
)
def GetPackageResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories.packages',
resource_name='package',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig(),
)
def GetVersionResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories.packages.versions',
resource_name='version',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig(),
packagesId=PackageAttributeConfig(),
)
def GetTagResourceSpec():
return concepts.ResourceSpec(
'artifactregistry.projects.locations.repositories.packages.tags',
resource_name='tag',
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
locationsId=LocationAttributeConfig(),
repositoriesId=RepoAttributeConfig(),
packagesId=PackageAttributeConfig(),
)
def GetScopeFlag():
return base.Argument(
'--scope',
help=('The scope to associate with the Artifact Registry registry. '
'If not specified, Artifact Registry is set as the default '
'registry.'))
def GetImagePathOptionalArg():
"""Gets IMAGE_PATH optional positional argument."""
help_txt = textwrap.dedent("""\
An Artifact Registry repository or a container image.
If not specified, default config values are used.
A valid docker repository has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID
A valid image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE_PATH
""")
return base.Argument('IMAGE_PATH', help=help_txt, nargs='?')
def GetImageRequiredArg():
"""Gets IMAGE required positional argument."""
help_txt = textwrap.dedent("""\
A container image.
A valid container image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE
A valid container image that can be referenced by tag or digest, has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
""")
return base.Argument('IMAGE', help=help_txt)
def GetDockerImageRequiredArg():
help_txt = textwrap.dedent("""\
Docker image - The container image that you want to tag.
A valid container image can be referenced by tag or digest, has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
""")
return base.Argument('DOCKER_IMAGE', help=help_txt)
def GetTagRequiredArg():
help_txt = textwrap.dedent("""\
Image tag - The container image tag.
A valid Docker tag has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
""")
return base.Argument('DOCKER_TAG', help=help_txt)
def GetRepoFlag():
return concept_parsers.ConceptParser.ForResource(
'--repository',
GetRepoResourceSpec(),
('The Artifact Registry repository. If not specified, '
'the current artifacts/repository is used.'),
required=False)
def GetRequiredRepoFlag():
return concept_parsers.ConceptParser.ForResource(
'--repository',
GetRepoResourceSpec(),
('The Artifact Registry repository. If not specified, '
'the current artifacts/repository is used.'),
required=True)
def GetLocationFlag():
return concept_parsers.ConceptParser.ForResource(
'--location',
GetLocationResourceSpec(),
('The Artifact Registry repository location. If not specified, '
'the current artifacts/location is used.'),
required=True)
def GetRequiredFileFlag():
return concept_parsers.ConceptParser.ForResource(
'file',
GetFileResourceSpec(),
'The Artifact Registry file name.',
required=True,
)
def GetRequiredAttachmentFlag():
return concept_parsers.ConceptParser.ForResource(
'attachment',
GetAttachmentResourceSpec(),
'The Artifact Registry attachment name.',
required=True,
)
def GetOptionalAttachmentFlag():
return concept_parsers.ConceptParser.ForResource(
'attachment',
GetAttachmentResourceSpec(),
'The Artifact Registry attachment name.',
required=False,
)
def GetRequiredVersionFlag():
return concept_parsers.ConceptParser.ForResource(
'version',
GetVersionResourceSpec(),
'The Artifact Registry version name.',
required=True,
)
def GetRequiredTagFlag():
return concept_parsers.ConceptParser.ForResource(
'tag',
GetTagResourceSpec(),
'The Artifact Registry tag name.',
required=True,
)
def GetAllowOverwriteFlag():
return base.Argument(
'--allow-overwrite',
action='store_true',
default=False,
help='If specified, the command overwrites an existing file',
)
def GetRepoArg():
return concept_parsers.ConceptParser.ForResource(
'repository',
GetRepoResourceSpec(),
('The Artifact Registry repository. If not specified, '
'the current artifacts/repository is used.'),
required=True)
def GetRepoArgFromBeta():
return concept_parsers.ConceptParser.ForResource(
'repository',
GetBetaRepoResourceSpec(),
('The Artifact Registry repository. If not specified, '
'the current artifacts/repository is used.'),
required=True)
def GetOptionalLocationFlag():
return concept_parsers.ConceptParser.ForResource(
'--location',
GetLocationResourceSpec(),
('The Artifact Registry repository location. You can also set '
'--location=all to list repositories across all locations. '
'If you omit this flag, the default location is used if you set the '
'artifacts/location property. Otherwise, omitting this flag '
'lists repositories across all locations.'),
required=False)
def GetOptionalAALocationFlag():
return base.Argument(
'--location',
help=('If specified, all requests to Artifact Analysis for occurrences'
' will go to location specified'),
required=False,
)
def GetIncludeTagsFlag():
return base.Argument(
'--include-tags',
help=(
'If specified, tags associated with each image digest are displayed'
' up to a maximum of 100 tags per version.'
),
action='store_true',
required=False,
)
def GetDeleteTagsFlag():
return base.Argument(
'--delete-tags',
help='If specified, all tags associated with the image are deleted.',
action='store_true',
required=False)
def GetGCRDomainArg():
return base.Argument(
'DOMAIN',
help=(
'A Container Registry domain. Valid values are: [gcr.io, asia.gcr.io,'
' eu.gcr.io, us.gcr.io]'
),
)
def GetJsonKeyFlag(tool):
"""Gets Json Key Flag text based on specified tool."""
if tool == 'pypi' or tool == 'python':
return base.Argument(
'--json-key',
help=('Path to service account JSON key. If not specified, '
'output returns either credentials for an active service account '
'or a placeholder for the current user account.'))
elif tool in ('gradle', 'maven', 'npm'):
return base.Argument(
'--json-key',
help=('Path to service account JSON key. If not specified, '
'current active service account credentials or a placeholder for '
'gcloud credentials is used.'))
else:
raise ar_exceptions.ArtifactRegistryError(
'Invalid tool type: {}'.format(tool))
def GetShowAllMetadataFlag():
return base.Argument(
'--show-all-metadata',
action='store_true',
help='Include all metadata in the output. Metadata will be grouped by '
'Grafeas kind, with an additional section for intoto provenance '
'metadata.')
def GetShowDeploymentFlag():
return base.Argument(
'--show-deployment',
action='store_true',
help='Include deployment metadata in the output.')
def GetShowImageBasisFlag():
return base.Argument(
'--show-image-basis',
action='store_true',
help='Include base image metadata in the output.')
def GetShowPackageVulnerabilityFlag():
return base.Argument(
'--show-package-vulnerability',
action='store_true',
help='Include vulnerability metadata in the output.')
def GetShowBuildDetailsFlag():
return base.Argument(
'--show-build-details',
action='store_true',
help='Include build metadata in the output.')
def GetShowSbomReferencesFlag():
return base.Argument(
'--show-sbom-references',
action='store_true',
help='Include SBOM metadata in the output.')
def GetMetadataFilterFlag():
return base.Argument(
'--metadata-filter',
help=('Additional filter to fetch metadata for a given '
'qualified image reference.'))
def GetShowOccurrencesFlag():
return base.Argument(
'--show-occurrences',
action='store_true',
help='Show summaries of the various occurrence types.')
def GetShowOccurrencesFromFlag():
return base.Argument(
'--show-occurrences-from',
type=arg_parsers.BoundedInt(1, sys.maxsize, unlimited=True),
default=10,
help=('The number of the most recent images for which to '
'summarize occurrences.'))
def GetOccurrenceFilterFlag():
return base.Argument(
'--occurrence-filter',
default=(
'kind="BUILD" OR kind="IMAGE" OR kind="DISCOVERY" OR'
' kind="SBOM_REFERENCE"'
),
help='A filter for the occurrences which will be summarized.')
def GetVulnerabilitiesOccurrenceFilterFlag():
return base.Argument(
'--occurrence-filter',
help='A filter for the occurrences which will be summarized. See link for '
'officially supported filters: '
'https://cloud.google.com/container-analysis/docs/os-scanning-automatically#filtering')
def GetShowProvenanceFlag():
return base.Argument(
'--show-provenance',
action='store_true',
help='Include intoto provenance metadata in the output, in the '
'provenance_summary section. To see all build metadata in the output, '
'use --show-all-metadata or --show-build-details.')
def GetResourceURIArg():
"""Gets RESOURCE_URI required positional argument."""
return base.Argument(
'RESOURCE_URI',
help=('A container image in a Google Cloud registry (Artifact Registry '
'or Container Registry), or a local container image.'))
def GetListURIArg():
"""Gets list uri required positional argument."""
return base.Argument(
'URI',
help=('An URI identifying a container image or package in '
'Artifact Registry or Google Cloud Registry.'))
def GetRemoteFlag():
return base.Argument(
'--remote',
action='store_true',
default=False,
help=('Whether the container image is located remotely or '
'on your local machine.'))
def GetOnDemandScanningLocationFlag():
return base.Argument(
'--location',
choices={
'us': 'Perform analysis in the US',
'europe': 'Perform analysis in Europe',
'asia': 'Perform analysis in Asia',
},
default='us',
help=('The API location in which to perform package analysis. Consider '
'choosing a location closest to where you are located. Proximity '
'to the container image does not affect response time.'))
def GetOnDemandScanningFakeExtractionFlag():
return base.Argument(
'--fake-extraction',
action='store_true',
default=False,
hidden=True,
help=('Whether to use fake packages/versions instead of performing '
'extraction. This flag is for test purposes only.'))
def GetAdditionalPackageTypesFlag():
return base.Argument(
'--additional-package-types',
action=actions.DeprecationAction(
'--additional-package-types',
warn=(
'This flag is deprecated as scanning for all package types is '
'now the default. To skip scanning for specific package types, '
'use --skip-package-types.'
),
),
type=arg_parsers.ArgList(
choices=_PACKAGE_TYPE_CHOICES,
element_type=lambda package_type: package_type.upper(),
),
metavar='ADDITIONAL_PACKAGE_TYPES',
help=(
'A comma-separated list of package types to scan in addition to OS'
' packages.'
),
)
def GetExperimentalPackageTypesFlag():
return base.Argument(
'--experimental-package-types',
action=actions.DeprecationAction(
'--experimental-package-types',
warn=(
'This flag is deprecated as scanning for all package types is '
'now the default. To skip scanning for specific package types, '
'use --skip-package-types.'
),
),
type=arg_parsers.ArgList(
choices=_EXPERIMENTAL_PACKAGE_TYPE_CHOICES,
element_type=lambda package_type: package_type.upper(),
),
hidden=True,
metavar='EXPERIMENTAL_PACKAGE_TYPES',
help=(
'A comma-separated list of experimental package types to scan in'
' addition to OS packages and officially supported third party'
' packages.'
),
)
def GetSkipPackageTypesFlag():
return base.Argument(
'--skip-package-types',
type=arg_parsers.ArgList(
choices=_PACKAGE_TYPE_CHOICES,
element_type=lambda package_type: package_type.upper(),
),
metavar='SKIP_PACKAGE_TYPES',
help='A comma-separated list of package types to skip when scanning.',
)
def GetVerboseErrorsFlag():
return base.Argument(
'--verbose-errors',
action='store_true',
default=False,
hidden=True,
help=('Log internal errors.'))
def GetSkipExistingFlag():
return base.Argument(
'--skip-existing',
action='store_true',
default=False,
help=(
'If specified, skip uploading files that already exist in the'
' repository, and continue to upload the remaining files.'
),
)
def GetChunkSize():
return base.Argument(
'--chunk-size',
help=(
'If specified, the chunk size (bytes) to use for downloading the'
' package.'
),
)
def GetPlainRepoFlag():
"""Gets a simple --repository flag."""
return base.Argument(
'--repository',
help=(
'The Artifact Registry repository. If not specified, the current'
' artifacts/repository is used.'
),
)
def GetPlainLocationFlag():
"""Gets a simple --location flag."""
return base.Argument(
'--location',
help=(
'The Artifact Registry repository location. If not specified, the'
' current artifacts/location is used.'
),
)
def GetSeverityFlag():
"""Gets the --severity flag for platform logs."""
return base.Argument(
'--severity',
type=lambda x: x.upper(), # Ensure value is uppercase for enum
choices=[
'INFO',
'ERROR',
],
help=(
'The minimum severity level of logs to generate. This flag is only '
'applicable when using --enable. Values are case-insensitive.'
'If omitted when --enable is true, it defaults to logging all '
'severities.'
),
)

View File

@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Formatting strings for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
BUILD_GIT_SHA_FORMAT = ("BUILD_DETAILS.buildDetails.provenance."
"sourceProvenance.context.cloudRepo.revisionId"
".notnull().list().slice(:8).join(''):optional:label"
"=GIT_SHA")
BUILD_FORMAT = ("BUILD_DETAILS.buildDetails.provenance.id.notnull().list()"
":optional:label=BUILD")
VULNERABILITY_FORMAT = "vuln_counts.list():optional:label=VULNERABILITIES"
IMAGE_BASIS_FORMAT = ("IMAGE_BASIS.derivedImage.sort(distance).map()"
".extract(baseResourceUrl).slice(:1).map().list().list()"
".split('//').slice(1:).list().split('@').slice(:1)"
".list():optional:label=FROM")
DISCOVERY_FORMAT = ("DISCOVERY[0].discovered.analysisStatus:optional:label"
"=VULNERABILITY_SCAN_STATUS")
CONTAINER_ANALYSIS_METADATA_FORMAT = """
{},
{},
{},
{},
{}
""".format(BUILD_GIT_SHA_FORMAT, VULNERABILITY_FORMAT, IMAGE_BASIS_FORMAT,
BUILD_FORMAT, DISCOVERY_FORMAT)

View File

@@ -0,0 +1,102 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Golang related utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
from google.auth import exceptions as ga_exceptions
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.command_lib.artifacts.print_settings import credentials
from googlecloudsdk.command_lib.util.anthos import binary_operations
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import requests
from googlecloudsdk.core.credentials import creds as c_creds
from googlecloudsdk.core.credentials import store
class PackOperation(binary_operations.BinaryBackedOperation):
"""PackOperation is a wrapper of the package-go-module binary."""
def __init__(self, **kwargs):
super(PackOperation, self).__init__(binary='package-go-module', **kwargs)
def _ParseArgsForCommand(self, module_path, version, source, output,
**kwargs):
args = [
'--module_path=' + module_path,
'--version=' + version,
'--source=' + source,
'--output=' + output,
]
return args
def _GetAdcToken():
"""Returns the ADC token."""
creds, _ = c_creds.GetGoogleAuthDefault().default()
creds.refresh(requests.GoogleAuthRequest())
return creds.token
def AuthorizationHeader(json_key):
"""Returns the authorization header."""
# Try --json-key first.
try:
creds = credentials.GetServiceAccountCreds(json_key)
if creds:
return _BasicAuthHeader(
'_json_key_base64',
creds,
)
else:
json_key_err = ar_exceptions.NoJsonKeyCredentialsError(
'--json-key unspecified'
)
except core_exceptions.Error as e:
json_key_err = ar_exceptions.NoJsonKeyCredentialsError(e)
# Try ADC next.
try:
token = _GetAdcToken()
return _BearerAuthHeader(token)
except (ga_exceptions.DefaultCredentialsError, core_exceptions.Error) as e:
default_creds_err = ar_exceptions.NoDefaultCredentialsError(e)
# Try user credentials finally.
try:
token = store.GetAccessToken()
return _BearerAuthHeader(token)
except core_exceptions.Error as e:
user_creds_err = ar_exceptions.NoUserCredentialsError(e)
# No credentials found.
raise ar_exceptions.NoCredentialsError(
json_key_err, default_creds_err, user_creds_err
)
def _BasicAuthHeader(username, password):
creds = base64.b64encode(
f'{username}:{password}'.encode('utf-8')
).decode('utf-8')
return f'Authorization: Basic {creds}'
def _BearerAuthHeader(token):
return f'Authorization: Bearer {token}'

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for interacting with containeranalysis API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.ondemandscanning import util as ods_util
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.core import exceptions as core_exceptions
from googlecloudsdk.core import resources
class UnsupportedOS(core_exceptions.Error):
"""Raised when the user attempts to scan from an unsupported operation system.
Note that this is not the same error as when a user initiates a scan on a
container image, but that image itself has an unsupported OS. In this case,
the gcloud command itself is running on an unsupported operation system.
"""
class ExtractionFailedError(core_exceptions.Error):
"""Raised when extraction fails."""
pass
def WaitForOperation(operation, version):
"""Silently waits for the given google.longrunning.Operation to complete.
Args:
operation: The operation to poll.
version: The ODS API version endpoints to use to talk to the Operations
service.
Raises:
apitools.base.py.HttpError: if the request returns an HTTP error
Returns:
The response field of the completed operation.
"""
op_service = ods_util.GetClient(version).projects_locations_operations
op_resource = resources.REGISTRY.ParseRelativeName(
operation.name,
collection='ondemandscanning.projects.locations.operations')
poller = waiter.CloudOperationPollerNoResources(op_service)
return waiter.PollUntilDone(poller, op_resource)

View File

@@ -0,0 +1,100 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package utils for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.artifacts import filter_rewriter
from googlecloudsdk.api_lib.util import common_args
from googlecloudsdk.command_lib.artifacts import requests
from googlecloudsdk.command_lib.artifacts import util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
def ListPackages(args):
"""Lists packages in a given project.
Args:
args: User input arguments.
Returns:
List of packages.
"""
client = requests.GetClient()
messages = requests.GetMessages()
repo = util.GetRepo(args)
project = util.GetProject(args)
location = args.location or properties.VALUES.artifacts.location.Get()
page_size = args.page_size
order_by = common_args.ParseSortByArg(args.sort_by)
_, server_filter = filter_rewriter.Rewriter().Rewrite(args.filter)
limit = args.limit
if order_by is not None:
if "," in order_by:
# Multi-ordering is not supported yet on backend, fall back to client-side
# sort-by.
order_by = None
if args.limit is not None and args.filter is not None:
if server_filter is not None:
# Apply limit to server-side page_size to improve performance when
# server-side filter is used.
page_size = args.limit
else:
# Fall back to client-side paging with client-side filtering.
page_size = None
limit = None
repo_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories",
projectsId=project,
locationsId=location,
repositoriesId=repo,
)
)
server_args = {
"client": client,
"messages": messages,
"repo": repo_path,
"server_filter": server_filter,
"page_size": page_size,
"order_by": order_by,
"limit": limit,
}
server_args_skipped, lpkgs = util.RetryOnInvalidArguments(
requests.ListPackages, **server_args
)
if not server_args_skipped:
# If server-side filter or sort-by is parsed correctly and the request
# succeeds, remove the client-side filter and sort-by.
if server_filter and server_filter == args.filter:
args.filter = None
if order_by:
args.sort_by = None
log.status.Print(
"Listing items under project {}, location {}, repository {}.\n".format(
project, location, repo
)
)
return util.UnescapePackageName(lpkgs, None)

View File

@@ -0,0 +1,110 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for platform logs."""
from googlecloudsdk.command_lib.artifacts import util
from googlecloudsdk.core import resources
def _GetClientAndResourceName(args, client):
"""Returns the service client and config resource name.
Args:
args: User input args.
client: The API service client.
Returns:
service: The service client for either project or repository level
operations.
config_name: The full resource name of the platform logs config.
is_repository: True if the resource is a repository.
"""
project = util.GetProject(args)
location = util.GetLocation(args)
if args.IsSpecified('repository'):
repo = util.GetRepo(args)
resource_ref = resources.REGISTRY.Parse(
repo,
params={'projectsId': project, 'locationsId': location},
collection='artifactregistry.projects.locations.repositories',
)
service = client.projects_locations_repositories
is_repository = True
else:
resource_ref = resources.REGISTRY.Parse(
location,
params={'projectsId': project},
collection='artifactregistry.projects.locations',
)
service = client.projects_locations
is_repository = False
config_name = resource_ref.RelativeName() + '/platformLogsConfig'
return service, config_name, is_repository
def GetPlatformLogsConfig(args, client, messages):
"""Gets the platform logs config.
Args:
args: User input args.
client: The API service client.
messages: The API messages module.
Returns:
The retrieved platform logs config.
"""
service, config_name, is_repository = _GetClientAndResourceName(args, client)
if is_repository:
request_message = (
messages.ArtifactregistryProjectsLocationsRepositoriesGetPlatformLogsConfigRequest
)
else:
request_message = (
messages.ArtifactregistryProjectsLocationsGetPlatformLogsConfigRequest
)
request = request_message(name=config_name)
return service.GetPlatformLogsConfig(request)
def UpdatePlatformLogsConfig(args, client, messages, platform_logs_config):
"""Updates the platform logs config.
Args:
args: User input args.
client: The API service client.
messages: The API messages module.
platform_logs_config: The platform logs config to update.
Returns:
The updated platform logs config.
"""
service, config_name, is_repository = _GetClientAndResourceName(args, client)
if is_repository:
request_message = (
messages.ArtifactregistryProjectsLocationsRepositoriesUpdatePlatformLogsConfigRequest
)
else:
request_message = (
messages.ArtifactregistryProjectsLocationsUpdatePlatformLogsConfigRequest
)
platform_logs_config.name = config_name
request = request_message(
name=config_name, platformLogsConfig=platform_logs_config
)
return service.UpdatePlatformLogsConfig(request)

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for Apt."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
DEFAULT_TEMPLATE = """\
# To configure your package manager with this repository, do the following:
# Prepare your VM to access the repository using the following instructions:
# https://cloud.google.com/artifact-registry/docs/os-packages/debian/configure#prepare-apt
# Configure your VM to access Artifact Registry packages using the following
# command:
echo "deb ar+https://{location}-apt.pkg.dev/projects/{project} {repo} main" | sudo tee -a /etc/apt/sources.list.d/artifact-registry.list
# Update Apt:
sudo apt update
# For complete setup information, see
# https://cloud.google.com/artifact-registry/docs/os-packages/debian/configure
"""
PUBLIC_TEMPLATE = """\
# To configure your package manager with this repository:
# Configure your VM to access Artifact Registry packages using the following
# command:
echo "deb https://{location}-apt.pkg.dev/projects/{project} {repo} main" | sudo tee -a /etc/apt/sources.list.d/artifact-registry.list
# Update Apt:
sudo apt update
# For complete setup information, see
# https://cloud.google.com/artifact-registry/docs/os-packages/debian/configure
"""

View File

@@ -0,0 +1,90 @@
# -*- coding: utf-8 -*- #
# Copyright 2025 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for Artifacts Registry repositories."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
import json
from googlecloudsdk.api_lib.auth import service_account
from googlecloudsdk.core import config
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import creds
from googlecloudsdk.core.credentials import exceptions as creds_exceptions
from googlecloudsdk.core.credentials import store
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import files
def _LoadJsonFile(filename):
"""Checks and validates if given filename is a proper JSON file.
Args:
filename: str, path to JSON file.
Returns:
bytes, the content of the file.
"""
content = console_io.ReadFromFileOrStdin(filename, binary=True)
try:
json.loads(encoding.Decode(content))
return content
except ValueError as e:
if filename.endswith(".json"):
raise service_account.BadCredentialFileException(
"Could not read JSON file {0}: {1}".format(filename, e))
raise service_account.BadCredentialFileException(
"Unsupported credential file: {0}".format(filename))
def GetServiceAccountCreds(json_key):
"""Gets service account credentials from given file path or default if any.
Args:
json_key: str, path to JSON key file.
Returns:
str, service account credentials.
"""
if json_key:
file_content = _LoadJsonFile(json_key)
return base64.b64encode(file_content).decode("utf-8")
account = properties.VALUES.core.account.Get()
if not account:
raise creds_exceptions.NoActiveAccountException()
cred = store.Load(account, prevent_refresh=True, use_google_auth=True)
if not cred:
raise store.NoCredentialsForAccountException(account)
if _IsServiceAccountCredentials(cred):
paths = config.Paths()
json_content = files.ReadFileContents(
paths.LegacyCredentialsAdcPath(account))
return base64.b64encode(json_content.encode("utf-8")).decode("utf-8")
return ""
def _IsServiceAccountCredentials(cred):
if creds.IsOauth2ClientCredentials(cred):
return creds.CredentialType.FromCredentials(
cred) == creds.CredentialType.SERVICE_ACCOUNT
else:
return creds.CredentialTypeGoogleAuth.FromCredentials(
cred) == creds.CredentialTypeGoogleAuth.SERVICE_ACCOUNT

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for gradle."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
SERVICE_ACCOUNT_TEMPLATE = """\
// Move the secret to ~/.gradle.properties
def artifactRegistryMavenSecret = "{password}"
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
}}
publishing {{
repositories {{
maven {{
url "https://{location}-maven.pkg.dev/{repo_path}"
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
}}
}}
}}
repositories {{
maven {{
url "https://{location}-maven.pkg.dev/{repo_path}"
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
authentication {{
basic(BasicAuthentication)
}}
}}
}}
"""
SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE = """\
// Move the secret to ~/.gradle.properties
def artifactRegistryMavenSecret = "{password}"
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
}}
publishing {{
repositories {{
maven {{
def snapshotURL = "https://{location}-maven.pkg.dev/{repo_path}"
def releaseURL = "<Paste release URL here>"
url version.endsWith('SNAPSHOT') ? snapshotURL : releaseURL
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
}}
}}
}}
repositories {{
maven {{
url "https://{location}-maven.pkg.dev/{repo_path}"
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
authentication {{
basic(BasicAuthentication)
}}
}}
}}
"""
SERVICE_ACCOUNT_RELEASE_TEMPLATE = """\
// Move the secret to ~/.gradle.properties
def artifactRegistryMavenSecret = "{password}"
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
}}
publishing {{
repositories {{
maven {{
def snapshotURL = "<Paste snapshot URL here>"
def releaseURL = "https://{location}-maven.pkg.dev/{repo_path}"
url version.endsWith('SNAPSHOT') ? snapshotURL : releaseURL
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
}}
}}
}}
repositories {{
maven {{
url "https://{location}-maven.pkg.dev/{repo_path}"
credentials {{
username = "{username}"
password = "$artifactRegistryMavenSecret"
}}
authentication {{
basic(BasicAuthentication)
}}
}}
}}
"""
NO_SERVICE_ACCOUNT_TEMPLATE = """\
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
id "com.google.cloud.artifactregistry.gradle-plugin" version "{extension_version}"
}}
publishing {{
repositories {{
maven {{
url "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
}}
}}
}}
repositories {{
maven {{
url "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
}}
}}
"""
NO_SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE = """\
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
id "com.google.cloud.artifactregistry.gradle-plugin" version "{extension_version}"
}}
publishing {{
repositories {{
maven {{
def snapshotURL = "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
def releaseURL = "<Paste release URL here>"
url version.endsWith('SNAPSHOT') ? snapshotURL : releaseURL
}}
}}
}}
repositories {{
maven {{
url "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
}}
}}
"""
NO_SERVICE_ACCOUNT_RELEASE_TEMPLATE = """\
// Insert following snippet into your build.gradle
// see docs.gradle.org/current/userguide/publishing_maven.html
plugins {{
id "maven-publish"
id "com.google.cloud.artifactregistry.gradle-plugin" version "{extension_version}"
}}
publishing {{
repositories {{
maven {{
def snapshotURL = "<Paste snapshot URL here>"
def releaseURL = "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
url version.endsWith('SNAPSHOT') ? snapshotURL : releaseURL
}}
}}
}}
repositories {{
maven {{
url "artifactregistry://{location}-maven.pkg.dev/{repo_path}"
}}
}}
"""

View File

@@ -0,0 +1,303 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for maven."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
SERVICE_ACCOUNT_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<snapshotRepository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</snapshotRepository>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</repository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
</project>
<!-- Insert following snippet into your settings.xml -->
<settings>
<servers>
<server>
<id>{server_id}</id>
<configuration>
<httpConfiguration>
<get>
<usePreemptive>true</usePreemptive>
</get>
<head>
<usePreemptive>true</usePreemptive>
</head>
<put>
<params>
<property>
<name>http.protocol.expect-continue</name>
<value>false</value>
</property>
</params>
</put>
</httpConfiguration>
</configuration>
<username>{username}</username>
<password>{password}</password>
</server>
</servers>
</settings>
"""
NO_SERVICE_ACCOUNT_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<snapshotRepository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</snapshotRepository>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</repository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<build>
<extensions>
<extension>
<groupId>com.google.cloud.artifactregistry</groupId>
<artifactId>artifactregistry-maven-wagon</artifactId>
<version>{extension_version}</version>
</extension>
</extensions>
</build>
</project>
"""
NO_SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<snapshotRepository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</snapshotRepository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>false</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<build>
<extensions>
<extension>
<groupId>com.google.cloud.artifactregistry</groupId>
<artifactId>artifactregistry-maven-wagon</artifactId>
<version>{extension_version}</version>
</extension>
</extensions>
</build>
</project>
"""
NO_SERVICE_ACCOUNT_RELEASE_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</repository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
<build>
<extensions>
<extension>
<groupId>com.google.cloud.artifactregistry</groupId>
<artifactId>artifactregistry-maven-wagon</artifactId>
<version>{extension_version}</version>
</extension>
</extensions>
</build>
</project>
"""
SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<snapshotRepository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</snapshotRepository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>false</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
</project>
<!-- Insert following snippet into your settings.xml -->
<settings>
<servers>
<server>
<id>{server_id}</id>
<configuration>
<httpConfiguration>
<get>
<usePreemptive>true</usePreemptive>
</get>
<head>
<usePreemptive>true</usePreemptive>
</head>
<put>
<params>
<property>
<name>http.protocol.expect-continue</name>
<value>false</value>
</property>
</params>
</put>
</httpConfiguration>
</configuration>
<username>{username}</username>
<password>{password}</password>
</server>
</servers>
</settings>
"""
SERVICE_ACCOUNT_RELEASE_TEMPLATE = """\
<!-- Insert following snippet into your pom.xml -->
<project>
<distributionManagement>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
</repository>
</distributionManagement>
<repositories>
<repository>
<id>{server_id}</id>
<url>{scheme}://{location}-maven.pkg.dev/{repo_path}</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
</project>
<!-- Insert following snippet into your settings.xml -->
<settings>
<servers>
<server>
<id>{server_id}</id>
<configuration>
<httpConfiguration>
<get>
<usePreemptive>true</usePreemptive>
</get>
<head>
<usePreemptive>true</usePreemptive>
</head>
<put>
<params>
<property>
<name>http.protocol.expect-continue</name>
<value>false</value>
</property>
</params>
</put>
</httpConfiguration>
</configuration>
<username>{username}</username>
<password>{password}</password>
</server>
</servers>
</settings>
"""

View File

@@ -0,0 +1,39 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for npm."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
SERVICE_ACCOUNT_TEMPLATE = """\
# Insert the following snippet into your project .npmrc
{configured_registry}=https://{registry_path}
//{registry_path}:always-auth=true
# Insert the following snippet into your user .npmrc
//{registry_path}:_password="{password}"
//{registry_path}:username=_json_key_base64
//{registry_path}:email=not.valid@email.com
"""
NO_SERVICE_ACCOUNT_TEMPLATE = """\
# Insert the following snippet into your project .npmrc
{configured_registry}=https://{registry_path}
//{registry_path}:always-auth=true
"""

View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for python."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
SERVICE_ACCOUNT_SETTING_TEMPLATE = """\
# Insert the following snippet into your .pypirc
[distutils]
index-servers =
{repo}
[{repo}]
repository: https://{location}-python.pkg.dev/{repo_path}/
username: _json_key_base64
password: {password}
# Insert the following snippet into your pip.conf
[global]
extra-index-url = https://_json_key_base64:{password}@{location}-python.pkg.dev/{repo_path}/simple/
"""
NO_SERVICE_ACCOUNT_SETTING_TEMPLATE = """\
# Insert the following snippet into your .pypirc
[distutils]
index-servers =
{repo}
[{repo}]
repository: https://{location}-python.pkg.dev/{repo_path}/
# Insert the following snippet into your pip.conf
[global]
extra-index-url = https://{location}-python.pkg.dev/{repo_path}/simple/
"""

View File

@@ -0,0 +1,364 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for Artifacts Registry repositories."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.command_lib.artifacts import requests as ar_requests
from googlecloudsdk.command_lib.artifacts import util as ar_util
from googlecloudsdk.command_lib.artifacts.print_settings import apt
from googlecloudsdk.command_lib.artifacts.print_settings import credentials
from googlecloudsdk.command_lib.artifacts.print_settings import gradle
from googlecloudsdk.command_lib.artifacts.print_settings import mvn
from googlecloudsdk.command_lib.artifacts.print_settings import npm
from googlecloudsdk.command_lib.artifacts.print_settings import python
from googlecloudsdk.command_lib.artifacts.print_settings import yum
from googlecloudsdk.core import properties
_EXT_VERSION = "2.2.0"
_PROJECT_NOT_FOUND_ERROR = """\
Failed to find attribute [project]. \
The attribute can be set in the following ways:
- provide the argument [--project] on the command line
- set the property [core/project]"""
_REPO_NOT_FOUND_ERROR = """\
Failed to find attribute [repository]. \
The attribute can be set in the following ways:
- provide the argument [--repository] on the command line
- set the property [artifacts/repository]"""
_LOCATION_NOT_FOUND_ERROR = """\
Failed to find attribute [location]. \
The attribute can be set in the following ways:
- provide the argument [--location] on the command line
- set the property [artifacts/location]"""
def _GetRequiredProjectValue(args):
if not args.project and not properties.VALUES.core.project.Get():
raise ar_exceptions.InvalidInputValueError(_PROJECT_NOT_FOUND_ERROR)
return ar_util.GetProject(args)
def _GetRequiredRepoValue(args):
if not args.repository and not properties.VALUES.artifacts.repository.Get():
raise ar_exceptions.InvalidInputValueError(_REPO_NOT_FOUND_ERROR)
return ar_util.GetRepo(args)
def _GetRequiredLocationValue(args):
if not args.location and not properties.VALUES.artifacts.location.Get():
raise ar_exceptions.InvalidInputValueError(_LOCATION_NOT_FOUND_ERROR)
return ar_util.GetLocation(args)
def _GetLocationAndRepoPath(args, repo_format):
"""Get resource values and validate user input."""
repo = _GetRequiredRepoValue(args)
project = _GetRequiredProjectValue(args)
location = _GetRequiredLocationValue(args)
repo_path = project + "/" + repo
repo = ar_requests.GetRepository(
"projects/{}/locations/{}/repositories/{}".format(project, location,
repo))
if repo.format != repo_format:
raise ar_exceptions.InvalidInputValueError(
"Invalid repository type {}. Valid type is {}.".format(
repo.format, repo_format))
return location, repo_path
def _GetLocationRepoPathAndMavenConfig(args, repo_format):
"""Get resource values and validate user input."""
repo = _GetRequiredRepoValue(args)
project = _GetRequiredProjectValue(args)
location = _GetRequiredLocationValue(args)
repo_path = project + "/" + repo
repo = ar_requests.GetRepository(
"projects/{}/locations/{}/repositories/{}".format(project, location,
repo))
if repo.format != repo_format:
raise ar_exceptions.InvalidInputValueError(
"Invalid repository type {}. Valid type is {}.".format(
repo.format, repo_format))
return location, repo_path, repo.mavenConfig
def IsPublicRepo(project, location, repo):
"""Determine if a repository is public.
Args:
project: Project name.
location: Repository location.
repo: Repository name.
Returns:
bool, True if repository is public.
"""
iam_policy = ar_requests.GetIamPolicy(
"projects/{}/locations/{}/repositories/{}".format(
project, location, repo))
if hasattr(iam_policy, "bindings"):
for binding in iam_policy.bindings:
if ("allUsers" in binding.members
and "artifactregistry.reader" in binding.role):
return True
return False
def GetAptSettingsSnippet(args):
"""Forms an apt settings snippet to add to the sources.list.d directory.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
An apt settings snippet.
"""
messages = ar_requests.GetMessages()
location, repo_path = _GetLocationAndRepoPath(
args, messages.Repository.FormatValueValuesEnum.APT)
repo = _GetRequiredRepoValue(args)
project = _GetRequiredProjectValue(args)
data = {
"location": location,
"project": project,
"repo": repo,
"repo_path": repo_path
}
if IsPublicRepo(project, location, repo):
apt_setting_template = apt.PUBLIC_TEMPLATE
else:
apt_setting_template = apt.DEFAULT_TEMPLATE
return apt_setting_template.format(**data)
def GetYumSettingsSnippet(args):
"""Forms a Yum settings snippet to add to the yum.repos.d directory.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
A yum settings snippet.
"""
messages = ar_requests.GetMessages()
location, repo_path = _GetLocationAndRepoPath(
args, messages.Repository.FormatValueValuesEnum.YUM)
repo = _GetRequiredRepoValue(args)
project = _GetRequiredProjectValue(args)
data = {"location": location, "repo": repo, "repo_path": repo_path}
if IsPublicRepo(project, location, repo):
yum_setting_template = yum.PUBLIC_TEMPLATE
else:
yum_setting_template = yum.DEFAULT_TEMPLATE
return yum_setting_template.format(**data)
def GetNpmSettingsSnippet(args):
"""Forms an npm settings snippet to add to the .npmrc file.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
An npm settings snippet.
"""
messages = ar_requests.GetMessages()
location, repo_path = _GetLocationAndRepoPath(
args, messages.Repository.FormatValueValuesEnum.NPM)
registry_path = "{location}-npm.pkg.dev/{repo_path}/".format(**{
"location": location,
"repo_path": repo_path
})
configured_registry = "registry"
if args.scope:
if not args.scope.startswith("@") or len(args.scope) <= 1:
raise ar_exceptions.InvalidInputValueError(
"Scope name must start with \"@\" and be longer than 1 character.")
configured_registry = args.scope + ":" + configured_registry
data = {
"configured_registry": configured_registry,
"registry_path": registry_path,
"repo_path": repo_path
}
sa_creds = credentials.GetServiceAccountCreds(args.json_key)
if sa_creds:
npm_setting_template = npm.SERVICE_ACCOUNT_TEMPLATE
data["password"] = base64.b64encode(
sa_creds.encode("utf-8")).decode("utf-8")
else:
npm_setting_template = npm.NO_SERVICE_ACCOUNT_TEMPLATE
return npm_setting_template.format(**data)
def GetMavenSnippet(args):
"""Forms a maven snippet to add to the pom.xml file.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
str, a maven snippet to add to the pom.xml file.
"""
messages = ar_requests.GetMessages()
location, repo_path, maven_cfg = _GetLocationRepoPathAndMavenConfig(
args, messages.Repository.FormatValueValuesEnum.MAVEN)
data = {
"scheme": "artifactregistry",
"location": location,
"server_id": "artifact-registry",
"repo_path": repo_path,
}
sa_creds = credentials.GetServiceAccountCreds(args.json_key)
mvn_template = GetMavenTemplate(messages, maven_cfg, sa_creds)
if sa_creds:
data["scheme"] = "https"
data["username"] = "_json_key_base64"
data["password"] = sa_creds
else:
data["extension_version"] = _EXT_VERSION
return mvn_template.format(**data)
def GetMavenTemplate(messages, maven_cfg, sa_creds):
"""Forms a maven snippet to add to the pom.xml file.
Args:
messages: Module, the messages module for the API.
maven_cfg: MavenRepositoryConfig, the maven configuration proto that
contains the version policy.
sa_creds: str, service account credentials.
Returns:
str, a maven template to add to pom.xml.
"""
mvn_template = mvn.NO_SERVICE_ACCOUNT_TEMPLATE
if maven_cfg and maven_cfg.versionPolicy == messages.MavenRepositoryConfig.VersionPolicyValueValuesEnum.SNAPSHOT:
mvn_template = mvn.NO_SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE
if sa_creds:
mvn_template = mvn.SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE
elif maven_cfg and maven_cfg.versionPolicy == messages.MavenRepositoryConfig.VersionPolicyValueValuesEnum.RELEASE:
mvn_template = mvn.NO_SERVICE_ACCOUNT_RELEASE_TEMPLATE
if sa_creds:
mvn_template = mvn.SERVICE_ACCOUNT_RELEASE_TEMPLATE
elif sa_creds:
mvn_template = mvn.SERVICE_ACCOUNT_TEMPLATE
return mvn_template
def GetGradleSnippet(args):
"""Forms a gradle snippet to add to the build.gradle file.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
str, a gradle snippet to add to build.gradle.
"""
messages = ar_requests.GetMessages()
location, repo_path, maven_cfg = _GetLocationRepoPathAndMavenConfig(
args, messages.Repository.FormatValueValuesEnum.MAVEN)
sa_creds = credentials.GetServiceAccountCreds(args.json_key)
gradle_template = GetGradleTemplate(messages, maven_cfg, sa_creds)
data = {"location": location, "repo_path": repo_path}
if sa_creds:
data["username"] = "_json_key_base64"
data["password"] = sa_creds
else:
data["extension_version"] = _EXT_VERSION
return gradle_template.format(**data)
def GetGradleTemplate(messages, maven_cfg, sa_creds):
"""Forms a gradle snippet to add to the build.gradle file.
Args:
messages: Module, the messages module for the API.
maven_cfg: MavenRepositoryConfig, the maven configuration proto that
contains the version policy..
sa_creds: str, service account credentials.
Returns:
str, a gradle template to add to build.gradle.
"""
gradle_template = gradle.NO_SERVICE_ACCOUNT_TEMPLATE
snapshot = (
messages.MavenRepositoryConfig.VersionPolicyValueValuesEnum.SNAPSHOT
)
release = messages.MavenRepositoryConfig.VersionPolicyValueValuesEnum.RELEASE
if maven_cfg and maven_cfg.versionPolicy == snapshot:
gradle_template = gradle.NO_SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE
if sa_creds:
gradle_template = gradle.SERVICE_ACCOUNT_SNAPSHOT_TEMPLATE
elif maven_cfg and maven_cfg.versionPolicy == release:
gradle_template = gradle.NO_SERVICE_ACCOUNT_RELEASE_TEMPLATE
if sa_creds:
gradle_template = gradle.SERVICE_ACCOUNT_RELEASE_TEMPLATE
elif sa_creds:
gradle_template = gradle.SERVICE_ACCOUNT_TEMPLATE
return gradle_template
def GetPythonSettingsSnippet(args):
"""Forms a Python snippet for .pypirc file (twine) and pip.conf file.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
A python snippet.
"""
messages = ar_requests.GetMessages()
location, repo_path = _GetLocationAndRepoPath(
args, messages.Repository.FormatValueValuesEnum.PYTHON)
repo = _GetRequiredRepoValue(args)
data = {"location": location, "repo_path": repo_path, "repo": repo}
sa_creds = credentials.GetServiceAccountCreds(args.json_key)
if sa_creds:
data["password"] = sa_creds
return python.SERVICE_ACCOUNT_SETTING_TEMPLATE.format(**data)
else:
return python.NO_SERVICE_ACCOUNT_SETTING_TEMPLATE.format(**data)

View File

@@ -0,0 +1,68 @@
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for forming settings for Apt."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
DEFAULT_TEMPLATE = """\
# To configure your package manager with this repository, do the following:
# Prepare you VM to access the repository using the following instructions:
# https://cloud.google.com/artifact-registry/docs/os-packages/rpm/configure#prepare-yum
# Configure your VM to access Artifact Registry packages using the following
# command:
sudo tee -a /etc/yum.repos.d/artifact-registry.repo << EOF
[{repo}]
name={repo}
baseurl=https://{location}-yum.pkg.dev/projects/{repo_path}
enabled=1
repo_gpgcheck=0
gpgcheck=0
EOF
# Update Yum:
sudo yum makecache
# For complete setup information, see
# https://cloud.google.com/artifact-registry/docs/os-packages/rpm/configure
"""
PUBLIC_TEMPLATE = """\
# To configure your package manager with this repository:
# Configure your VM to access Artifact Registry packages using the following
# command:
sudo tee -a /etc/yum.repos.d/artifact-registry.repo << EOF
[{repo}]
name={repo}
baseurl=https://{location}-yum.pkg.dev/projects/{repo_path}
enabled=1
repo_gpgcheck=0
gpgcheck=0
EOF
# Update Yum:
sudo yum makecache
# For complete setup information, see
# https://cloud.google.com/artifact-registry/docs/os-packages/rpm/configure
"""

View File

@@ -0,0 +1,483 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote repo utils for Artifact Registry repository commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from typing import List
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.artifacts import requests as ar_requests
from googlecloudsdk.command_lib.util.apis import arg_utils
GITHUB_URI = "https://github.com"
GOOGLE_MODULE_PROXY = re.compile(
r"(http(|s))://proxy\.golang\.org(|/)"
)
def Args():
"""Adds the remote-<facade>-repo flags."""
# We need to do this because these flags need to be able to accept either a
# PublicRepository enum or a string registry URI.
return [
base.Argument(
"--remote-mvn-repo",
help=_RemoteRepoHelpText(facade="Maven", hide_custom_remotes=False),
),
base.Argument(
"--remote-docker-repo",
help=_RemoteRepoHelpText(facade="Docker", hide_custom_remotes=False),
),
base.Argument(
"--remote-npm-repo",
help=_RemoteRepoHelpText(facade="Npm", hide_custom_remotes=False),
),
base.Argument(
"--remote-python-repo",
help=_RemoteRepoHelpText(facade="Python", hide_custom_remotes=False),
),
base.Argument(
"--remote-apt-repo",
help=_OsPackageRemoteRepoHelpText(
facade="Apt", hide_custom_remotes=True
),
),
base.Argument(
"--remote-yum-repo",
help=_OsPackageRemoteRepoHelpText(
facade="Yum", hide_custom_remotes=True
),
),
base.Argument(
"--remote-go-repo", help=_GoRemoteRepoHelpText()
),
base.Argument(
"--remote-username",
help="Remote Repository upstream registry username.",
),
base.Argument(
"--remote-password-secret-version",
help="""\
Secret Manager secret version that contains password for the
remote repository upstream.
""",
),
base.Argument(
"--service-directory-config", help="""\
Service Directory config link for using Private Networks. Format:
projects/<project>/locations/<location>/namespaces/<namespace>/services/<service>
""", hidden=True
),
base.Argument(
"--remote-repo",
help=_CommonRemoteRepoHelpText(), hidden=True
),
]
def IsRemoteRepoRequest(repo_args) -> bool:
"""Returns whether or not the repo mode specifies a remote repository."""
return (
hasattr(repo_args, "mode")
and arg_utils.ChoiceToEnumName(repo_args.mode) == "REMOTE_REPOSITORY"
)
def AppendRemoteRepoConfigToRequest(messages, repo_args, request):
"""Adds remote repository config to CreateRepositoryRequest or UpdateRepositoryRequest."""
remote_cfg = messages.RemoteRepositoryConfig()
remote_cfg.description = repo_args.remote_repo_config_desc
# Credentials
username = repo_args.remote_username
secret = repo_args.remote_password_secret_version
if username or secret:
creds = messages.UpstreamCredentials()
creds.usernamePasswordCredentials = messages.UsernamePasswordCredentials()
if username:
creds.usernamePasswordCredentials.username = username
if secret:
creds.usernamePasswordCredentials.passwordSecretVersion = secret
remote_cfg.upstreamCredentials = creds
# Disable Remote Validation
if repo_args.disable_remote_validation:
remote_cfg.disableUpstreamValidation = True
# Enable/Disable Ingestion Attestation
if repo_args.enable_ingestion_attestation:
remote_cfg.enableIngestionAttestation = True
else:
remote_cfg.enableIngestionAttestation = False
# Service Directory config for Private networks
sd_config = repo_args.service_directory_config
if sd_config:
remote_cfg.serviceDirectoryConfig = messages.ServiceDirectoryConfig()
remote_cfg.serviceDirectoryConfig.service = sd_config
# MAVEN
if repo_args.remote_mvn_repo:
facade, remote_input = "Maven", repo_args.remote_mvn_repo
enum_message = _ChoiceToRemoteEnum(facade, remote_input)
if enum_message: # input is PublicRepository
remote_cfg.mavenRepository = messages.MavenRepository()
remote_cfg.mavenRepository.publicRepository = enum_message
elif _IsRemoteURI(remote_input): # input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
elif _IsARRemote(remote_input): # input is ArtifactRegistryRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_input)
# DOCKER
elif repo_args.remote_docker_repo:
facade, remote_input = "Docker", repo_args.remote_docker_repo
enum_message = _ChoiceToRemoteEnum(facade, remote_input)
if enum_message: # input is PublicRepository
remote_cfg.dockerRepository = messages.DockerRepository()
remote_cfg.dockerRepository.publicRepository = enum_message
elif _IsRemoteURI(remote_input): # input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
elif _IsARRemote(remote_input): # input is ArtifactRegistryRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_input)
# NPM
elif repo_args.remote_npm_repo:
facade, remote_input = "Npm", repo_args.remote_npm_repo
enum_message = _ChoiceToRemoteEnum(facade, remote_input)
if enum_message: # input is PublicRepository
remote_cfg.npmRepository = messages.NpmRepository()
remote_cfg.npmRepository.publicRepository = enum_message
elif _IsRemoteURI(remote_input): # input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
elif _IsARRemote(remote_input): # input is ArtifactRegistryRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_input)
# PYTHON
elif repo_args.remote_python_repo:
facade, remote_input = "Python", repo_args.remote_python_repo
enum_message = _ChoiceToRemoteEnum(facade, remote_input)
if enum_message: # input is PublicRepository
remote_cfg.pythonRepository = messages.PythonRepository()
remote_cfg.pythonRepository.publicRepository = enum_message
elif _IsRemoteURI(remote_input): # input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
elif _IsARRemote(remote_input): # input is ArtifactRegistryRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_input)
# APT
elif repo_args.remote_apt_repo:
facade, remote_base, remote_path = (
"Apt",
repo_args.remote_apt_repo,
repo_args.remote_apt_repo_path,
)
enum_message = _ChoiceToRemoteEnum(facade, remote_base)
if enum_message: # input is PublicRepository
remote_cfg.aptRepository = messages.AptRepository()
remote_cfg.aptRepository.publicRepository = (
messages.GoogleDevtoolsArtifactregistryV1RemoteRepositoryConfigAptRepositoryPublicRepository()
)
remote_cfg.aptRepository.publicRepository.repositoryBase = enum_message
remote_cfg.aptRepository.publicRepository.repositoryPath = remote_path
elif _IsRemoteURI(_OsPackageUri(remote_base, remote_path)):
# input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = _OsPackageUri(remote_base, remote_path)
elif _IsARRemote(remote_base): # input is ArtifactRegistryRepository
if remote_path:
raise ar_exceptions.InvalidInputValueError(
"--remote-apt-repo-path is not supported for Artifact Registry"
" Repository upstream."
)
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_base
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_base)
# YUM
elif repo_args.remote_yum_repo:
facade, remote_base, remote_path = (
"Yum",
repo_args.remote_yum_repo,
repo_args.remote_yum_repo_path,
)
enum_message = _ChoiceToRemoteEnum(facade, remote_base)
if enum_message: # input is PublicRepository
remote_cfg.yumRepository = messages.YumRepository()
remote_cfg.yumRepository.publicRepository = (
messages.GoogleDevtoolsArtifactregistryV1RemoteRepositoryConfigYumRepositoryPublicRepository()
)
remote_cfg.yumRepository.publicRepository.repositoryBase = enum_message
remote_cfg.yumRepository.publicRepository.repositoryPath = remote_path
elif _IsRemoteURI(_OsPackageUri(remote_base, remote_path)):
# input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = _OsPackageUri(remote_base, remote_path)
elif _IsARRemote(remote_base): # input is ArtifactRegistryRepository
if remote_path:
raise ar_exceptions.InvalidInputValueError(
"--remote-yum-repo-path is not supported for Artifact Registry"
" Repository upstream."
)
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_base
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_base)
# GO
elif repo_args.remote_go_repo:
facade, remote_input = "Go", repo_args.remote_go_repo
# Go does not have Public enums
if _IsRemoteURI(remote_input): # input is CustomRepository
if remote_input[-1] == "/":
remote_input = remote_input[:-1]
if remote_input != GITHUB_URI and not GOOGLE_MODULE_PROXY.match(
remote_input
):
_RaiseCustomUpstreamUnsupportedError(
facade, remote_input, ["https://proxy.golang.org"]
)
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
elif _IsARRemote(remote_input): # input is ArtifactRegistryRepository
_RaiseArtifactRegistryUpstreamUnsupportedError(facade)
else: # raise error
_RaiseRemoteRepoUpstreamError(facade, remote_input)
# COMMON
elif repo_args.remote_repo:
remote_input = repo_args.remote_repo
if _IsRemoteURI(remote_input): # input is CustomRepository
remote_cfg.commonRepository = messages.CommonRemoteRepository()
remote_cfg.commonRepository.uri = remote_input
else:
return request
request.repository.remoteRepositoryConfig = remote_cfg
return request
def _RemoteRepoHelpText(facade: str, hide_custom_remotes: bool) -> str:
if hide_custom_remotes:
return """\
({facade} only) Repo upstream for {facade_lower} remote repository.
REMOTE_{command}_REPO must be one of: [{enums}].
""".format(
facade=facade,
facade_lower=facade.lower(),
command=_LanguagePackageCommandName(facade),
enums=_EnumsStrForFacade(facade),
)
return """\
({facade} only) Repo upstream for {facade_lower} remote repository.
REMOTE_{command}_REPO can be either:
- one of the following enums: [{enums}].
- an http/https custom registry uri (ex: https://my.{facade_lower}.registry)
""".format(
facade=facade,
facade_lower=facade.lower(),
command=_LanguagePackageCommandName(facade),
enums=_EnumsStrForFacade(facade),
)
def _GoRemoteRepoHelpText() -> str:
return (
'(Go only) Repo upstream for Go remote repository. '
'"https://proxy.golang.org/" is the only valid value.'
)
def _CommonRemoteRepoHelpText() -> str:
return (
'An upstream for a given remote repository. Ex: "https://github.com"'
', "https://docker.io/v2/" are valid values for their given formats of'
' Go and Docker respectively.'
)
def _OsPackageRemoteRepoHelpText(facade: str, hide_custom_remotes: bool) -> str:
if hide_custom_remotes:
return """\
({facade} only) Repository base for {facade_lower} remote repository.
REMOTE_{facade_upper}_REPO must be one of: [{enums}].
""".format(
facade=facade,
facade_lower=facade.lower(),
facade_upper=facade.upper(),
enums=_EnumsStrForFacade(facade),
)
return """\
({facade} only) Repository base for {facade_lower} remote repository.
REMOTE_{facade_upper}_REPO can be either:
- one of the following enums: [{enums}].
- an http/https custom registry uri (ex: https://my.{facade_lower}.registry)
""".format(
facade=facade,
facade_lower=facade.lower(),
facade_upper=facade.upper(),
enums=_EnumsStrForFacade(facade),
)
def _LanguagePackageCommandName(facade: str) -> str:
if facade == "Maven":
return "MVN"
return facade.upper()
def _ChoiceToRemoteEnum(facade: str, remote_input: str):
"""Converts the remote repo input to a PublicRepository Enum message or None."""
enums = _EnumsMessageForFacade(facade)
name = arg_utils.ChoiceToEnumName(remote_input)
try:
return enums.lookup_by_name(name)
except KeyError:
return None
def _EnumsMessageForFacade(facade: str):
"""Returns the PublicRepository enum messages for a facade."""
facade_to_enum = {
"Maven": (
ar_requests.GetMessages()
.MavenRepository()
.PublicRepositoryValueValuesEnum
),
"Docker": (
ar_requests.GetMessages()
.DockerRepository()
.PublicRepositoryValueValuesEnum
),
"Npm": (
ar_requests.GetMessages()
.NpmRepository()
.PublicRepositoryValueValuesEnum
),
"Python": (
ar_requests.GetMessages()
.PythonRepository()
.PublicRepositoryValueValuesEnum
),
"Apt": (
ar_requests.GetMessages()
.GoogleDevtoolsArtifactregistryV1RemoteRepositoryConfigAptRepositoryPublicRepository()
.RepositoryBaseValueValuesEnum
),
"Yum": (
ar_requests.GetMessages()
.GoogleDevtoolsArtifactregistryV1RemoteRepositoryConfigYumRepositoryPublicRepository()
.RepositoryBaseValueValuesEnum
),
"Ruby": (
ar_requests.GetMessages()
.CommonRemoteRepository()
),
}
if facade not in facade_to_enum:
return None
return facade_to_enum[facade]
def _EnumsStrForFacade(facade: str) -> str:
"""Returns the human-readable PublicRepository enum strings for a facade."""
return _EnumsMessageToStr(_EnumsMessageForFacade(facade))
def _EnumsMessageToStr(enums) -> str:
"""Returns the human-readable PublicRepository enum strings."""
if enums is None:
return ""
return ", ".join(
arg_utils.EnumNameToChoice(name)
for name, number in sorted(enums.to_dict().items())
if number != 0 # Ignore UNSPECIFIED enum values.
)
def _OsPackageUri(remote_base, remote_path):
# Don't concatenate if remote_path not given.
if not remote_path:
return remote_base
# Add '/' to end of remote_base if not already present.
if remote_base[-1] != "/":
remote_base = remote_base + "/"
return remote_base + remote_path
def _IsRemoteURI(remote_input: str) -> bool:
return remote_input.startswith("https://") or remote_input.startswith(
"http://"
)
def _IsARRemote(remote_input: str) -> bool:
return remote_input.startswith("projects/")
def _RaiseRemoteRepoUpstreamError(facade: str, remote_input: str):
"""Raises an error for a remote repo upstream error."""
well_known_enum_requirement = ""
if _EnumsStrForFacade(facade):
enums = _EnumsMessageForFacade(facade)
well_known_enum_requirement = (
" If you intended to enter a well known upstream repo, valid choices"
f" are: [{enums}]."
)
custom_uri_requirement = (
" If you intended to enter a custom upstream URI, this value must start"
" with 'https://' or 'http://'."
)
raise ar_exceptions.InvalidInputValueError(
"Invalid repo upstream for remote repository:"
f" '{remote_input}'.{well_known_enum_requirement}{custom_uri_requirement}"
)
def _RaiseArtifactRegistryUpstreamUnsupportedError(facade: str):
raise ar_exceptions.InvalidInputValueError(
f"Artifact Registry upstream is not supported for {facade}."
)
def _RaiseCustomUpstreamUnsupportedError(
facade: str, remote_input: str, allowed: List[str]
):
allowed_choices = ", ".join(allowed)
raise ar_exceptions.InvalidInputValueError(
f"Custom upstream {remote_input} is not supported for {facade}. Valid"
f" choices are [{allowed_choices}].\n"
)

View File

@@ -0,0 +1,504 @@
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for making API calls."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from apitools.base.py import http_wrapper
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.api_lib.cloudkms import iam as kms_iam
from googlecloudsdk.api_lib.iam import util as iam_api
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import resources
ARTIFACTREGISTRY_API_NAME = "artifactregistry"
ARTIFACTREGISTRY_API_VERSION = "v1"
STORAGE_API_NAME = "storage"
STORAGE_API_VERSION = "v1"
_GCR_PERMISSION = "storage.objects.list"
CRYPTO_KEY_COLLECTION = "cloudkms.projects.locations.keyRings.cryptoKeys"
def GetStorageClient():
return apis.GetClientInstance(STORAGE_API_NAME, STORAGE_API_VERSION)
def GetStorageMessages():
return apis.GetMessagesModule(STORAGE_API_NAME, STORAGE_API_VERSION)
def SkipRetryOn500Errors(response):
"""Wrap http_wrapper.CheckResponse to skip retry on 501."""
if response.status_code >= 500:
raise apitools_exceptions.HttpError.FromResponse(response)
return http_wrapper.CheckResponse(response)
def GetClient(skip_activation_prompt=False):
client = apis.GetClientInstance(
ARTIFACTREGISTRY_API_NAME,
ARTIFACTREGISTRY_API_VERSION,
skip_activation_prompt=skip_activation_prompt,
)
client.check_response_func = SkipRetryOn500Errors
return client
def GetMessages():
return apis.GetMessagesModule(ARTIFACTREGISTRY_API_NAME,
ARTIFACTREGISTRY_API_VERSION)
def GetClientV1beta2():
return apis.GetClientInstance(ARTIFACTREGISTRY_API_NAME,
"v1beta2")
def GetMessagesV1beta2():
return apis.GetMessagesModule(ARTIFACTREGISTRY_API_NAME,
"v1beta2")
def DeleteTag(client, messages, tag):
"""Deletes a tag by its name."""
delete_tag_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsDeleteRequest(
name=tag)
err = client.projects_locations_repositories_packages_tags.Delete(
delete_tag_req)
if not isinstance(err, messages.Empty):
raise ar_exceptions.ArtifactRegistryError(
"Failed to delete tag {}: {}".format(tag, err))
def CreateDockerTag(client, messages, docker_tag, docker_version):
"""Creates a tag associated with the given docker version."""
tag = messages.Tag(
name=docker_tag.GetTagName(), version=docker_version.GetVersionName())
create_tag_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsCreateRequest(
parent=docker_tag.GetPackageName(), tag=tag, tagId=docker_tag.tag)
return client.projects_locations_repositories_packages_tags.Create(
create_tag_req)
def GetTag(client, messages, tag):
"""Gets a tag by its name."""
get_tag_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsGetRequest(
name=tag)
return client.projects_locations_repositories_packages_tags.Get(get_tag_req)
def DeleteVersion(client, messages, version):
"""Deletes a version by its name."""
delete_ver_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesVersionsDeleteRequest(
name=version)
return client.projects_locations_repositories_packages_versions.Delete(
delete_ver_req)
def DeletePackage(client, messages, package):
"""Deletes a package by its name."""
delete_pkg_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesDeleteRequest(
name=package)
return client.projects_locations_repositories_packages.Delete(delete_pkg_req)
def GetVersion(client, messages, version):
"""Gets a version by its name."""
client = GetClient()
messages = GetMessages()
get_ver_req = (
messages
.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsGetRequest(
name=version))
return client.projects_locations_repositories_packages_tags.Get(get_ver_req)
def GetVersionFromTag(client, messages, tag):
"""Gets a version name by a tag name."""
get_tag_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsGetRequest(
name=tag)
get_tag_res = client.projects_locations_repositories_packages_tags.Get(
get_tag_req)
if not get_tag_res.version or len(get_tag_res.version.split("/")) != 10:
raise ar_exceptions.ArtifactRegistryError(
"Internal error. Corrupted tag: {}".format(tag))
return get_tag_res.version.split("/")[-1]
def ListTags(client, messages, package, page_size=None, server_filter=None):
"""Lists all tags under a package with the given package name."""
list_tags_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsListRequest(
parent=package, filter=server_filter)
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_packages_tags,
list_tags_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="tags"))
def ListVersionTags(client, messages, package, version, page_size=None):
"""Lists tags associated with the given version."""
list_tags_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesTagsListRequest(
parent=package, filter="version=\"{}\"".format(version))
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_packages_tags,
list_tags_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="tags"))
def ListPackages(client, messages, repo, page_size=None,
order_by=None, limit=None, server_filter=None):
"""Lists all packages under a repository."""
list_pkgs_req = (
messages.ArtifactregistryProjectsLocationsRepositoriesPackagesListRequest(
parent=repo, orderBy=order_by, filter=server_filter))
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_packages,
list_pkgs_req,
limit=limit,
batch_size=page_size,
batch_size_attribute="pageSize",
field="packages"))
def ListVersions(client, messages, pkg, version_view=None,
page_size=None, order_by=None, limit=None, server_filter=None):
"""Lists all versions under a package."""
page_limit = limit
if limit is None or (page_size is not None and page_size < limit):
page_limit = page_size
list_vers_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesVersionsListRequest(
parent=pkg, view=version_view, orderBy=order_by, filter=server_filter)
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_packages_versions,
list_vers_req,
limit=limit,
batch_size=page_limit,
batch_size_attribute="pageSize",
field="versions"))
def ListRepositories(project, page_size=None,
order_by=None, server_filter=None):
"""Lists all repositories under a project."""
client = GetClient()
messages = GetMessages()
list_repos_req = (
messages.ArtifactregistryProjectsLocationsRepositoriesListRequest(
parent=project, orderBy=order_by, filter=server_filter))
return list(
list_pager.YieldFromList(
client.projects_locations_repositories,
list_repos_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="repositories"))
def ListFiles(client, messages, repo, server_filter=None,
page_size=None, order_by=None):
"""Lists all files under a repository."""
list_files_req = (
messages.ArtifactregistryProjectsLocationsRepositoriesFilesListRequest(
parent=repo, filter=server_filter, orderBy=order_by))
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_files,
list_files_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="files"))
def GetRepository(repo, skip_activation_prompt=False):
"""Gets the repository given its name."""
client = GetClient(skip_activation_prompt)
messages = GetMessages()
get_repo_req = messages.ArtifactregistryProjectsLocationsRepositoriesGetRequest(
name=repo)
get_repo_res = client.projects_locations_repositories.Get(get_repo_req)
return get_repo_res
def GetIamPolicy(repo_res):
"""Gets the IAM policy for the specified repository."""
client = GetClient()
messages = GetMessages()
get_iam_policy_req = messages.ArtifactregistryProjectsLocationsRepositoriesGetIamPolicyRequest(
resource=repo_res)
get_iam_policy_res = client.projects_locations_repositories.GetIamPolicy(
get_iam_policy_req)
return get_iam_policy_res
def SetIamPolicy(repo_res, policy):
"""Sets the IAM policy for the specified repository."""
client = GetClient()
ar_messages = GetMessages()
# pylint: disable=line-too-long
set_iam_policy_req = ar_messages.ArtifactregistryProjectsLocationsRepositoriesSetIamPolicyRequest(
resource=repo_res,
setIamPolicyRequest=ar_messages.SetIamPolicyRequest(policy=policy),
)
return client.projects_locations_repositories.SetIamPolicy(set_iam_policy_req)
def CreateRepository(
project, location, repository, skip_activation_prompt=False
):
"""Creates the repository given its parent.
Args:
project: str: The project to create the repository in.
location: str: The region to create the repository in.
repository: messages.Repository to create.
skip_activation_prompt: bool: If true, do not prompt for service activation
Returns:
The resulting operation from the create request.
"""
client = GetClient(skip_activation_prompt)
messages = GetMessages()
request = messages.ArtifactregistryProjectsLocationsRepositoriesCreateRequest(
parent="projects/{}/locations/{}".format(project, location),
repositoryId=repository.name.split("/")[-1],
repository=repository)
return client.projects_locations_repositories.Create(request)
def GetPackage(package):
"""Gets the package given its name."""
client = GetClient()
messages = GetMessages()
get_package_req = messages.ArtifactregistryProjectsLocationsRepositoriesPackagesGetRequest(
name=package)
get_package_res = client.projects_locations_repositories_packages.Get(
get_package_req)
return get_package_res
def ListLocations(project_id, page_size=None):
"""Lists all locations for a given project."""
client = GetClientV1beta2()
messages = GetMessagesV1beta2()
list_locs_req = messages.ArtifactregistryProjectsLocationsListRequest(
name="projects/" + project_id)
locations = list_pager.YieldFromList(
client.projects_locations,
list_locs_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="locations")
return sorted([loc.locationId for loc in locations])
def TestStorageIAMPermission(bucket, project):
"""Tests storage IAM permission for a given bucket for the user project."""
client = GetStorageClient()
messages = GetStorageMessages()
test_req = messages.StorageBucketsTestIamPermissionsRequest(
bucket=bucket, permissions=_GCR_PERMISSION, userProject=project)
return client.buckets.TestIamPermissions(test_req)
def GetCryptoKeyPolicy(kms_key):
"""Gets the IAM policy for a given crypto key."""
crypto_key_ref = resources.REGISTRY.ParseRelativeName(
relative_name=kms_key, collection=CRYPTO_KEY_COLLECTION)
return kms_iam.GetCryptoKeyIamPolicy(crypto_key_ref)
def AddCryptoKeyPermission(kms_key, service_account):
"""Adds Encrypter/Decrypter role to the given service account."""
crypto_key_ref = resources.REGISTRY.ParseRelativeName(
relative_name=kms_key, collection=CRYPTO_KEY_COLLECTION)
return kms_iam.AddPolicyBindingToCryptoKey(
crypto_key_ref, service_account,
"roles/cloudkms.cryptoKeyEncrypterDecrypter")
def GetServiceAccount(service_account):
"""Gets the service account given its email."""
client, messages = iam_api.GetClientAndMessages()
return client.projects_serviceAccounts.Get(
messages.IamProjectsServiceAccountsGetRequest(
name=iam_util.EmailToAccountResourceName(service_account)))
def GetProjectSettings(project_id):
client = GetClient()
messages = GetMessages()
get_settings_req = messages.ArtifactregistryProjectsGetProjectSettingsRequest(
name="projects/" + project_id + "/projectSettings")
return client.projects.GetProjectSettings(get_settings_req)
def GetVPCSCConfig(project_id, location_id):
"""Gets VPC SC Config on the project and location."""
client = GetClient()
messages = GetMessages()
get_vpcsc_req = messages.ArtifactregistryProjectsLocationsGetVpcscConfigRequest(
name="projects/" + project_id + "/locations/" + location_id +
"/vpcscConfig")
return client.projects_locations.GetVpcscConfig(get_vpcsc_req)
def AllowVPCSCConfig(project_id, location_id):
"""Allows requests in Remote Repository inside VPC SC perimeter."""
client = GetClient()
messages = GetMessages()
vc = messages.VPCSCConfig(
name="projects/" + project_id + "/locations/" + location_id +
"/vpcscConfig",
vpcscPolicy=messages.VPCSCConfig.VpcscPolicyValueValuesEnum.ALLOW)
update_vpcsc_req = messages.ArtifactregistryProjectsLocationsUpdateVpcscConfigRequest(
name="projects/" + project_id + "/locations/" + location_id +
"/vpcscConfig",
vPCSCConfig=vc)
return client.projects_locations.UpdateVpcscConfig(update_vpcsc_req)
def DenyVPCSCConfig(project_id, location_id):
"""Denies requests in Remote Repository inside VPC SC perimeter."""
client = GetClient()
messages = GetMessages()
vc = messages.VPCSCConfig(
name="projects/" + project_id + "/locations/" + location_id +
"/vpcscConfig",
vpcscPolicy=messages.VPCSCConfig.VpcscPolicyValueValuesEnum.DENY)
get_vpcsc_req = messages.ArtifactregistryProjectsLocationsUpdateVpcscConfigRequest(
name="projects/" + project_id + "/locations/" + location_id +
"/vpcscConfig",
vPCSCConfig=vc)
return client.projects_locations.UpdateVpcscConfig(get_vpcsc_req)
def EnableUpgradeRedirection(project_id):
messages = GetMessages()
return SetUpgradeRedirectionState(
project_id, messages.ProjectSettings.LegacyRedirectionStateValueValuesEnum
.REDIRECTION_FROM_GCR_IO_ENABLED)
def DisableUpgradeRedirection(project_id):
messages = GetMessages()
return SetUpgradeRedirectionState(
project_id, messages.ProjectSettings.LegacyRedirectionStateValueValuesEnum
.REDIRECTION_FROM_GCR_IO_DISABLED)
def FinalizeUpgradeRedirection(project_id):
messages = GetMessages()
return SetUpgradeRedirectionState(
project_id, messages.ProjectSettings.LegacyRedirectionStateValueValuesEnum
.REDIRECTION_FROM_GCR_IO_FINALIZED)
def SetUpgradeRedirectionState(
project_id, redirection_state, pull_percent=None
):
"""Sets the upgrade redirection state for the supplied project."""
client = GetClient()
messages = GetMessages()
project_settings = messages.ProjectSettings(
legacyRedirectionState=redirection_state)
update_mask = "legacy_redirection_state"
if pull_percent:
project_settings.pullPercent = pull_percent
update_settings_req = (
messages.ArtifactregistryProjectsUpdateProjectSettingsRequest(
name="projects/" + project_id + "/projectSettings",
projectSettings=project_settings,
updateMask=update_mask,
)
)
return client.projects.UpdateProjectSettings(update_settings_req)
# TODO(b/339473586): If possible annotate list DockerImage output.
def ListDockerImages(parent: str, page_size: int, limit: int):
"""Lists all docker images under a repository."""
client = GetClient()
messages = GetMessages()
list_images_req = messages.ArtifactregistryProjectsLocationsRepositoriesDockerImagesListRequest(
parent=parent
)
return list(
list_pager.YieldFromList(
client.projects_locations_repositories_dockerImages,
list_images_req,
batch_size=page_size,
batch_size_attribute="pageSize",
field="dockerImages",
limit=limit,
)
)
def CopyRepository(source_repo, dest_repo_name):
"""Copies a repository."""
client = GetClient()
messages = GetMessages()
req = messages.ArtifactregistryProjectsLocationsRepositoriesCopyRepositoryRequest(
destinationRepository=dest_repo_name,
copyRepositoryRequest=messages.CopyRepositoryRequest(
sourceRepository=source_repo
),
)
return client.projects_locations_repositories.CopyRepository(req)
def ExportArtifact(version, tag, gcs_destination):
"""Exports an artifact by version or tag."""
client = GetClient()
messages = GetMessages()
if version:
req = messages.ArtifactregistryProjectsLocationsRepositoriesExportArtifactRequest(
repository=version.Parent().Parent().RelativeName(),
exportArtifactRequest=messages.ExportArtifactRequest(
gcsPath=gcs_destination,
sourceVersion=version.RelativeName(),
),
)
elif tag:
req = messages.ArtifactregistryProjectsLocationsRepositoriesExportArtifactRequest(
repository=tag.Parent().Parent().RelativeName(),
exportArtifactRequest=messages.ExportArtifactRequest(
gcsPath=gcs_destination,
sourceTag=tag.RelativeName(),
),
)
else:
raise ValueError("Either version or tag must be specified.")
return client.projects_locations_repositories.ExportArtifact(req)

View File

@@ -0,0 +1,158 @@
project:
name: project
collection: artifactregistry.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: |
Cloud project for the {resource}.
property: core/project
location:
name: location
collection: artifactregistry.projects.locations
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: |
Location of the {resource}. Overrides the default artifacts/location property value
for this command invocation. To configure the default location, use the command:
gcloud config set artifacts/location.
property: artifacts/location
repository:
name: repository
collection: artifactregistry.projects.locations.repositories
request_id_field: repository.name
attributes:
- *project
- *location
- &repository
parameter_name: repositoriesId
attribute_name: repository
help: |
The repository associated with the {resource}. Overrides the default artifacts/repository property value
for this command invocation. To configure the default repository, use the command:
gcloud config set artifacts/repository.
property: artifacts/repository
repository_without_property:
name: repository
collection: artifactregistry.projects.locations.repositories
request_id_field: repository.name
attributes:
- *project
- *location
- &repository_without_property
parameter_name: repositoriesId
attribute_name: repository
help: |
The repository associated with the {resource}.
mavenArtifact:
name: mavenArtifact
collection: artifactregistry.projects.locations.repositories.mavenArtifacts
attributes:
- *project
- *location
- *repository
- &mavenArtifact
parameter_name: mavenArtifactsId
attribute_name: mavenArtifact
help: |
The maven artifact associated with the {resource}.
package:
name: package
collection: artifactregistry.projects.locations.repositories.packages
attributes:
- *project
- *location
- *repository
- &package
parameter_name: packagesId
attribute_name: package
help: |
The package associated with the {resource}.
version:
name: version
collection: artifactregistry.projects.locations.repositories.packages.versions
attributes:
- *project
- *location
- *repository
- *package
- &version
parameter_name: versionsId
attribute_name: version
help: |
The version associated with the {resource}.
tag:
name: tag
collection: artifactregistry.projects.locations.repositories.packages.tags
attributes:
- *project
- *location
- *repository
- *package
- &tag
parameter_name: tagsId
attribute_name: tag
help: |
The tag associated with the {resource}.
operation:
name: operation
collection: artifactregistry.projects.locations.operations
attributes:
- *project
- *location
- &operation
parameter_name: operationsId
attribute_name: operation
help: |
An Artifact Registry operation.
file:
name: file
collection: artifactregistry.projects.locations.repositories.files
attributes:
- *project
- *location
- *repository
- &file
parameter_name: filesId
attribute_name: file
help: |
The file associated with the {resource}.
rule:
name: rule
collection: artifactregistry.projects.locations.repositories.rules
attributes:
- *project
- *location
- *repository
- &rule
parameter_name: rulesId
attribute_name: rule
help: |
The rule associated with the {resource}.
attachment:
name: attachment
collection: artifactregistry.projects.locations.repositories.attachments
attributes:
- *project
- *location
- *repository
- &attachment
parameter_name: attachmentsId
attribute_name: attachment
help: |
Attachment associated with the {resource}.

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Artifacts SBOM reference specific printer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from googlecloudsdk.core.resource import custom_printer_base as cp
from googlecloudsdk.core.resource import flattened_printer as fp
SBOM_PRINTER_FORMAT = "sbom"
# pylint: disable=line-too-long
def _GenerateSignedBy(signatures):
sig = (", ").join(sig.keyid for sig in signatures)
if sig == "projects/goog-analysis/locations/global/keyRings/sbomAttestor/cryptoKeys/generatedByArtifactAnalysis/cryptoKeyVersions/1":
return "Artifact Analysis"
if sig == "projects/goog-analysis-dev/locations/global/keyRings/sbomAttestor/cryptoKeys/generatedByArtifactAnalysis/cryptoKeyVersions/1":
return "Artifact Analysis Dev"
return sig
class SbomPrinter(cp.CustomPrinterBase):
"""Prints SBOM reference fields with customized labels in customized order."""
def Transform(self, sbom_ref):
printer = fp.FlattenedPrinter()
printer.AddRecord({"resource_uri": sbom_ref.occ.resourceUri}, delimit=False)
printer.AddRecord(
{"location": sbom_ref.occ.sbomReference.payload.predicate.location},
delimit=False,
)
printer.AddRecord({"reference": sbom_ref.occ.name}, delimit=False)
sig = _GenerateSignedBy(sbom_ref.occ.sbomReference.signatures)
if sig:
printer.AddRecord({"signed_by": sig}, delimit=False)
if "exists" in sbom_ref.file_info:
printer.AddRecord(
{"file_exists": sbom_ref.file_info["exists"]}, delimit=False
)
if "err_msg" in sbom_ref.file_info:
printer.AddRecord(
{"file_err_msg": sbom_ref.file_info["err_msg"]}, delimit=False
)

View File

@@ -0,0 +1,80 @@
# -*- coding: utf-8 -*- #
# Copyright 2024 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tag utils for Artifact Registry commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.artifacts import filter_rewriter
from googlecloudsdk.command_lib.artifacts import requests
from googlecloudsdk.command_lib.artifacts import util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
def ListTags(args):
"""Lists package tags in a given package.
Args:
args: User input arguments.
Returns:
List of package tags.
"""
client = requests.GetClient()
messages = requests.GetMessages()
page_size = args.page_size
repo = util.GetRepo(args)
project = util.GetProject(args)
location = args.location or properties.VALUES.artifacts.location.Get()
package = args.package
escaped_pkg = package.replace("/", "%2F").replace("+", "%2B")
escaped_pkg = escaped_pkg.replace("^", "%5E")
_, server_filter = filter_rewriter.Rewriter().Rewrite(args.filter)
pkg_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=escaped_pkg,
)
)
server_args = {
"client": client,
"messages": messages,
"package": pkg_path,
"server_filter": server_filter,
"page_size": page_size,
}
server_args_skipped, ltags = util.RetryOnInvalidArguments(
requests.ListTags, **server_args
)
if not server_args_skipped:
# If server-side filter is parsed correctly and the request
# succeeds, remove the client-side filter and sort-by.
if server_filter and server_filter == args.filter:
args.filter = None
log.status.Print(
"Listing items under project {}, location {}, repository {}, "
"package {}.\n".format(project, location, repo, package)
)
return ltags

View File

@@ -0,0 +1,478 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for interacting with `artifacts docker upgrade` command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import functools
from apitools.base.py import exceptions as apitools_exceptions
import frozendict
from google.api_core.exceptions import ResourceExhausted
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.api_lib.asset import client_util as asset
from googlecloudsdk.api_lib.cloudresourcemanager import organizations
from googlecloudsdk.api_lib.cloudresourcemanager import projects_api as crm
from googlecloudsdk.api_lib.resource_manager import folders
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.artifacts import requests as artifacts
from googlecloudsdk.command_lib.projects import util as projects_util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_attr
_DOMAIN_TO_BUCKET_PREFIX = frozendict.frozendict({
"gcr.io": "",
"us.gcr.io": "us.",
"asia.gcr.io": "asia.",
"eu.gcr.io": "eu.",
})
_REPO_ADMIN = "roles/artifactregistry.repoAdmin"
_WRITER = "roles/artifactregistry.writer"
_READER = "roles/artifactregistry.reader"
# In order of most to least privilege, so we can grant the most privileged role.
_AR_ROLES = (_REPO_ADMIN, _WRITER, _READER)
# Set of GCS permissions for GCR that are relevant to AR.
_PERMISSIONS = (
"storage.objects.get",
"storage.objects.list",
"storage.objects.create",
"storage.objects.delete",
)
# Set of AR permissions that could used over the gcr.io endpoint
_AR_PERMISSIONS = (
"artifactregistry.repositories.downloadArtifacts",
"artifactregistry.repositories.uploadArtifacts",
"artifactregistry.repositories.deleteArtifacts",
)
# Maps a GCS permission for GCR to an equivalent AR role.
_PERMISSION_TO_ROLE = frozendict.frozendict({
"storage.objects.get": _READER,
"storage.objects.list": _READER,
"storage.objects.create": _WRITER,
"storage.objects.delete": _REPO_ADMIN,
})
_AR_PERMISSIONS_TO_ROLES = [
("artifactregistry.repositories.downloadArtifacts", _READER),
("artifactregistry.repositories.uploadArtifacts", _WRITER),
("artifactregistry.repositories.deleteArtifacts", _REPO_ADMIN),
]
_ANALYSIS_NOT_FULLY_EXPLORED = (
"Too many IAM policies. Analysis cannot be fully completed."
)
def bucket_suffix(project):
chunks = project.split(":", 1)
if len(chunks) == 2:
# domain-scoped project
return "{0}.{1}.a.appspot.com".format(chunks[1], chunks[0])
return project + ".appspot.com"
def bucket_resource_name(domain, project):
prefix = _DOMAIN_TO_BUCKET_PREFIX[domain]
suffix = bucket_suffix(project)
# gcloud-disable-gdu-domain
return "//storage.googleapis.com/{0}artifacts.{1}".format(prefix, suffix)
def bucket_url(domain, project):
prefix = _DOMAIN_TO_BUCKET_PREFIX[domain]
suffix = bucket_suffix(project)
return f"gs://{prefix}artifacts.{suffix}"
def project_resource_name(project):
# gcloud-disable-gdu-domain
return "//cloudresourcemanager.googleapis.com/projects/{0}".format(project)
def iam_policy(domain, project, use_analyze=True):
"""Generates an AR-equivalent IAM policy for a GCR registry.
Args:
domain: The domain of the GCR registry.
project: The project of the GCR registry.
use_analyze: If true, use AnalyzeIamPolicy to generate the policy
Returns:
An iam.Policy.
Raises:
Exception: A problem was encountered while generating the policy.
"""
# Convert the map to an iam.Policy object so that gcloud can format it nicely.
m, _ = iam_map(
domain,
project,
skip_bucket=False,
from_ar_permissions=False,
use_analyze=use_analyze,
)
return policy_from_map(m)
def map_from_policy(policy):
"""Converts an iam.Policy object to a map of roles to sets of users.
Args:
policy: An iam.Policy object
Returns:
A map of roles to sets of users
"""
role_to_members = collections.defaultdict(set)
for binding in policy.bindings:
role_to_members[binding.role].update(binding.members)
return role_to_members
def policy_from_map(role_to_members):
"""Converts a map of roles to sets of users to an iam.Policy object.
Args:
role_to_members: A map of roles to sets of users
Returns:
An iam.Policy.
"""
messages = artifacts.GetMessages()
bindings = list()
for role, members in role_to_members.items():
bindings.append(
messages.Binding(
role=role,
members=tuple(sorted(members)),
)
)
bindings = sorted(bindings, key=lambda b: b.role)
return messages.Policy(bindings=bindings)
@functools.lru_cache(maxsize=None)
def iam_map(
domain,
project,
skip_bucket,
from_ar_permissions,
best_effort=False,
use_analyze=True,
):
"""Generates an AR-equivalent IAM mapping for a GCR registry.
Args:
domain: The domain of the GCR registry.
project: The project of the GCR registry.
skip_bucket: If true, get iam policy for project instead of bucket. This can
be useful when the bucket doesn't exist.
from_ar_permissions: If true, use AR permissions to generate roles that
would not need to be added to AR since user already has equivalent access
for docker commands
best_effort: If true, lower the scope when encountering auth errors
use_analyze: If true, use AnalyzeIamPolicy to generate the policy
Returns:
(map, failures) where map is a map of roles to sets of users and
failures is a list of scopes that failed
Raises:
Exception: A problem was encountered while generating the policy.
"""
perm_to_members = None
failures = []
if use_analyze:
if skip_bucket:
resource = project_resource_name(project)
else:
resource = bucket_resource_name(domain, project)
perm_to_members, failures = get_permissions_using_analyze(
project, resource, from_ar_permissions, best_effort
)
else:
if from_ar_permissions:
perm_to_members, failures = get_permissions_with_ancestors(
project, _AR_PERMISSIONS, best_effort=best_effort
)
else:
if skip_bucket:
perm_to_members, failures = get_permissions_with_ancestors(
project, _PERMISSIONS, best_effort=best_effort
)
else:
gcs_bucket = bucket_url(domain, project)
perm_to_members, failures = get_permissions_with_ancestors(
project, _PERMISSIONS, gcs_bucket, best_effort=best_effort
)
if perm_to_members is None:
return None, failures
role_to_members = collections.defaultdict(set)
if from_ar_permissions:
# For AR roles, provide all roles that the user has every *Artifacts
# permission for
members = perm_to_members[_AR_PERMISSIONS_TO_ROLES[0][0]]
for needed_perm, role in _AR_PERMISSIONS_TO_ROLES:
members = members.intersection(perm_to_members[needed_perm])
for member in members:
role_to_members[role].add(member)
return role_to_members, failures
# For GCR roles, provide the smallest set of roles required to grant all
# permissions
for perm, members in perm_to_members.items():
role = _PERMISSION_TO_ROLE[perm]
role_to_members[role].update(members)
# Grant the most privileged role to a member.
upgraded_members = set()
final_map = collections.defaultdict(set)
for role in _AR_ROLES:
members = role_to_members[role]
# Don't return deleted members. They show up in the old policies but we
# can't copy them.
members = {m for m in members if not m.startswith("deleted:")}
members.difference_update(upgraded_members)
if not members:
continue
upgraded_members.update(members)
final_map[role].update(members)
return final_map, failures
def get_permissions_using_analyze(
project, resource, from_ar_permissions, best_effort
):
"""Returns a map of permissions to members using AnalyzeIamPolicy."""
ancestry = crm.GetAncestry(project_id=project)
failures = []
analysis = None
# Reverse the order so we go from org->project
for num, ancestor in enumerate(reversed(ancestry.ancestor)):
scope = resource_from_ancestor(ancestor)
try:
if from_ar_permissions:
analysis = analyze_iam_policy(_AR_PERMISSIONS, resource, scope)
else:
analysis = analyze_iam_policy(_PERMISSIONS, resource, scope)
break
except apitools_exceptions.HttpForbiddenError:
failures.append(scope)
if not best_effort:
raise
if num == len(ancestry.ancestor) - 1:
return None, failures
# If we see any false fullyExplored, that indicates that AnalyzeIamPolicy is
# returning incomplete information, so the generated policy might be wrong,
# so we conservatively bail out in that case.
if not analysis.fullyExplored or not analysis.mainAnalysis.fullyExplored:
errors = list(err.cause for err in analysis.mainAnalysis.nonCriticalErrors)
error_msg = "\n".join(errors)
if not best_effort:
raise ar_exceptions.ArtifactRegistryError(error_msg)
warning_msg = (
"Encountered errors when analyzing IAM policy. This may result in"
f" incomplete bindings: {error_msg}"
)
con = console_attr.GetConsoleAttr()
log.status.Print(f"{con.Colorize('Warning:','red')} {warning_msg}")
perm_to_members = collections.defaultdict(set)
for result in analysis.mainAnalysis.analysisResults:
if not result.fullyExplored:
raise ar_exceptions.ArtifactRegistryError(_ANALYSIS_NOT_FULLY_EXPLORED)
if result.iamBinding.condition is not None and not best_effort:
# AR doesn't support IAM conditions.
raise ar_exceptions.ArtifactRegistryError(
"Conditional IAM binding is not supported."
)
members = set()
for member in result.iamBinding.members:
if is_convenience(member):
# convenience values are GCR legacy. They are not needed in AR.
continue
members.add(member)
for acl in result.accessControlLists:
for access in acl.accesses:
perm = access.permission
perm_to_members[perm].update(members)
return perm_to_members, failures
def is_convenience(s):
return (
s.startswith("projectOwner:")
or s.startswith("projectEditor:")
or s.startswith("projectViewer:")
)
def get_permissions_with_ancestors(
project_id, permissions, gcs_bucket=None, best_effort=True
):
roles, failures = recursive_get_roles(project_id, best_effort, gcs_bucket)
perms, perm_failures = get_permissions(permissions, roles, best_effort)
return perms, failures + perm_failures
def recursive_get_roles(project_id, best_effort, gcs_bucket=None):
"""Returns a map of roles to members for the given project + ancestors (and bucket if provided)."""
ancestry = crm.GetAncestry(project_id=project_id)
role_to_members = collections.defaultdict(set)
if gcs_bucket:
for binding in (
storage_api.StorageClient()
.GetIamPolicy(storage_util.BucketReference.FromUrl(gcs_bucket))
.bindings
):
role_to_members[binding.role].update(binding.members)
failures = []
for resource in reversed(ancestry.ancestor):
bindings = []
try:
if resource.resourceId.type == "project":
bindings = crm.GetIamPolicy(
projects_util.ParseProject(project_id)
).bindings
elif resource.resourceId.type == "folder":
bindings = folders.GetIamPolicy(resource.resourceId.id).bindings
elif resource.resourceId.type == "organization":
bindings = (
organizations.Client().GetIamPolicy(resource.resourceId.id).bindings
)
for binding in bindings:
role_to_members[binding.role].update(binding.members)
except apitools_exceptions.HttpForbiddenError:
failures.append(resource.resourceId.type + "s/" + resource.resourceId.id)
if not best_effort:
raise
if resource.resourceId.type == "project":
return None, failures
return role_to_members, failures
def get_permissions(permissions, role_map, best_effort=True):
"""Returns a map of permissions to members for the given roles.
Args:
permissions: The permissions to look for. All other permissions are ignored.
role_map: A map of roles to members.
best_effort: If true, warn instead of failing on auth errors.
Returns:
(map, failures) where map is a map of permissions to members and failures
is a list of roles that failed
"""
failures = []
permission_map = collections.defaultdict(set)
iam_messages = apis.GetMessagesModule("iam", "v1")
for role, members in role_map.items():
members = [m for m in members if not is_convenience(m)]
# if not members:
# continue
request = iam_messages.IamRolesGetRequest(name=role)
try:
role_permissions = set(
apis.GetClientInstance("iam", "v1")
.roles.Get(request)
.includedPermissions
)
except apitools_exceptions.HttpForbiddenError as e:
failures.append(role)
if not best_effort:
raise e
continue
for p in permissions:
if p in role_permissions:
permission_map[p].update(members)
return permission_map, failures
def analyze_iam_policy(permissions, resource, scope):
"""Calls AnalyzeIamPolicy for the given resource.
Args:
permissions: for the access selector
resource: for the resource selector
scope: for the scope
Returns:
An CloudassetAnalyzeIamPolicyResponse.
Raises:
ResourceExhausted: If the request fails due to analyzeIamPolicy quota.
"""
client = asset.GetClient()
service = client.v1
messages = asset.GetMessages()
try:
return service.AnalyzeIamPolicy(
messages.CloudassetAnalyzeIamPolicyRequest(
analysisQuery_accessSelector_permissions=permissions,
analysisQuery_resourceSelector_fullResourceName=resource,
scope=scope,
)
)
except apitools_exceptions.HttpError as e:
if e.status_code == 429:
raise ar_exceptions.ArtifactRegistryError(
"Insufficient quota for AnalyzeIamPolicy. Use --no-use-analyze-iam to"
" generate IAM policies without using AnalyzeIamPolicy."
)
raise
except ResourceExhausted:
raise ar_exceptions.ArtifactRegistryError(
"Insufficient quota for AnalyzeIamPolicy. Use --no-use-analyze-iam to"
" generate IAM policies without using AnalyzeIamPolicy."
)
def resource_from_ancestor(ancestor):
"""Converts an ancestor to a resource name.
Args:
ancestor: an ancestor proto return from GetAncestry
Returns:
The resource name of the ancestor
"""
if ancestor.resourceId.type == "organization":
return "organizations/{0}".format(ancestor.resourceId.id)
if ancestor.resourceId.type == "folder":
return "folders/{0}".format(ancestor.resourceId.id)
if ancestor.resourceId.type == "project":
return "projects/{0}".format(ancestor.resourceId.id)

View File

@@ -0,0 +1,289 @@
# -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for parsing Artifact Registry versions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
import json
from apitools.base.protorpclite import protojson
from googlecloudsdk.api_lib.artifacts import filter_rewriter
from googlecloudsdk.api_lib.util import common_args
from googlecloudsdk.command_lib.artifacts import containeranalysis_util as ca_util
from googlecloudsdk.command_lib.artifacts import requests
from googlecloudsdk.command_lib.artifacts import util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
def ShortenRelatedTags(response, unused_args):
"""Convert the tag resources into tag IDs."""
tags = []
for t in response.relatedTags:
tag = resources.REGISTRY.ParseRelativeName(
t.name, "artifactregistry.projects.locations.repositories.packages.tags"
)
tags.append(tag.tagsId)
json_obj = json.loads(protojson.encode_message(response))
json_obj.pop("relatedTags", None)
if tags:
json_obj["relatedTags"] = tags
# Restore the display format of `metadata` after json conversion.
if response.metadata is not None:
json_obj["metadata"] = {
prop.key: prop.value.string_value
for prop in response.metadata.additionalProperties
}
return json_obj
def ListOccurrences(response, args):
"""Call CA APIs for vulnerabilities if --show-package-vulnerability is set."""
if not args.show_package_vulnerability:
return response
resource = resources.REGISTRY.ParseRelativeName(
response["name"],
"artifactregistry.projects.locations.repositories.packages.versions",
)
repo_resource = resources.REGISTRY.Parse(
resource.repositoriesId,
params={
"projectsId": resource.projectsId,
"locationsId": (
resource.locationsId
),
},
collection="artifactregistry.projects.locations.repositories",
)
messages = requests.GetMessages()
repository = requests.GetRepository(repo_resource.RelativeName())
if not repository or not repository.format:
log.warning(
"Could not determine repository format, so cannot show vulnerability"
" scan."
)
return response
if repository.format == messages.Repository.FormatValueValuesEnum.MAVEN:
project, resource = _GenerateMavenResourceFromResponse(resource)
elif repository.format == messages.Repository.FormatValueValuesEnum.NPM:
project, resource = _GenerateNPMPackageResourceFromResponse(resource)
elif repository.format == messages.Repository.FormatValueValuesEnum.PYTHON:
project, resource = _GeneratePythonPackageResourceFromResponse(resource)
else:
log.warning(
"Unsupported repository format. Skipping showing vulnerability scan."
)
return response
metadata = ca_util.GetArtifactOccurrences(project, resource)
if metadata.ArtifactsDescribeView():
response.update(metadata.ArtifactsDescribeView())
else:
response.update(
{"package_vulnerability_summary": "No vulnerability data found."}
)
return response
def ConvertFingerprint(response, unused_args):
"""Convert fingerprint and annotations to a dict."""
if hasattr(response, "check_initialized"):
# It's a protorpc message.
resource = json.loads(protojson.encode_message(response))
else:
# It's a json already.
resource = response
if "fingerprints" in resource and resource["fingerprints"]:
for h in resource["fingerprints"]:
if isinstance(h.get("value"), str):
# In dicts from tests, the value is base64 encoded string.
h["value"] = base64.b64decode(h["value"]).hex()
if "annotations" in resource and resource.get("annotations"):
# The value from scenario test is a dict, not a message.
if "additionalProperties" in resource["annotations"]:
annotations = {}
for p in resource["annotations"].get("additionalProperties", []):
annotations[p["key"]] = p["value"]
resource["annotations"] = annotations
return resource
def _GenerateMavenResourceFromResponse(resource):
"""Generates the maven artifact resource from the version resource name.
Args:
resource: The version resource name.
Returns:
The project ID and the maven artifact package resource name.
"""
registry = resources.REGISTRY.Clone()
registry.RegisterApiByName("artifactregistry", "v1")
maven_artifacts_id = resource.packagesId + ":" + resource.versionsId
maven_resource = resources.Resource.RelativeName(
registry.Create(
"artifactregistry.projects.locations.repositories.mavenArtifacts",
projectsId=resource.projectsId,
locationsId=resource.locationsId,
repositoriesId=resource.repositoriesId,
mavenArtifactsId=maven_artifacts_id,
)
)
return resource.projectsId, maven_resource
def _GenerateNPMPackageResourceFromResponse(resource):
"""Generates the npm package resource from the version resource name.
Args:
resource: The version resource name.
Returns:
The project ID and the npm package resource name.
"""
registry = resources.REGISTRY.Clone()
registry.RegisterApiByName("artifactregistry", "v1")
npm_package_id = resource.packagesId + ":" + resource.versionsId
npm_resource = resources.Resource.RelativeName(
registry.Create(
"artifactregistry.projects.locations.repositories.npmPackages",
projectsId=resource.projectsId,
locationsId=resource.locationsId,
repositoriesId=resource.repositoriesId,
npmPackagesId=npm_package_id,
)
)
return resource.projectsId, npm_resource
def _GeneratePythonPackageResourceFromResponse(resource):
"""Generates the python package resource from the version resource name.
Args:
resource: The version resource name.
Returns:
The project ID and the python package resource name.
"""
registry = resources.REGISTRY.Clone()
registry.RegisterApiByName("artifactregistry", "v1")
python_package_id = resource.packagesId + ":" + resource.versionsId
python_resource = resources.Resource.RelativeName(
registry.Create(
"artifactregistry.projects.locations.repositories.pythonPackages",
projectsId=resource.projectsId,
locationsId=resource.locationsId,
repositoriesId=resource.repositoriesId,
pythonPackagesId=python_package_id,
)
)
return resource.projectsId, python_resource
def ListVersions(args):
"""Lists package versions in a given package.
Args:
args: User input arguments.
Returns:
List of package versiions.
"""
client = requests.GetClient()
messages = requests.GetMessages()
page_size = args.page_size
repo = util.GetRepo(args)
project = util.GetProject(args)
location = args.location or properties.VALUES.artifacts.location.Get()
package = args.package
escaped_pkg = package.replace("/", "%2F").replace("+", "%2B")
escaped_pkg = escaped_pkg.replace("^", "%5E")
order_by = common_args.ParseSortByArg(args.sort_by)
limit = args.limit
_, server_filter = filter_rewriter.Rewriter().Rewrite(args.filter)
if order_by is not None:
if "," in order_by:
# Multi-ordering is not supported yet on backend, fall back to client-side
# sort-by.
order_by = None
if args.limit is not None and args.filter is not None:
if server_filter is not None:
# Apply limit to server-side page_size to improve performance when
# server-side filter is used.
page_size = args.limit
else:
# Fall back to client-side paging with client-side filtering.
page_size = None
limit = None
pkg_path = resources.Resource.RelativeName(
resources.REGISTRY.Create(
"artifactregistry.projects.locations.repositories.packages",
projectsId=project,
locationsId=location,
repositoriesId=repo,
packagesId=escaped_pkg,
)
)
server_args = {
"client": client,
"messages": messages,
"pkg": pkg_path,
"server_filter": server_filter,
"page_size": page_size,
"order_by": order_by,
"limit": limit,
}
server_args_skipped, lversions = util.RetryOnInvalidArguments(
requests.ListVersions, **server_args
)
if not server_args_skipped:
# If server-side filter or sort-by is parsed correctly and the request
# succeeds, remove the client-side filter and sort-by.
if server_filter and server_filter == args.filter:
args.filter = None
if order_by:
args.sort_by = None
log.status.Print(
"Listing items under project {}, location {}, repository {}, "
"package {}.\n".format(project, location, repo, package)
)
return lversions

View File

@@ -0,0 +1,407 @@
# -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for interacting with vex command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import hashlib
import json
import re
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.api_lib.container.images import util as gcr_util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.command_lib.artifacts import docker_util
from googlecloudsdk.core import log
from googlecloudsdk.core.util.files import FileReader
POSSIBLE_JUSTIFICATION_FLAGS = [
'component_not_present',
'vulnerable_code_not_present',
'vulnerable_code_cannot_be_controlled_by_adversary',
'vulnerable_code_not_in_execute_path',
'inline_mitigations_already_exist',
]
POSSIBLE_PRODUCT_STATUS = ['known_affected',
'known_not_affected',
'fixed',
'under_investigation']
POSSIBLE_REMEDIATION_CATEGORIES = [
'mitigation',
'no_fix_planned',
'none_available',
'vendor_fix',
'workaround']
WHOLE_IMAGE_REGEX = r'^[^:@\/]+$'
def ParseVexFile(filename, image_uri, version_uri):
"""Reads a vex file and extracts notes.
Args:
filename: str, path to the vex file.
image_uri: uri of the whole image
version_uri: uri of a specific version
Returns:
A list of notes.
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
"""
ca_messages = apis.GetMessagesModule('containeranalysis', 'v1')
try:
with FileReader(filename) as file:
vex = json.load(file)
except ValueError:
raise ar_exceptions.InvalidInputValueError(
'Reading json file has failed'
)
_Validate(vex)
name = ''
namespace = ''
document = vex.get('document')
if document is not None:
publisher = document.get('publisher')
if publisher is not None:
name = publisher.get('name')
namespace = publisher.get('namespace')
publisher = ca_messages.Publisher(
name=name,
publisherNamespace=namespace,
)
generic_uri = version_uri if version_uri else image_uri
productid_to_product_proto_map = {}
for product_info in vex['product_tree']['branches']:
artifact_uri = product_info['name']
artifact_uri = RemoveHTTPS(artifact_uri)
if image_uri != artifact_uri:
continue
product = product_info['product']
product_id = product['product_id']
generic_uri = 'https://{}'.format(generic_uri)
product_proto = ca_messages.Product(
name=product['name'],
id=product_id,
genericUri=generic_uri,
)
productid_to_product_proto_map[product_id] = product_proto
notes = []
for vuln in vex['vulnerabilities']:
for status in vuln['product_status']:
for product_id in vuln['product_status'][status]:
product = productid_to_product_proto_map.get(product_id)
if product is None:
continue
noteid, note = _MakeNote(
vuln, status, product, publisher, document, ca_messages
)
if version_uri is None:
noteid = 'image-{}'.format(noteid)
note = (
ca_messages.BatchCreateNotesRequest.NotesValue.AdditionalProperty(
key=noteid, value=note
)
)
notes.append(note)
return notes, generic_uri
def _Validate(vex):
"""Validates vex file has all needed fields.
Args:
vex: json representing a vex document
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
"""
product_tree = vex.get('product_tree')
if product_tree is None:
raise ar_exceptions.InvalidInputValueError(
'product_tree is required in csaf document'
)
branches = product_tree.get('branches')
if branches is None:
raise ar_exceptions.InvalidInputValueError(
'branches are required in product tree in csaf document'
)
if len(branches) < 1:
raise ar_exceptions.InvalidInputValueError(
'at least one branch is expected in product tree in csaf document'
)
for product in branches:
name = product.get('name')
if name is None:
raise ar_exceptions.InvalidInputValueError(
'name is required in product tree in csaf document'
)
if len(name.split('/')) < 3:
raise ar_exceptions.InvalidInputValueError(
'name of product should be artifact path, showing repository,'
' project, and package/image'
)
vulnerabilities = vex.get('vulnerabilities')
if vulnerabilities is None:
raise ar_exceptions.InvalidInputValueError(
'vulnerabilities are required in csaf document'
)
if len(vulnerabilities) < 1:
log.warning('at least one vulnerability is expected in csaf document')
for vuln in vulnerabilities:
_ValidateVulnerability(vuln)
def _ValidateVulnerability(vuln):
"""Validates vulnerability is structured correctly.
Args:
vuln: a vulnerability from vex document
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
"""
cve_name = vuln.get('cve')
if cve_name is None:
raise ar_exceptions.InvalidInputValueError(
'cve is required in all vulnerabilities in csaf document'
)
product_status = vuln.get('product_status')
if product_status is None:
raise ar_exceptions.InvalidInputValueError(
'product_status is required in all vulnerabilities in csaf document'
)
if len(product_status) < 1:
raise ar_exceptions.InvalidInputValueError(
'at least one status is expected in each vulnerability'
)
for status in product_status:
if status not in POSSIBLE_PRODUCT_STATUS:
raise ar_exceptions.InvalidInputValueError(
'Invalid product status passed in {}. Product status should be one'
' of {}'.format(status, POSSIBLE_PRODUCT_STATUS)
)
flags = vuln.get('flags')
if flags is not None:
for flag in flags:
label = flag.get('label')
if label not in POSSIBLE_JUSTIFICATION_FLAGS:
raise ar_exceptions.InvalidInputValueError(
'Invalid flag label passed in {}. Label should be one of {}'
.format(label, POSSIBLE_JUSTIFICATION_FLAGS)
)
remediations = vuln.get('remediations')
if remediations is not None:
for remediation in remediations:
category = remediation.get('category')
if category not in POSSIBLE_REMEDIATION_CATEGORIES:
raise ar_exceptions.InvalidInputValueError(
'Invalid remediation category passed in {}. Label should be one'
' of {}'.format(category, POSSIBLE_REMEDIATION_CATEGORIES)
)
def _MakeNote(vuln, status, product, publisher, document, msgs):
"""Makes a note.
Args:
vuln: vulnerability proto
status: string of status of vulnerability
product: product proto
publisher: publisher proto.
document: document proto.
msgs: container analysis messages
Returns:
noteid, and note
"""
state = None
remediations = []
desc_note = None
justification = None
notes = vuln.get('notes')
if notes is not None:
for note in notes:
if note['category'] == 'description':
desc_note = note
if status == 'known_affected':
state = msgs.Assessment.StateValueValuesEnum.AFFECTED
remediations = _GetRemediations(vuln, product, msgs)
elif status == 'known_not_affected':
state = msgs.Assessment.StateValueValuesEnum.NOT_AFFECTED
justification = _GetJustifications(vuln, product, msgs)
elif status == 'fixed':
state = msgs.Assessment.StateValueValuesEnum.FIXED
elif status == 'under_investigation':
state = msgs.Assessment.StateValueValuesEnum.UNDER_INVESTIGATION
note = msgs.Note(
vulnerabilityAssessment=msgs.VulnerabilityAssessmentNote(
title=document['title'],
publisher=publisher,
product=product,
assessment=msgs.Assessment(
vulnerabilityId=vuln['cve'],
shortDescription=desc_note['title']
if desc_note is not None
else None,
longDescription=desc_note['text']
if desc_note is not None
else None,
state=state,
remediations=remediations,
justification=justification,
),
),
)
key = (
note.vulnerabilityAssessment.product.genericUri
+ note.vulnerabilityAssessment.assessment.vulnerabilityId
)
result = hashlib.md5(key.encode())
noteid = result.hexdigest()
return noteid, note
def _GetRemediations(vuln, product, msgs):
"""Get remediations.
Args:
vuln: vulnerability proto
product: product proto
msgs: container analysis messages
Returns:
remediations proto
"""
remediations = []
vuln_remediations = vuln.get('remediations')
if vuln_remediations is None:
return remediations
for remediation in vuln_remediations:
remediation_type = remediation['category']
remediation_detail = remediation['details']
remediation_enum = (
msgs.Remediation.RemediationTypeValueValuesEnum.lookup_by_name(
remediation_type.upper()
)
)
for product_id in remediation['product_ids']:
if product_id == product.id:
remediation = msgs.Remediation(
remediationType=remediation_enum, details=remediation_detail
)
remediations.append(remediation)
return remediations
def _GetJustifications(vuln, product, msgs):
"""Get justifications.
Args:
vuln: vulnerability proto
product: product proto
msgs: container analysis messages
Returns:
justification proto
"""
justification_type_as_string = 'justification_type_unspecified'
flags = vuln.get('flags')
if flags is None:
return msgs.Justification()
for flag in flags:
label = flag.get('label')
for product_id in flag.get('product_ids'):
if product_id == product.id:
justification_type_as_string = label
enum_dict = (
msgs.Justification.JustificationTypeValueValuesEnum.to_dict()
)
number = enum_dict[justification_type_as_string.upper()]
justification_type = (
msgs.Justification.JustificationTypeValueValuesEnum(number)
)
justification = msgs.Justification(
justificationType=justification_type,
)
return justification
def ParseGCRUrl(url):
"""Parse GCR URL.
Args:
url: gcr url for version, tag or whole image
Returns:
strings of project, image url and version url
Raises:
ar_exceptions.InvalidInputValueError: If user input is invalid.
"""
location_map = {
'us.gcr.io': 'us',
'gcr.io': 'us',
'eu.gcr.io': 'europe',
'asia.gcr.io': 'asia',
}
location = None
project = None
image = None
matches = re.match(docker_util.GCR_DOCKER_REPO_REGEX, url)
if matches:
location = location_map[matches.group('repo')]
project = matches.group('project')
image = matches.group('image')
matches = re.match(docker_util.GCR_DOCKER_DOMAIN_SCOPED_REPO_REGEX, url)
if matches:
location = location_map[matches.group('repo')]
project = matches.group('project').replace('/', ':', 1)
image = matches.group('image')
if not project or not location or not image:
raise ar_exceptions.InvalidInputValueError(
'Failed to parse the GCR image.'
)
matches = re.match(WHOLE_IMAGE_REGEX, image)
if matches:
return project, url, None
try:
docker_digest = gcr_util.GetDigestFromName(url)
except gcr_util.InvalidImageNameError as e:
raise ar_exceptions.InvalidInputValueError(
'Failed to resolve digest of the GCR image'
) from e
image_url = super(type(docker_digest), docker_digest).__str__()
return project, image_url, str(docker_digest)
def RemoveHTTPS(uri):
prefix = 'https://'
if uri.startswith(prefix):
return uri[len(prefix):]
return uri