feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for App Engine apps for `gcloud tasks` commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.app import appengine_api_client as app_engine_api
from googlecloudsdk.api_lib.tasks import GetApiAdapter
from googlecloudsdk.calliope import base as calliope_base
from googlecloudsdk.command_lib.tasks import constants
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
class RegionResolvingError(exceptions.Error):
"""Error for when the app's region cannot be ultimately determined."""
def AppEngineAppExists():
"""Returns whether an AppEngine app exists for the current project.
Previously we were relying on the output of ListLocations for Cloud Tasks &
Cloud Scheduler to determine if an AppEngine exists. Previous behaviour was
to return only one location which would be the AppEngine app location and an
empty list otherwise if no app existed. Now with AppEngine dependency removal,
ListLocations will return an actual list of valid regions. If an AppEngine app
does exist, that location will be returned indexed at 0 in the result list.
Note: We also return False if the user does not have the necessary permissions
to determine if the project has an AppEngine app or not.
Returns:
Boolean representing whether an app exists or not.
"""
app_engine_api_client = app_engine_api.GetApiClientForTrack(
calliope_base.ReleaseTrack.GA)
try:
# Should raise NotFoundError if no app exists.
app_engine_api_client.GetApplication()
found_app = True
except Exception: # pylint: disable=broad-except
found_app = False
return found_app
def ResolveAppLocation(project_ref, locations_client=None):
"""Gets the default location from the Cloud Tasks API.
If an AppEngine app exists, the default location is the location where the
app exists.
Args:
project_ref: The project resource to look up the location for.
locations_client: The project resource used to look up locations.
Returns:
The location. Some examples: 'us-central1', 'us-east4'
Raises:
RegionResolvingError: If we are unable to determine a default location
for the given project.
"""
if not locations_client:
locations_client = GetApiAdapter(calliope_base.ReleaseTrack.GA).locations
locations = list(locations_client.List(project_ref))
if len(locations) >= 1 and AppEngineAppExists():
location = locations[0].labels.additionalProperties[0].value
if len(locations) > 1:
log.warning(
constants.APP_ENGINE_DEFAULT_LOCATION_WARNING.format(location))
return location
raise RegionResolvingError(
'Please use the location flag to manually specify a location.')

View File

@@ -0,0 +1,123 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants for `gcloud tasks` and `gcloud app deploy` commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import frozendict
PROJECTS_COLLECTION = 'cloudtasks.projects'
LOCATIONS_COLLECTION = 'cloudtasks.projects.locations'
QUEUES_COLLECTION = 'cloudtasks.projects.locations.queues'
TASKS_COLLECTION = 'cloudtasks.projects.locations.queues.tasks'
GCLOUD_COMMAND_ENV_KEY = 'CLOUDSDK_METRICS_COMMAND_NAME'
COMMANDS_THAT_NEED_APPENGINE = frozenset([
'gcloud.scheduler.jobs.create.app-engine',
'gcloud.alpha.scheduler.jobs.create.app-engine',
'gcloud.beta.scheduler.jobs.create.app-engine',
'gcloud.scheduler.jobs.update.app-engine',
'gcloud.alpha.scheduler.jobs.update.app-engine',
'gcloud.beta.scheduler.jobs.update.app-engine',
])
PULL_QUEUE = 'pull'
PUSH_QUEUE = 'push'
VALID_QUEUE_TYPES = (PULL_QUEUE, PUSH_QUEUE)
PULL_TASK = 'pull'
APP_ENGINE_TASK = 'app-engine'
HTTP_TASK = 'http'
APP_ENGINE_ROUTING_KEYS = ('service', 'version', 'instance')
HTTP_URI_OVERIDE_KEYS = ('scheme', 'host', 'port', 'path', 'query', 'mode')
APP_ENGINE_DEFAULT_LOCATION_WARNING = (
'We are using the App Engine app location ({}) as the default location. '
'Please use the "--location" flag if you want to use a different location.')
QUEUE_MANAGEMENT_WARNING = (
'You are managing queues with gcloud, do not use queue.yaml or queue.xml '
'in the future. More details at: '
'https://cloud.google.com/tasks/docs/queue-yaml.')
MAX_RATE = 500
MAX_BUCKET_SIZE = 500
# The maximum amount of time that a task will remain in a queue without being
# executed. We use this value to have consistent behaviour with superapps's
# implementation which would have an infinite TTL as they were instead tracking
# quota usage by memory used by tasks in BigTable. The current TTL set for a
# legacy queue equals '315576000000.999999999s'.
MAX_TASK_TTL = '315360000s' # 10 years
# The maximum amount of time that a task's name will be reserved after deletion.
# We use this value to have consistent behaviour with the legacy superapps
# implementation.
MAX_TASK_TOMBSTONE_TTL = '777600s' # 9 days
TIME_IN_SECONDS = frozendict.frozendict({
's': 1,
'm': 60,
'h': 3600,
'd': 86400,
})
APP_TO_TASKS_ATTRIBUTES_MAPPING = frozendict.frozendict({
'bucket_size': 'max_burst_size',
'max_concurrent_requests': 'max_concurrent_dispatches',
'mode': 'type',
'name': 'name',
'rate': 'max_dispatches_per_second',
'retry_parameters.min_backoff_seconds': 'min_backoff',
'retry_parameters.max_backoff_seconds': 'max_backoff',
'retry_parameters.max_doublings': 'max_doublings',
'retry_parameters.task_age_limit': 'max_retry_duration',
'retry_parameters.task_retry_limit': 'max_attempts',
'target': 'routing_override',
# Not supported and need to deprecate if possible. See go/remove-tq-quotas
# 'total_storage_limit': 'total_storage_limit'
})
PUSH_QUEUES_APP_DEPLOY_DEFAULT_VALUES = frozendict.frozendict({
'max_attempts': -1, # Translates as 'unlimited' in CT-FE
'max_backoff': '3600s',
'max_doublings': 16,
'max_burst_size': 5,
# The previous behavior when max_concurrent_dispactches was not present in
# the YAML file was to NOT set it at all which would show up as 0 in the UI.
# However, functionally it is no different from using the default value of
# 1000 and this is more or less a UI fix.
'max_concurrent_dispatches': 1000,
'max_retry_duration': '0s', # Translates as 'unlimited' in CT-FE
'min_backoff': '0.100s',
})
CRON_JOB_LEGACY_DEFAULT_VALUES = frozendict.frozendict({
'max_backoff': 3600,
'max_doublings': 16,
'max_retry_duration': '0s',
'min_backoff': 0.1,
})
# Note currently CT APIs do not support modifying any pull-queue attributes
# except max_attempts and max_retry_duration while queue.yaml does not support
# max_retry_duration.
PULL_QUEUES_APP_DEPLOY_DEFAULT_VALUES = frozendict.frozendict({
'max_attempts': -1, # Translates as 'unlimited' in CT-FE
})

View File

@@ -0,0 +1,726 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for flags for `gcloud tasks` commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from googlecloudsdk.api_lib import tasks as tasks_api_lib
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.tasks import constants
from googlecloudsdk.command_lib.util.apis import arg_utils
def AddCmekConfigResourceFlag(parser):
"""Add flags for CMEK Update."""
kms_key_name_arg = base.Argument(
'--kms-key-name',
help=(
'Fully qualified identifier for the key or just the key ID. The'
' latter requires that the --kms-keyring and --kms-project flags be'
' provided too.'
),
required=True,
)
kms_keyring_arg = base.Argument(
'--kms-keyring',
help="""\
KMS keyring of the KMS key.
""",
)
kms_location_arg = base.Argument(
'--location',
help="""\
Google Cloud location for the KMS key.
""",
)
kms_project_arg = base.Argument(
'--kms-project',
help="""\
Google Cloud project for the KMS key.
""",
)
# UPDATE
cmek_update_group = base.ArgumentGroup(
help='Flags for Updating CMEK Resource key',
)
cmek_update_group.AddArgument(kms_key_name_arg)
cmek_update_group.AddArgument(kms_keyring_arg)
cmek_update_group.AddArgument(kms_project_arg)
# CLEAR
clear_kms_key_name_flag = base.Argument(
'--clear-kms-key',
action='store_true',
required=True,
help=(
'Disables CMEK for Cloud Tasks in the specified location by clearing'
' the Cloud KMS cryptokey from the Cloud Tasks project and CMEK'
' configuration.'
),
)
cmek_clear_group = base.ArgumentGroup(
help='Flags for clearing CMEK Resource key.',
)
cmek_clear_group.AddArgument(clear_kms_key_name_flag)
# UPDATE AND CLEAR GROUP.
cmek_clear_update_group = base.ArgumentGroup(
help='Flags for Clearing or Updating CMEK Resource', mutex=True
)
cmek_clear_update_group.AddArgument(cmek_clear_group)
cmek_clear_update_group.AddArgument(cmek_update_group)
kms_location_arg.AddToParser(parser)
cmek_clear_update_group.AddToParser(parser)
def DescribeCmekConfigResourceFlag(parser):
"""Add flags for CMEK Describe."""
kms_location_arg = base.Argument(
'--location',
required=True,
help="""\
Google Cloud location for the KMS key.
""",
)
kms_location_arg.AddToParser(parser)
def AddQueueResourceArg(parser, verb):
base.Argument('queue', help='The queue {}.\n\n'.format(verb)).AddToParser(
parser
)
def AddQueueResourceFlag(parser, required=True, plural_tasks=False):
description = ('The queue the tasks belong to.'
if plural_tasks else 'The queue the task belongs to.')
argument = base.Argument('--queue', help=description, required=required)
argument.AddToParser(parser)
def AddTaskIdFlag(parser):
description = ('The task ID for the task being created.')
argument = base.Argument('--task-id', help=description, required=False)
argument.AddToParser(parser)
def AddTaskResourceArgs(parser, verb):
base.Argument(
'task', help='The task {}.\n\n'.format(verb)).AddToParser(parser)
AddQueueResourceFlag(parser, required=False)
def AddLocationFlag(parser, required=False, helptext=None):
if helptext is None:
helptext = (
'The location where we want to manage the queue or task. If not '
"specified, uses the location of the current project's App Engine "
'app if there is an associated app.')
argument = base.Argument(
'--location', hidden=False, help=helptext, required=required)
argument.AddToParser(parser)
def AddCreatePullQueueFlags(parser):
for flag in _PullQueueFlags():
flag.AddToParser(parser)
def AddCreatePushQueueFlags(
parser,
release_track=base.ReleaseTrack.GA,
app_engine_queue=False,
http_queue=True,
):
"""Creates flags related to Push queues."""
if release_track == base.ReleaseTrack.ALPHA:
flags = _AlphaPushQueueFlags()
else:
flags = _PushQueueFlags(release_track)
if release_track == base.ReleaseTrack.BETA:
if not app_engine_queue:
AddQueueTypeFlag(parser)
# HTTP Queues can be enabled for all ALPHA, BETA, and GA tracks.
if http_queue:
flags += _HttpPushQueueFlags()
_AddHttpTargetAuthFlags(parser, is_email_required=True)
for flag in flags:
flag.AddToParser(parser)
def AddUpdatePullQueueFlags(parser):
for flag in _PullQueueFlags():
_AddFlagAndItsClearEquivalent(flag, parser)
def AddUpdatePushQueueFlags(
parser,
release_track=base.ReleaseTrack.GA,
app_engine_queue=False,
http_queue=True,
):
"""Updates flags related to Push queues."""
if release_track == base.ReleaseTrack.ALPHA:
flags = _AlphaPushQueueFlags()
else:
flags = _PushQueueFlags(release_track)
if release_track == base.ReleaseTrack.BETA:
if not app_engine_queue:
AddQueueTypeFlag(parser)
# HTTP Queues can be enabled for all ALPHA, BETA, and GA tracks.
if http_queue:
flags += _HttpPushQueueFlags() + _AddHttpTargetAuthFlags()
for flag in flags:
_AddFlagAndItsClearEquivalent(flag, parser)
def _AddFlagAndItsClearEquivalent(flag, parser):
update_group = base.ArgumentGroup(mutex=True)
update_group.AddArgument(flag)
update_group.AddArgument(_EquivalentClearFlag(flag))
update_group.AddToParser(parser)
def _EquivalentClearFlag(flag):
name = flag.name.replace('--', '--clear-')
clear_flag = base.Argument(
name, action='store_true', help="""\
Clear the field corresponding to `{}`.""".format(flag.name))
return clear_flag
def AddPolicyFileFlag(parser):
base.Argument('policy_file', help="""\
JSON or YAML file containing the IAM policy.""").AddToParser(parser)
def AddTaskLeaseScheduleTimeFlag(parser, verb):
base.Argument(
'--schedule-time', required=True,
help="""\
The task's current schedule time. This restriction is to check that the
caller is {} the correct task.
""".format(verb)).AddToParser(parser)
def AddTaskLeaseDurationFlag(parser, helptext=None):
if helptext is None:
helptext = ('The number of seconds for the desired new lease duration, '
'starting from now. The maximum lease duration is 1 week.')
base.Argument('--lease-duration', required=True, type=int,
help=helptext).AddToParser(parser)
def AddMaxTasksToLeaseFlag(parser):
# Default help for base.LIMIT_FLAG is inaccurate and confusing in this case
base.Argument(
'--limit', type=int, default=1000, category=base.LIST_COMMAND_FLAGS,
help="""\
The maximum number of tasks to lease. The maximum that can be requested is
1000.
""").AddToParser(parser)
def AddQueueTypeFlag(parser):
base.Argument(
'--type',
type=_GetQueueTypeArgValidator(),
default='push',
help="""\
Specifies the type of queue. Only available options are 'push' and
'pull'. The default option is 'push'.
""").AddToParser(parser)
def AddFilterLeasedTasksFlag(parser):
tag_filter_group = parser.add_mutually_exclusive_group()
tag_filter_group.add_argument('--tag', help="""\
A tag to filter each task to be leased. If a task has the tag and the
task is available to be leased, then it is listed and leased.
""")
tag_filter_group.add_argument('--oldest-tag', action='store_true', help="""\
Only lease tasks which have the same tag as the task with the oldest
schedule time.
""")
def AddCreatePullTaskFlags(parser):
"""Add flags needed for creating a pull task to the parser."""
AddQueueResourceFlag(parser, required=True)
_GetTaskIdFlag().AddToParser(parser)
for flag in _PullTaskFlags():
flag.AddToParser(parser)
_AddPayloadFlags(parser, True)
def AddCreateAppEngineTaskFlags(parser, is_alpha=False):
"""Add flags needed for creating a App Engine task to the parser."""
AddQueueResourceFlag(parser, required=True)
_GetTaskIdFlag().AddToParser(parser)
flags = _AlphaAppEngineTaskFlags() if is_alpha else _AppEngineTaskFlags()
for flag in flags:
flag.AddToParser(parser)
_AddPayloadFlags(parser, is_alpha)
def AddCreateHttpTaskFlags(parser):
"""Add flags needed for creating a HTTP task to the parser."""
AddQueueResourceFlag(parser, required=True)
_GetTaskIdFlag().AddToParser(parser)
for flag in _HttpTaskFlags():
flag.AddToParser(parser)
_AddPayloadFlags(parser)
_AddAuthFlags(parser)
def _PullQueueFlags():
return [
base.Argument(
'--max-attempts',
type=arg_parsers.BoundedInt(-1, sys.maxsize, unlimited=True),
help="""\
The maximum number of attempts per task in the queue.
"""),
# This is actually a push-queue and not a pull-queue flag. However, the
# way this argument is being currently used does not impact funtionality.
base.Argument(
'--max-retry-duration',
help="""\
The time limit for retrying a failed task, measured from when the task
was first run. Once the `--max-retry-duration` time has passed and the
task has been attempted --max-attempts times, no further attempts will
be made and the task will be deleted.
Must be a string that ends in 's', such as "5s".
"""),
]
def _BasePushQueueFlags():
return _PullQueueFlags() + [
base.Argument(
'--max-doublings',
type=int,
help="""\
The time between retries will double maxDoublings times.
A tasks retry interval starts at minBackoff, then doubles maxDoublings
times, then increases linearly, and finally retries retries at
intervals of maxBackoff up to maxAttempts times.
For example, if minBackoff is 10s, maxBackoff is 300s, and
maxDoublings is 3, then the a task will first be retried in 10s. The
retry interval will double three times, and then increase linearly by
2^3 * 10s. Finally, the task will retry at intervals of maxBackoff
until the task has been attempted maxAttempts times. Thus, the
requests will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s.
"""),
base.Argument(
'--min-backoff',
help="""\
The minimum amount of time to wait before retrying a task after it
fails. Must be a string that ends in 's', such as "5s".
"""),
base.Argument(
'--max-backoff',
help="""\
The maximum amount of time to wait before retrying a task after it
fails. Must be a string that ends in 's', such as "5s".
"""),
base.Argument(
'--routing-override',
type=arg_parsers.ArgDict(
key_type=_GetAppEngineRoutingKeysValidator(),
min_length=1,
max_length=3,
operators={':': None}),
metavar='KEY:VALUE',
help="""\
If provided, the specified App Engine route is used for all tasks
in the queue, no matter what is set is at the task-level.
KEY must be at least one of: [{}]. Any missing keys will use the
default.
""".format(', '.join(constants.APP_ENGINE_ROUTING_KEYS))),
]
def _AlphaPushQueueFlags():
return _BasePushQueueFlags() + [
base.Argument(
'--max-tasks-dispatched-per-second',
type=float,
help="""\
The maximum rate at which tasks are dispatched from this queue.
"""),
base.Argument(
'--max-concurrent-tasks',
type=int,
help="""\
The maximum number of concurrent tasks that Cloud Tasks allows to
be dispatched for this queue. After this threshold has been reached,
Cloud Tasks stops dispatching tasks until the number of outstanding
requests decreases.
"""),
]
def _HttpPushQueueFlags():
return [
base.Argument(
'--http-uri-override',
type=arg_parsers.ArgDict(
key_type=_GetHttpUriOverrideKeysValidator(),
min_length=1,
max_length=6,
operators={':': None}),
metavar='KEY:VALUE',
help="""\
If provided, the specified HTTP target URI override is used for all
tasks in the queue depending on what is set as the mode.
Allowed values for mode are: ALWAYS, IF_NOT_EXISTS. If not set, mode
defaults to ALWAYS.
KEY must be at least one of: [{}]. Any missing keys will use the
default.
""".format(', '.join(constants.HTTP_URI_OVERIDE_KEYS))),
base.Argument(
'--http-method-override',
help="""\
If provided, the specified HTTP method type override is used for
all tasks in the queue, no matter what is set at the task-level.
"""),
base.Argument(
'--http-header-override',
metavar='HEADER_FIELD: HEADER_VALUE',
action='append',
type=_GetHeaderArgValidator(),
help="""\
If provided, the specified HTTP headers override the existing
headers for all tasks in the queue.
If a task has a header with the same Key as a queue-level header
override, then the value of the task header will be overriden with
the value of the queue-level header. Otherwise, the queue-level
header will be added to the task headers.
Header values can contain commas. This flag can be repeated.
Repeated header fields will have their values overridden.
"""),
]
def _PushQueueFlags(release_track=base.ReleaseTrack.GA):
"""Returns flags needed by push queues."""
flags = _BasePushQueueFlags() + [
base.Argument(
'--max-dispatches-per-second',
type=float,
help="""\
The maximum rate at which tasks are dispatched from this queue.
"""),
base.Argument(
'--max-concurrent-dispatches',
type=int,
help="""\
The maximum number of concurrent tasks that Cloud Tasks allows to
be dispatched for this queue. After this threshold has been reached,
Cloud Tasks stops dispatching tasks until the number of outstanding
requests decreases.
"""),
]
if release_track == base.ReleaseTrack.BETA or release_track == base.ReleaseTrack.GA:
flags.append(base.Argument(
'--log-sampling-ratio',
type=float,
help="""\
Specifies the fraction of operations to write to Cloud Logging.
This field may contain any value between 0.0 and 1.0, inclusive. 0.0 is
the default and means that no operations are logged.
"""))
return flags
def _PullTaskFlags():
return _CommonTaskFlags() + [
base.Argument('--tag', help="""\
An optional label used to group similar tasks.
"""),
]
def _BasePushTaskFlags():
return _CommonTaskFlags() + [
base.Argument('--method', help="""\
The HTTP method to use for the request. If not specified, "POST" will
be used.
"""),
base.Argument('--header', metavar='HEADER_FIELD: HEADER_VALUE',
action='append', type=_GetHeaderArgValidator(),
help="""\
An HTTP request header. Header values can contain commas. This flag
can be repeated. Repeated header fields will have their values
overridden.
"""),
]
def _HttpTaskFlags():
return _BasePushTaskFlags() + [
base.Argument('--url', required=True, help="""\
The full URL path that the request will be sent to. This string must
begin with either "http://" or "https://".
"""),
]
def _BaseAppEngineTaskFlags():
return _BasePushTaskFlags() + [
base.Argument(
'--routing',
type=arg_parsers.ArgDict(key_type=_GetAppEngineRoutingKeysValidator(),
min_length=1, max_length=3,
operators={':': None}),
metavar='KEY:VALUE',
help="""\
The route to be used for this task. KEY must be at least one of:
[{}]. Any missing keys will use the default.
Routing can be overridden by the queue-level `--routing-override`
flag.
""".format(', '.join(constants.APP_ENGINE_ROUTING_KEYS))),
]
def _AlphaAppEngineTaskFlags():
return _BaseAppEngineTaskFlags() + [
base.Argument('--url', help="""\
The relative URL of the request. Must begin with "/" and must be a
valid HTTP relative URL. It can contain a path and query string
arguments. If not specified, then the root path "/" will be used.
"""),
]
def _AppEngineTaskFlags():
return _BaseAppEngineTaskFlags() + [
base.Argument('--relative-uri', help="""\
The relative URI of the request. Must begin with "/" and must be a
valid HTTP relative URI. It can contain a path and query string
arguments. If not specified, then the root path "/" will be used.
"""),
]
def _GetTaskIdFlag():
return base.Argument(
'task',
metavar='TASK_ID',
nargs='?',
help="""\
The task to create.
If not specified then the system will generate a random unique task
ID. Explicitly specifying a task ID enables task de-duplication. If a
task's ID is identical to that of an existing task or a task that was
deleted or completed recently then the call will fail.
Because there is an extra lookup cost to identify duplicate task
names, tasks created with IDs have significantly increased latency.
Using hashed strings for the task ID or for the prefix of the task ID
is recommended.
""")
def _CommonTaskFlags():
return [
base.Argument('--schedule-time', help="""\
The time when the task is scheduled to be first attempted. Defaults to
"now" if not specified.
""")
]
def _AddPayloadFlags(parser, is_alpha=False):
"""Adds either payload or body flags."""
payload_group = parser.add_mutually_exclusive_group()
if is_alpha:
payload_group.add_argument('--payload-content', help="""\
Data payload used by the task worker to process the task.
""")
payload_group.add_argument('--payload-file', help="""\
File containing data payload used by the task worker to process the
task.
""")
else:
payload_group.add_argument('--body-content', help="""\
HTTP Body data sent to the task worker processing the task.
""")
payload_group.add_argument('--body-file', help="""\
File containing HTTP body data sent to the task worker processing
the task.
""")
def _AddAuthFlags(parser):
"""Add flags for http auth."""
auth_group = parser.add_mutually_exclusive_group(help="""\
How the request sent to the target when executing the task should be
authenticated.
""")
oidc_group = auth_group.add_argument_group(help='OpenId Connect')
oidc_group.add_argument('--oidc-service-account-email', required=True,
help="""\
The service account email to be used for generating an OpenID
Connect token to be included in the request sent to the target when
executing the task. The service account must be within the same
project as the queue. The caller must have
'iam.serviceAccounts.actAs' permission for the service account.
""")
oidc_group.add_argument('--oidc-token-audience', help="""\
The audience to be used when generating an OpenID Connect token to
be included in the request sent to the target when executing the
task. If not specified, the URI specified in the target will be
used.
""")
oauth_group = auth_group.add_argument_group(help='OAuth2')
oauth_group.add_argument('--oauth-service-account-email', required=True,
help="""\
The service account email to be used for generating an OAuth2 access
token to be included in the request sent to the target when
executing the task. The service account must be within the same
project as the queue. The caller must have
'iam.serviceAccounts.actAs' permission for the service account.
""")
oauth_group.add_argument('--oauth-token-scope', help="""\
The scope to be used when generating an OAuth2 access token to be
included in the request sent to the target when executing the task.
If not specified, 'https://www.googleapis.com/auth/cloud-platform'
will be used.
""")
def _AddHttpTargetAuthFlags(parser=None, is_email_required=False):
"""Add flags for http auth."""
auth_group = base.ArgumentGroup(
mutex=True,
help="""\
If specified, all `Authorization` headers in the HttpRequest.headers
field will be overridden for any tasks executed on this queue.
""")
oidc_group = base.ArgumentGroup(help='OpenId Connect')
oidc_email_arg = base.Argument(
'--http-oidc-service-account-email-override',
required=is_email_required,
help="""\
The service account email to be used for generating an OpenID
Connect token to be included in the request sent to the target when
executing the task. The service account must be within the same
project as the queue. The caller must have
'iam.serviceAccounts.actAs' permission for the service account.
""")
oidc_group.AddArgument(oidc_email_arg)
oidc_token_arg = base.Argument(
'--http-oidc-token-audience-override',
help="""\
The audience to be used when generating an OpenID Connect token to
be included in the request sent to the target when executing the
task. If not specified, the URI specified in the target will be
used.
""")
oidc_group.AddArgument(oidc_token_arg)
oauth_group = base.ArgumentGroup(help='OAuth2')
oauth_email_arg = base.Argument(
'--http-oauth-service-account-email-override',
required=is_email_required,
help="""\
The service account email to be used for generating an OAuth2 access
token to be included in the request sent to the target when
executing the task. The service account must be within the same
project as the queue. The caller must have
'iam.serviceAccounts.actAs' permission for the service account.
""")
oauth_group.AddArgument(oauth_email_arg)
oauth_scope_arg = base.Argument(
'--http-oauth-token-scope-override',
help="""\
The scope to be used when generating an OAuth2 access token to be
included in the request sent to the target when executing the task.
If not specified, 'https://www.googleapis.com/auth/cloud-platform'
will be used.
""")
oauth_group.AddArgument(oauth_scope_arg)
auth_group.AddArgument(oidc_group)
auth_group.AddArgument(oauth_group)
if parser is not None:
auth_group.AddToParser(parser)
return [oidc_email_arg, oidc_token_arg, oauth_email_arg, oauth_scope_arg]
def _GetAppEngineRoutingKeysValidator():
return arg_parsers.CustomFunctionValidator(
lambda k: k in constants.APP_ENGINE_ROUTING_KEYS,
'Only the following keys are valid for routing: [{}].'.format(
', '.join(constants.APP_ENGINE_ROUTING_KEYS)))
def _GetHttpUriOverrideKeysValidator():
return arg_parsers.CustomFunctionValidator(
lambda k: k in constants.HTTP_URI_OVERIDE_KEYS,
'Only the following keys are valid for routing: [{}].'.format(
', '.join(constants.HTTP_URI_OVERIDE_KEYS)))
def _GetQueueTypeArgValidator():
return arg_parsers.CustomFunctionValidator(
lambda k: k in constants.VALID_QUEUE_TYPES,
'Only the following queue types are valid: [{}].'.format(
', '.join(constants.VALID_QUEUE_TYPES)))
def _GetHeaderArgValidator():
return arg_parsers.RegexpValidator(
r'^(\S+):(.+)$', 'Must be of the form: "HEADER_FIELD: HEADER_VALUE".')
def GetTaskResponseViewMapper(release_track):
return arg_utils.ChoiceEnumMapper(
'--response-view',
apis.GetMessagesModule(
tasks_api_lib.API_NAME,
tasks_api_lib.ApiVersionFromReleaseTrack(
release_track)).CloudtasksProjectsLocationsQueuesTasksGetRequest
.ResponseViewValueValuesEnum,
default='basic',
help_str='Task response view.')

View File

@@ -0,0 +1,141 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List command formats and transforms for `gcloud tasks`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.tasks import constants
from googlecloudsdk.command_lib.tasks import parsers
# pylint: disable=line-too-long
_ALPHA_QUEUE_LIST_FORMAT = '''table(
name.basename():label="QUEUE_NAME",
queuetype():label=TYPE,
state,
rateLimits.maxConcurrentTasks.yesno(no="unlimited").format("{0}").sub("-1", "unlimited"):label="MAX_NUM_OF_TASKS",
rateLimits.maxTasksDispatchedPerSecond.yesno(no="unlimited"):label="MAX_RATE (/sec)",
retryConfig.maxAttempts.yesno(no="unlimited"):label="MAX_ATTEMPTS")'''
_BETA_QUEUE_LIST_FORMAT = '''table(
name.basename():label="QUEUE_NAME",
queuetype():label=TYPE,
state,
rateLimits.maxConcurrentDispatches.yesno(no="unlimited").format("{0}").sub("-1", "unlimited"):label="MAX_NUM_OF_TASKS",
rateLimits.maxDispatchesPerSecond.yesno(no="unlimited"):label="MAX_RATE (/sec)",
retryConfig.maxAttempts.yesno(no="unlimited").format("{0}").sub("-1", "unlimited"):label="MAX_ATTEMPTS")'''
_QUEUE_LIST_FORMAT = '''table(
name.basename():label="QUEUE_NAME",
state,
rateLimits.maxConcurrentDispatches.yesno(no="unlimited").format("{0}").sub("-1", "unlimited"):label="MAX_NUM_OF_TASKS",
rateLimits.maxDispatchesPerSecond.yesno(no="unlimited"):label="MAX_RATE (/sec)",
retryConfig.maxAttempts.yesno(no="unlimited").format("{0}").sub("-1", "unlimited"):label="MAX_ATTEMPTS")'''
_ALPHA_TASK_LIST_FORMAT = '''table(
name.basename():label="TASK_NAME",
tasktype():label=TYPE,
createTime,
scheduleTime,
status.attemptDispatchCount.yesno(no="0"):label="DISPATCH_ATTEMPTS",
status.attemptResponseCount.yesno(no="0"):label="RESPONSE_ATTEMPTS",
status.lastAttemptStatus.responseStatus.message.yesno(no="Unknown")
:label="LAST_ATTEMPT_STATUS")'''
_TASK_LIST_FORMAT = '''table(
name.basename():label="TASK_NAME",
tasktype():label=TYPE,
createTime,
scheduleTime,
dispatchCount.yesno(no="0"):label="DISPATCH_ATTEMPTS",
responseCount.yesno(no="0"):label="RESPONSE_ATTEMPTS",
lastAttempt.responseStatus.message.yesno(no="Unknown")
:label="LAST_ATTEMPT_STATUS")'''
_LOCATION_LIST_FORMAT = '''table(
locationId:label="NAME",
name:label="FULL_NAME")'''
# pylint: enable=line-too-long
def AddListQueuesFormats(parser, version=base.ReleaseTrack.GA):
is_alpha = version == base.ReleaseTrack.ALPHA
is_beta = version == base.ReleaseTrack.BETA
if is_alpha or is_beta:
parser.display_info.AddTransforms({'queuetype': _TransformQueueType})
parser.display_info.AddFormat(
_ALPHA_QUEUE_LIST_FORMAT if is_alpha else
_BETA_QUEUE_LIST_FORMAT if is_beta else _QUEUE_LIST_FORMAT)
parser.display_info.AddUriFunc(parsers.QueuesUriFunc)
def AddListTasksFormats(parser, is_alpha=False):
parser.display_info.AddTransforms({'tasktype': _TransformTaskType})
parser.display_info.AddFormat(
_ALPHA_TASK_LIST_FORMAT if is_alpha else _TASK_LIST_FORMAT)
parser.display_info.AddUriFunc(parsers.TasksUriFunc)
def AddListLocationsFormats(parser):
parser.display_info.AddFormat(_LOCATION_LIST_FORMAT)
parser.display_info.AddUriFunc(parsers.LocationsUriFunc)
def _IsPullQueue(r):
return 'pullTarget' in r or ('type' in r and r['type'] == 'PULL')
def _IsPushQueue(r):
# appEngineHttpTarget is used in the v2beta2 version of the API but will be
# deprecated soon.
return ('appEngineHttpTarget' in r or 'appEngineHttpQueue' in r or
'appEngineRoutingOverride' in r or
('type' in r and r['type'] == 'PUSH'))
def _IsPullTask(r):
return 'pullMessage' in r
def _IsAppEngineTask(r):
return 'appEngineHttpRequest' in r
def _IsHttpTask(r):
return 'httpRequest' in r
def _TransformQueueType(r):
if _IsPullQueue(r):
return constants.PULL_QUEUE
if _IsPushQueue(r):
return constants.PUSH_QUEUE
def _TransformTaskType(r):
if _IsPullTask(r):
return constants.PULL_QUEUE
if _IsAppEngineTask(r):
return 'app-engine'
if _IsHttpTask(r):
return 'http'

View File

@@ -0,0 +1,981 @@
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for parsing arguments to `gcloud tasks` commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from apitools.base.py import encoding
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import parser_errors
from googlecloudsdk.command_lib.tasks import app
from googlecloudsdk.command_lib.tasks import constants
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.util import http_encoding
import six # pylint: disable=unused-import
from six.moves import filter # pylint:disable=redefined-builtin
from six.moves import map # pylint:disable=redefined-builtin
_PROJECT = properties.VALUES.core.project.GetOrFail
class NoFieldsSpecifiedError(exceptions.Error):
"""Error for when calling an update method with no fields specified."""
class FullTaskUnspecifiedError(exceptions.Error):
"""Error parsing task without specifing the queue or full path."""
class NoFieldsSpecifiedForHttpQueueError(exceptions.Error):
"""Error for calling a create-http-queue method with no override field specified.
"""
class QueueUpdatableConfiguration(object):
"""Data Class for queue configuration updates."""
@classmethod
def FromQueueTypeAndReleaseTrack(cls,
queue_type,
release_track=base.ReleaseTrack.GA):
"""Creates QueueUpdatableConfiguration from the given parameters."""
config = cls()
config.retry_config = {}
config.rate_limits = {}
config.app_engine_routing_override = {}
config.http_target = {}
config.stackdriver_logging_config = {}
config.retry_config_mask_prefix = None
config.rate_limits_mask_prefix = None
config.app_engine_routing_override_mask_prefix = None
config.http_target_mask_prefix = None
config.stackdriver_logging_config_mask_prefix = None
if queue_type == constants.PULL_QUEUE:
config.retry_config = {
'max_attempts': 'maxAttempts',
'max_retry_duration': 'maxRetryDuration',
}
config.retry_config_mask_prefix = 'retryConfig'
elif queue_type == constants.PUSH_QUEUE:
if release_track == base.ReleaseTrack.ALPHA:
config.retry_config = {
'max_attempts': 'maxAttempts',
'max_retry_duration': 'maxRetryDuration',
'max_doublings': 'maxDoublings',
'min_backoff': 'minBackoff',
'max_backoff': 'maxBackoff',
}
config.rate_limits = {
'max_tasks_dispatched_per_second': 'maxTasksDispatchedPerSecond',
'max_concurrent_tasks': 'maxConcurrentTasks',
}
config.app_engine_routing_override = {
'routing_override': 'appEngineRoutingOverride',
}
config.http_target = {
'http_uri_override':
'uriOverride',
'http_method_override':
'httpMethod',
'http_header_override':
'headerOverrides',
'http_oauth_service_account_email_override':
'oauthToken.serviceAccountEmail',
'http_oauth_token_scope_override':
'oauthToken.scope',
'http_oidc_service_account_email_override':
'oidcToken.serviceAccountEmail',
'http_oidc_token_audience_override':
'oidcToken.audience',
}
config.retry_config_mask_prefix = 'retryConfig'
config.rate_limits_mask_prefix = 'rateLimits'
config.app_engine_routing_override_mask_prefix = 'appEngineHttpTarget'
config.http_target_mask_prefix = 'httpTarget'
elif release_track == base.ReleaseTrack.BETA:
config.retry_config = {
'max_attempts': 'maxAttempts',
'max_retry_duration': 'maxRetryDuration',
'max_doublings': 'maxDoublings',
'min_backoff': 'minBackoff',
'max_backoff': 'maxBackoff',
}
config.rate_limits = {
'max_dispatches_per_second': 'maxDispatchesPerSecond',
'max_concurrent_dispatches': 'maxConcurrentDispatches',
'max_burst_size': 'maxBurstSize',
}
config.app_engine_routing_override = {
'routing_override': 'appEngineRoutingOverride',
}
config.http_target = {
'http_uri_override':
'uriOverride',
'http_method_override':
'httpMethod',
'http_header_override':
'headerOverrides',
'http_oauth_service_account_email_override':
'oauthToken.serviceAccountEmail',
'http_oauth_token_scope_override':
'oauthToken.scope',
'http_oidc_service_account_email_override':
'oidcToken.serviceAccountEmail',
'http_oidc_token_audience_override':
'oidcToken.audience',
}
config.stackdriver_logging_config = {
'log_sampling_ratio': 'samplingRatio',
}
config.retry_config_mask_prefix = 'retryConfig'
config.rate_limits_mask_prefix = 'rateLimits'
config.app_engine_routing_override_mask_prefix = 'appEngineHttpQueue'
config.http_target_mask_prefix = 'httpTarget'
config.stackdriver_logging_config_mask_prefix = 'stackdriverLoggingConfig'
else:
config.retry_config = {
'max_attempts': 'maxAttempts',
'max_retry_duration': 'maxRetryDuration',
'max_doublings': 'maxDoublings',
'min_backoff': 'minBackoff',
'max_backoff': 'maxBackoff',
}
config.rate_limits = {
'max_dispatches_per_second': 'maxDispatchesPerSecond',
'max_concurrent_dispatches': 'maxConcurrentDispatches',
}
config.app_engine_routing_override = {
'routing_override': 'appEngineRoutingOverride',
}
config.http_target = {
'http_uri_override':
'uriOverride',
'http_method_override':
'httpMethod',
'http_header_override':
'headerOverrides',
'http_oauth_service_account_email_override':
'oauthToken.serviceAccountEmail',
'http_oauth_token_scope_override':
'oauthToken.scope',
'http_oidc_service_account_email_override':
'oidcToken.serviceAccountEmail',
'http_oidc_token_audience_override':
'oidcToken.audience',
}
config.stackdriver_logging_config = {
'log_sampling_ratio': 'samplingRatio',
}
config.retry_config_mask_prefix = 'retryConfig'
config.rate_limits_mask_prefix = 'rateLimits'
config.app_engine_routing_override_mask_prefix = ''
config.http_target_mask_prefix = 'httpTarget'
config.stackdriver_logging_config_mask_prefix = 'stackdriverLoggingConfig'
return config
def _InitializedConfigsAndPrefixTuples(self):
"""Returns the initialized configs as a list of (config, prefix) tuples."""
all_configs_and_prefixes = [
(self.retry_config, self.retry_config_mask_prefix),
(self.rate_limits, self.rate_limits_mask_prefix),
(self.app_engine_routing_override,
self.app_engine_routing_override_mask_prefix),
(self.http_target, self.http_target_mask_prefix),
(self.stackdriver_logging_config,
self.stackdriver_logging_config_mask_prefix),
]
return [(config, prefix)
for (config, prefix) in all_configs_and_prefixes
if config]
def _GetSingleConfigToMaskMapping(self, config, prefix):
"""Build a map from each arg and its clear_ counterpart to a mask field."""
fields_to_mask = dict()
for field in config.keys():
output_field = config[field]
if prefix:
fields_to_mask[field] = '{}.{}'.format(prefix, output_field)
else:
fields_to_mask[field] = output_field
fields_to_mask[_EquivalentClearArg(field)] = fields_to_mask[field]
return fields_to_mask
def GetConfigToUpdateMaskMapping(self):
"""Builds mapping from config fields to corresponding update mask fields."""
config_to_mask = dict()
for (config, prefix) in self._InitializedConfigsAndPrefixTuples():
config_to_mask.update(self._GetSingleConfigToMaskMapping(config, prefix))
return config_to_mask
def AllConfigs(self):
return (list(self.retry_config.keys()) + list(self.rate_limits.keys()) +
list(self.app_engine_routing_override.keys()) +
list(self.http_target.keys()) +
list(self.stackdriver_logging_config.keys()))
def ParseProject():
return resources.REGISTRY.Parse(
_PROJECT(),
collection=constants.PROJECTS_COLLECTION)
def ParseLocation(location):
return resources.REGISTRY.Parse(
location,
params={'projectsId': _PROJECT},
collection=constants.LOCATIONS_COLLECTION)
def GetConsolePromptString(queue_string):
"""Parses a full queue reference and returns an abridged version.
Args:
queue_string: A full qualifying path for a queue which includes project and
location, e.g. projects/PROJECT/locations/LOCATION/queues/QUEUE
Returns:
A shortened string for the full queue ref which has only the location and
the queue (LOCATION/QUEUE). For example:
'projects/myproject/location/us-east1/queue/myqueue' => 'us-east1/myqueue'
"""
match = re.match(
r'projects\/.*\/locations\/(?P<location>.*)\/queues\/(?P<queue>.*)',
queue_string)
if match:
return '{}/{}'.format(match.group('location'), match.group('queue'))
return queue_string
def ParseQueue(queue, location=None):
"""Parses an id or uri for a queue.
Args:
queue: An id, self-link, or relative path of a queue resource.
location: The location of the app associated with the active project.
Returns:
A queue resource reference, or None if passed-in queue is Falsy.
"""
if not queue:
return None
queue_ref = None
try:
queue_ref = resources.REGISTRY.Parse(queue,
collection=constants.QUEUES_COLLECTION)
except resources.RequiredFieldOmittedException:
app_location = location or app.ResolveAppLocation(ParseProject())
location_ref = ParseLocation(app_location)
queue_ref = resources.REGISTRY.Parse(
queue, params={'projectsId': location_ref.projectsId,
'locationsId': location_ref.locationsId},
collection=constants.QUEUES_COLLECTION)
return queue_ref
def ParseTask(task, queue_ref=None):
"""Parses an id or uri for a task."""
params = queue_ref.AsDict() if queue_ref else None
try:
return resources.REGISTRY.Parse(task,
collection=constants.TASKS_COLLECTION,
params=params)
except resources.RequiredFieldOmittedException:
raise FullTaskUnspecifiedError(
'Must specify either the fully qualified task path or the queue flag.')
def ParseTaskId(args):
"""Parses an id for a task."""
return args.task_id if args.task_id else None
def ParseFullKmsKeyName(kms_key_name):
"""Parses and retrieves the segments of a full KMS key name."""
if not kms_key_name:
return None
match = re.match(
r'projects\/(?P<project>.*)\/locations\/(?P<location>.*)\/keyRings\/(?P<keyring>.*)\/cryptoKeys\/(?P<key>.*)',
kms_key_name,
)
if match:
return [
match.group('project'),
match.group('location'),
match.group('keyring'),
match.group('key'),
]
return None
def ParseKmsUpdateArgs(args):
"""Parses KMS key value."""
location_id = args.location if args.location else None
full_kms_key_name = None
parse_result = ParseFullKmsKeyName(args.kms_key_name)
# Either a full kms-key-name is provided, or a short name along with other
# params should be provided. If there is parse_reulst, then it is a full name.
# If not, the user must provide all parts.
if parse_result is not None:
location_id = parse_result[1]
full_kms_key_name = args.kms_key_name
elif (
args.kms_key_name
and args.kms_keyring
and args.location
):
full_kms_key_name = 'projects/{kms_project_id}/locations/{location_id}/keyRings/{kms_keyring}/cryptoKeys/{kms_key_name}'.format(
kms_project_id=args.kms_project if args.kms_project else _PROJECT(),
location_id=location_id,
kms_keyring=args.kms_keyring,
kms_key_name=args.kms_key_name, # short key name
)
return _PROJECT(), full_kms_key_name, location_id
def ParseKmsDescribeArgs(args):
"""Parses KMS describe args."""
location_id = args.location if args.location else None
project_id = _PROJECT()
return project_id, location_id
def ParseKmsClearArgs(args):
"""Parses KMS clear args."""
location_id = args.location if args.location else None
return _PROJECT(), location_id
def ExtractLocationRefFromQueueRef(queue_ref):
params = queue_ref.AsDict()
del params['queuesId']
location_ref = resources.REGISTRY.Parse(
None, params=params, collection=constants.LOCATIONS_COLLECTION)
return location_ref
def ParseCreateOrUpdateQueueArgs(
args,
queue_type,
messages,
is_update=False,
release_track=base.ReleaseTrack.GA,
http_queue=True,
):
"""Parses queue level args."""
if release_track == base.ReleaseTrack.ALPHA:
app_engine_http_target = _ParseAppEngineHttpTargetArgs(
args, queue_type, messages
)
http_target = (
_ParseHttpTargetArgs(args, queue_type, messages) if http_queue else None
)
return messages.Queue(
retryConfig=_ParseRetryConfigArgs(
args, queue_type, messages, is_update, is_alpha=True
),
rateLimits=_ParseAlphaRateLimitsArgs(
args, queue_type, messages, is_update
),
pullTarget=_ParsePullTargetArgs(args, queue_type, messages, is_update),
appEngineHttpTarget=app_engine_http_target,
httpTarget=http_target,
)
elif release_track == base.ReleaseTrack.BETA:
http_target = (
_ParseHttpTargetArgs(args, queue_type, messages) if http_queue else None
)
return messages.Queue(
retryConfig=_ParseRetryConfigArgs(
args, queue_type, messages, is_update, is_alpha=False
),
rateLimits=_ParseRateLimitsArgs(args, queue_type, messages, is_update),
stackdriverLoggingConfig=_ParseStackdriverLoggingConfigArgs(
args, queue_type, messages, is_update
),
appEngineHttpQueue=_ParseAppEngineHttpQueueArgs(
args, queue_type, messages
),
httpTarget=http_target,
type=_ParseQueueType(args, queue_type, messages, is_update),
)
else:
http_target = (
_ParseHttpTargetArgs(args, queue_type, messages) if http_queue else None
)
return messages.Queue(
retryConfig=_ParseRetryConfigArgs(
args, queue_type, messages, is_update, is_alpha=False
),
rateLimits=_ParseRateLimitsArgs(args, queue_type, messages, is_update),
stackdriverLoggingConfig=_ParseStackdriverLoggingConfigArgs(
args, queue_type, messages, is_update
),
appEngineRoutingOverride=_ParseAppEngineRoutingOverrideArgs(
args, queue_type, messages
),
httpTarget=http_target,
)
def GetHttpTargetArgs(queue_config):
"""Returns a pair of each http target attribute and its value in the queue."""
# pylint: disable=g-long-ternary
http_uri_override = (
queue_config.httpTarget.uriOverride
if queue_config.httpTarget is not None
else None
)
http_method_override = (
queue_config.httpTarget.httpMethod
if queue_config.httpTarget is not None
else None
)
http_header_override = (
queue_config.httpTarget.headerOverrides
if queue_config.httpTarget is not None
else None
)
http_oauth_email_override = (
queue_config.httpTarget.oauthToken.serviceAccountEmail
if (
queue_config.httpTarget is not None
and queue_config.httpTarget.oauthToken is not None
)
else None
)
http_oauth_scope_override = (
queue_config.httpTarget.oauthToken.scope
if (
queue_config.httpTarget is not None
and queue_config.httpTarget.oauthToken is not None
)
else None
)
http_oidc_email_override = (
queue_config.httpTarget.oidcToken.serviceAccountEmail
if (
queue_config.httpTarget is not None
and queue_config.httpTarget.oidcToken is not None
)
else None
)
http_oidc_audience_override = (
queue_config.httpTarget.oidcToken.audience
if (
queue_config.httpTarget is not None
and queue_config.httpTarget.oidcToken is not None
)
else None
)
return {
'http_uri_override': http_uri_override,
'http_method_override': http_method_override,
'http_header_override': http_header_override,
'http_oauth_email_override': http_oauth_email_override,
'http_oauth_scope_override': http_oauth_scope_override,
'http_oidc_email_override': http_oidc_email_override,
'http_oidc_audience_override': http_oidc_audience_override,
}
def ExtractTargetFromAppEngineHostUrl(job, project):
"""Extracts any target (service) if it exists in the appEngineRouting field.
Args:
job: An instance of job fetched from the backend.
project: The base name of the project.
Returns:
The target if it exists in the URL, or if it is present in the service
attribute of the appEngineRouting field, returns None otherwise.
Some examples are:
'alpha.some_project.uk.r.appspot.com' => 'alpha'
'some_project.uk.r.appspot.com' => None
"""
# For cron jobs created with the new scheduler FE API, target is stored as a
# service attribute in the appEngineRouting field
target = None
try:
target = job.appEngineHttpTarget.appEngineRouting.service
except AttributeError:
pass
if target:
return target
# For cron jobs created using admin-console-hr, target is prepended to the
# host url
host_url = None
try:
host_url = job.appEngineHttpTarget.appEngineRouting.host
except AttributeError:
pass
if not host_url:
return None
delimiter = '.{}.'.format(project)
return host_url.split(delimiter, 1)[0] if delimiter in host_url else None
def ParseCreateTaskArgs(args, task_type, messages,
release_track=base.ReleaseTrack.GA):
"""Parses task level args."""
if release_track == base.ReleaseTrack.ALPHA:
return messages.Task(
scheduleTime=args.schedule_time,
pullMessage=_ParsePullMessageArgs(args, task_type, messages),
appEngineHttpRequest=_ParseAlphaAppEngineHttpRequestArgs(
args, task_type, messages))
else:
return messages.Task(
scheduleTime=args.schedule_time,
appEngineHttpRequest=_ParseAppEngineHttpRequestArgs(args, task_type,
messages),
httpRequest=_ParseHttpRequestArgs(args, task_type, messages))
def CheckUpdateArgsSpecified(args, queue_type,
release_track=base.ReleaseTrack.GA):
"""Verifies that args are valid for updating a queue."""
updatable_config = QueueUpdatableConfiguration.FromQueueTypeAndReleaseTrack(
queue_type, release_track)
if _AnyArgsSpecified(args, updatable_config.AllConfigs(), clear_args=True):
return
raise NoFieldsSpecifiedError('Must specify at least one field to update.')
def GetSpecifiedFieldsMask(args, queue_type,
release_track=base.ReleaseTrack.GA):
"""Returns the mask fields to use with the given args."""
updatable_config = QueueUpdatableConfiguration.FromQueueTypeAndReleaseTrack(
queue_type, release_track)
specified_args = _SpecifiedArgs(
args, updatable_config.AllConfigs(), clear_args=True)
args_to_mask = updatable_config.GetConfigToUpdateMaskMapping()
masks_field = [args_to_mask[arg] for arg in specified_args]
if hasattr(args, 'type') and args.type == constants.PULL_TASK:
masks_field.append('type')
return sorted(set(masks_field))
def _SpecifiedArgs(specified_args_object, args_list, clear_args=False):
"""Returns the list of known arguments in the specified list."""
def _IsSpecifiedWrapper(arg):
"""Wrapper function for Namespace.IsSpecified function.
We need this function to be support being able to modify certain queue
attributes internally using `gcloud app deploy queue.yaml` without exposing
the same functionality via `gcloud tasks queues create/update`.
Args:
arg: The argument we are trying to check if specified.
Returns:
True if the argument was specified at CLI invocation, False otherwise.
"""
# HTTP queue overrides should be ignored when running 'app deploy'
http_queue_args = [
'http_uri_override',
'http_method_override',
'http_header_override',
'http_oauth_service_account_email_override',
'http_oauth_token_scope_override',
'http_oidc_service_account_email_override',
'http_oidc_token_audience_override',
]
try:
return specified_args_object.IsSpecified(arg)
except parser_errors.UnknownDestinationException:
if arg in ('max_burst_size', 'clear_max_burst_size') or any(
flag in arg for flag in http_queue_args
):
return False
raise
clear_args_list = []
if clear_args:
clear_args_list = [_EquivalentClearArg(a) for a in args_list]
return filter(_IsSpecifiedWrapper, args_list + clear_args_list)
def _AnyArgsSpecified(specified_args_object, args_list, clear_args=False):
"""Returns whether there are known arguments in the specified list."""
return any(_SpecifiedArgs(specified_args_object, args_list, clear_args))
def _EquivalentClearArg(arg):
return 'clear_{}'.format(arg)
def _ParseRetryConfigArgs(args, queue_type, messages, is_update,
is_alpha=False):
"""Parses the attributes of 'args' for Queue.retryConfig."""
if (queue_type == constants.PULL_QUEUE and
_AnyArgsSpecified(args, ['max_attempts', 'max_retry_duration'],
clear_args=is_update)):
retry_config = messages.RetryConfig(
maxRetryDuration=args.max_retry_duration)
_AddMaxAttemptsFieldsFromArgs(args, retry_config, is_alpha)
return retry_config
if (queue_type == constants.PUSH_QUEUE and
_AnyArgsSpecified(args, ['max_attempts', 'max_retry_duration',
'max_doublings', 'min_backoff', 'max_backoff'],
clear_args=is_update)):
retry_config = messages.RetryConfig(
maxRetryDuration=args.max_retry_duration,
maxDoublings=args.max_doublings, minBackoff=args.min_backoff,
maxBackoff=args.max_backoff)
_AddMaxAttemptsFieldsFromArgs(args, retry_config, is_alpha)
return retry_config
def _AddMaxAttemptsFieldsFromArgs(args, config_object, is_alpha=False):
if args.IsSpecified('max_attempts'):
# args.max_attempts is a BoundedInt and so None means unlimited
if args.max_attempts is None:
if is_alpha:
config_object.unlimitedAttempts = True
else:
config_object.maxAttempts = -1
else:
config_object.maxAttempts = args.max_attempts
def _ParseAlphaRateLimitsArgs(args, queue_type, messages, is_update):
"""Parses the attributes of 'args' for Queue.rateLimits."""
if (queue_type == constants.PUSH_QUEUE and
_AnyArgsSpecified(args, ['max_tasks_dispatched_per_second',
'max_concurrent_tasks'],
clear_args=is_update)):
return messages.RateLimits(
maxTasksDispatchedPerSecond=args.max_tasks_dispatched_per_second,
maxConcurrentTasks=args.max_concurrent_tasks)
def _ParseRateLimitsArgs(args, queue_type, messages, is_update):
"""Parses the attributes of 'args' for Queue.rateLimits."""
if (
queue_type == constants.PUSH_QUEUE and
_AnyArgsSpecified(
args,
['max_dispatches_per_second', 'max_concurrent_dispatches',
'max_burst_size'],
clear_args=is_update)):
max_burst_size = (
args.max_burst_size if hasattr(args, 'max_burst_size') else None)
return messages.RateLimits(
maxDispatchesPerSecond=args.max_dispatches_per_second,
maxConcurrentDispatches=args.max_concurrent_dispatches,
maxBurstSize=max_burst_size)
def _ParseStackdriverLoggingConfigArgs(args, queue_type, messages,
is_update):
"""Parses the attributes of 'args' for Queue.stackdriverLoggingConfig."""
if (queue_type != constants.PULL_QUEUE and
_AnyArgsSpecified(args, ['log_sampling_ratio'], clear_args=is_update)):
return messages.StackdriverLoggingConfig(
samplingRatio=args.log_sampling_ratio)
def _ParsePullTargetArgs(unused_args, queue_type, messages, is_update):
"""Parses the attributes of 'args' for Queue.pullTarget."""
if queue_type == constants.PULL_QUEUE and not is_update:
return messages.PullTarget()
def _ParseQueueType(args, queue_type, messages, is_update):
"""Parses the attributes of 'args' for Queue.type."""
if (
(hasattr(args, 'type') and args.type == constants.PULL_QUEUE) or
(queue_type == constants.PULL_QUEUE and not is_update)
):
return messages.Queue.TypeValueValuesEnum.PULL
return messages.Queue.TypeValueValuesEnum.PUSH
def _ParseAppEngineHttpTargetArgs(args, queue_type, messages):
"""Parses the attributes of 'args' for Queue.appEngineHttpTarget."""
if queue_type == constants.PUSH_QUEUE:
routing_override = _ParseAppEngineRoutingOverrideArgs(
args, queue_type, messages)
if routing_override is None:
return None
return messages.AppEngineHttpTarget(
appEngineRoutingOverride=routing_override)
def _ParseHttpTargetArgs(args, queue_type, messages):
"""Parses the attributes of 'args' for Queue.HttpTarget."""
if queue_type == constants.PUSH_QUEUE:
uri_override = _ParseHttpRoutingOverrideArgs(args, messages)
http_method = (
messages.HttpTarget.HttpMethodValueValuesEnum(
args.http_method_override.upper())
if args.IsSpecified('http_method_override') else None)
oauth_token = _ParseHttpTargetOAuthArgs(args, messages)
oidc_token = _ParseHttpTargetOidcArgs(args, messages)
if (
uri_override is None
and http_method is None
and oauth_token is None
and oidc_token is None
):
return None
return messages.HttpTarget(
uriOverride=uri_override,
headerOverrides=_ParseHttpTargetHeaderArg(args, messages),
httpMethod=http_method,
oauthToken=oauth_token,
oidcToken=oidc_token)
def _ParseAppEngineHttpQueueArgs(args, queue_type, messages):
"""Parses the attributes of 'args' for Queue.appEngineHttpQueue."""
if queue_type == constants.PUSH_QUEUE:
routing_override = _ParseAppEngineRoutingOverrideArgs(
args, queue_type, messages
)
return messages.AppEngineHttpQueue(
appEngineRoutingOverride=routing_override
)
def _ParseAppEngineRoutingOverrideArgs(args, queue_type, messages):
"""Parses the attributes of 'args' for AppEngineRouting."""
if queue_type == constants.PUSH_QUEUE:
if args.IsSpecified('routing_override'):
return messages.AppEngineRouting(**args.routing_override)
return None
def _ParseHttpRoutingOverrideArgs(args, messages):
"""Parses the attributes of 'args' for HTTP Routing."""
if args.IsSpecified('http_uri_override'):
return _ParseUriOverride(messages=messages, **args.http_uri_override)
return None
def _ParseUriOverride(messages,
scheme=None,
host=None,
port=None,
path=None,
query=None,
mode=None):
"""Parses the attributes of 'args' for URI Override."""
scheme = (
messages.UriOverride.SchemeValueValuesEnum(scheme.upper())
if scheme else None)
port = int(port) if port else None
uri_override_enforce_mode = (
messages.UriOverride.UriOverrideEnforceModeValueValuesEnum(mode.upper())
if mode else None)
return messages.UriOverride(
scheme=scheme,
host=host,
port=port,
pathOverride=messages.PathOverride(path=path),
queryOverride=messages.QueryOverride(queryParams=query),
uriOverrideEnforceMode=uri_override_enforce_mode)
def _ParsePullMessageArgs(args, task_type, messages):
if task_type == constants.PULL_TASK:
return messages.PullMessage(payload=_ParsePayloadArgs(args), tag=args.tag)
def _ParseAlphaAppEngineHttpRequestArgs(args, task_type, messages):
"""Parses the attributes of 'args' for Task.appEngineHttpRequest."""
if task_type == constants.APP_ENGINE_TASK:
routing = (
messages.AppEngineRouting(**args.routing) if args.routing else None)
http_method = (messages.AppEngineHttpRequest.HttpMethodValueValuesEnum(
args.method.upper()) if args.IsSpecified('method') else None)
return messages.AppEngineHttpRequest(
appEngineRouting=routing, httpMethod=http_method,
payload=_ParsePayloadArgs(args), relativeUrl=args.url,
headers=_ParseHeaderArg(args,
messages.AppEngineHttpRequest.HeadersValue))
def _ParsePayloadArgs(args):
if args.IsSpecified('payload_file'):
payload = console_io.ReadFromFileOrStdin(args.payload_file, binary=False)
elif args.IsSpecified('payload_content'):
payload = args.payload_content
else:
return None
return http_encoding.Encode(payload)
def _ParseAppEngineHttpRequestArgs(args, task_type, messages):
"""Parses the attributes of 'args' for Task.appEngineHttpRequest."""
if task_type == constants.APP_ENGINE_TASK:
routing = (
messages.AppEngineRouting(**args.routing) if args.routing else None)
http_method = (messages.AppEngineHttpRequest.HttpMethodValueValuesEnum(
args.method.upper()) if args.IsSpecified('method') else None)
return messages.AppEngineHttpRequest(
appEngineRouting=routing, httpMethod=http_method,
body=_ParseBodyArgs(args), relativeUri=args.relative_uri,
headers=_ParseHeaderArg(args,
messages.AppEngineHttpRequest.HeadersValue))
def _ParseHttpRequestArgs(args, task_type, messages):
"""Parses the attributes of 'args' for Task.httpRequest."""
if task_type == constants.HTTP_TASK:
http_method = (messages.HttpRequest.HttpMethodValueValuesEnum(
args.method.upper()) if args.IsSpecified('method') else None)
return messages.HttpRequest(
headers=_ParseHeaderArg(args, messages.HttpRequest.HeadersValue),
httpMethod=http_method, body=_ParseBodyArgs(args), url=args.url,
oauthToken=_ParseOAuthArgs(args, messages),
oidcToken=_ParseOidcArgs(args, messages))
def _ParseBodyArgs(args):
if args.IsSpecified('body_file'):
body = console_io.ReadFromFileOrStdin(args.body_file, binary=False)
elif args.IsSpecified('body_content'):
body = args.body_content
else:
return None
return http_encoding.Encode(body)
def _ParseOAuthArgs(args, messages):
if args.IsSpecified('oauth_service_account_email'):
return messages.OAuthToken(
serviceAccountEmail=args.oauth_service_account_email,
scope=args.oauth_token_scope)
else:
return None
def _ParseOidcArgs(args, messages):
if args.IsSpecified('oidc_service_account_email'):
return messages.OidcToken(
serviceAccountEmail=args.oidc_service_account_email,
audience=args.oidc_token_audience)
else:
return None
def _ParseHttpTargetOAuthArgs(args, messages):
if args.IsSpecified('http_oauth_service_account_email_override'):
return messages.OAuthToken(
serviceAccountEmail=args.http_oauth_service_account_email_override,
scope=args.http_oauth_token_scope_override)
else:
return None
def _ParseHttpTargetOidcArgs(args, messages):
if args.IsSpecified('http_oidc_service_account_email_override'):
return messages.OidcToken(
serviceAccountEmail=args.http_oidc_service_account_email_override,
audience=args.http_oidc_token_audience_override)
else:
return None
def _ParseHeaderArg(args, headers_value):
if args.header:
headers_dict = {k: v for k, v in map(_SplitHeaderArgValue, args.header)}
return encoding.DictToAdditionalPropertyMessage(headers_dict, headers_value)
def _SplitHeaderArgValue(header_arg_value):
key, value = header_arg_value.split(':', 1)
return key, value.lstrip()
def _ParseHttpTargetHeaderArg(args, messages):
"""Converts header values into a list of headers and returns the list."""
map_ = []
if args.IsSpecified('http_header_override'):
headers_dict = {
k: v for k, v in map(_SplitHeaderArgValue, args.http_header_override)
}
items = sorted(headers_dict.items())
for key, value in items:
header_override = messages.HeaderOverride(
header=messages.Header(key=key.encode(), value=value.encode()))
map_.append(header_override)
return map_
def FormatLeaseDuration(lease_duration):
return '{}s'.format(lease_duration)
def ParseTasksLeaseFilterFlags(args):
if args.oldest_tag:
return 'tag_function=oldest_tag()'
if args.IsSpecified('tag'):
return 'tag="{}"'.format(args.tag)
def QueuesUriFunc(queue):
return resources.REGISTRY.Parse(
queue.name,
params={'projectsId': _PROJECT},
collection=constants.QUEUES_COLLECTION).SelfLink()
def TasksUriFunc(task):
return resources.REGISTRY.Parse(
task.name,
params={'projectsId': _PROJECT},
collection=constants.TASKS_COLLECTION).SelfLink()
def LocationsUriFunc(task):
return resources.REGISTRY.Parse(
task.name,
params={'projectsId': _PROJECT},
collection=constants.LOCATIONS_COLLECTION).SelfLink()

View File

@@ -0,0 +1,29 @@
project:
name: project
collection: cloudtasks.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: The project name.
location:
name: location
collection: cloudtasks.projects.locations
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: The location name.
queue:
name: queue
collection: cloudtasks.projects.locations.queues
attributes:
- *project
- *location
- parameter_name: queuesId
attribute_name: queue
help: |
The name of the queue.