feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,26 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command group for ai-platform local."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Local(base.Group):
"""AI Platform Local commands."""
pass

View File

@@ -0,0 +1,125 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ai-platform local predict command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.ml_engine import flags
from googlecloudsdk.command_lib.ml_engine import local_utils
from googlecloudsdk.command_lib.ml_engine import predict_utilities
from googlecloudsdk.core import log
def _AddLocalPredictArgs(parser):
"""Add arguments for `gcloud ai-platform local predict` command."""
parser.add_argument('--model-dir', required=True, help='Path to the model.')
flags.FRAMEWORK_MAPPER.choice_arg.AddToParser(parser)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
'--json-request',
help="""\
Path to a local file containing the body of JSON request.
An example of a JSON request:
{
"instances": [
{"x": [1, 2], "y": [3, 4]},
{"x": [-1, -2], "y": [-3, -4]}
]
}
This flag accepts "-" for stdin.
""")
group.add_argument(
'--json-instances',
help="""\
Path to a local file from which instances are read.
Instances are in JSON format; newline delimited.
An example of the JSON instances file:
{"images": [0.0, ..., 0.1], "key": 3}
{"images": [0.0, ..., 0.1], "key": 2}
...
This flag accepts "-" for stdin.
""")
group.add_argument(
'--text-instances',
help="""\
Path to a local file from which instances are read.
Instances are in UTF-8 encoded text format; newline delimited.
An example of the text instances file:
107,4.9,2.5,4.5,1.7
100,5.7,2.8,4.1,1.3
...
This flag accepts "-" for stdin.
""")
flags.SIGNATURE_NAME.AddToParser(parser)
class Predict(base.Command):
"""Run prediction locally."""
@staticmethod
def Args(parser):
_AddLocalPredictArgs(parser)
def Run(self, args):
framework = flags.FRAMEWORK_MAPPER.GetEnumForChoice(args.framework)
framework_flag = framework.name.lower() if framework else 'tensorflow'
if args.signature_name is None:
log.status.Print('If the signature defined in the model is '
'not serving_default then you must specify it via '
'--signature-name flag, otherwise the command may fail.')
results = local_utils.RunPredict(
args.model_dir,
json_request=args.json_request,
json_instances=args.json_instances,
text_instances=args.text_instances,
framework=framework_flag,
signature_name=args.signature_name)
if not args.IsSpecified('format'):
# default format is based on the response.
if isinstance(results, list):
predictions = results
else:
predictions = results.get('predictions')
args.format = predict_utilities.GetDefaultFormat(predictions)
return results
_DETAILED_HELP = {
'DESCRIPTION':
"""\
*{command}* performs prediction locally with the given instances. It requires the
[TensorFlow SDK](https://www.tensorflow.org/install) be installed locally. The
output format mirrors `gcloud ai-platform predict` (online prediction).
You cannot use this command with custom prediction routines.
"""
}
Predict.detailed_help = _DETAILED_HELP

View File

@@ -0,0 +1,121 @@
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ai-platform local train command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.ml_engine import flags
from googlecloudsdk.command_lib.ml_engine import local_train
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
_BAD_FLAGS_WARNING_MESSAGE = """\
{flag} is ignored if --distributed is not provided.
Did you mean to run distributed training?\
"""
class RunLocal(base.Command):
r"""Run an AI Platform training job locally.
This command runs the specified module in an environment
similar to that of a live AI Platform Training Job.
This is especially useful in the case of testing distributed models,
as it allows you to validate that you are properly interacting with the
AI Platform cluster configuration. If your model expects a specific
number of parameter servers or workers (i.e. you expect to use the CUSTOM
machine type), use the --parameter-server-count and --worker-count flags to
further specify the desired cluster configuration, just as you would in
your cloud training job configuration:
$ {command} --module-name trainer.task \
--package-path /path/to/my/code/trainer \
--distributed \
--parameter-server-count 4 \
--worker-count 8
Unlike submitting a training job, the --package-path parameter can be
omitted, and will use your current working directory.
AI Platform Training sets a TF_CONFIG environment variable on each VM in
your training job. You can use TF_CONFIG to access the cluster description
and the task description for each VM.
Learn more about TF_CONFIG:
https://cloud.google.com/ai-platform/training/docs/distributed-training-details.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
flags.PACKAGE_PATH.AddToParser(parser)
flags.GetModuleNameFlag().AddToParser(parser)
flags.DISTRIBUTED.AddToParser(parser)
flags.EVALUATORS.AddToParser(parser)
flags.PARAM_SERVERS.AddToParser(parser)
flags.GetJobDirFlag(upload_help=False, allow_local=True).AddToParser(parser)
flags.WORKERS.AddToParser(parser)
flags.START_PORT.AddToParser(parser)
flags.GetUserArgs(local=True).AddToParser(parser)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
package_path = args.package_path or files.GetCWD()
# Mimic behavior of ai-platform jobs submit training
package_root = os.path.dirname(os.path.abspath(package_path))
user_args = args.user_args or []
if args.job_dir:
user_args.extend(('--job-dir', args.job_dir))
worker_count = 2 if args.worker_count is None else args.worker_count
ps_count = 2 if args.parameter_server_count is None else args.parameter_server_count
if args.distributed:
retval = local_train.RunDistributed(
args.module_name,
package_root,
ps_count,
worker_count,
args.evaluator_count or 0,
args.start_port,
user_args=user_args)
else:
if args.parameter_server_count:
log.warning(_BAD_FLAGS_WARNING_MESSAGE.format(
flag='--parameter-server-count'))
if args.worker_count:
log.warning(_BAD_FLAGS_WARNING_MESSAGE.format(flag='--worker-count'))
retval = local_train.MakeProcess(
args.module_name,
package_root,
args=user_args,
task_type=local_train.GetPrimaryNodeName())
# Don't raise an exception because the users will already see the message.
# We want this to mimic calling the script directly as much as possible.
self.exit_code = retval