feat: Add new gcloud commands, API clients, and third-party libraries across various services.

This commit is contained in:
2026-01-01 20:26:35 +01:00
parent 5e23cbece0
commit a19e592eb7
25221 changed files with 8324611 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains test case base classes for gsutil tests."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from gslib.tests.testcase.integration_testcase import GsUtilIntegrationTestCase
from gslib.tests.util import KmsTestingResources
from gslib.tests.testcase.unit_testcase import GsUtilUnitTestCase
from gslib.tests.testcase.shim_unit_test_base import ShimUnitTestBase

View File

@@ -0,0 +1,263 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base test case class for unit and integration tests."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from functools import wraps
import os.path
import random
import re
import shutil
import tempfile
import six
import boto
import gslib.tests.util as util
from gslib.tests.util import unittest
from gslib.utils.constants import UTF8
from gslib.utils.posix_util import NA_ID
from gslib.utils.posix_util import NA_MODE
MAX_BUCKET_LENGTH = 63
def NotParallelizable(test):
"""Wrapper for cases that are not parallelizable."""
setattr(test, 'is_parallelizable', False)
return test
def RequiresIsolation(func):
"""Wrapper function for cases that require running in a separate process."""
@wraps(func)
def RequiresIsolationFunc(*args, **kwargs):
return func(*args, **kwargs)
RequiresIsolationFunc.requires_isolation = True
return RequiresIsolationFunc
class GsUtilTestCase(unittest.TestCase):
"""Base test case class for unit and integration tests."""
def setUp(self):
if six.PY2:
self.assertRegex = self.assertRegexpMatches
self.assertNotRegex = self.assertNotRegexpMatches
if util.RUN_S3_TESTS:
self.test_api = 'XML'
self.default_provider = 's3'
self.provider_custom_meta = 'amz'
else:
self.test_api = boto.config.get('GSUtil', 'prefer_api', 'JSON').upper()
self.default_provider = 'gs'
self.provider_custom_meta = 'goog'
self.tempdirs = []
def tearDown(self):
while self.tempdirs:
tmpdir = self.tempdirs.pop()
shutil.rmtree(tmpdir, ignore_errors=True)
def assertNumLines(self, text, numlines):
self.assertEqual(text.count('\n'), numlines)
def GetTestMethodName(self):
return self._testMethodName
def MakeRandomTestString(self):
"""Creates a random string of hex characters 8 characters long."""
return '%08x' % random.randrange(256**4)
def MakeTempName(self, kind, prefix='', suffix=''):
"""Creates a temporary name that is most-likely unique.
Args:
kind (str): A string indicating what kind of test name this is.
prefix (str): Prefix prepended to the temporary name.
suffix (str): Suffix string appended to end of temporary name.
Returns:
(str) The temporary name. If `kind` was "bucket", the temporary name may
have coerced this string, including the supplied `prefix`, such that it
contains only characters that are valid across all supported storage
providers (e.g. replacing "_" with "-", converting uppercase letters to
lowercase, etc.).
"""
name = '{prefix}gsutil-test-{method}-{kind}'.format(
prefix=prefix, method=self.GetTestMethodName(), kind=kind)
name = name[:MAX_BUCKET_LENGTH - 13]
name = '{name}-{rand}'.format(name=name, rand=self.MakeRandomTestString())
total_name_len = len(name) + len(suffix)
if suffix:
if kind == 'bucket' and total_name_len > MAX_BUCKET_LENGTH:
self.fail(
'Tried to create a psuedo-random bucket name with a specific '
'suffix, but the generated name was too long and there was not '
'enough room for the suffix. Please use shorter strings or perform '
'name randomization manually.\nRequested name: ' + name + suffix)
name += suffix
if kind == 'bucket':
name = util.MakeBucketNameValid(name)
return name
# TODO: Convert tests to use this for object names.
def MakeTempUnicodeName(self, kind, prefix=''):
return self.MakeTempName(kind, prefix=prefix) + ''
def CreateTempDir(self, test_files=0, contents=None):
"""Creates a temporary directory on disk.
The directory and all of its contents will be deleted after the test.
Args:
test_files: The number of test files to place in the directory or a list
of test file names.
contents: The contents for each generated test file.
Returns:
The path to the new temporary directory.
"""
tmpdir = tempfile.mkdtemp(prefix=self.MakeTempName('directory'))
self.tempdirs.append(tmpdir)
try:
iter(test_files)
except TypeError:
test_files = [self.MakeTempName('file') for _ in range(test_files)]
for i, name in enumerate(test_files):
contents_file = contents
if contents_file is None:
contents_file = ('test %d' % i).encode('ascii')
self.CreateTempFile(tmpdir=tmpdir, file_name=name, contents=contents_file)
return tmpdir
def CreateTempFifo(self, tmpdir=None, file_name=None):
"""Creates a temporary fifo file on disk. Should not be used on Windows.
Args:
tmpdir: The temporary directory to place the file in. If not specified, a
new temporary directory is created.
file_name: The name to use for the file. If not specified, a temporary
test file name is constructed. This can also be a tuple, where
('dir', 'foo') means to create a file named 'foo' inside a
subdirectory named 'dir'.
Returns:
The path to the new temporary fifo.
"""
tmpdir = tmpdir or self.CreateTempDir()
file_name = file_name or self.MakeTempName('fifo')
if isinstance(file_name, six.string_types):
fpath = os.path.join(tmpdir, file_name)
else:
fpath = os.path.join(tmpdir, *file_name)
os.mkfifo(fpath)
return fpath
def CreateTempFile(self,
tmpdir=None,
contents=None,
file_name=None,
mtime=None,
mode=NA_MODE,
uid=NA_ID,
gid=NA_ID):
"""Creates a temporary file on disk.
Note: if mode, uid, or gid are present, they must be validated by
ValidateFilePermissionAccess and ValidatePOSIXMode before calling this
function.
Args:
tmpdir: The temporary directory to place the file in. If not specified, a
new temporary directory is created.
contents: The contents to write to the file. If not specified, a test
string is constructed and written to the file. Since the file
is opened 'wb', the contents must be bytes.
file_name: The name to use for the file. If not specified, a temporary
test file name is constructed. This can also be a tuple, where
('dir', 'foo') means to create a file named 'foo' inside a
subdirectory named 'dir'.
mtime: The modification time of the file in POSIX time (seconds since
UTC 1970-01-01). If not specified, this defaults to the current
system time.
mode: The POSIX mode for the file. Must be a base-8 3-digit integer
represented as a string.
uid: A POSIX user ID.
gid: A POSIX group ID.
Returns:
The path to the new temporary file.
"""
tmpdir = six.ensure_str(tmpdir or self.CreateTempDir())
file_name = file_name or self.MakeTempName(str('file'))
if isinstance(file_name, (six.text_type, six.binary_type)):
fpath = os.path.join(tmpdir, six.ensure_str(file_name))
else:
file_name = map(six.ensure_str, file_name)
fpath = os.path.join(tmpdir, *file_name)
if not os.path.isdir(os.path.dirname(fpath)):
os.makedirs(os.path.dirname(fpath))
if isinstance(fpath, six.binary_type):
fpath = fpath.decode(UTF8)
with open(fpath, 'wb') as f:
contents = (contents if contents is not None else self.MakeTempName(
str('contents')))
if isinstance(contents, bytearray):
contents = bytes(contents)
else:
contents = six.ensure_binary(contents)
f.write(contents)
if mtime is not None:
# Set the atime and mtime to be the same.
os.utime(fpath, (mtime, mtime))
if uid != NA_ID or int(gid) != NA_ID:
os.chown(fpath, uid, int(gid))
if int(mode) != NA_MODE:
os.chmod(fpath, int(mode, 8))
return fpath
def assertRegexpMatchesWithFlags(self, text, pattern, msg=None, flags=0):
"""Like assertRegexpMatches, but allows specifying additional re flags.
Args:
text: The text in which to search for pattern.
pattern: The pattern to search for; should be either a string or compiled
regex returned from re.compile().
msg: The message to be displayed if pattern is not found in text. The
values for pattern and text will be included after this message.
flags: Additional flags from the re module to be included when compiling
pattern. If pattern is a regex that was compiled with existing flags,
these, flags will be added via a bitwise-or.
"""
if isinstance(pattern, six.string_types):
pattern = re.compile(pattern, flags=flags)
else: # It's most likely an already-compiled pattern.
pattern = re.compile(pattern.pattern, flags=pattern.flags | flags)
if not pattern.search(text):
failure_msg = msg or 'Regex didn\'t match'
failure_msg = '%s: %r not found in %r' % (failure_msg, pattern.pattern,
text)
raise self.failureException(failure_msg)

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains base class to be used for shim unit tests."""
import subprocess
from unittest import mock
from gslib.tests.testcase.unit_testcase import GsUtilUnitTestCase
class ShimUnitTestBase(GsUtilUnitTestCase):
"""Base class for unit testing shim behavior.
This class mocks the `subprocess.run()` method because it gets called
for all shim operations to check if there is an active account configured
for gcloud.
"""
def setUp(self):
super().setUp()
# Translator calls `gcloud config get account` to check active account
# using subprocess.run().
# We don't care about this call for most of the tests below, so we are
# simply patching this here.
# There are separate tests to check this call is being made.
self._subprocess_run_patcher = mock.patch.object(subprocess,
'run',
autospec=True)
self._mock_subprocess_run = self._subprocess_run_patcher.start()
self._mock_subprocess_run.return_value.returncode = 0
def tearDown(self):
if self._subprocess_run_patcher is not None:
self._subprocess_run_patcher.stop()
super().tearDown()

View File

@@ -0,0 +1,455 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains gsutil base unit test case class."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import logging
import os
import sys
import tempfile
import six
import boto
from boto.utils import get_utf8able_str
from gslib import project_id
from gslib import wildcard_iterator
from gslib.boto_translation import BotoTranslation
from gslib.cloud_api_delegator import CloudApiDelegator
from gslib.command_runner import CommandRunner
from gslib.cs_api_map import ApiMapConstants
from gslib.cs_api_map import ApiSelector
from gslib.discard_messages_queue import DiscardMessagesQueue
from gslib.gcs_json_api import GcsJsonApi
from gslib.tests.mock_logging_handler import MockLoggingHandler
from gslib.tests.testcase import base
import gslib.tests.util as util
from gslib.tests.util import unittest
from gslib.tests.util import WorkingDirectory
from gslib.utils.constants import UTF8
from gslib.utils.text_util import print_to_fd
def _AttemptToCloseSysFd(fd):
"""Suppress IOError when closing sys.stdout or sys.stderr in tearDown."""
# In PY2, if another sibling thread/process tried closing it at the same
# time we did, it succeeded either way, so we just continue. This approach
# was taken from https://github.com/pytest-dev/pytest/pull/3305.
if not six.PY2: # This doesn't happen in PY3, AFAICT.
fd.close()
return
try:
fd.close()
except IOError:
pass
class GsutilApiUnitTestClassMapFactory(object):
"""Class map factory for use in unit tests.
BotoTranslation is used for all cases so that GSMockBucketStorageUri can
be used to communicate with the mock XML service.
"""
@classmethod
def GetClassMap(cls):
"""Returns a class map for use in unit tests."""
gs_class_map = {
ApiSelector.XML:
BotoTranslation,
# TODO: This should be replaced with 'ApiSelector.JSON: GcsJsonApi'.
# Refer Issue https://github.com/GoogleCloudPlatform/gsutil/issues/970
ApiSelector.JSON:
BotoTranslation
}
s3_class_map = {ApiSelector.XML: BotoTranslation}
class_map = {'gs': gs_class_map, 's3': s3_class_map}
return class_map
@unittest.skipUnless(util.RUN_UNIT_TESTS, 'Not running integration tests.')
class GsUtilUnitTestCase(base.GsUtilTestCase):
"""Base class for gsutil unit tests."""
@classmethod
def setUpClass(cls):
base.GsUtilTestCase.setUpClass()
cls.mock_bucket_storage_uri = util.GSMockBucketStorageUri
cls.mock_gsutil_api_class_map_factory = GsutilApiUnitTestClassMapFactory
cls.logger = logging.getLogger()
cls.command_runner = CommandRunner(
bucket_storage_uri_class=cls.mock_bucket_storage_uri,
gsutil_api_class_map_factory=cls.mock_gsutil_api_class_map_factory)
# Ensure unit tests don't fail if no default_project_id is defined in the
# boto config file.
project_id.UNIT_TEST_PROJECT_ID = 'mock-project-id-for-unit-tests'
def setUp(self):
super(GsUtilUnitTestCase, self).setUp()
self.bucket_uris = []
self.stdout_save = sys.stdout
self.stderr_save = sys.stderr
fd, self.stdout_file = tempfile.mkstemp()
# Specify the encoding explicitly to ensure Windows uses 'utf-8' instead of
# the default of 'cp1252'.
if six.PY2:
sys.stdout = os.fdopen(fd, 'w+')
else:
sys.stdout = os.fdopen(fd, 'w+', encoding='utf-8')
fd, self.stderr_file = tempfile.mkstemp()
# do not set sys.stderr to be 'wb+' - it will blow up the logger
if six.PY2:
sys.stderr = os.fdopen(fd, 'w+')
else:
sys.stderr = os.fdopen(fd, 'w+', encoding='utf-8')
self.accumulated_stdout = []
self.accumulated_stderr = []
self.root_logger = logging.getLogger()
self.is_debugging = self.root_logger.isEnabledFor(logging.DEBUG)
self.log_handlers_save = self.root_logger.handlers
fd, self.log_handler_file = tempfile.mkstemp()
self.log_handler_stream = os.fdopen(fd, 'w+')
self.temp_log_handler = logging.StreamHandler(self.log_handler_stream)
self.root_logger.handlers = [self.temp_log_handler]
def tearDown(self):
super(GsUtilUnitTestCase, self).tearDown()
self.root_logger.handlers = self.log_handlers_save
self.temp_log_handler.flush()
self.temp_log_handler.close()
self.log_handler_stream.seek(0)
log_output = self.log_handler_stream.read()
self.log_handler_stream.close()
os.unlink(self.log_handler_file)
sys.stdout.seek(0)
sys.stderr.seek(0)
if six.PY2:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
else:
try:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
except UnicodeDecodeError:
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.buffer.read()
stderr = sys.stderr.buffer.read()
[six.ensure_text(string) for string in self.accumulated_stderr]
[six.ensure_text(string) for string in self.accumulated_stdout]
stdout = six.ensure_text(get_utf8able_str(stdout))
stderr = six.ensure_text(get_utf8able_str(stderr))
stdout += ''.join(self.accumulated_stdout)
stderr += ''.join(self.accumulated_stderr)
_AttemptToCloseSysFd(sys.stdout)
_AttemptToCloseSysFd(sys.stderr)
sys.stdout = self.stdout_save
sys.stderr = self.stderr_save
os.unlink(self.stdout_file)
os.unlink(self.stderr_file)
_id = six.ensure_text(self.id())
if self.is_debugging and stdout:
print_to_fd('==== stdout {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(stdout, file=sys.stderr)
print_to_fd('==== end stdout ====\n', file=sys.stderr)
if self.is_debugging and stderr:
print_to_fd('==== stderr {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(stderr, file=sys.stderr)
print_to_fd('==== end stderr ====\n', file=sys.stderr)
if self.is_debugging and log_output:
print_to_fd('==== log output {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(log_output, file=sys.stderr)
print_to_fd('==== end log output ====\n', file=sys.stderr)
def RunCommand(self,
command_name,
args=None,
headers=None,
debug=0,
return_stdout=False,
return_stderr=False,
return_log_handler=False,
cwd=None):
"""Method for calling gslib.command_runner.CommandRunner.
Passes parallel_operations=False for all tests, optionally saving/returning
stdout output. We run all tests multi-threaded, to exercise those more
complicated code paths.
TODO: Change to run with parallel_operations=True for all tests. At
present when you do this it causes many test failures.
Args:
command_name: The name of the command being run.
args: Command-line args (arg0 = actual arg, not command name ala bash).
headers: Dictionary containing optional HTTP headers to pass to boto.
debug: Debug level to pass in to boto connection (range 0..3).
return_stdout: If True, will save and return stdout produced by command.
return_stderr: If True, will save and return stderr produced by command.
return_log_handler: If True, will return a MockLoggingHandler instance
that was attached to the command's logger while running.
cwd: The working directory that should be switched to before running the
command. The working directory will be reset back to its original
value after running the command. If not specified, the working
directory is left unchanged.
Returns:
One or a tuple of requested return values, depending on whether
return_stdout, return_stderr, and/or return_log_handler were specified.
Return Types:
stdout - str (binary in Py2, text in Py3)
stderr - str (binary in Py2, text in Py3)
log_handler - MockLoggingHandler
"""
args = args or []
command_line = six.ensure_text(' '.join([command_name] + args))
if self.is_debugging:
print_to_fd('\nRunCommand of {}\n'.format(command_line),
file=self.stderr_save)
# Save and truncate stdout and stderr for the lifetime of RunCommand. This
# way, we can return just the stdout and stderr that was output during the
# RunNamedCommand call below.
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.read()
stderr = sys.stderr.read()
if stdout:
self.accumulated_stdout.append(stdout)
if stderr:
self.accumulated_stderr.append(stderr)
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.stdout.truncate()
sys.stderr.truncate()
mock_log_handler = MockLoggingHandler()
logging.getLogger(command_name).addHandler(mock_log_handler)
if debug:
logging.getLogger(command_name).setLevel(logging.DEBUG)
try:
with WorkingDirectory(cwd):
self.command_runner.RunNamedCommand(command_name,
args=args,
headers=headers,
debug=debug,
parallel_operations=False,
do_shutdown=False)
finally:
sys.stdout.seek(0)
sys.stderr.seek(0)
if six.PY2:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
else:
try:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
except UnicodeDecodeError:
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.buffer.read()
stderr = sys.stderr.buffer.read()
logging.getLogger(command_name).removeHandler(mock_log_handler)
mock_log_handler.close()
log_output = '\n'.join(
'%s:\n ' % level + '\n '.join(records)
for level, records in six.iteritems(mock_log_handler.messages)
if records)
_id = six.ensure_text(self.id())
if self.is_debugging and log_output:
print_to_fd('==== logging RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(log_output, file=self.stderr_save)
print_to_fd('\n==== end logging ====\n', file=self.stderr_save)
if self.is_debugging and stdout:
print_to_fd('==== stdout RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(stdout, file=self.stderr_save)
print_to_fd('==== end stdout ====\n', file=self.stderr_save)
if self.is_debugging and stderr:
print_to_fd('==== stderr RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(stderr, file=self.stderr_save)
print_to_fd('==== end stderr ====\n', file=self.stderr_save)
# Reset stdout and stderr files, so that we won't print them out again
# in tearDown if debugging is enabled.
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.stdout.truncate()
sys.stderr.truncate()
to_return = []
if return_stdout:
to_return.append(stdout)
if return_stderr:
to_return.append(stderr)
if return_log_handler:
to_return.append(mock_log_handler)
if len(to_return) == 1:
return to_return[0]
return tuple(to_return)
@classmethod
def MakeGsUtilApi(cls, debug=0):
gsutil_api_map = {
ApiMapConstants.API_MAP:
(cls.mock_gsutil_api_class_map_factory.GetClassMap()),
ApiMapConstants.SUPPORT_MAP: {
'gs': [ApiSelector.XML, ApiSelector.JSON],
's3': [ApiSelector.XML]
},
ApiMapConstants.DEFAULT_MAP: {
'gs': ApiSelector.JSON,
's3': ApiSelector.XML
}
}
return CloudApiDelegator(cls.mock_bucket_storage_uri,
gsutil_api_map,
cls.logger,
DiscardMessagesQueue(),
debug=debug)
@classmethod
def _test_wildcard_iterator(cls, uri_or_str, exclude_tuple=None, debug=0):
"""Convenience method for instantiating a test instance of WildcardIterator.
This makes it unnecessary to specify all the params of that class
(like bucket_storage_uri_class=mock_storage_service.MockBucketStorageUri).
Also, naming the factory method this way makes it clearer in the test code
that WildcardIterator needs to be set up for testing.
Args are same as for wildcard_iterator.wildcard_iterator(), except
there are no class args for bucket_storage_uri_class or gsutil_api_class.
Args:
uri_or_str: StorageUri or string representing the wildcard string.
exclude_tuple: (base_url, exclude_pattern), where base_url is
top-level URL to list; exclude_pattern is a regex
of paths to ignore during iteration.
debug: debug level to pass to the underlying connection (0..3)
Returns:
WildcardIterator, over which caller can iterate.
"""
# TODO: Remove when tests no longer pass StorageUri arguments.
uri_string = uri_or_str
if hasattr(uri_or_str, 'uri'):
uri_string = uri_or_str.uri
return wildcard_iterator.CreateWildcardIterator(uri_string,
cls.MakeGsUtilApi(debug),
exclude_tuple=exclude_tuple)
@staticmethod
def _test_storage_uri(uri_str, default_scheme='file', debug=0, validate=True):
"""Convenience method for instantiating a testing instance of StorageUri.
This makes it unnecessary to specify
bucket_storage_uri_class=mock_storage_service.MockBucketStorageUri.
Also naming the factory method this way makes it clearer in the test
code that StorageUri needs to be set up for testing.
Args, Returns, and Raises are same as for boto.storage_uri(), except there's
no bucket_storage_uri_class arg.
Args:
uri_str: Uri string to create StorageUri for.
default_scheme: Default scheme for the StorageUri
debug: debug level to pass to the underlying connection (0..3)
validate: If True, validate the resource that the StorageUri refers to.
Returns:
StorageUri based on the arguments.
"""
return boto.storage_uri(uri_str, default_scheme, debug, validate,
util.GSMockBucketStorageUri)
def CreateBucket(self,
bucket_name=None,
test_objects=0,
storage_class=None,
provider='gs'):
"""Creates a test bucket.
The bucket and all of its contents will be deleted after the test.
Args:
bucket_name: Create the bucket with this name. If not provided, a
temporary test bucket name is constructed.
test_objects: The number of objects that should be placed in the bucket or
a list of object names to place in the bucket. Defaults to
0.
storage_class: storage class to use. If not provided we us standard.
provider: string provider to use, default gs.
Returns:
StorageUri for the created bucket.
"""
bucket_name = bucket_name or self.MakeTempName('bucket')
bucket_uri = boto.storage_uri(
'%s://%s' % (provider, bucket_name.lower()),
suppress_consec_slashes=False,
bucket_storage_uri_class=util.GSMockBucketStorageUri)
bucket_uri.create_bucket(storage_class=storage_class)
self.bucket_uris.append(bucket_uri)
try:
iter(test_objects)
except TypeError:
test_objects = [self.MakeTempName('obj') for _ in range(test_objects)]
for i, name in enumerate(test_objects):
self.CreateObject(bucket_uri=bucket_uri,
object_name=name,
contents='test {}'.format(i).encode(UTF8))
return bucket_uri
def CreateObject(self, bucket_uri=None, object_name=None, contents=None):
"""Creates a test object.
Args:
bucket_uri: The URI of the bucket to place the object in. If not
specified, a new temporary bucket is created.
object_name: The name to use for the object. If not specified, a temporary
test object name is constructed.
contents: The contents to write to the object. If not specified, the key
is not written to, which means that it isn't actually created
yet on the server.
Returns:
A StorageUri for the created object.
"""
bucket_uri = bucket_uri or self.CreateBucket(provider=self.default_provider)
object_name = object_name or self.MakeTempName('obj')
key_uri = bucket_uri.clone_replace_name(object_name)
if contents is not None:
key_uri.set_contents_from_string(contents)
return key_uri