From dc5c6f21ccf2c037f7c4cfb2133bdfe87069d57f Mon Sep 17 00:00:00 2001 From: kyleknap Date: Mon, 11 Aug 2014 11:16:39 -0700 Subject: [PATCH 1/3] Added a ``--source-region`` parameter. This parameter ensures the ability to do trans-region syncs, moves, and copies. Tests were expanded as well to better test handling endpoints. --- awscli/customizations/s3/filegenerator.py | 13 ++- awscli/customizations/s3/fileinfo.py | 6 +- awscli/customizations/s3/subcommands.py | 52 +++++++-- awscli/testutils.py | 3 + .../customizations/s3/test_filegenerator.py | 21 +++- .../customizations/s3/test_plugin.py | 103 ++++++++++++++++-- .../customizations/s3/test_s3handler.py | 5 +- tests/unit/customizations/s3/__init__.py | 25 ++++- tests/unit/customizations/s3/fake_session.py | 2 + .../customizations/s3/test_filegenerator.py | 33 ++++-- .../customizations/s3/test_subcommands.py | 41 ++++++- tests/unit/test_completer.py | 3 +- 12 files changed, 255 insertions(+), 52 deletions(-) diff --git a/awscli/customizations/s3/filegenerator.py b/awscli/customizations/s3/filegenerator.py index 16284830cc76..824d537996b0 100644 --- a/awscli/customizations/s3/filegenerator.py +++ b/awscli/customizations/s3/filegenerator.py @@ -54,9 +54,13 @@ class FileGenerator(object): under the same common prefix. The generator yields corresponding ``FileInfo`` objects to send to a ``Comparator`` or ``S3Handler``. """ - def __init__(self, service, endpoint, operation_name, follow_symlinks=True): + def __init__(self, service, endpoint, operation_name, + follow_symlinks=True, source_endpoint=None): self._service = service self._endpoint = endpoint + self._source_endpoint = endpoint + if source_endpoint: + self._source_endpoint = source_endpoint self.operation_name = operation_name self.follow_symlinks = follow_symlinks @@ -91,7 +95,8 @@ def call(self, files): last_update=last_update, src_type=src_type, service=self._service, endpoint=self._endpoint, dest_type=dest_type, - operation_name=self.operation_name) + operation_name=self.operation_name, + source_endpoint=self._source_endpoint) def list_files(self, path, dir_op): """ @@ -190,7 +195,7 @@ def list_objects(self, s3_path, dir_op): yield self._list_single_object(s3_path) else: operation = self._service.get_operation('ListObjects') - lister = BucketLister(operation, self._endpoint) + lister = BucketLister(operation, self._source_endpoint) for key in lister.list_objects(bucket=bucket, prefix=prefix): source_path, size, last_update = key if size == 0 and source_path.endswith('/'): @@ -216,7 +221,7 @@ def _list_single_object(self, s3_path): operation = self._service.get_operation('HeadObject') try: response = operation.call( - self._endpoint, bucket=bucket, key=key)[1] + self._source_endpoint, bucket=bucket, key=key)[1] except ClientError as e: # We want to try to give a more helpful error message. # This is what the customer is going to see so we want to diff --git a/awscli/customizations/s3/fileinfo.py b/awscli/customizations/s3/fileinfo.py index 36de233c71f2..fe482e64d13b 100644 --- a/awscli/customizations/s3/fileinfo.py +++ b/awscli/customizations/s3/fileinfo.py @@ -140,7 +140,7 @@ class FileInfo(TaskInfo): def __init__(self, src, dest=None, compare_key=None, size=None, last_update=None, src_type=None, dest_type=None, operation_name=None, service=None, endpoint=None, - parameters=None): + parameters=None, source_endpoint=None): super(FileInfo, self).__init__(src, src_type=src_type, operation_name=operation_name, service=service, @@ -156,6 +156,7 @@ def __init__(self, src, dest=None, compare_key=None, size=None, else: self.parameters = {'acl': None, 'sse': None} + self.source_endpoint = source_endpoint def _permission_to_param(self, permission): if permission == 'read': @@ -256,7 +257,8 @@ def delete(self): """ if (self.src_type == 's3'): bucket, key = find_bucket_key(self.src) - params = {'endpoint': self.endpoint, 'bucket': bucket, 'key': key} + params = {'endpoint': self.source_endpoint, 'bucket': bucket, + 'key': key} response_data, http = operate(self.service, 'DeleteObject', params) else: diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index c85b01103acd..98f9b0865bf9 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -163,6 +163,16 @@ CONTENT_LANGUAGE = {'name': 'content-language', 'nargs': 1, 'help_text': ("The language the content is in.")} +SOURCE_REGION = {'name': 'source-region', 'nargs': 1, + 'help_text': ( + "When transferring objects from an s3 bucket to an s3 " + "bucket, this specifies the region of the source bucket." + " Note the region specified by ``--region`` or through " + "configuration of the CLI refers to the region of the " + "destination bucket. If ``--source-region`` is not " + "specified the region of the source will be the same " + "as the region of the destination bucket.")} + EXPIRES = {'name': 'expires', 'nargs': 1, 'help_text': ("The date and time at " "which the object is no longer cacheable.")} @@ -198,20 +208,22 @@ FOLLOW_SYMLINKS, NO_FOLLOW_SYMLINKS, NO_GUESS_MIME_TYPE, SSE, STORAGE_CLASS, GRANTS, WEBSITE_REDIRECT, CONTENT_TYPE, CACHE_CONTROL, CONTENT_DISPOSITION, CONTENT_ENCODING, - CONTENT_LANGUAGE, EXPIRES] + CONTENT_LANGUAGE, EXPIRES, SOURCE_REGION] SYNC_ARGS = [DELETE, EXACT_TIMESTAMPS, SIZE_ONLY] + TRANSFER_ARGS +def get_endpoint(service, region, endpoint_url, verify): + return service.get_endpoint(region_name=region, endpoint_url=endpoint_url, + verify=verify) + + class S3Command(BasicCommand): def _run_main(self, parsed_args, parsed_globals): self.service = self._session.get_service('s3') - self.endpoint = self._get_endpoint(self.service, parsed_globals) - - def _get_endpoint(self, service, parsed_globals): - return service.get_endpoint(region_name=parsed_globals.region, - endpoint_url=parsed_globals.endpoint_url, - verify=parsed_globals.verify_ssl) + self.endpoint = get_endpoint(self.service, parsed_globals.region, + parsed_globals.endpoint_url, + parsed_globals.verify_ssl) class ListCommand(S3Command): @@ -363,6 +375,7 @@ def _run_main(self, parsed_args, parsed_globals): cmd_params.check_force(parsed_globals) cmd = CommandArchitecture(self._session, self.NAME, cmd_params.parameters) + cmd.set_endpoints() cmd.create_instructions() return cmd.run() @@ -463,10 +476,24 @@ def __init__(self, session, cmd, parameters): self.parameters = parameters self.instructions = [] self._service = self.session.get_service('s3') - self._endpoint = self._service.get_endpoint( - region_name=self.parameters['region'], + self._endpoint = None + self._source_endpoint = None + + def set_endpoints(self): + self._endpoint = get_endpoint( + self._service, + region=self.parameters['region'], endpoint_url=self.parameters['endpoint_url'], - verify=self.parameters['verify_ssl']) + verify=self.parameters['verify_ssl'] + ) + if self.parameters['source_region']: + if self.parameters['paths_type'] == 's3s3': + self._source_endpoint = get_endpoint( + self._service, + region=self.parameters['source_region'][0], + endpoint_url=None, + verify=self.parameters['verify_ssl'] + ) def create_instructions(self): """ @@ -526,7 +553,8 @@ def run(self): operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self._service, self._endpoint, operation_name, - self.parameters['follow_symlinks']) + self.parameters['follow_symlinks'], + self._source_endpoint) rev_generator = FileGenerator(self._service, self._endpoint, '', self.parameters['follow_symlinks']) taskinfo = [TaskInfo(src=files['src']['path'], @@ -610,6 +638,8 @@ def __init__(self, session, cmd, parameters, usage): self.parameters['dir_op'] = False if 'follow_symlinks' not in parameters: self.parameters['follow_symlinks'] = True + if 'source_region' not in parameters: + self.parameters['source_region'] = None if self.cmd in ['sync', 'mb', 'rb']: self.parameters['dir_op'] = True diff --git a/awscli/testutils.py b/awscli/testutils.py index 0484e63d3120..b6f4bd2abcc0 100644 --- a/awscli/testutils.py +++ b/awscli/testutils.py @@ -333,6 +333,9 @@ def create_file(self, filename, contents, mtime=None): os.makedirs(os.path.dirname(full_path)) with open(full_path, 'w') as f: f.write(contents) + current_time = os.path.getmtime(full_path) + # Subtract a few years off the last modification date. + os.utime(full_path, (current_time, current_time - 100000000)) if mtime is not None: os.utime(full_path, (mtime, mtime)) return full_path diff --git a/tests/integration/customizations/s3/test_filegenerator.py b/tests/integration/customizations/s3/test_filegenerator.py index 08850ef50b22..23b758ea35ea 100644 --- a/tests/integration/customizations/s3/test_filegenerator.py +++ b/tests/integration/customizations/s3/test_filegenerator.py @@ -57,7 +57,9 @@ def test_s3_file(self): size=expected_file_size, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='') + dest_type='local', operation_name='', + endpoint=self.endpoint, + source_endpoint=self.endpoint) expected_list = [file_info] self.assertEqual(len(result_list), 1) @@ -81,14 +83,18 @@ def test_s3_directory(self): size=21, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='') + dest_type='local', operation_name='', + endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info2 = FileInfo(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, last_update=result_list[1].last_update, src_type='s3', - dest_type='local', operation_name='') + dest_type='local', operation_name='', + endpoint=self.endpoint, + source_endpoint=self.endpoint) expected_result = [file_info, file_info2] self.assertEqual(len(result_list), 2) @@ -117,7 +123,8 @@ def test_s3_delete_directory(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete', - service=self.service, endpoint=self.endpoint) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info2 = FileInfo( src=self.file2, dest='another_directory' + os.sep + 'text2.txt', @@ -127,7 +134,8 @@ def test_s3_delete_directory(self): src_type='s3', dest_type='local', operation_name='delete', service=self.service, - endpoint=self.endpoint) + endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info3 = FileInfo( src=self.file1, dest='text1.txt', @@ -137,7 +145,8 @@ def test_s3_delete_directory(self): src_type='s3', dest_type='local', operation_name='delete', service=self.service, - endpoint=self.endpoint) + endpoint=self.endpoint, + source_endpoint=self.endpoint) expected_list = [file_info1, file_info2, file_info3] self.assertEqual(len(result_list), 3) diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index 4301ea477320..b96b3f968a4c 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -21,6 +21,7 @@ import contextlib import time import signal +import string import botocore.session import six @@ -51,7 +52,9 @@ def setUp(self): self.files = FileCreator() self.session = botocore.session.get_session() self.service = self.session.get_service('s3') - self.endpoint = self.service.get_endpoint('us-east-1') + self.regions = {} + self.region = 'us-east-1' + self.endpoint = self.service.get_endpoint(self.region) self.extra_setup() def extra_setup(self): @@ -77,14 +80,18 @@ def assert_key_contents_equal(self, bucket, key, expected_contents): self.fail("Contents for %s/%s do not match (but they " "have the same length)" % (bucket, key)) - def create_bucket(self): - bucket_name = _create_bucket(self.session) + def create_bucket(self, name=None, region=None): + if not region: + region = self.region + bucket_name = _create_bucket(self.session, name, region) + self.regions[bucket_name] = region self.addCleanup(self.delete_bucket, bucket_name) return bucket_name def put_object(self, bucket_name, key_name, contents=''): operation = self.service.get_operation('PutObject') - http = operation.call(self.endpoint, bucket=bucket_name, + endpoint = self.service.get_endpoint(self.regions[bucket_name]) + http = operation.call(endpoint, bucket=bucket_name, key=key_name, body=contents)[0] self.assertEqual(http.status_code, 200) self.addCleanup(self.delete_key, bucket_name, key_name) @@ -92,12 +99,15 @@ def put_object(self, bucket_name, key_name, contents=''): def delete_bucket(self, bucket_name): self.remove_all_objects(bucket_name) operation = self.service.get_operation('DeleteBucket') - response = operation.call(self.endpoint, bucket=bucket_name)[0] + endpoint = self.service.get_endpoint(self.regions[bucket_name]) + response = operation.call(endpoint, bucket=bucket_name)[0] + del self.regions[bucket_name] self.assertEqual(response.status_code, 204, response.content) def remove_all_objects(self, bucket_name): operation = self.service.get_operation('ListObjects') - pages = operation.paginate(self.endpoint, bucket=bucket_name) + endpoint = self.service.get_endpoint(self.regions[bucket_name]) + pages = operation.paginate(endpoint, bucket=bucket_name) parsed = pages.build_full_result() key_names = [obj['Key'] for obj in parsed['Contents']] for key_name in key_names: @@ -105,21 +115,24 @@ def remove_all_objects(self, bucket_name): def delete_key(self, bucket_name, key_name): operation = self.service.get_operation('DeleteObject') - response = operation.call(self.endpoint, bucket=bucket_name, + endpoint = self.service.get_endpoint(self.regions[bucket_name]) + response = operation.call(endpoint, bucket=bucket_name, key=key_name)[0] self.assertEqual(response.status_code, 204) def get_key_contents(self, bucket_name, key_name): operation = self.service.get_operation('GetObject') + endpoint = self.service.get_endpoint(self.regions[bucket_name]) http, parsed = operation.call( - self.endpoint, bucket=bucket_name, key=key_name) + endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['Body'].read().decode('utf-8') def key_exists(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') + endpoint = self.service.get_endpoint(self.regions[bucket_name]) http, parsed = operation.call( - self.endpoint, bucket=bucket_name, key=key_name) + endpoint, bucket=bucket_name, key=key_name) return http.status_code == 200 def list_buckets(self): @@ -130,8 +143,9 @@ def list_buckets(self): def content_type_for_key(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') + endpoint = self.service.get_endpoint(self.regions[bucket_name]) http, parsed = operation.call( - self.endpoint, bucket=bucket_name, key=key_name) + endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['ContentType'] @@ -487,6 +501,7 @@ def test_sync_no_resync(self): p = aws('s3 sync %s s3://%s' % (self.files.rootdir, bucket_name)) self.assert_no_errors(p) + time.sleep(2) self.assertTrue(self.key_exists(bucket_name, 'xyz123456789')) self.assertTrue(self.key_exists(bucket_name, 'xyz1/test')) self.assertTrue(self.key_exists(bucket_name, 'xyz/test')) @@ -579,6 +594,74 @@ def test_sync_with_delete_option_with_same_prefix(self): self.assertEqual('', p.stdout) +class TestSourceRegion(BaseS3CLICommand): + def extra_setup(self): + name_comp = [] + for i in range(2): + name_comp.append(''.join(random.sample(string.ascii_lowercase + + string.digits,10))) + self.src_name = '.'.join(name_comp + ['com']) + name_comp = [] + for i in range(2): + name_comp.append(''.join(random.sample(string.ascii_lowercase + + string.digits,10))) + self.dest_name = '.'.join(name_comp + ['com']) + self.src_region = 'us-west-1' + self.dest_region = 'us-west-2' + self.src_bucket = self.create_bucket(self.src_name, self.src_region) + self.dest_bucket = self.create_bucket(self.dest_name, self.dest_region) + + def testFailWithoutRegion(self): + self.files.create_file('foo.txt', 'foo') + p = aws('s3 sync %s s3://%s/ --region %s' % + (self.files.rootdir, self.src_bucket, self.src_region)) + self.assert_no_errors(p) + p2 = aws('s3 sync s3://%s/ s3://%s/ --region %s' % + (self.src_bucket, self.dest_bucket, self.src_region)) + self.assertEqual(p2.rc, 1, p2.stdout) + self.assertIn('PermanentRedirect', p2.stdout) + + def testCpRegion(self): + self.files.create_file('foo.txt', 'foo') + p = aws('s3 sync %s s3://%s/ --region %s' % + (self.files.rootdir, self.src_bucket, self.src_region)) + self.assert_no_errors(p) + p2 = aws('s3 cp s3://%s/ s3://%s/ --region %s --source-region %s ' + '--recursive' % + (self.src_bucket, self.dest_bucket, self.dest_region, + self.src_region)) + self.assertEqual(p2.rc, 0, p2.stdout) + self.assertTrue( + self.key_exists(bucket_name=self.dest_bucket, key_name='foo.txt')) + + def testSyncRegion(self): + self.files.create_file('foo.txt', 'foo') + p = aws('s3 sync %s s3://%s/ --region %s' % + (self.files.rootdir, self.src_bucket, self.src_region)) + self.assert_no_errors(p) + p2 = aws('s3 sync s3://%s/ s3://%s/ --region %s --source-region %s ' % + (self.src_bucket, self.dest_bucket, self.dest_region, + self.src_region)) + self.assertEqual(p2.rc, 0, p2.stdout) + self.assertTrue( + self.key_exists(bucket_name=self.dest_bucket, key_name='foo.txt')) + + def testMvRegion(self): + self.files.create_file('foo.txt', 'foo') + p = aws('s3 sync %s s3://%s/ --region %s' % + (self.files.rootdir, self.src_bucket, self.src_region)) + self.assert_no_errors(p) + p2 = aws('s3 mv s3://%s/ s3://%s/ --region %s --source-region %s ' + '--recursive' % + (self.src_bucket, self.dest_bucket, self.dest_region, + self.src_region)) + self.assertEqual(p2.rc, 0, p2.stdout) + self.assertTrue( + self.key_exists(bucket_name=self.dest_bucket, key_name='foo.txt')) + self.assertFalse( + self.key_exists(bucket_name=self.src_bucket, key_name='foo.txt')) + + @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], 'Symlink tests only supported on mac/linux') class TestSymlinks(BaseS3CLICommand): diff --git a/tests/integration/customizations/s3/test_s3handler.py b/tests/integration/customizations/s3/test_s3handler.py index 0d732725b2d8..4d46a2381173 100644 --- a/tests/integration/customizations/s3/test_s3handler.py +++ b/tests/integration/customizations/s3/test_s3handler.py @@ -84,6 +84,7 @@ def test_s3_delete(self): size=0, service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint )) self.assertEqual(len(list_contents(self.bucket, self.session)), 3) self.s3_handler.call(tasks) @@ -136,6 +137,7 @@ def test_delete_url_encode(self): src=key, src_type='s3', dest_type='local', operation_name='delete', size=0, service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint )] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) @@ -274,7 +276,8 @@ def test_move(self): src=self.s3_files[i], src_type='s3', dest=self.s3_files2[i], dest_type='s3', operation_name='move', size=0, - service=self.service, endpoint=self.endpoint + service=self.service, endpoint=self.endpoint, + source_endpoint = self.endpoint )) # Perform the move. self.s3_handler.call(tasks) diff --git a/tests/unit/customizations/s3/__init__.py b/tests/unit/customizations/s3/__init__.py index 4171826efc98..4c4f995407d2 100644 --- a/tests/unit/customizations/s3/__init__.py +++ b/tests/unit/customizations/s3/__init__.py @@ -103,17 +103,24 @@ def make_s3_files(session, key1='text1.txt', key2='text2.txt'): return bucket -def create_bucket(session): +def create_bucket(session, name=None, region=None): """ Creates a bucket :returns: the name of the bucket created """ service = session.get_service('s3') - region = 'us-east-1' + if not region: + region = 'us-east-1' endpoint = service.get_endpoint(region) - rand1 = ''.join(random.sample(string.ascii_lowercase + string.digits, 10)) - bucket_name = 'awscli-s3test-' + str(rand1) + if name: + bucket_name = name + else: + rand1 = ''.join(random.sample(string.ascii_lowercase + string.digits, + 10)) + bucket_name = 'awscli-s3test-' + str(rand1) params = {'endpoint': endpoint, 'bucket': bucket_name} + if region != 'us-east-1': + params['create_bucket_configuration'] = {'LocationConstraint': region} operation = service.get_operation('CreateBucket') http_response, response_data = operation.call(**params) return bucket_name @@ -154,6 +161,16 @@ def compare_files(self, result_file, ref_file): self.assertEqual(result_file.src_type, ref_file.src_type) self.assertEqual(result_file.dest_type, ref_file.dest_type) self.assertEqual(result_file.operation_name, ref_file.operation_name) + compare_endpoints(self, result_file.endpoint, ref_file.endpoint) + compare_endpoints(self, result_file.source_endpoint, + ref_file.source_endpoint) + + +def compare_endpoints(self, endpoint, ref_endpoint): + self.assertEqual(endpoint.region_name, ref_endpoint.region_name) + if getattr(endpoint, 'endpoint_url', None): + self.assertEqual(endpoint.endpoint_url, ref_endpoint.endpoint_url) + self.assertEqual(endpoint.verify, ref_endpoint.verify) def list_contents(bucket, session): diff --git a/tests/unit/customizations/s3/fake_session.py b/tests/unit/customizations/s3/fake_session.py index 14c15f376450..3ded0b48b476 100644 --- a/tests/unit/customizations/s3/fake_session.py +++ b/tests/unit/customizations/s3/fake_session.py @@ -89,6 +89,8 @@ def __init__(self, session): def get_endpoint(self, region_name, endpoint_url=None, verify=None): endpoint = Mock() endpoint.region_name = region_name + endpoint.endpoint_url = endpoint_url + endpoint.verify = verify return endpoint def get_operation(self, name): diff --git a/tests/unit/customizations/s3/test_filegenerator.py b/tests/unit/customizations/s3/test_filegenerator.py index 8139dfcea026..56ebc2d3754e 100644 --- a/tests/unit/customizations/s3/test_filegenerator.py +++ b/tests/unit/customizations/s3/test_filegenerator.py @@ -65,7 +65,8 @@ def test_local_file(self): compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) ref_list = [file_info] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): @@ -91,7 +92,8 @@ def test_local_directory(self): compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) path = self.local_dir + 'another_directory' + os.sep \ + 'text2.txt' size, last_update = get_file_stat(path) @@ -101,7 +103,8 @@ def test_local_directory(self): size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) ref_list = [file_info2, file_info] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): @@ -376,6 +379,7 @@ def setUp(self): self.file2 = self.bucket + '/' + 'another_directory/text2.txt' self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') + self.source_endpoint = self.service.get_endpoint('us-west-1') def tearDown(self): s3_cleanup(self.bucket, self.session) @@ -389,8 +393,9 @@ def test_s3_file(self): 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} - files = FileGenerator(self.service, self.endpoint, - '').call(input_s3_file) + file_gen = FileGenerator(self.service, self.endpoint, + '', source_endpoint=self.source_endpoint) + files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) @@ -400,7 +405,8 @@ def test_s3_file(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.source_endpoint) ref_list = [file_info] self.assertEqual(len(result_list), len(ref_list)) @@ -430,7 +436,8 @@ def test_s3_directory(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info2 = FileInfo(src=self.file1, dest='text1.txt', compare_key='text1.txt', @@ -438,7 +445,8 @@ def test_s3_directory(self): last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) ref_list = [file_info, file_info2] self.assertEqual(len(result_list), len(ref_list)) @@ -467,7 +475,8 @@ def test_s3_delete_directory(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info2 = FileInfo(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', @@ -475,7 +484,8 @@ def test_s3_delete_directory(self): last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='delete', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) file_info3 = FileInfo(src=self.file1, dest='text1.txt', compare_key='text1.txt', @@ -483,7 +493,8 @@ def test_s3_delete_directory(self): last_update=result_list[2].last_update, src_type='s3', dest_type='local', operation_name='delete', - service=None, endpoint=None) + service=self.service, endpoint=self.endpoint, + source_endpoint=self.endpoint) ref_list = [file_info1, file_info2, file_info3] self.assertEqual(len(result_list), len(ref_list)) diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 9a88c00d97e4..05a230a1a7de 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -21,7 +21,7 @@ import botocore.session from awscli.customizations.s3.s3 import S3 from awscli.customizations.s3.subcommands import CommandParameters, \ - CommandArchitecture, CpCommand, SyncCommand, ListCommand + CommandArchitecture, CpCommand, SyncCommand, ListCommand, get_endpoint from awscli.testutils import unittest, BaseAWSHelpOutputTest from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \ make_s3_files, s3_cleanup, S3HandlerBaseTest @@ -33,6 +33,18 @@ def __init__(self, **kwargs): self.__dict__.update(kwargs) +class TestGetEndpoint(unittest.TestCase): + def test_endpoint(self): + session = FakeSession() + endpoint = get_endpoint(session.service, + region='us-west-1', + endpoint_url='URL', + verify=True) + self.assertEqual(endpoint.region_name, 'us-west-1') + self.assertEqual(endpoint.endpoint_url, 'URL') + self.assertTrue(endpoint.verify) + + class TestLSCommand(unittest.TestCase): def setUp(self): self.session = mock.Mock() @@ -106,9 +118,34 @@ def tearDown(self): self.output.close() sys.stdout = self.saved_stdout - super(CommandArchitectureTest, self).setUp() + super(CommandArchitectureTest, self).tearDown() clean_loc_files(self.loc_files) s3_cleanup(self.bucket, self.session) + + def test_set_endpoint_no_source(self): + cmd_arc = CommandArchitecture(self.session, 'sync', + {'region': 'us-west-1', + 'endpoint_url': None, + 'verify_ssl': None, + 'source_region': None}) + cmd_arc.set_endpoints() + endpoint = cmd_arc._endpoint + source_endpoint = cmd_arc._source_endpoint + self.assertEqual(endpoint.region_name, 'us-west-1') + self.assertEqual(source_endpoint, None) + + def test_set_endpoint_with_source(self): + cmd_arc = CommandArchitecture(self.session, 'sync', + {'region': 'us-west-1', + 'endpoint_url': None, + 'verify_ssl': None, + 'paths_type': 's3s3', + 'source_region': ['us-west-2']}) + cmd_arc.set_endpoints() + endpoint = cmd_arc._endpoint + source_endpoint = cmd_arc._source_endpoint + self.assertEqual(endpoint.region_name, 'us-west-1') + self.assertEqual(source_endpoint.region_name, 'us-west-2') def test_create_instructions(self): """ diff --git a/tests/unit/test_completer.py b/tests/unit/test_completer.py index eafa7d08b21a..94366a1062dd 100644 --- a/tests/unit/test_completer.py +++ b/tests/unit/test_completer.py @@ -71,7 +71,7 @@ '--sse', '--exclude', '--include', '--follow-symlinks', '--no-follow-symlinks', '--cache-control', '--content-type', - '--content-disposition', + '--content-disposition', '--source-region', '--content-encoding', '--content-language', '--expires', '--grants'] + GLOBALOPTS)), ('aws s3 cp --quiet -', -1, set(['--no-guess-mime-type', '--dryrun', @@ -82,6 +82,7 @@ '--expires', '--website-redirect', '--acl', '--storage-class', '--sse', '--exclude', '--include', + '--source-region', '--grants'] + GLOBALOPTS)), ('aws emr ', -1, set(['add-instance-groups', 'add-steps', 'add-tags', 'create-cluster', 'create-default-roles', From c931e5486af833cb9e63ceb4df93463cf9de052a Mon Sep 17 00:00:00 2001 From: kyleknap Date: Tue, 12 Aug 2014 09:33:52 -0700 Subject: [PATCH 2/3] Add ``InfoSetter`` class. This refactoring removes the necessity of passing arguments through the ``FileGenerator`` class in order for the ``FileInfo`` class to obtain the arguments it requires to perform an operation. --- awscli/customizations/s3/filegenerator.py | 30 ++-- awscli/customizations/s3/infosetter.py | 49 ++++++ awscli/customizations/s3/subcommands.py | 16 +- .../customizations/s3/test_filegenerator.py | 47 ++---- tests/unit/customizations/s3/__init__.py | 12 +- .../unit/customizations/s3/test_comparator.py | 152 +++++++----------- .../customizations/s3/test_filegenerator.py | 89 ++++------ tests/unit/customizations/s3/test_filters.py | 39 +++-- .../unit/customizations/s3/test_infosetter.py | 38 +++++ .../customizations/s3/test_subcommands.py | 13 +- 10 files changed, 257 insertions(+), 228 deletions(-) create mode 100644 awscli/customizations/s3/infosetter.py create mode 100644 tests/unit/customizations/s3/test_infosetter.py diff --git a/awscli/customizations/s3/filegenerator.py b/awscli/customizations/s3/filegenerator.py index 824d537996b0..2e53787e33e1 100644 --- a/awscli/customizations/s3/filegenerator.py +++ b/awscli/customizations/s3/filegenerator.py @@ -17,7 +17,6 @@ from dateutil.parser import parse from dateutil.tz import tzlocal -from awscli.customizations.s3.fileinfo import FileInfo from awscli.customizations.s3.utils import find_bucket_key, get_file_stat from awscli.customizations.s3.utils import BucketLister from awscli.errorhandler import ClientError @@ -46,6 +45,20 @@ def __init__(self, directory, filename): super(FileDecodingError, self).__init__(self.error_message) +class FileBase(object): + def __init__(self, src, dest=None, compare_key=None, size=None, + last_update=None, src_type=None, dest_type=None, + operation_name=None): + self.src = src + self.dest = dest + self.compare_key = compare_key + self.size = size + self.last_update = last_update + self.src_type = src_type + self.dest_type = dest_type + self.operation_name = operation_name + + class FileGenerator(object): """ This is a class the creates a generator to yield files based on information @@ -55,12 +68,9 @@ class FileGenerator(object): ``FileInfo`` objects to send to a ``Comparator`` or ``S3Handler``. """ def __init__(self, service, endpoint, operation_name, - follow_symlinks=True, source_endpoint=None): + follow_symlinks=True): self._service = service self._endpoint = endpoint - self._source_endpoint = endpoint - if source_endpoint: - self._source_endpoint = source_endpoint self.operation_name = operation_name self.follow_symlinks = follow_symlinks @@ -90,13 +100,11 @@ def call(self, files): sep_table[dest_type]) else: dest_path = dest['path'] - yield FileInfo(src=src_path, dest=dest_path, + yield FileBase(src=src_path, dest=dest_path, compare_key=compare_key, size=size, last_update=last_update, src_type=src_type, - service=self._service, endpoint=self._endpoint, dest_type=dest_type, - operation_name=self.operation_name, - source_endpoint=self._source_endpoint) + operation_name=self.operation_name) def list_files(self, path, dir_op): """ @@ -195,7 +203,7 @@ def list_objects(self, s3_path, dir_op): yield self._list_single_object(s3_path) else: operation = self._service.get_operation('ListObjects') - lister = BucketLister(operation, self._source_endpoint) + lister = BucketLister(operation, self._endpoint) for key in lister.list_objects(bucket=bucket, prefix=prefix): source_path, size, last_update = key if size == 0 and source_path.endswith('/'): @@ -221,7 +229,7 @@ def _list_single_object(self, s3_path): operation = self._service.get_operation('HeadObject') try: response = operation.call( - self._source_endpoint, bucket=bucket, key=key)[1] + self._endpoint, bucket=bucket, key=key)[1] except ClientError as e: # We want to try to give a more helpful error message. # This is what the customer is going to see so we want to diff --git a/awscli/customizations/s3/infosetter.py b/awscli/customizations/s3/infosetter.py new file mode 100644 index 000000000000..bb4c69909d88 --- /dev/null +++ b/awscli/customizations/s3/infosetter.py @@ -0,0 +1,49 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from awscli.customizations.s3.fileinfo import FileInfo + + +class InfoSetter(object): + """ + This class takes a ``FileBase`` object's attributes and generates + a ``FileInfo`` object so that the operation can be performed. + """ + def __init__(self, service, endpoint, source_endpoint=None, + parameters = None): + self._service = service + self._endpoint = endpoint + self._source_endpoint = endpoint + if source_endpoint: + self._source_endpoint = source_endpoint + self._parameters = parameters + + def call(self, files): + for file_base in files: + file_info = self.inject_info(file_base) + yield file_info + + def inject_info(self, file_base): + file_info_attr = {} + file_info_attr['src'] = file_base.src + file_info_attr['dest'] = file_base.dest + file_info_attr['compare_key'] = file_base.compare_key + file_info_attr['size'] = file_base.size + file_info_attr['last_update'] = file_base.last_update + file_info_attr['src_type'] = file_base.src_type + file_info_attr['dest_type'] = file_base.dest_type + file_info_attr['operation_name'] = file_base.operation_name + file_info_attr['service'] = self._service + file_info_attr['endpoint'] = self._endpoint + file_info_attr['source_endpoint'] = self._source_endpoint + file_info_attr['parameters'] = self._parameters + return FileInfo(**file_info_attr) diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index 98f9b0865bf9..ec68c312adc4 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -19,6 +19,7 @@ from awscli.customizations.commands import BasicCommand from awscli.customizations.s3.comparator import Comparator +from awscli.customizations.s3.infosetter import InfoSetter from awscli.customizations.s3.fileformat import FileFormat from awscli.customizations.s3.filegenerator import FileGenerator from awscli.customizations.s3.fileinfo import TaskInfo @@ -486,6 +487,7 @@ def set_endpoints(self): endpoint_url=self.parameters['endpoint_url'], verify=self.parameters['verify_ssl'] ) + self._source_endpoint = self._endpoint if self.parameters['source_region']: if self.parameters['paths_type'] == 's3s3': self._source_endpoint = get_endpoint( @@ -509,6 +511,8 @@ def create_instructions(self): self.instructions.append('filters') if self.cmd == 'sync': self.instructions.append('comparator') + if self.cmd not in ['mb', 'rb']: + self.instructions.append('info_setter') self.instructions.append('s3_handler') def run(self): @@ -551,10 +555,10 @@ def run(self): 'rb': 'remove_bucket' } operation_name = cmd_translation[paths_type][self.cmd] - file_generator = FileGenerator(self._service, self._endpoint, + file_generator = FileGenerator(self._service, + self._source_endpoint, operation_name, - self.parameters['follow_symlinks'], - self._source_endpoint) + self.parameters['follow_symlinks']) rev_generator = FileGenerator(self._service, self._endpoint, '', self.parameters['follow_symlinks']) taskinfo = [TaskInfo(src=files['src']['path'], @@ -562,6 +566,8 @@ def run(self): operation_name=operation_name, service=self._service, endpoint=self._endpoint)] + info_setter = InfoSetter(self._service, self._endpoint, + self._source_endpoint, self.parameters) s3handler = S3Handler(self.session, self.parameters) command_dict = {} @@ -572,21 +578,25 @@ def run(self): 'filters': [create_filter(self.parameters), create_filter(self.parameters)], 'comparator': [Comparator(self.parameters)], + 'info_setter': [info_setter], 's3_handler': [s3handler]} elif self.cmd == 'cp': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], + 'info_setter': [info_setter], 's3_handler': [s3handler]} elif self.cmd == 'rm': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], + 'info_setter': [info_setter], 's3_handler': [s3handler]} elif self.cmd == 'mv': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], + 'info_setter': [info_setter], 's3_handler': [s3handler]} elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], diff --git a/tests/integration/customizations/s3/test_filegenerator.py b/tests/integration/customizations/s3/test_filegenerator.py index 23b758ea35ea..59353fc3d39b 100644 --- a/tests/integration/customizations/s3/test_filegenerator.py +++ b/tests/integration/customizations/s3/test_filegenerator.py @@ -22,8 +22,7 @@ import botocore.session from awscli import EnvironmentVariables -from awscli.customizations.s3.filegenerator import FileGenerator -from awscli.customizations.s3.fileinfo import FileInfo +from awscli.customizations.s3.filegenerator import FileGenerator, FileBase from tests.unit.customizations.s3 import make_s3_files, s3_cleanup, \ compare_files @@ -52,16 +51,14 @@ def test_s3_file(self): result_list = list( FileGenerator(self.service, self.endpoint, '').call( input_s3_file)) - file_info = FileInfo(src=self.file1, dest='text1.txt', + file_base = FileBase(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=expected_file_size, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='', - endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='local', operation_name='') - expected_list = [file_info] + expected_list = [file_base] self.assertEqual(len(result_list), 1) compare_files(self, result_list[0], expected_list[0]) @@ -77,26 +74,22 @@ def test_s3_directory(self): result_list = list( FileGenerator(self.service, self.endpoint, '').call( input_s3_file)) - file_info = FileInfo(src=self.file2, + file_base = FileBase(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='', - endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info2 = FileInfo(src=self.file1, + dest_type='local', operation_name='') + file_base2 = FileBase(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, last_update=result_list[1].last_update, src_type='s3', - dest_type='local', operation_name='', - endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='local', operation_name='') - expected_result = [file_info, file_info2] + expected_result = [file_base, file_base2] self.assertEqual(len(result_list), 2) compare_files(self, result_list[0], expected_result[0]) compare_files(self, result_list[1], expected_result[1]) @@ -115,40 +108,32 @@ def test_s3_delete_directory(self): 'delete').call( input_s3_file)) - file_info1 = FileInfo( + file_base1 = FileBase( src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', size=0, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info2 = FileInfo( + dest_type='local', operation_name='delete') + file_base2 = FileBase( src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[1].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, - endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info3 = FileInfo( + dest_type='local', operation_name='delete') + file_base3 = FileBase( src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, last_update=result_list[2].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, - endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='local', operation_name='delete') - expected_list = [file_info1, file_info2, file_info3] + expected_list = [file_base1, file_base2, file_base3] self.assertEqual(len(result_list), 3) compare_files(self, result_list[0], expected_list[0]) compare_files(self, result_list[1], expected_list[1]) diff --git a/tests/unit/customizations/s3/__init__.py b/tests/unit/customizations/s3/__init__.py index 4c4f995407d2..3743c757f1de 100644 --- a/tests/unit/customizations/s3/__init__.py +++ b/tests/unit/customizations/s3/__init__.py @@ -150,7 +150,7 @@ def s3_cleanup(bucket, session, key1='text1.txt', key2='text2.txt'): def compare_files(self, result_file, ref_file): """ - Ensures that the FileInfo's properties are what they + Ensures that the FileBase's properties are what they are suppose to be. """ self.assertEqual(result_file.src, ref_file.src) @@ -161,16 +161,6 @@ def compare_files(self, result_file, ref_file): self.assertEqual(result_file.src_type, ref_file.src_type) self.assertEqual(result_file.dest_type, ref_file.dest_type) self.assertEqual(result_file.operation_name, ref_file.operation_name) - compare_endpoints(self, result_file.endpoint, ref_file.endpoint) - compare_endpoints(self, result_file.source_endpoint, - ref_file.source_endpoint) - - -def compare_endpoints(self, endpoint, ref_endpoint): - self.assertEqual(endpoint.region_name, ref_endpoint.region_name) - if getattr(endpoint, 'endpoint_url', None): - self.assertEqual(endpoint.endpoint_url, ref_endpoint.endpoint_url) - self.assertEqual(endpoint.verify, ref_endpoint.verify) def list_contents(bucket, session): diff --git a/tests/unit/customizations/s3/test_comparator.py b/tests/unit/customizations/s3/test_comparator.py index d0d5652d1bc9..45d5cf5e6960 100644 --- a/tests/unit/customizations/s3/test_comparator.py +++ b/tests/unit/customizations/s3/test_comparator.py @@ -14,7 +14,7 @@ import unittest from awscli.customizations.s3.comparator import Comparator -from awscli.customizations.s3.fileinfo import FileInfo +from awscli.customizations.s3.filegenerator import FileBase class ComparatorTest(unittest.TestCase): @@ -30,16 +30,14 @@ def test_compare_key_equal(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='upload') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') src_files.append(src_file) dest_files.append(dest_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -56,16 +54,14 @@ def test_compare_size(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=11, last_update=time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='upload') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') src_files.append(src_file) dest_files.append(dest_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -84,16 +80,14 @@ def test_compare_lastmod_upload(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='upload') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') src_files.append(src_file) dest_files.append(dest_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -112,16 +106,14 @@ def test_compare_lastmod_copy(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='s3', - dest_type='s3', operation_name='copy', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='copy') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') src_files.append(src_file) dest_files.append(dest_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -140,16 +132,14 @@ def test_compare_lastmod_download(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='local', operation_name='download') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') src_files.append(src_file) dest_files.append(dest_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -159,16 +149,14 @@ def test_compare_lastmod_download(self): self.assertEqual(result_list, ref_list) # If the source is newer than the destination do not download. - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='local', operation_name='download') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') src_files = [] dest_files = [] src_files.append(src_file) @@ -189,16 +177,14 @@ def test_compare_key_less(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='bomparator_test.py', size=10, last_update=time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='upload') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') src_files.append(src_file) dest_files.append(dest_file) dest_file.operation = 'delete' @@ -219,16 +205,14 @@ def test_compare_key_greater(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='domparator_test.py', size=10, last_update=time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) - dest_file = FileInfo(src='', dest='', + dest_type='s3', operation_name='upload') + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') src_files.append(src_file) dest_files.append(dest_file) src_file.operation = 'upload' @@ -250,11 +234,10 @@ def test_empty_src(self): ref_list = [] result_list = [] time = datetime.datetime.now() - dest_file = FileInfo(src='', dest='', + dest_file = FileBase(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') dest_files.append(dest_file) dest_file.operation = 'delete' ref_list.append(dest_file) @@ -273,11 +256,10 @@ def test_empty_dest(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='domparator_test.py', size=10, last_update=time, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) + dest_type='s3', operation_name='upload') src_files.append(src_file) ref_list.append(src_file) files = self.comparator.call(iter(src_files), iter(dest_files)) @@ -312,17 +294,15 @@ def test_compare_size_only_dest_older_than_src(self): time_src = datetime.datetime.now() time_dst = time_src + datetime.timedelta(days=1) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) + dest_type='s3', operation_name='upload') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 0) @@ -335,17 +315,15 @@ def test_compare_size_only_src_older_than_dest(self): time_dst = datetime.datetime.now() time_src = time_dst + datetime.timedelta(days=1) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='local', - dest_type='s3', operation_name='upload', - service=None, endpoint=None) + dest_type='s3', operation_name='upload') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='s3', - dest_type='local', operation_name='', - service=None, endpoint=None) + dest_type='local', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 0) @@ -364,17 +342,15 @@ def test_compare_exact_timestamps_dest_older(self): time_src = datetime.datetime.now() time_dst = time_src - datetime.timedelta(days=1) - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) + dest_type='local', operation_name='download') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 1) @@ -388,17 +364,15 @@ def test_compare_exact_timestamps_src_older(self): time_src = datetime.datetime.now() - datetime.timedelta(days=1) time_dst = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) + dest_type='local', operation_name='download') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 1) @@ -411,17 +385,15 @@ def test_compare_exact_timestamps_same_age_same_size(self): """ time_both = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) + dest_type='local', operation_name='download') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 0) @@ -434,17 +406,15 @@ def test_compare_exact_timestamps_same_age_diff_size(self): """ time_both = datetime.datetime.now() - src_file = FileInfo(src='', dest='', + src_file = FileBase(src='', dest='', compare_key='test.py', size=20, last_update=time_both, src_type='s3', - dest_type='local', operation_name='download', - service=None, endpoint=None) + dest_type='local', operation_name='download') - dst_file = FileInfo(src='', dest='', + dst_file = FileBase(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) + dest_type='s3', operation_name='') files = self.comparator.call(iter([src_file]), iter([dst_file])) self.assertEqual(sum(1 for _ in files), 1) diff --git a/tests/unit/customizations/s3/test_filegenerator.py b/tests/unit/customizations/s3/test_filegenerator.py index 56ebc2d3754e..8ede65de6536 100644 --- a/tests/unit/customizations/s3/test_filegenerator.py +++ b/tests/unit/customizations/s3/test_filegenerator.py @@ -20,8 +20,7 @@ import mock from awscli.customizations.s3.filegenerator import FileGenerator, \ - FileDecodingError -from awscli.customizations.s3.fileinfo import FileInfo + FileDecodingError, FileBase from awscli.customizations.s3.utils import get_file_stat import botocore.session from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \ @@ -61,13 +60,11 @@ def test_local_file(self): for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) - file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt', + file_base = FileBase(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', - dest_type='s3', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - ref_list = [file_info] + dest_type='s3', operation_name='') + ref_list = [file_base] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -88,24 +85,20 @@ def test_local_directory(self): for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) - file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt', + file_base = FileBase(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', - dest_type='s3', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='s3', operation_name='') path = self.local_dir + 'another_directory' + os.sep \ + 'text2.txt' size, last_update = get_file_stat(path) - file_info2 = FileInfo(src=path, + file_base2 = FileBase(src=path, dest='bucket/another_directory/text2.txt', compare_key='another_directory/text2.txt', size=size, last_update=last_update, src_type='local', - dest_type='s3', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - ref_list = [file_info2, file_info] + dest_type='s3', operation_name='') + ref_list = [file_base2, file_base] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -224,12 +217,12 @@ def test_no_follow_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_infos = FileGenerator(self.service, self.endpoint, + file_bases = FileGenerator(self.service, self.endpoint, '', False).call(input_local_dir) self.filenames.sort() result_list = [] - for file_info in file_infos: - result_list.append(getattr(file_info, 'src')) + for file_base in file_bases: + result_list.append(getattr(file_base, 'src')) self.assertEqual(len(result_list), len(self.filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): @@ -246,13 +239,13 @@ def test_follow_bad_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_infos = FileGenerator(self.service, self.endpoint, + file_bases = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) result_list = [] rc = 0 try: - for file_info in file_infos: - result_list.append(getattr(file_info, 'src')) + for file_base in file_bases: + result_list.append(getattr(file_base, 'src')) rc = 1 except OSError as e: pass @@ -271,13 +264,13 @@ def test_follow_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_infos = FileGenerator(self.service, self.endpoint, + file_bases = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] - for file_info in file_infos: - result_list.append(getattr(file_info, 'src')) + for file_base in file_bases: + result_list.append(getattr(file_base, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): @@ -379,7 +372,6 @@ def setUp(self): self.file2 = self.bucket + '/' + 'another_directory/text2.txt' self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - self.source_endpoint = self.service.get_endpoint('us-west-1') def tearDown(self): s3_cleanup(self.bucket, self.session) @@ -393,22 +385,19 @@ def test_s3_file(self): 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} - file_gen = FileGenerator(self.service, self.endpoint, - '', source_endpoint=self.source_endpoint) + file_gen = FileGenerator(self.service, self.endpoint, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) - file_info = FileInfo(src=self.file1, dest='text1.txt', + file_base = FileBase(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.source_endpoint) + dest_type='local', operation_name='') - ref_list = [file_info] + ref_list = [file_base] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -428,27 +417,23 @@ def test_s3_directory(self): result_list = [] for filename in files: result_list.append(filename) - file_info = FileInfo(src=self.file2, + file_base = FileBase(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info2 = FileInfo(src=self.file1, + dest_type='local', operation_name='') + file_base2 = FileBase(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', - dest_type='local', operation_name='', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='local', operation_name='') - ref_list = [file_info, file_info2] + ref_list = [file_base, file_base2] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -468,35 +453,29 @@ def test_s3_delete_directory(self): for filename in files: result_list.append(filename) - file_info1 = FileInfo(src=self.bucket + '/another_directory/', + file_base1 = FileBase(src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info2 = FileInfo(src=self.file2, + dest_type='local', operation_name='delete') + file_base2 = FileBase(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) - file_info3 = FileInfo(src=self.file1, + dest_type='local', operation_name='delete') + file_base3 = FileBase(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[2].size, last_update=result_list[2].last_update, src_type='s3', - dest_type='local', operation_name='delete', - service=self.service, endpoint=self.endpoint, - source_endpoint=self.endpoint) + dest_type='local', operation_name='delete') - ref_list = [file_info1, file_info2, file_info3] + ref_list = [file_base1, file_base2, file_base3] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) diff --git a/tests/unit/customizations/s3/test_filters.py b/tests/unit/customizations/s3/test_filters.py index a7f19bb05c2d..b3f49e5029b5 100644 --- a/tests/unit/customizations/s3/test_filters.py +++ b/tests/unit/customizations/s3/test_filters.py @@ -14,7 +14,7 @@ from awscli.testutils import unittest import platform -from awscli.customizations.s3.fileinfo import FileInfo +from awscli.customizations.s3.filegenerator import FileBase from awscli.customizations.s3.filters import Filter @@ -29,27 +29,26 @@ def platform_path(filepath): class FiltersTest(unittest.TestCase): def setUp(self): self.local_files = [ - self.file_info('test.txt'), - self.file_info('test.jpg'), - self.file_info(os.path.join('directory', 'test.jpg')), + self.file_base('test.txt'), + self.file_base('test.jpg'), + self.file_base(os.path.join('directory', 'test.jpg')), ] self.s3_files = [ - self.file_info('bucket/test.txt', src_type='s3'), - self.file_info('bucket/test.jpg', src_type='s3'), - self.file_info('bucket/key/test.jpg', src_type='s3'), + self.file_base('bucket/test.txt', src_type='s3'), + self.file_base('bucket/test.jpg', src_type='s3'), + self.file_base('bucket/key/test.jpg', src_type='s3'), ] - def file_info(self, filename, src_type='local'): + def file_base(self, filename, src_type='local'): if src_type == 'local': filename = os.path.abspath(filename) dest_type = 's3' else: dest_type = 'local' - return FileInfo(src=filename, dest='', + return FileBase(src=filename, dest='', compare_key='', size=10, last_update=0, src_type=src_type, - dest_type=dest_type, operation_name='', - service=None, endpoint=None) + dest_type=dest_type, operation_name='') def create_filter(self, filters=None, root=None, dst_root=None): if root is None: @@ -117,15 +116,15 @@ def test_prefix_filtering_consistent(self): # The same filter should work for both local and remote files. # So if I have a directory with 2 files: local_files = [ - self.file_info('test1.txt'), - self.file_info('nottest1.txt'), + self.file_base('test1.txt'), + self.file_base('nottest1.txt'), ] - # And the same 2 files remote (note that the way FileInfo objects + # And the same 2 files remote (note that the way FileBase objects # are constructed, we'll have the bucket name but no leading '/' # character): remote_files = [ - self.file_info('bucket/test1.txt', src_type='s3'), - self.file_info('bucket/nottest1.txt', src_type='s3'), + self.file_base('bucket/test1.txt', src_type='s3'), + self.file_base('bucket/nottest1.txt', src_type='s3'), ] # If I apply the filter to the local to the local files. exclude_filter = self.create_filter([['exclude', 't*']]) @@ -144,9 +143,9 @@ def test_prefix_filtering_consistent(self): def test_bucket_exclude_with_prefix(self): s3_files = [ - self.file_info('bucket/dir1/key1.txt', src_type='s3'), - self.file_info('bucket/dir1/key2.txt', src_type='s3'), - self.file_info('bucket/dir1/notkey3.txt', src_type='s3'), + self.file_base('bucket/dir1/key1.txt', src_type='s3'), + self.file_base('bucket/dir1/key2.txt', src_type='s3'), + self.file_base('bucket/dir1/notkey3.txt', src_type='s3'), ] filtered_files = list( self.create_filter([['exclude', 'dir1/*']], @@ -161,7 +160,7 @@ def test_bucket_exclude_with_prefix(self): def test_root_dir(self): p = platform_path - local_files = [self.file_info(p('/foo/bar/baz.txt'), src_type='local')] + local_files = [self.file_base(p('/foo/bar/baz.txt'), src_type='local')] local_filter = self.create_filter([['exclude', 'baz.txt']], root=p('/foo/bar/')) filtered = list(local_filter.call(local_files)) diff --git a/tests/unit/customizations/s3/test_infosetter.py b/tests/unit/customizations/s3/test_infosetter.py new file mode 100644 index 000000000000..2cf197a07fe2 --- /dev/null +++ b/tests/unit/customizations/s3/test_infosetter.py @@ -0,0 +1,38 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import mock + +from awscli.testutils import unittest +from awscli.customizations.s3.filegenerator import FileBase +from awscli.customizations.s3.fileinfo import FileInfo +from awscli.customizations.s3.infosetter import InfoSetter + + +class TestInfoSetter(unittest.TestCase): + def test_info_setter(self): + info_setter = InfoSetter(service='service', endpoint='endpoint', + source_endpoint='source_endpoint', + parameters='parameters') + files = [FileBase(src='src', dest='dest', compare_key='compare_key', + size='size', last_update='last_update', + src_type='src_type', dest_type='dest_type', + operation_name='operation_name')] + file_infos = info_setter.call(files) + for file_info in file_infos: + attributes = file_info.__dict__.keys() + for key in attributes: + self.assertEqual(getattr(file_info, key), str(key)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 05a230a1a7de..9ce310586daf 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -132,7 +132,7 @@ def test_set_endpoint_no_source(self): endpoint = cmd_arc._endpoint source_endpoint = cmd_arc._source_endpoint self.assertEqual(endpoint.region_name, 'us-west-1') - self.assertEqual(source_endpoint, None) + self.assertEqual(source_endpoint.region_name, 'us-west-1') def test_set_endpoint_with_source(self): cmd_arc = CommandArchitecture(self.session, 'sync', @@ -154,10 +154,11 @@ def test_create_instructions(self): """ cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb'] - instructions = {'cp': ['file_generator', 's3_handler'], - 'mv': ['file_generator', 's3_handler'], - 'rm': ['file_generator', 's3_handler'], - 'sync': ['file_generator', 'comparator', 's3_handler'], + instructions = {'cp': ['file_generator', 'info_setter', 's3_handler'], + 'mv': ['file_generator', 'info_setter', 's3_handler'], + 'rm': ['file_generator', 'info_setter', 's3_handler'], + 'sync': ['file_generator', 'comparator', + 'info_setter', 's3_handler'], 'mb': ['s3_handler'], 'rb': ['s3_handler']} @@ -175,7 +176,7 @@ def test_create_instructions(self): cmd_arc = CommandArchitecture(self.session, 'cp', params) cmd_arc.create_instructions() self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters', - 's3_handler']) + 'info_setter', 's3_handler']) def test_run_cp_put(self): # This ensures that the architecture sets up correctly for a ``cp`` put From 8e832a66a016db9811ef1b2f463c25db09d756b2 Mon Sep 17 00:00:00 2001 From: kyleknap Date: Tue, 12 Aug 2014 16:10:12 -0700 Subject: [PATCH 3/3] Renamed the recently added classes. Also moved the ``test_plugin.py`` tests out of ``us-east-1`` and into ``us-west-2``. --- awscli/customizations/s3/filegenerator.py | 4 +- .../s3/{infosetter.py => fileinfobuilder.py} | 6 +- awscli/customizations/s3/subcommands.py | 14 ++--- .../customizations/s3/test_filegenerator.py | 20 +++--- .../customizations/s3/test_plugin.py | 18 +++++- tests/unit/customizations/s3/__init__.py | 4 +- .../unit/customizations/s3/test_comparator.py | 62 +++++++++---------- .../customizations/s3/test_filegenerator.py | 48 +++++++------- ..._infosetter.py => test_fileinfobuilder.py} | 14 ++--- tests/unit/customizations/s3/test_filters.py | 36 +++++------ .../customizations/s3/test_subcommands.py | 14 +++-- 11 files changed, 128 insertions(+), 112 deletions(-) rename awscli/customizations/s3/{infosetter.py => fileinfobuilder.py} (94%) rename tests/unit/customizations/s3/{test_infosetter.py => test_fileinfobuilder.py} (73%) diff --git a/awscli/customizations/s3/filegenerator.py b/awscli/customizations/s3/filegenerator.py index 2e53787e33e1..7984a8a31e5a 100644 --- a/awscli/customizations/s3/filegenerator.py +++ b/awscli/customizations/s3/filegenerator.py @@ -45,7 +45,7 @@ def __init__(self, directory, filename): super(FileDecodingError, self).__init__(self.error_message) -class FileBase(object): +class FileStat(object): def __init__(self, src, dest=None, compare_key=None, size=None, last_update=None, src_type=None, dest_type=None, operation_name=None): @@ -100,7 +100,7 @@ def call(self, files): sep_table[dest_type]) else: dest_path = dest['path'] - yield FileBase(src=src_path, dest=dest_path, + yield FileStat(src=src_path, dest=dest_path, compare_key=compare_key, size=size, last_update=last_update, src_type=src_type, dest_type=dest_type, diff --git a/awscli/customizations/s3/infosetter.py b/awscli/customizations/s3/fileinfobuilder.py similarity index 94% rename from awscli/customizations/s3/infosetter.py rename to awscli/customizations/s3/fileinfobuilder.py index bb4c69909d88..8bc2042615ef 100644 --- a/awscli/customizations/s3/infosetter.py +++ b/awscli/customizations/s3/fileinfobuilder.py @@ -13,7 +13,7 @@ from awscli.customizations.s3.fileinfo import FileInfo -class InfoSetter(object): +class FileInfoBuilder(object): """ This class takes a ``FileBase`` object's attributes and generates a ``FileInfo`` object so that the operation can be performed. @@ -29,10 +29,10 @@ def __init__(self, service, endpoint, source_endpoint=None, def call(self, files): for file_base in files: - file_info = self.inject_info(file_base) + file_info = self._inject_info(file_base) yield file_info - def inject_info(self, file_base): + def _inject_info(self, file_base): file_info_attr = {} file_info_attr['src'] = file_base.src file_info_attr['dest'] = file_base.dest diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index ec68c312adc4..a4f9586aa9dc 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -19,7 +19,7 @@ from awscli.customizations.commands import BasicCommand from awscli.customizations.s3.comparator import Comparator -from awscli.customizations.s3.infosetter import InfoSetter +from awscli.customizations.s3.fileinfobuilder import FileInfoBuilder from awscli.customizations.s3.fileformat import FileFormat from awscli.customizations.s3.filegenerator import FileGenerator from awscli.customizations.s3.fileinfo import TaskInfo @@ -512,7 +512,7 @@ def create_instructions(self): if self.cmd == 'sync': self.instructions.append('comparator') if self.cmd not in ['mb', 'rb']: - self.instructions.append('info_setter') + self.instructions.append('file_info_builder') self.instructions.append('s3_handler') def run(self): @@ -566,7 +566,7 @@ def run(self): operation_name=operation_name, service=self._service, endpoint=self._endpoint)] - info_setter = InfoSetter(self._service, self._endpoint, + file_info_builder = FileInfoBuilder(self._service, self._endpoint, self._source_endpoint, self.parameters) s3handler = S3Handler(self.session, self.parameters) @@ -578,25 +578,25 @@ def run(self): 'filters': [create_filter(self.parameters), create_filter(self.parameters)], 'comparator': [Comparator(self.parameters)], - 'info_setter': [info_setter], + 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'cp': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], - 'info_setter': [info_setter], + 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'rm': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], - 'info_setter': [info_setter], + 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'mv': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], - 'info_setter': [info_setter], + 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], diff --git a/tests/integration/customizations/s3/test_filegenerator.py b/tests/integration/customizations/s3/test_filegenerator.py index 59353fc3d39b..0c3912402533 100644 --- a/tests/integration/customizations/s3/test_filegenerator.py +++ b/tests/integration/customizations/s3/test_filegenerator.py @@ -22,7 +22,7 @@ import botocore.session from awscli import EnvironmentVariables -from awscli.customizations.s3.filegenerator import FileGenerator, FileBase +from awscli.customizations.s3.filegenerator import FileGenerator, FileStat from tests.unit.customizations.s3 import make_s3_files, s3_cleanup, \ compare_files @@ -51,14 +51,14 @@ def test_s3_file(self): result_list = list( FileGenerator(self.service, self.endpoint, '').call( input_s3_file)) - file_base = FileBase(src=self.file1, dest='text1.txt', + file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=expected_file_size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') - expected_list = [file_base] + expected_list = [file_stat] self.assertEqual(len(result_list), 1) compare_files(self, result_list[0], expected_list[0]) @@ -74,14 +74,14 @@ def test_s3_directory(self): result_list = list( FileGenerator(self.service, self.endpoint, '').call( input_s3_file)) - file_base = FileBase(src=self.file2, + file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') - file_base2 = FileBase(src=self.file1, + file_stat2 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, @@ -89,7 +89,7 @@ def test_s3_directory(self): src_type='s3', dest_type='local', operation_name='') - expected_result = [file_base, file_base2] + expected_result = [file_stat, file_stat2] self.assertEqual(len(result_list), 2) compare_files(self, result_list[0], expected_result[0]) compare_files(self, result_list[1], expected_result[1]) @@ -108,7 +108,7 @@ def test_s3_delete_directory(self): 'delete').call( input_s3_file)) - file_base1 = FileBase( + file_stat1 = FileStat( src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', @@ -116,7 +116,7 @@ def test_s3_delete_directory(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete') - file_base2 = FileBase( + file_stat2 = FileStat( src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', @@ -124,7 +124,7 @@ def test_s3_delete_directory(self): last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='delete') - file_base3 = FileBase( + file_stat3 = FileStat( src=self.file1, dest='text1.txt', compare_key='text1.txt', @@ -133,7 +133,7 @@ def test_s3_delete_directory(self): src_type='s3', dest_type='local', operation_name='delete') - expected_list = [file_base1, file_base2, file_base3] + expected_list = [file_stat1, file_stat2, file_stat3] self.assertEqual(len(result_list), 3) compare_files(self, result_list[0], expected_list[0]) compare_files(self, result_list[1], expected_list[1]) diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index b96b3f968a4c..56801f0a9970 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -26,7 +26,8 @@ import botocore.session import six -from awscli.testutils import unittest, aws, FileCreator +from awscli.testutils import unittest, FileCreator +from awscli.testutils import aws as _aws from tests.unit.customizations.s3 import create_bucket as _create_bucket from awscli.customizations.s3 import constants @@ -41,6 +42,14 @@ def cd(directory): os.chdir(original) +def aws(command, collect_memory=False, env_vars=None, wait_for_finish=True): + if not env_vars: + env_vars = os.environ.copy() + env_vars['AWS_DEFAULT_REGION'] = "us-west-2" + return _aws(command, collect_memory=collect_memory, env_vars=env_vars, + wait_for_finish=wait_for_finish) + + class BaseS3CLICommand(unittest.TestCase): """Base class for aws s3 command. @@ -53,7 +62,7 @@ def setUp(self): self.session = botocore.session.get_session() self.service = self.session.get_service('s3') self.regions = {} - self.region = 'us-east-1' + self.region = 'us-west-2' self.endpoint = self.service.get_endpoint(self.region) self.extra_setup() @@ -597,6 +606,9 @@ def test_sync_with_delete_option_with_same_prefix(self): class TestSourceRegion(BaseS3CLICommand): def extra_setup(self): name_comp = [] + # This creates a non DNS compatible bucket name by making two random + # sequences of characters and joining them with a period and + # adding a .com at the end. for i in range(2): name_comp.append(''.join(random.sample(string.ascii_lowercase + string.digits,10))) @@ -607,7 +619,7 @@ def extra_setup(self): string.digits,10))) self.dest_name = '.'.join(name_comp + ['com']) self.src_region = 'us-west-1' - self.dest_region = 'us-west-2' + self.dest_region = 'us-east-1' self.src_bucket = self.create_bucket(self.src_name, self.src_region) self.dest_bucket = self.create_bucket(self.dest_name, self.dest_region) diff --git a/tests/unit/customizations/s3/__init__.py b/tests/unit/customizations/s3/__init__.py index 3743c757f1de..5ada082edd3e 100644 --- a/tests/unit/customizations/s3/__init__.py +++ b/tests/unit/customizations/s3/__init__.py @@ -110,7 +110,7 @@ def create_bucket(session, name=None, region=None): """ service = session.get_service('s3') if not region: - region = 'us-east-1' + region = 'us-west-2' endpoint = service.get_endpoint(region) if name: bucket_name = name @@ -150,7 +150,7 @@ def s3_cleanup(bucket, session, key1='text1.txt', key2='text2.txt'): def compare_files(self, result_file, ref_file): """ - Ensures that the FileBase's properties are what they + Ensures that the FileStat's properties are what they are suppose to be. """ self.assertEqual(result_file.src, ref_file.src) diff --git a/tests/unit/customizations/s3/test_comparator.py b/tests/unit/customizations/s3/test_comparator.py index 45d5cf5e6960..b8909b248e82 100644 --- a/tests/unit/customizations/s3/test_comparator.py +++ b/tests/unit/customizations/s3/test_comparator.py @@ -14,7 +14,7 @@ import unittest from awscli.customizations.s3.comparator import Comparator -from awscli.customizations.s3.filegenerator import FileBase +from awscli.customizations.s3.filegenerator import FileStat class ComparatorTest(unittest.TestCase): @@ -30,11 +30,11 @@ def test_compare_key_equal(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='local', dest_type='s3', operation_name='upload') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -54,11 +54,11 @@ def test_compare_size(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=11, last_update=time, src_type='local', dest_type='s3', operation_name='upload') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -80,11 +80,11 @@ def test_compare_lastmod_upload(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='local', dest_type='s3', operation_name='upload') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -106,11 +106,11 @@ def test_compare_lastmod_copy(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='s3', dest_type='s3', operation_name='copy') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='s3', operation_name='') @@ -132,11 +132,11 @@ def test_compare_lastmod_download(self): result_list = [] time = datetime.datetime.now() future_time = time + datetime.timedelta(0, 3) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='download') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='local', dest_type='s3', operation_name='') @@ -149,11 +149,11 @@ def test_compare_lastmod_download(self): self.assertEqual(result_list, ref_list) # If the source is newer than the destination do not download. - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=future_time, src_type='s3', dest_type='local', operation_name='download') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='local', dest_type='s3', operation_name='') @@ -177,11 +177,11 @@ def test_compare_key_less(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='bomparator_test.py', size=10, last_update=time, src_type='local', dest_type='s3', operation_name='upload') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -205,11 +205,11 @@ def test_compare_key_greater(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='domparator_test.py', size=10, last_update=time, src_type='local', dest_type='s3', operation_name='upload') - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -234,7 +234,7 @@ def test_empty_src(self): ref_list = [] result_list = [] time = datetime.datetime.now() - dest_file = FileBase(src='', dest='', + dest_file = FileStat(src='', dest='', compare_key='comparator_test.py', size=10, last_update=time, src_type='s3', dest_type='local', operation_name='') @@ -256,7 +256,7 @@ def test_empty_dest(self): ref_list = [] result_list = [] time = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='domparator_test.py', size=10, last_update=time, src_type='local', dest_type='s3', operation_name='upload') @@ -294,12 +294,12 @@ def test_compare_size_only_dest_older_than_src(self): time_src = datetime.datetime.now() time_dst = time_src + datetime.timedelta(days=1) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='local', dest_type='s3', operation_name='upload') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='s3', dest_type='local', operation_name='') @@ -315,12 +315,12 @@ def test_compare_size_only_src_older_than_dest(self): time_dst = datetime.datetime.now() time_src = time_dst + datetime.timedelta(days=1) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='local', dest_type='s3', operation_name='upload') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='s3', dest_type='local', operation_name='') @@ -342,12 +342,12 @@ def test_compare_exact_timestamps_dest_older(self): time_src = datetime.datetime.now() time_dst = time_src - datetime.timedelta(days=1) - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='s3', dest_type='local', operation_name='download') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='local', dest_type='s3', operation_name='') @@ -364,12 +364,12 @@ def test_compare_exact_timestamps_src_older(self): time_src = datetime.datetime.now() - datetime.timedelta(days=1) time_dst = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_src, src_type='s3', dest_type='local', operation_name='download') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_dst, src_type='local', dest_type='s3', operation_name='') @@ -385,12 +385,12 @@ def test_compare_exact_timestamps_same_age_same_size(self): """ time_both = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='s3', dest_type='local', operation_name='download') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='local', dest_type='s3', operation_name='') @@ -406,12 +406,12 @@ def test_compare_exact_timestamps_same_age_diff_size(self): """ time_both = datetime.datetime.now() - src_file = FileBase(src='', dest='', + src_file = FileStat(src='', dest='', compare_key='test.py', size=20, last_update=time_both, src_type='s3', dest_type='local', operation_name='download') - dst_file = FileBase(src='', dest='', + dst_file = FileStat(src='', dest='', compare_key='test.py', size=10, last_update=time_both, src_type='local', dest_type='s3', operation_name='') diff --git a/tests/unit/customizations/s3/test_filegenerator.py b/tests/unit/customizations/s3/test_filegenerator.py index 8ede65de6536..96b2a2739c2b 100644 --- a/tests/unit/customizations/s3/test_filegenerator.py +++ b/tests/unit/customizations/s3/test_filegenerator.py @@ -20,7 +20,7 @@ import mock from awscli.customizations.s3.filegenerator import FileGenerator, \ - FileDecodingError, FileBase + FileDecodingError, FileStat from awscli.customizations.s3.utils import get_file_stat import botocore.session from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \ @@ -60,11 +60,11 @@ def test_local_file(self): for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) - file_base = FileBase(src=self.local_file, dest='bucket/text1.txt', + file_stat = FileStat(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='') - ref_list = [file_base] + ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -85,20 +85,20 @@ def test_local_directory(self): for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) - file_base = FileBase(src=self.local_file, dest='bucket/text1.txt', + file_stat = FileStat(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='') path = self.local_dir + 'another_directory' + os.sep \ + 'text2.txt' size, last_update = get_file_stat(path) - file_base2 = FileBase(src=path, + file_stat2 = FileStat(src=path, dest='bucket/another_directory/text2.txt', compare_key='another_directory/text2.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='') - ref_list = [file_base2, file_base] + ref_list = [file_stat2, file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -217,12 +217,12 @@ def test_no_follow_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_bases = FileGenerator(self.service, self.endpoint, + file_stats = FileGenerator(self.service, self.endpoint, '', False).call(input_local_dir) self.filenames.sort() result_list = [] - for file_base in file_bases: - result_list.append(getattr(file_base, 'src')) + for file_stat in file_stats: + result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(self.filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): @@ -239,13 +239,13 @@ def test_follow_bad_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_bases = FileGenerator(self.service, self.endpoint, + file_stats = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) result_list = [] rc = 0 try: - for file_base in file_bases: - result_list.append(getattr(file_base, 'src')) + for file_stat in file_stats: + result_list.append(getattr(file_stat, 'src')) rc = 1 except OSError as e: pass @@ -264,13 +264,13 @@ def test_follow_symlink(self): 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} - file_bases = FileGenerator(self.service, self.endpoint, + file_stats = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] - for file_base in file_bases: - result_list.append(getattr(file_base, 'src')) + for file_stat in file_stats: + result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): @@ -390,14 +390,14 @@ def test_s3_file(self): result_list = [] for filename in files: result_list.append(filename) - file_base = FileBase(src=self.file1, dest='text1.txt', + file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') - ref_list = [file_base] + ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -417,7 +417,7 @@ def test_s3_directory(self): result_list = [] for filename in files: result_list.append(filename) - file_base = FileBase(src=self.file2, + file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', @@ -425,7 +425,7 @@ def test_s3_directory(self): last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') - file_base2 = FileBase(src=self.file1, + file_stat2 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[1].size, @@ -433,7 +433,7 @@ def test_s3_directory(self): src_type='s3', dest_type='local', operation_name='') - ref_list = [file_base, file_base2] + ref_list = [file_stat, file_stat2] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) @@ -453,21 +453,21 @@ def test_s3_delete_directory(self): for filename in files: result_list.append(filename) - file_base1 = FileBase(src=self.bucket + '/another_directory/', + file_stat1 = FileStat(src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete') - file_base2 = FileBase(src=self.file2, + file_stat2 = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='delete') - file_base3 = FileBase(src=self.file1, + file_stat3 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[2].size, @@ -475,7 +475,7 @@ def test_s3_delete_directory(self): src_type='s3', dest_type='local', operation_name='delete') - ref_list = [file_base1, file_base2, file_base3] + ref_list = [file_stat1, file_stat2, file_stat3] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i]) diff --git a/tests/unit/customizations/s3/test_infosetter.py b/tests/unit/customizations/s3/test_fileinfobuilder.py similarity index 73% rename from tests/unit/customizations/s3/test_infosetter.py rename to tests/unit/customizations/s3/test_fileinfobuilder.py index 2cf197a07fe2..439c006ad136 100644 --- a/tests/unit/customizations/s3/test_infosetter.py +++ b/tests/unit/customizations/s3/test_fileinfobuilder.py @@ -13,17 +13,17 @@ import mock from awscli.testutils import unittest -from awscli.customizations.s3.filegenerator import FileBase +from awscli.customizations.s3.filegenerator import FileStat from awscli.customizations.s3.fileinfo import FileInfo -from awscli.customizations.s3.infosetter import InfoSetter +from awscli.customizations.s3.fileinfobuilder import FileInfoBuilder -class TestInfoSetter(unittest.TestCase): +class TestFileInfoBuilder(unittest.TestCase): def test_info_setter(self): - info_setter = InfoSetter(service='service', endpoint='endpoint', - source_endpoint='source_endpoint', - parameters='parameters') - files = [FileBase(src='src', dest='dest', compare_key='compare_key', + info_setter = FileInfoBuilder(service='service', endpoint='endpoint', + source_endpoint='source_endpoint', + parameters='parameters') + files = [FileStat(src='src', dest='dest', compare_key='compare_key', size='size', last_update='last_update', src_type='src_type', dest_type='dest_type', operation_name='operation_name')] diff --git a/tests/unit/customizations/s3/test_filters.py b/tests/unit/customizations/s3/test_filters.py index b3f49e5029b5..eaa5f89e485b 100644 --- a/tests/unit/customizations/s3/test_filters.py +++ b/tests/unit/customizations/s3/test_filters.py @@ -14,7 +14,7 @@ from awscli.testutils import unittest import platform -from awscli.customizations.s3.filegenerator import FileBase +from awscli.customizations.s3.filegenerator import FileStat from awscli.customizations.s3.filters import Filter @@ -29,23 +29,23 @@ def platform_path(filepath): class FiltersTest(unittest.TestCase): def setUp(self): self.local_files = [ - self.file_base('test.txt'), - self.file_base('test.jpg'), - self.file_base(os.path.join('directory', 'test.jpg')), + self.file_stat('test.txt'), + self.file_stat('test.jpg'), + self.file_stat(os.path.join('directory', 'test.jpg')), ] self.s3_files = [ - self.file_base('bucket/test.txt', src_type='s3'), - self.file_base('bucket/test.jpg', src_type='s3'), - self.file_base('bucket/key/test.jpg', src_type='s3'), + self.file_stat('bucket/test.txt', src_type='s3'), + self.file_stat('bucket/test.jpg', src_type='s3'), + self.file_stat('bucket/key/test.jpg', src_type='s3'), ] - def file_base(self, filename, src_type='local'): + def file_stat(self, filename, src_type='local'): if src_type == 'local': filename = os.path.abspath(filename) dest_type = 's3' else: dest_type = 'local' - return FileBase(src=filename, dest='', + return FileStat(src=filename, dest='', compare_key='', size=10, last_update=0, src_type=src_type, dest_type=dest_type, operation_name='') @@ -116,15 +116,15 @@ def test_prefix_filtering_consistent(self): # The same filter should work for both local and remote files. # So if I have a directory with 2 files: local_files = [ - self.file_base('test1.txt'), - self.file_base('nottest1.txt'), + self.file_stat('test1.txt'), + self.file_stat('nottest1.txt'), ] - # And the same 2 files remote (note that the way FileBase objects + # And the same 2 files remote (note that the way FileStat objects # are constructed, we'll have the bucket name but no leading '/' # character): remote_files = [ - self.file_base('bucket/test1.txt', src_type='s3'), - self.file_base('bucket/nottest1.txt', src_type='s3'), + self.file_stat('bucket/test1.txt', src_type='s3'), + self.file_stat('bucket/nottest1.txt', src_type='s3'), ] # If I apply the filter to the local to the local files. exclude_filter = self.create_filter([['exclude', 't*']]) @@ -143,9 +143,9 @@ def test_prefix_filtering_consistent(self): def test_bucket_exclude_with_prefix(self): s3_files = [ - self.file_base('bucket/dir1/key1.txt', src_type='s3'), - self.file_base('bucket/dir1/key2.txt', src_type='s3'), - self.file_base('bucket/dir1/notkey3.txt', src_type='s3'), + self.file_stat('bucket/dir1/key1.txt', src_type='s3'), + self.file_stat('bucket/dir1/key2.txt', src_type='s3'), + self.file_stat('bucket/dir1/notkey3.txt', src_type='s3'), ] filtered_files = list( self.create_filter([['exclude', 'dir1/*']], @@ -160,7 +160,7 @@ def test_bucket_exclude_with_prefix(self): def test_root_dir(self): p = platform_path - local_files = [self.file_base(p('/foo/bar/baz.txt'), src_type='local')] + local_files = [self.file_stat(p('/foo/bar/baz.txt'), src_type='local')] local_filter = self.create_filter([['exclude', 'baz.txt']], root=p('/foo/bar/')) filtered = list(local_filter.call(local_files)) diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 9ce310586daf..5a6c2c978397 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -154,11 +154,14 @@ def test_create_instructions(self): """ cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb'] - instructions = {'cp': ['file_generator', 'info_setter', 's3_handler'], - 'mv': ['file_generator', 'info_setter', 's3_handler'], - 'rm': ['file_generator', 'info_setter', 's3_handler'], + instructions = {'cp': ['file_generator', 'file_info_builder', + 's3_handler'], + 'mv': ['file_generator', 'file_info_builder', + 's3_handler'], + 'rm': ['file_generator', 'file_info_builder', + 's3_handler'], 'sync': ['file_generator', 'comparator', - 'info_setter', 's3_handler'], + 'file_info_builder', 's3_handler'], 'mb': ['s3_handler'], 'rb': ['s3_handler']} @@ -176,7 +179,8 @@ def test_create_instructions(self): cmd_arc = CommandArchitecture(self.session, 'cp', params) cmd_arc.create_instructions() self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters', - 'info_setter', 's3_handler']) + 'file_info_builder', + 's3_handler']) def test_run_cp_put(self): # This ensures that the architecture sets up correctly for a ``cp`` put