diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dad42f72b3f9..bb4331b930c5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,34 @@ CHANGELOG ========= + +1.3.22 +====== + +* feature:``aws cwlogs``: Add support for Amazon CloudWatch Logs +* feature:``aws cognito-sync``: Add support for + Amazon Cognito Service +* feature:``aws cognito-identity``: Add support for + Amazon Cognito Identity Service +* feature:``aws route53``: Update ``aws route53`` command to the + latest version +* feature:``aws ec2``: Update ``aws ec2`` command to the + latest version +* bugfix:``aws s3/s3api``: Fix issue where ``--endpoint-url`` + wasn't being used for ``aws s3/s3api`` commands + (`issue 549 `__) +* bugfix:``aws s3 mv``: Fix bug where using the ``aws s3 mv`` + command to move a large file onto itself results in the + file being deleted + (`issue 831 `__) +* bugfix:``aws s3``: Fix issue where parts in a multipart + upload are stil being uploaded when a part has failed + (`issue 834 `__) +* bugfix:Windows: Fix issue where ``python.exe`` is on a path + that contains spaces + (`issue 825 `__) + + 1.3.21 ====== diff --git a/awscli/__init__.py b/awscli/__init__.py index 6aea8984ed37..ceb715fff443 100644 --- a/awscli/__init__.py +++ b/awscli/__init__.py @@ -17,7 +17,7 @@ """ import os -__version__ = '1.3.21' +__version__ = '1.3.22' # # Get our data path to be added to botocore's search path diff --git a/awscli/customizations/s3/s3.py b/awscli/customizations/s3/s3.py index f501c732836e..1be544f053ba 100644 --- a/awscli/customizations/s3/s3.py +++ b/awscli/customizations/s3/s3.py @@ -662,6 +662,23 @@ def add_paths(self, paths): self.parameters['dest'] = paths[1] elif len(paths) == 1: self.parameters['dest'] = paths[0] + self._validate_path_args() + + def _validate_path_args(self): + # If we're using a mv command, you can't copy the object onto itself. + params = self.parameters + if self.cmd == 'mv' and self._same_path(params['src'], params['dest']): + raise ValueError("Cannot mv a file onto itself: '%s' - '%s'" % ( + params['src'], params['dest'])) + + def _same_path(self, src, dest): + if not self.parameters['paths_type'] == 's3s3': + return False + elif src == dest: + return True + elif dest.endswith('/'): + src_base = os.path.basename(src) + return src == os.path.join(dest, src_base) def _normalize_s3_trailing_slash(self, paths): for i, path in enumerate(paths): diff --git a/awscli/customizations/s3/tasks.py b/awscli/customizations/s3/tasks.py index d187fb105530..430004ca3ae8 100644 --- a/awscli/customizations/s3/tasks.py +++ b/awscli/customizations/s3/tasks.py @@ -560,10 +560,10 @@ def wait_for_parts_to_finish(self): def wait_for_upload_id(self): with self._upload_id_condition: - while self._upload_id is None: - if self._state == self._CANCELLED: - raise UploadCancelledError("Upload has been cancelled.") - self._upload_id_condition.wait(timeout=1) + while self._upload_id is None and self._state != self._CANCELLED: + self._upload_id_condition.wait(timeout=1) + if self._state == self._CANCELLED: + raise UploadCancelledError("Upload has been cancelled.") return self._upload_id def wait_for_completion(self): diff --git a/awscli/customizations/s3endpoint.py b/awscli/customizations/s3endpoint.py new file mode 100644 index 000000000000..e51a85b7e939 --- /dev/null +++ b/awscli/customizations/s3endpoint.py @@ -0,0 +1,45 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Disable endpoint url customizations for s3. + +There's a customization in botocore such that for S3 operations +we try to fix the S3 endpoint url based on whether a bucket is +dns compatible. We also try to map the endpoint url to the +standard S3 region (s3.amazonaws.com). This normally happens +even if a user provides an --endpoint-url (if the bucket is +DNS compatible). + +This customization ensures that if a user specifies +an --endpoint-url, then we turn off the botocore customization +that messes with endpoint url. + +""" +from functools import partial + +from botocore.handlers import fix_s3_host + + +def register_s3_endpoint(cli): + handler = partial(on_top_level_args_parsed, event_handler=cli) + cli.register('top-level-args-parsed', handler) + + +def on_top_level_args_parsed(parsed_args, event_handler, **kwargs): + # The fix_s3_host has logic to set the endpoint to the + # standard region endpoint for s3 (s3.amazonaws.com) under + # certain conditions. We're making sure that if + # the user provides an --endpoint-url, that entire handler + # is disabled. + if parsed_args.command in ['s3', 's3api'] and \ + parsed_args.endpoint_url is not None: + event_handler.unregister('before-auth.s3', fix_s3_host) diff --git a/awscli/handlers.py b/awscli/handlers.py index 2badab18244f..6d99892b9d9e 100644 --- a/awscli/handlers.py +++ b/awscli/handlers.py @@ -46,6 +46,7 @@ from awscli.customizations.cloudsearch import initialize as cloudsearch_init from awscli.customizations.emr.emr import emr_initialize from awscli.customizations.cloudsearchdomain import register_cloudsearchdomain +from awscli.customizations.s3endpoint import register_s3_endpoint def awscli_initialize(event_handlers): @@ -94,3 +95,4 @@ def awscli_initialize(event_handlers): cloudsearch_init(event_handlers) emr_initialize(event_handlers) register_cloudsearchdomain(event_handlers) + register_s3_endpoint(event_handlers) diff --git a/bin/aws.cmd b/bin/aws.cmd index e9ff63974307..9baf5302b4dd 100644 --- a/bin/aws.cmd +++ b/bin/aws.cmd @@ -1,7 +1,7 @@ @echo OFF REM=""" setlocal -set PythonExe= +set PythonExe="" set PythonExeFlags= for %%i in (cmd bat exe) do ( @@ -16,13 +16,13 @@ for /f "tokens=2 delims==" %%i in ('assoc .py') do ( ) ) ) -"%PythonExe%" -x %PythonExeFlags% "%~f0" %* +%PythonExe% -x %PythonExeFlags% "%~f0" %* goto :EOF :SetPythonExe -if not [%1]==[""] ( - if ["%PythonExe%"]==[""] ( - set PythonExe=%~1 +if not ["%~1"]==[""] ( + if [%PythonExe%]==[""] ( + set PythonExe="%~1" ) ) goto :EOF diff --git a/doc/source/conf.py b/doc/source/conf.py index 426a233c4ed6..c1c3d0749e78 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -52,7 +52,7 @@ # The short X.Y version. version = '1.3.' # The full version, including alpha/beta/rc tags. -release = '1.3.21' +release = '1.3.22' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index f3ba063dc590..d6f057645227 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ import awscli -requires = ['botocore>=0.55.0,<0.56.0', +requires = ['botocore>=0.56.0,<0.57.0', 'bcdoc>=0.12.0,<0.13.0', 'six>=1.1.0', 'colorama==0.2.5', diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index 61ffc2c7d9f9..fd6dd0a8e84a 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -247,6 +247,27 @@ def test_mv_to_nonexistent_bucket(self): p = aws('s3 mv %s s3://bad-noexist-13143242/foo.txt' % (full_path,)) self.assertEqual(p.rc, 1) + def test_cant_move_file_onto_itself_small_file(self): + # We don't even need a remote file in this case. We can + # immediately validate that we can't move a file onto itself. + bucket_name = self.create_bucket() + self.put_object(bucket_name, key_name='key.txt', contents='foo') + p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name)) + self.assertEqual(p.rc, 255) + self.assertIn('Cannot mv a file onto itself', p.stderr) + + def test_cant_move_large_file_onto_itself(self): + # At the API level, you can multipart copy an object onto itself, + # but a mv command doesn't make sense because a mv is just a + # cp + an rm of the src file. We should be consistent and + # not allow large files to be mv'd onto themselves. + file_contents = six.BytesIO(b'a' * (1024 * 1024 * 10)) + bucket_name = self.create_bucket() + self.put_object(bucket_name, key_name='key.txt', contents=file_contents) + p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name)) + self.assertEqual(p.rc, 255) + self.assertIn('Cannot mv a file onto itself', p.stderr) + class TestRm(BaseS3CLICommand): @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], diff --git a/tests/unit/customizations/s3/test_mv_command.py b/tests/unit/customizations/s3/test_mv_command.py new file mode 100644 index 000000000000..2c296efddf1e --- /dev/null +++ b/tests/unit/customizations/s3/test_mv_command.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from awscli.testutils import BaseAWSCommandParamsTest, FileCreator +import re + +import mock +import six + + +class TestMvCommand(BaseAWSCommandParamsTest): + + prefix = 's3 mv ' + + def setUp(self): + super(TestMvCommand, self).setUp() + self.files = FileCreator() + + def tearDown(self): + super(TestMvCommand, self).tearDown() + self.files.remove_all() + + def test_cant_mv_object_onto_itself(self): + cmdline = '%s s3://bucket/key s3://bucket/key' % self.prefix + stderr = self.run_cmd(cmdline, expected_rc=255)[1] + self.assertIn('Cannot mv a file onto itself', stderr) + + def test_cant_mv_object_with_implied_name(self): + # The "key" key name is implied in the dst argument. + cmdline = '%s s3://bucket/key s3://bucket/' % self.prefix + stderr = self.run_cmd(cmdline, expected_rc=255)[1] + self.assertIn('Cannot mv a file onto itself', stderr) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/customizations/s3/test_tasks.py b/tests/unit/customizations/s3/test_tasks.py index 051d21035076..4451c85cb569 100644 --- a/tests/unit/customizations/s3/test_tasks.py +++ b/tests/unit/customizations/s3/test_tasks.py @@ -223,6 +223,26 @@ def test_can_cancel_tasks(self): with self.assertRaises(UploadCancelledError): self.context.wait_for_parts_to_finish() + def test_cancel_after_upload_id(self): + # We want have a thread waiting for the upload id. + upload_part_thread = threading.Thread(target=self.upload_part, + args=(1,)) + self.start_thread(upload_part_thread) + + # We announce the upload id. + self.create_upload('my_upload_id') + # The upload_part thread can now proceed, + # now, let's cancel this upload. + self.context.cancel_upload() + + # The upload_part_thread should be finished. + self.join_threads() + + # In a cancelled multipart upload task any subsequent + # call to wait_for_upload_id must raise an UploadCancelledError + with self.assertRaises(UploadCancelledError): + self.context.wait_for_upload_id() + def test_cancel_threads_waiting_for_completion(self): # So we have a thread waiting for the entire upload to complete. arbitrary_waiting_thread = threading.Thread(target=self.wait_for_upload_complete) diff --git a/tests/unit/customizations/test_s3endpoint.py b/tests/unit/customizations/test_s3endpoint.py new file mode 100644 index 000000000000..1f83f2e5b129 --- /dev/null +++ b/tests/unit/customizations/test_s3endpoint.py @@ -0,0 +1,43 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from awscli.testutils import unittest +from awscli.customizations.s3endpoint import on_top_level_args_parsed + +from botocore.handlers import fix_s3_host + +import mock + + +class TestS3EndpointURL(unittest.TestCase): + def test_endpoint_url_unregisters_fix_s3_host(self): + args = mock.Mock() + args.endpoint_url = 'http://custom/' + args.command = 's3' + event_handler = mock.Mock() + on_top_level_args_parsed(args, event_handler) + event_handler.unregister.assert_called_with('before-auth.s3', fix_s3_host) + + def test_unregister_not_called_for_no_endpoint(self): + args = mock.Mock() + args.endpoint_url = None + event_handler = mock.Mock() + on_top_level_args_parsed(args, event_handler) + self.assertFalse(event_handler.unregister.called) + + def test_endpoint_url_set_but_not_for_s3(self): + args = mock.Mock() + args.endpoint_url = 'http://custom/' + args.command = 'NOTS3' + event_handler = mock.Mock() + on_top_level_args_parsed(args, event_handler) + self.assertFalse(event_handler.unregister.called) diff --git a/tests/unit/test_completer.py b/tests/unit/test_completer.py index 6cfce44be92d..82a2d3392120 100644 --- a/tests/unit/test_completer.py +++ b/tests/unit/test_completer.py @@ -29,11 +29,12 @@ COMPLETIONS = [ ('aws ', -1, set(['autoscaling', 'cloudformation', 'cloudsearch', 'cloudsearchdomain', 'cloudtrail', 'cloudwatch', - 'configure', 'datapipeline', 'directconnect', 'dynamodb', - 'ec2', 'elasticache', 'elasticbeanstalk', - 'elastictranscoder', 'elb', 'iam', 'importexport', - 'kinesis', 'opsworks', 'rds', 'redshift', 'route53', - 's3', 's3api', 'ses', 'sns', 'sqs', 'storagegateway', + 'cognito-identity', 'cognito-sync', 'configure', + 'datapipeline', 'directconnect', 'dynamodb', 'ec2', + 'elasticache', 'elasticbeanstalk', 'elastictranscoder', + 'elb', 'iam', 'importexport', 'kinesis', 'logs', + 'opsworks', 'rds', 'redshift', 'route53', 's3', 's3api', + 'ses', 'sns', 'sqs', 'storagegateway', 'sts', 'support', 'swf'])), ('aws cloud', -1, set(['cloudformation', 'cloudsearch', 'cloudsearchdomain', 'cloudtrail', 'cloudwatch'])),