diff --git a/awscli/customizations/s3/executor.py b/awscli/customizations/s3/executor.py index 872f181ef055..7eee384dab7d 100644 --- a/awscli/customizations/s3/executor.py +++ b/awscli/customizations/s3/executor.py @@ -274,13 +274,15 @@ def run(self): def _process_print_task(self, print_task): print_str = print_task.message + print_to_stderr = False if print_task.error: self.num_errors_seen += 1 + print_to_stderr = True warning = False if print_task.warning: - if print_task.warning: - warning = True - self.num_warnings_seen += 1 + warning = True + self.num_warnings_seen += 1 + print_to_stderr = True final_str = '' if warning: final_str += print_str.ljust(self._progress_length, ' ') @@ -309,6 +311,12 @@ def _process_print_task(self, print_task): self._num_parts += 1 self._file_count += 1 + # If the message is an error or warning, print it to standard error. + if print_to_stderr == True and not self._quiet: + uni_print(final_str, sys.stderr) + sys.stderr.flush() + final_str = '' + is_done = self._total_files == self._file_count if not is_done: prog_str = "Completed %s " % self._num_parts diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py index 56fa247f2876..b7d2c7c97d94 100644 --- a/awscli/customizations/s3/utils.py +++ b/awscli/customizations/s3/utils.py @@ -223,24 +223,26 @@ def __init__(self): self.count = 0 -def uni_print(statement): +def uni_print(statement, out_file=None): """ - This function is used to properly write unicode to stdout. It - ensures that the proper encoding is used if the statement is - not in a version type of string. The initial check is to - allow if ``sys.stdout`` does not use an encoding + This function is used to properly write unicode to a file, usually + stdout or stdderr. It ensures that the proper encoding is used if the + statement is not a string type. The initial check is to allow if + ``out_file`` does not use an encoding. """ - encoding = getattr(sys.stdout, 'encoding', None) + if out_file is None: + out_file = sys.stdout + encoding = getattr(out_file, 'encoding', None) if encoding is not None and not PY3: - sys.stdout.write(statement.encode(sys.stdout.encoding)) + out_file.write(statement.encode(out_file.encoding)) else: try: - sys.stdout.write(statement) + out_file.write(statement) except UnicodeEncodeError: # Some file like objects like cStringIO will # try to decode as ascii. Interestingly enough # this works with a normal StringIO. - sys.stdout.write(statement.encode('utf-8')) + out_file.write(statement.encode('utf-8')) def guess_content_type(filename): diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index f2da15b9ae57..f716002f88b1 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -458,7 +458,7 @@ def test_download_non_existent_key(self): expected_err_msg = ( 'A client error (NoSuchKey) occurred when calling the ' 'HeadObject operation: Key "foo.txt" does not exist') - self.assertIn(expected_err_msg, p.stdout) + self.assertIn(expected_err_msg, p.stderr) class TestSync(BaseS3CLICommand): @@ -645,7 +645,7 @@ def testFailWithoutRegion(self): p2 = aws('s3 sync s3://%s/ s3://%s/ --region %s' % (self.src_bucket, self.dest_bucket, self.src_region)) self.assertEqual(p2.rc, 1, p2.stdout) - self.assertIn('PermanentRedirect', p2.stdout) + self.assertIn('PermanentRedirect', p2.stderr) def testCpRegion(self): self.files.create_file('foo.txt', 'foo') @@ -695,9 +695,9 @@ def extra_setup(self): def test_no_exist(self): filename = os.path.join(self.files.rootdir, "no-exists-file") p = aws('s3 cp %s s3://%s/' % (filename, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File does not exist.' % - filename, p.stdout) + filename, p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], 'Read permissions tests only supported on mac/linux') @@ -711,9 +711,9 @@ def test_no_read_access(self): permissions = permissions ^ stat.S_IREAD os.chmod(filename, permissions) p = aws('s3 cp %s s3://%s/' % (filename, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File/Directory is ' - 'not readable.' % filename, p.stdout) + 'not readable.' % filename, p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], 'Special files only supported on mac/linux') @@ -723,10 +723,10 @@ def test_is_special_file(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(file_path) p = aws('s3 cp %s s3://%s/' % (file_path, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn(("warning: Skipping file %s. File is character " "special device, block special device, FIFO, or " - "socket." % file_path), p.stdout) + "socket." % file_path), p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], @@ -806,10 +806,10 @@ def test_follow_symlinks_default(self): def test_bad_symlink(self): p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File does not exist.' % os.path.join(self.files.rootdir, 'b-badsymlink'), - p.stdout) + p.stderr) class TestUnicode(BaseS3CLICommand): @@ -945,7 +945,7 @@ def test_mb_rb(self): def test_fail_mb_rb(self): # Choose a bucket name that already exists. p = aws('s3 mb s3://mybucket') - self.assertIn("BucketAlreadyExists", p.stdout) + self.assertIn("BucketAlreadyExists", p.stderr) self.assertEqual(p.rc, 1) diff --git a/tests/integration/customizations/s3/test_s3handler.py b/tests/integration/customizations/s3/test_s3handler.py index 4d46a2381173..955f428ded7a 100644 --- a/tests/integration/customizations/s3/test_s3handler.py +++ b/tests/integration/customizations/s3/test_s3handler.py @@ -163,12 +163,12 @@ def setUp(self): self.s3_files = [self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt'] self.output = StringIO() - self.saved_stdout = sys.stdout - sys.stdout = self.output + self.saved_stderr = sys.stderr + sys.stderr = self.output def tearDown(self): self.output.close() - sys.stdout = self.saved_stdout + sys.stderr = self.saved_stderr clean_loc_files(self.loc_files) s3_cleanup(self.bucket, self.session) @@ -215,7 +215,7 @@ def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1', 'acl': ['private']} + params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session, key1=u'\u2713') self.bucket2 = create_bucket(self.session) diff --git a/tests/unit/customizations/s3/test_executor.py b/tests/unit/customizations/s3/test_executor.py index 9afaacd3ba22..cf172aaa4ce6 100644 --- a/tests/unit/customizations/s3/test_executor.py +++ b/tests/unit/customizations/s3/test_executor.py @@ -90,12 +90,20 @@ def __call__(self): executor.wait_until_shutdown() self.assertEqual(open(f.name, 'rb').read(), b'foobar') + class TestPrintThread(unittest.TestCase): + def test_print_error(self): + result_queue = queue.Queue() + print_task = PrintTask(message="Fail File.", error=True) + thread = PrintThread(result_queue, False) + with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: + thread._process_print_task(print_task) + self.assertIn("Fail File.", mock_stderr.getvalue()) + def test_print_warning(self): result_queue = queue.Queue() print_task = PrintTask(message="Bad File.", warning=True) thread = PrintThread(result_queue, False) - with mock.patch('sys.stdout', new=six.StringIO()) as mock_stdout: + with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: thread._process_print_task(print_task) - self.assertIn("Bad File.", mock_stdout.getvalue()) - + self.assertIn("Bad File.", mock_stderr.getvalue()) diff --git a/tests/unit/customizations/s3/test_s3handler.py b/tests/unit/customizations/s3/test_s3handler.py index 20bc3a62a858..5a5b416f8ad0 100644 --- a/tests/unit/customizations/s3/test_s3handler.py +++ b/tests/unit/customizations/s3/test_s3handler.py @@ -140,7 +140,7 @@ def setUp(self): self.session = FakeSession() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1', 'acl': ['private']} + params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} self.s3_handler = S3Handler(self.session, params) self.s3_handler_multi = S3Handler(self.session, multi_threshold=10, chunksize=2, @@ -275,7 +275,7 @@ def setUp(self): self.session = FakeSession(True, True) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1'} + params = {'region': 'us-east-1', 'quiet': True} self.s3_handler_multi = S3Handler(self.session, params, multi_threshold=10, chunksize=2) self.bucket = create_bucket(self.session) diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 363fc496889f..755ad4fdb675 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -114,12 +114,17 @@ def setUp(self): self.bucket = make_s3_files(self.session) self.loc_files = make_loc_files() self.output = StringIO() + self.err_output = StringIO() self.saved_stdout = sys.stdout + self.saved_stderr = sys.stderr sys.stdout = self.output + sys.stderr = self.err_output def tearDown(self): self.output.close() + self.err_output.close() sys.stdout = self.saved_stdout + sys.stderr = self.saved_stderr super(CommandArchitectureTest, self).tearDown() clean_loc_files(self.loc_files) @@ -222,7 +227,7 @@ def test_error_on_same_line_as_status(self): output_str = ( "upload failed: %s to %s Error: Bucket does not exist\n" % ( rel_local_file, s3_file)) - self.assertIn(output_str, self.output.getvalue()) + self.assertIn(output_str, self.err_output.getvalue()) def test_run_cp_get(self): # This ensures that the architecture sets up correctly for a ``cp`` get @@ -362,7 +367,7 @@ def test_run_rb_nonzero_rc(self): cmd_arc.create_instructions() rc = cmd_arc.run() output_str = "remove_bucket failed: %s" % s3_prefix - self.assertIn(output_str, self.output.getvalue()) + self.assertIn(output_str, self.err_output.getvalue()) self.assertEqual(rc, 1)