From 144e6bbb18c4cc7a0881d325976bf07ddf382ef2 Mon Sep 17 00:00:00 2001 From: kyleknap Date: Mon, 22 Sep 2014 22:23:06 -0700 Subject: [PATCH 1/2] Errors and warnings are printed to stderr. --- CHANGELOG.rst | 3 ++ awscli/customizations/s3/executor.py | 14 +++++++-- awscli/customizations/s3/utils.py | 20 +++++++------ .../customizations/s3/test_plugin.py | 30 +++++++++---------- .../customizations/s3/test_s3handler.py | 8 ++--- tests/unit/customizations/s3/test_executor.py | 14 +++++++-- .../unit/customizations/s3/test_s3handler.py | 4 +-- .../customizations/s3/test_subcommands.py | 9 ++++-- 8 files changed, 64 insertions(+), 38 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c87b5e3cc068..ea08277d24c5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -9,6 +9,9 @@ Next Release (TBD) ``aws configure set`` command now write out all credential variables to the shared credentials file ``~/.aws/credentials`` (`issue 847 `__) +* bugfix:``aws s3``: Write warnings and errors to standard error as + opposed to standard output. + (`issue 919 `__) 1.4.4 diff --git a/awscli/customizations/s3/executor.py b/awscli/customizations/s3/executor.py index 872f181ef055..7eee384dab7d 100644 --- a/awscli/customizations/s3/executor.py +++ b/awscli/customizations/s3/executor.py @@ -274,13 +274,15 @@ def run(self): def _process_print_task(self, print_task): print_str = print_task.message + print_to_stderr = False if print_task.error: self.num_errors_seen += 1 + print_to_stderr = True warning = False if print_task.warning: - if print_task.warning: - warning = True - self.num_warnings_seen += 1 + warning = True + self.num_warnings_seen += 1 + print_to_stderr = True final_str = '' if warning: final_str += print_str.ljust(self._progress_length, ' ') @@ -309,6 +311,12 @@ def _process_print_task(self, print_task): self._num_parts += 1 self._file_count += 1 + # If the message is an error or warning, print it to standard error. + if print_to_stderr == True and not self._quiet: + uni_print(final_str, sys.stderr) + sys.stderr.flush() + final_str = '' + is_done = self._total_files == self._file_count if not is_done: prog_str = "Completed %s " % self._num_parts diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py index 56fa247f2876..b7d2c7c97d94 100644 --- a/awscli/customizations/s3/utils.py +++ b/awscli/customizations/s3/utils.py @@ -223,24 +223,26 @@ def __init__(self): self.count = 0 -def uni_print(statement): +def uni_print(statement, out_file=None): """ - This function is used to properly write unicode to stdout. It - ensures that the proper encoding is used if the statement is - not in a version type of string. The initial check is to - allow if ``sys.stdout`` does not use an encoding + This function is used to properly write unicode to a file, usually + stdout or stdderr. It ensures that the proper encoding is used if the + statement is not a string type. The initial check is to allow if + ``out_file`` does not use an encoding. """ - encoding = getattr(sys.stdout, 'encoding', None) + if out_file is None: + out_file = sys.stdout + encoding = getattr(out_file, 'encoding', None) if encoding is not None and not PY3: - sys.stdout.write(statement.encode(sys.stdout.encoding)) + out_file.write(statement.encode(out_file.encoding)) else: try: - sys.stdout.write(statement) + out_file.write(statement) except UnicodeEncodeError: # Some file like objects like cStringIO will # try to decode as ascii. Interestingly enough # this works with a normal StringIO. - sys.stdout.write(statement.encode('utf-8')) + out_file.write(statement.encode('utf-8')) def guess_content_type(filename): diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index f2da15b9ae57..186533c555b2 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -164,10 +164,10 @@ def assert_no_errors(self, p): self.assertEqual( p.rc, 0, "Non zero rc (%s) received: %s" % (p.rc, p.stdout + p.stderr)) - self.assertNotIn("Error:", p.stdout) - self.assertNotIn("failed:", p.stdout) - self.assertNotIn("client error", p.stdout) - self.assertNotIn("server error", p.stdout) + self.assertNotIn("Error:", p.stderr) + self.assertNotIn("failed:", p.stderr) + self.assertNotIn("client error", p.stderr) + self.assertNotIn("server error", p.stderr) class TestMoveCommand(BaseS3CLICommand): @@ -458,7 +458,7 @@ def test_download_non_existent_key(self): expected_err_msg = ( 'A client error (NoSuchKey) occurred when calling the ' 'HeadObject operation: Key "foo.txt" does not exist') - self.assertIn(expected_err_msg, p.stdout) + self.assertIn(expected_err_msg, p.stderr) class TestSync(BaseS3CLICommand): @@ -645,7 +645,7 @@ def testFailWithoutRegion(self): p2 = aws('s3 sync s3://%s/ s3://%s/ --region %s' % (self.src_bucket, self.dest_bucket, self.src_region)) self.assertEqual(p2.rc, 1, p2.stdout) - self.assertIn('PermanentRedirect', p2.stdout) + self.assertIn('PermanentRedirect', p2.stderr) def testCpRegion(self): self.files.create_file('foo.txt', 'foo') @@ -695,9 +695,9 @@ def extra_setup(self): def test_no_exist(self): filename = os.path.join(self.files.rootdir, "no-exists-file") p = aws('s3 cp %s s3://%s/' % (filename, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File does not exist.' % - filename, p.stdout) + filename, p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], 'Read permissions tests only supported on mac/linux') @@ -711,9 +711,9 @@ def test_no_read_access(self): permissions = permissions ^ stat.S_IREAD os.chmod(filename, permissions) p = aws('s3 cp %s s3://%s/' % (filename, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File/Directory is ' - 'not readable.' % filename, p.stdout) + 'not readable.' % filename, p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], 'Special files only supported on mac/linux') @@ -723,10 +723,10 @@ def test_is_special_file(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(file_path) p = aws('s3 cp %s s3://%s/' % (file_path, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn(("warning: Skipping file %s. File is character " "special device, block special device, FIFO, or " - "socket." % file_path), p.stdout) + "socket." % file_path), p.stderr) @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'], @@ -806,10 +806,10 @@ def test_follow_symlinks_default(self): def test_bad_symlink(self): p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, self.bucket_name)) - self.assertEqual(p.rc, 2, p.stdout) + self.assertEqual(p.rc, 2, p.stderr) self.assertIn('warning: Skipping file %s. File does not exist.' % os.path.join(self.files.rootdir, 'b-badsymlink'), - p.stdout) + p.stderr) class TestUnicode(BaseS3CLICommand): @@ -945,7 +945,7 @@ def test_mb_rb(self): def test_fail_mb_rb(self): # Choose a bucket name that already exists. p = aws('s3 mb s3://mybucket') - self.assertIn("BucketAlreadyExists", p.stdout) + self.assertIn("BucketAlreadyExists", p.stderr) self.assertEqual(p.rc, 1) diff --git a/tests/integration/customizations/s3/test_s3handler.py b/tests/integration/customizations/s3/test_s3handler.py index 4d46a2381173..955f428ded7a 100644 --- a/tests/integration/customizations/s3/test_s3handler.py +++ b/tests/integration/customizations/s3/test_s3handler.py @@ -163,12 +163,12 @@ def setUp(self): self.s3_files = [self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt'] self.output = StringIO() - self.saved_stdout = sys.stdout - sys.stdout = self.output + self.saved_stderr = sys.stderr + sys.stderr = self.output def tearDown(self): self.output.close() - sys.stdout = self.saved_stdout + sys.stderr = self.saved_stderr clean_loc_files(self.loc_files) s3_cleanup(self.bucket, self.session) @@ -215,7 +215,7 @@ def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1', 'acl': ['private']} + params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session, key1=u'\u2713') self.bucket2 = create_bucket(self.session) diff --git a/tests/unit/customizations/s3/test_executor.py b/tests/unit/customizations/s3/test_executor.py index 9afaacd3ba22..cf172aaa4ce6 100644 --- a/tests/unit/customizations/s3/test_executor.py +++ b/tests/unit/customizations/s3/test_executor.py @@ -90,12 +90,20 @@ def __call__(self): executor.wait_until_shutdown() self.assertEqual(open(f.name, 'rb').read(), b'foobar') + class TestPrintThread(unittest.TestCase): + def test_print_error(self): + result_queue = queue.Queue() + print_task = PrintTask(message="Fail File.", error=True) + thread = PrintThread(result_queue, False) + with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: + thread._process_print_task(print_task) + self.assertIn("Fail File.", mock_stderr.getvalue()) + def test_print_warning(self): result_queue = queue.Queue() print_task = PrintTask(message="Bad File.", warning=True) thread = PrintThread(result_queue, False) - with mock.patch('sys.stdout', new=six.StringIO()) as mock_stdout: + with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: thread._process_print_task(print_task) - self.assertIn("Bad File.", mock_stdout.getvalue()) - + self.assertIn("Bad File.", mock_stderr.getvalue()) diff --git a/tests/unit/customizations/s3/test_s3handler.py b/tests/unit/customizations/s3/test_s3handler.py index 20bc3a62a858..5a5b416f8ad0 100644 --- a/tests/unit/customizations/s3/test_s3handler.py +++ b/tests/unit/customizations/s3/test_s3handler.py @@ -140,7 +140,7 @@ def setUp(self): self.session = FakeSession() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1', 'acl': ['private']} + params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} self.s3_handler = S3Handler(self.session, params) self.s3_handler_multi = S3Handler(self.session, multi_threshold=10, chunksize=2, @@ -275,7 +275,7 @@ def setUp(self): self.session = FakeSession(True, True) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') - params = {'region': 'us-east-1'} + params = {'region': 'us-east-1', 'quiet': True} self.s3_handler_multi = S3Handler(self.session, params, multi_threshold=10, chunksize=2) self.bucket = create_bucket(self.session) diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 363fc496889f..755ad4fdb675 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -114,12 +114,17 @@ def setUp(self): self.bucket = make_s3_files(self.session) self.loc_files = make_loc_files() self.output = StringIO() + self.err_output = StringIO() self.saved_stdout = sys.stdout + self.saved_stderr = sys.stderr sys.stdout = self.output + sys.stderr = self.err_output def tearDown(self): self.output.close() + self.err_output.close() sys.stdout = self.saved_stdout + sys.stderr = self.saved_stderr super(CommandArchitectureTest, self).tearDown() clean_loc_files(self.loc_files) @@ -222,7 +227,7 @@ def test_error_on_same_line_as_status(self): output_str = ( "upload failed: %s to %s Error: Bucket does not exist\n" % ( rel_local_file, s3_file)) - self.assertIn(output_str, self.output.getvalue()) + self.assertIn(output_str, self.err_output.getvalue()) def test_run_cp_get(self): # This ensures that the architecture sets up correctly for a ``cp`` get @@ -362,7 +367,7 @@ def test_run_rb_nonzero_rc(self): cmd_arc.create_instructions() rc = cmd_arc.run() output_str = "remove_bucket failed: %s" % s3_prefix - self.assertIn(output_str, self.output.getvalue()) + self.assertIn(output_str, self.err_output.getvalue()) self.assertEqual(rc, 1) From 184898b2703cbaa77e87d5d62f7227095e139484 Mon Sep 17 00:00:00 2001 From: kyleknap Date: Wed, 24 Sep 2014 13:26:57 -0700 Subject: [PATCH 2/2] Cleaned up ``print_thread`` code. --- awscli/customizations/s3/executor.py | 39 ++++++++++--------- awscli/customizations/s3/subcommands.py | 3 -- awscli/customizations/s3/utils.py | 5 ++- tests/unit/customizations/s3/test_executor.py | 16 +++++--- 4 files changed, 33 insertions(+), 30 deletions(-) diff --git a/awscli/customizations/s3/executor.py b/awscli/customizations/s3/executor.py index 7eee384dab7d..fc850dffc328 100644 --- a/awscli/customizations/s3/executor.py +++ b/awscli/customizations/s3/executor.py @@ -278,13 +278,11 @@ def _process_print_task(self, print_task): if print_task.error: self.num_errors_seen += 1 print_to_stderr = True - warning = False + + final_str = '' if print_task.warning: - warning = True self.num_warnings_seen += 1 print_to_stderr = True - final_str = '' - if warning: final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' elif print_task.total_parts: @@ -312,26 +310,29 @@ def _process_print_task(self, print_task): self._file_count += 1 # If the message is an error or warning, print it to standard error. - if print_to_stderr == True and not self._quiet: + if print_to_stderr and not self._quiet: uni_print(final_str, sys.stderr) - sys.stderr.flush() final_str = '' is_done = self._total_files == self._file_count if not is_done: - prog_str = "Completed %s " % self._num_parts - num_files = self._total_files - if self._total_files != '...': - prog_str += "of %s " % self._total_parts - num_files = self._total_files - self._file_count - prog_str += "part(s) with %s file(s) remaining" % \ - num_files - length_prog = len(prog_str) - prog_str += '\r' - prog_str = prog_str.ljust(self._progress_length, ' ') - self._progress_length = length_prog - final_str += prog_str + final_str += self._make_progress_bar() if not self._quiet: uni_print(final_str) self._needs_newline = not final_str.endswith('\n') - sys.stdout.flush() + + def _make_progress_bar(self): + """Creates the progress bar string to print out.""" + + prog_str = "Completed %s " % self._num_parts + num_files = self._total_files + if self._total_files != '...': + prog_str += "of %s " % self._total_parts + num_files = self._total_files - self._file_count + prog_str += "part(s) with %s file(s) remaining" % \ + num_files + length_prog = len(prog_str) + prog_str += '\r' + prog_str = prog_str.ljust(self._progress_length, ' ') + self._progress_length = length_prog + return prog_str diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index 6ec6856294d9..bbe6dda1d8d1 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -272,7 +272,6 @@ def _display_page(self, response_data, use_basename=True): pre_string = "PRE".rjust(30, " ") print_str = pre_string + ' ' + prefix + '/\n' uni_print(print_str) - sys.stdout.flush() for content in contents: last_mod_str = self._make_last_mod_str(content['LastModified']) size_str = self._make_size_str(content['Size']) @@ -284,7 +283,6 @@ def _display_page(self, response_data, use_basename=True): print_str = last_mod_str + ' ' + size_str + ' ' + \ filename + '\n' uni_print(print_str) - sys.stdout.flush() def _list_all_buckets(self): operation = self.service.get_operation('ListBuckets') @@ -294,7 +292,6 @@ def _list_all_buckets(self): last_mod_str = self._make_last_mod_str(bucket['CreationDate']) print_str = last_mod_str + ' ' + bucket['Name'] + '\n' uni_print(print_str) - sys.stdout.flush() def _list_all_objects_recursive(self, bucket, key, page_size=None): operation = self.service.get_operation('ListObjects') diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py index b7d2c7c97d94..97b17933d9a8 100644 --- a/awscli/customizations/s3/utils.py +++ b/awscli/customizations/s3/utils.py @@ -227,11 +227,11 @@ def uni_print(statement, out_file=None): """ This function is used to properly write unicode to a file, usually stdout or stdderr. It ensures that the proper encoding is used if the - statement is not a string type. The initial check is to allow if - ``out_file`` does not use an encoding. + statement is not a string type. """ if out_file is None: out_file = sys.stdout + # Check for an encoding on the file. encoding = getattr(out_file, 'encoding', None) if encoding is not None and not PY3: out_file.write(statement.encode(out_file.encoding)) @@ -243,6 +243,7 @@ def uni_print(statement, out_file=None): # try to decode as ascii. Interestingly enough # this works with a normal StringIO. out_file.write(statement.encode('utf-8')) + out_file.flush() def guess_content_type(filename): diff --git a/tests/unit/customizations/s3/test_executor.py b/tests/unit/customizations/s3/test_executor.py index cf172aaa4ce6..7c803fe693e7 100644 --- a/tests/unit/customizations/s3/test_executor.py +++ b/tests/unit/customizations/s3/test_executor.py @@ -92,18 +92,22 @@ def __call__(self): class TestPrintThread(unittest.TestCase): + def setUp(self): + self.result_queue = queue.Queue() + self.thread = PrintThread(result_queue=self.result_queue, quiet=False) + def test_print_error(self): - result_queue = queue.Queue() print_task = PrintTask(message="Fail File.", error=True) - thread = PrintThread(result_queue, False) with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: - thread._process_print_task(print_task) + self.result_queue.put(print_task) + self.result_queue.put(ShutdownThreadRequest()) + self.thread.run() self.assertIn("Fail File.", mock_stderr.getvalue()) def test_print_warning(self): - result_queue = queue.Queue() print_task = PrintTask(message="Bad File.", warning=True) - thread = PrintThread(result_queue, False) with mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr: - thread._process_print_task(print_task) + self.result_queue.put(print_task) + self.result_queue.put(ShutdownThreadRequest()) + self.thread.run() self.assertIn("Bad File.", mock_stderr.getvalue())