From fb51db1427d5a4162a9bd1c1896e9525e472b488 Mon Sep 17 00:00:00 2001 From: James Saryerwinnie Date: Mon, 2 Feb 2015 13:25:04 -0800 Subject: [PATCH] Cleanup code formatting --- awscli/customizations/s3/subcommands.py | 112 ++++++++++++++++-------- 1 file changed, 76 insertions(+), 36 deletions(-) diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index 4e276d633ecc..456abc2e6d19 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -32,32 +32,39 @@ SizeAndLastModifiedSync, NeverSync - RECURSIVE = {'name': 'recursive', 'action': 'store_true', 'dest': 'dir_op', 'help_text': ( "Command is performed on all files or objects " "under the specified directory or prefix.")} + HUMAN_READABLE = {'name': 'human-readable', 'action': 'store_true', 'help_text': "Displays file sizes in human readable format."} -SUMMARIZE = {'name': 'summarize', 'action': 'store_true', 'help_text': ( - "Displays summary information (number of objects, total size).")} + +SUMMARIZE = {'name': 'summarize', 'action': 'store_true', + 'help_text': ( + "Displays summary information " + "(number of objects, total size).")} + DRYRUN = {'name': 'dryrun', 'action': 'store_true', 'help_text': ( "Displays the operations that would be performed using the " "specified command without actually running them.")} + QUIET = {'name': 'quiet', 'action': 'store_true', 'help_text': ( "Does not display the operations performed from the specified " "command.")} + FORCE = {'name': 'force', 'action': 'store_true', 'help_text': ( "Deletes all objects in the bucket including the bucket itself.")} + FOLLOW_SYMLINKS = {'name': 'follow-symlinks', 'action': 'store_true', 'default': True, 'group_name': 'follow_symlinks', 'help_text': ( @@ -69,10 +76,12 @@ "nor ``--no-follow-symlinks`` is specifed, the default " "is to follow symlinks.")} + NO_FOLLOW_SYMLINKS = {'name': 'no-follow-symlinks', 'action': 'store_false', 'dest': 'follow_symlinks', 'default': True, 'group_name': 'follow_symlinks'} + NO_GUESS_MIME_TYPE = {'name': 'no-guess-mime-type', 'action': 'store_false', 'dest': 'guess_mime_type', 'default': True, 'help_text': ( @@ -80,23 +89,27 @@ "uploaded files. By default the mime type of a " "file is guessed when it is uploaded.")} + CONTENT_TYPE = {'name': 'content-type', 'nargs': 1, 'help_text': ( "Specify an explicit content type for this operation. " "This value overrides any guessed mime types.")} + EXCLUDE = {'name': 'exclude', 'action': AppendFilter, 'nargs': 1, 'dest': 'filters', 'help_text': ( "Exclude all files or objects from the command that matches " "the specified pattern.")} + INCLUDE = {'name': 'include', 'action': AppendFilter, 'nargs': 1, 'dest': 'filters', 'help_text': ( "Don't exclude files or objects " "in the command that match the specified pattern")} + ACL = {'name': 'acl', 'nargs': 1, 'choices': ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', @@ -108,32 +121,38 @@ "``bucket-owner-read``, ``bucket-owner-full-control`` and " "``log-delivery-write``.")} -GRANTS = {'name': 'grants', 'nargs': '+', - 'help_text': ( - "Grant specific permissions to individual users or groups. You " - "can supply a list of grants of the form::

--grants " - "Permission=Grantee_Type=Grantee_ID [Permission=Grantee_Type=" - "Grantee_ID ...]

Each value contains the following elements:" - "

The " - "Grantee_ID value can be one of:" - "" - "For more information on Amazon S3 access control, see " - 'Access Control')} + +GRANTS = { + 'name': 'grants', 'nargs': '+', + 'help_text': ( + 'Grant specific permissions to individual users or groups. You ' + 'can supply a list of grants of the form::

--grants ' + 'Permission=Grantee_Type=Grantee_ID [Permission=Grantee_Type=' + 'Grantee_ID ...]

Each value contains the following elements:' + '

The ' + 'Grantee_ID value can be one of:' + '' + 'For more information on Amazon S3 access control, see ' + 'Access Control')} + SSE = {'name': 'sse', 'action': 'store_true', 'help_text': ( "Enable Server Side Encryption of the object in S3")} + STORAGE_CLASS = {'name': 'storage-class', 'nargs': 1, 'choices': ['STANDARD', 'REDUCED_REDUNDANCY'], 'help_text': ( @@ -141,6 +160,7 @@ "Valid choices are: STANDARD | REDUCED_REDUNDANCY. " "Defaults to 'STANDARD'")} + WEBSITE_REDIRECT = {'name': 'website-redirect', 'nargs': 1, 'help_text': ( "If the bucket is configured as a website, " @@ -149,16 +169,19 @@ "stores the value of this header in the object " "metadata.")} + CACHE_CONTROL = {'name': 'cache-control', 'nargs': 1, 'help_text': ( "Specifies caching behavior along the " "request/reply chain.")} + CONTENT_DISPOSITION = {'name': 'content-disposition', 'nargs': 1, 'help_text': ( "Specifies presentational information " "for the object.")} + CONTENT_ENCODING = {'name': 'content-encoding', 'nargs': 1, 'help_text': ( "Specifies what content encodings have been " @@ -166,9 +189,11 @@ "mechanisms must be applied to obtain the media-type " "referenced by the Content-Type header field.")} + CONTENT_LANGUAGE = {'name': 'content-language', 'nargs': 1, 'help_text': ("The language the content is in.")} + SOURCE_REGION = {'name': 'source-region', 'nargs': 1, 'help_text': ( "When transferring objects from an s3 bucket to an s3 " @@ -179,8 +204,13 @@ "specified the region of the source will be the same " "as the region of the destination bucket.")} -EXPIRES = {'name': 'expires', 'nargs': 1, 'help_text': ("The date and time at " - "which the object is no longer cacheable.")} + +EXPIRES = { + 'name': 'expires', 'nargs': 1, + 'help_text': ( + "The date and time at which the object is no longer cacheable.") +} + INDEX_DOCUMENT = {'name': 'index-document', 'help_text': ( @@ -192,16 +222,19 @@ 'images/index.html) The suffix must not be empty and ' 'must not include a slash character.')} + ERROR_DOCUMENT = {'name': 'error-document', 'help_text': ( 'The object key name to use when ' 'a 4XX class error occurs.')} + ONLY_SHOW_ERRORS = {'name': 'only-show-errors', 'action': 'store_true', 'help_text': ( 'Only errors and warnings are displayed. All other ' 'output is suppressed.')} + EXPECTED_SIZE = {'name': 'expected-size', 'help_text': ( 'This argument specifies the expected size of a stream ' @@ -213,10 +246,10 @@ PAGE_SIZE = {'name': 'page-size', 'cli_type_name': 'integer', - 'help_text': ( - 'The number of results to return in each response to a list ' - 'operation. The default value is 1000 (the maximum allowed). ' - 'Using a lower value may help if an operation times out.')} + 'help_text': ( + 'The number of results to return in each response to a list ' + 'operation. The default value is 1000 (the maximum allowed). ' + 'Using a lower value may help if an operation times out.')} TRANSFER_ARGS = [DRYRUN, QUIET, RECURSIVE, INCLUDE, EXCLUDE, ACL, @@ -355,7 +388,7 @@ def _make_last_mod_str(self, last_mod): str(last_mod.day).zfill(2), str(last_mod.hour).zfill(2), str(last_mod.minute).zfill(2), - str(last_mod.second).zfill(2)) + str(last_mod.second).zfill(2)) last_mod_str = "%s-%s-%s %s:%s:%s" % last_mod_tup return last_mod_str.ljust(19, ' ') @@ -363,7 +396,10 @@ def _make_size_str(self, size): """ This function creates the size string when objects are being listed. """ - size_str = human_readable_size(size) if self._human_readable else str(size) + if self._human_readable: + size_str = human_readable_size(size) + else: + size_str = str(size) return size_str.rjust(10, ' ') def _print_summary(self): @@ -372,7 +408,10 @@ def _print_summary(self): """ print_str = str(self._total_objects) uni_print("\nTotal Objects: ".rjust(15, ' ') + print_str + "\n") - print_str = human_readable_size(self._size_accumulator) if self._human_readable else str(self._size_accumulator) + if self._human_readable: + print_str = human_readable_size(self._size_accumulator) + else: + print_str = str(self._size_accumulator) uni_print("Total Size: ".rjust(15, ' ') + print_str + "\n") @@ -668,8 +707,9 @@ def run(self): service=self._service, endpoint=self._endpoint, is_stream=True)] - file_info_builder = FileInfoBuilder(self._service, self._endpoint, - self._source_endpoint, self.parameters) + file_info_builder = FileInfoBuilder( + self._service, self._endpoint, + self._source_endpoint, self.parameters) s3handler = S3Handler(self.session, self.parameters, result_queue=result_queue) s3_stream_handler = S3StreamHandler(self.session, self.parameters, @@ -689,7 +729,7 @@ def run(self): 's3_handler': [s3handler]} elif self.cmd == 'cp' and self.parameters['is_stream']: command_dict = {'setup': [stream_file_info], - 's3_handler': [s3_stream_handler]} + 's3_handler': [s3_stream_handler]} elif self.cmd == 'cp': command_dict = {'setup': [files], 'file_generator': [file_generator],