-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* request compression * refactor * add handler test * Another round of refactoring. Move compression to a separate file and query body serialization to a utility * cleanup * pr feedback * removed enumerate and set size=-1 instead of returning -1 * change to functional implementation * simplify assert compression method * pr feedback * update wording regarding compression for requests with streaming input * divert byte encoding * move min and max min compression size to funciton * fixed test * fixed max allowed min compression size, added test for it and moved compression config tests to test_args * cleanup tests and test bad min and bad max separately * cleanup * pr feedback. clean up tests and move urlencoding dicts into compression * cleaned up tests * added TypeError to min compression validation and a bunch of formatting cleanup * extract dict type normalization into separate function * refactor unit tests * adjust compression assertion method and move dict compression into parametrized test * formatting cleanup * actually convert request_compression_min_size_bytes to int and some more formatting fixes * add test case for coercible boolean and small fix to no compression test * fixed test * actually fixed test * assert_compression method * add test cases for non-seekable streams * pr feedback * put private note in wrong file. Also removed `classes` since there arent any * remove duplicate test case * Refactor unit tests * Fix incorret operation model * use compression assertion function in stream test and only use single quotes * small fix to functional test and changelog --------- Co-authored-by: Nate Prewitt <nate.prewitt@gmail.com>
- Loading branch information
1 parent
10f938d
commit bf00aa7
Showing
11 changed files
with
771 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
{ | ||
"type": "enhancement", | ||
"category": "compression", | ||
"description": "Adds support for the ``requestcompression`` operation trait." | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"). You | ||
# may not use this file except in compliance with the License. A copy of | ||
# the License is located at | ||
# | ||
# http://aws.amazon.com/apache2.0/ | ||
# | ||
# or in the "license" file accompanying this file. This file is | ||
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF | ||
# ANY KIND, either express or implied. See the License for the specific | ||
# language governing permissions and limitations under the License. | ||
""" | ||
NOTE: All functions in this module are considered private and are | ||
subject to abrupt breaking changes. Please do not use them directly. | ||
""" | ||
|
||
import io | ||
import logging | ||
from gzip import GzipFile | ||
from gzip import compress as gzip_compress | ||
|
||
from botocore.compat import urlencode | ||
from botocore.utils import determine_content_length | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def maybe_compress_request(config, request_dict, operation_model): | ||
"""Attempt to compress the request body using the modeled encodings.""" | ||
if _should_compress_request(config, request_dict, operation_model): | ||
for encoding in operation_model.request_compression['encodings']: | ||
encoder = COMPRESSION_MAPPING.get(encoding) | ||
if encoder is not None: | ||
logger.debug('Compressing request with %s encoding.', encoding) | ||
request_dict['body'] = encoder(request_dict['body']) | ||
_set_compression_header(request_dict['headers'], encoding) | ||
return | ||
else: | ||
logger.debug('Unsupported compression encoding: %s', encoding) | ||
|
||
|
||
def _should_compress_request(config, request_dict, operation_model): | ||
if ( | ||
config.disable_request_compression is not True | ||
and config.signature_version != 'v2' | ||
and operation_model.request_compression is not None | ||
): | ||
if not _is_compressible_type(request_dict): | ||
body_type = type(request_dict['body']) | ||
log_msg = 'Body type %s does not support compression.' | ||
logger.debug(log_msg, body_type) | ||
return False | ||
|
||
if operation_model.has_streaming_input: | ||
streaming_input = operation_model.get_streaming_input() | ||
streaming_metadata = streaming_input.metadata | ||
return 'requiresLength' not in streaming_metadata | ||
|
||
body_size = _get_body_size(request_dict['body']) | ||
min_size = config.request_min_compression_size_bytes | ||
return min_size <= body_size | ||
|
||
return False | ||
|
||
|
||
def _is_compressible_type(request_dict): | ||
body = request_dict['body'] | ||
# Coerce dict to a format compatible with compression. | ||
if isinstance(body, dict): | ||
body = urlencode(body, doseq=True, encoding='utf-8').encode('utf-8') | ||
request_dict['body'] = body | ||
is_supported_type = isinstance(body, (str, bytes, bytearray)) | ||
return is_supported_type or hasattr(body, 'read') | ||
|
||
|
||
def _get_body_size(body): | ||
size = determine_content_length(body) | ||
if size is None: | ||
logger.debug( | ||
'Unable to get length of the request body: %s. ' | ||
'Skipping compression.', | ||
body, | ||
) | ||
size = 0 | ||
return size | ||
|
||
|
||
def _gzip_compress_body(body): | ||
if isinstance(body, str): | ||
return gzip_compress(body.encode('utf-8')) | ||
elif isinstance(body, (bytes, bytearray)): | ||
return gzip_compress(body) | ||
elif hasattr(body, 'read'): | ||
if hasattr(body, 'seek') and hasattr(body, 'tell'): | ||
current_position = body.tell() | ||
compressed_obj = _gzip_compress_fileobj(body) | ||
body.seek(current_position) | ||
return compressed_obj | ||
return _gzip_compress_fileobj(body) | ||
|
||
|
||
def _gzip_compress_fileobj(body): | ||
compressed_obj = io.BytesIO() | ||
with GzipFile(fileobj=compressed_obj, mode='wb') as gz: | ||
while True: | ||
chunk = body.read(8192) | ||
if not chunk: | ||
break | ||
if isinstance(chunk, str): | ||
chunk = chunk.encode('utf-8') | ||
gz.write(chunk) | ||
compressed_obj.seek(0) | ||
return compressed_obj | ||
|
||
|
||
def _set_compression_header(headers, encoding): | ||
ce_header = headers.get('Content-Encoding') | ||
if ce_header is None: | ||
headers['Content-Encoding'] = encoding | ||
else: | ||
headers['Content-Encoding'] = f'{ce_header},{encoding}' | ||
|
||
|
||
COMPRESSION_MAPPING = {'gzip': _gzip_compress_body} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.