diff --git a/awscli/customizations/s3/constants.py b/awscli/customizations/s3/constants.py index d0877eed26b2..53bdc5cb4174 100644 --- a/awscli/customizations/s3/constants.py +++ b/awscli/customizations/s3/constants.py @@ -14,7 +14,4 @@ CHUNKSIZE = 7 * (1024 ** 2) NUM_THREADS = 10 QUEUE_TIMEOUT_WAIT = 0.2 -MAX_PARTS = 950 -MAX_SINGLE_UPLOAD_SIZE = 5 * (1024 ** 3) -MAX_UPLOAD_SIZE = 5 * (1024 ** 4) MAX_QUEUE_SIZE = 1000 diff --git a/awscli/customizations/s3/s3handler.py b/awscli/customizations/s3/s3handler.py index ec253aa4415c..5588faf1d673 100644 --- a/awscli/customizations/s3/s3handler.py +++ b/awscli/customizations/s3/s3handler.py @@ -17,7 +17,7 @@ import sys from awscli.customizations.s3.constants import MULTI_THRESHOLD, CHUNKSIZE, \ - NUM_THREADS, MAX_UPLOAD_SIZE, MAX_QUEUE_SIZE + NUM_THREADS, MAX_QUEUE_SIZE from awscli.customizations.s3.utils import find_chunksize, \ operate, find_bucket_key, relative_path, PrintTask, create_warning from awscli.customizations.s3.executor import Executor @@ -27,6 +27,9 @@ LOGGER = logging.getLogger(__name__) +# Maximum object size allowed in S3. +# See: http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html +MAX_UPLOAD_SIZE = 5 * (1024 ** 4) CommandResult = namedtuple('CommandResult', ['num_tasks_failed', 'num_tasks_warned']) diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py index a47dc2df2b1f..c9d929b2a4d1 100644 --- a/awscli/customizations/s3/utils.py +++ b/awscli/customizations/s3/utils.py @@ -24,14 +24,17 @@ from dateutil.tz import tzlocal from botocore.compat import unquote_str -from awscli.customizations.s3.constants import MAX_PARTS -from awscli.customizations.s3.constants import MAX_SINGLE_UPLOAD_SIZE from awscli.compat import six from awscli.compat import PY3 from awscli.compat import queue -humanize_suffixes = ('KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB') +HUMANIZE_SUFFIXES = ('KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB') +MAX_PARTS = 10000 +# The maximum file size you can upload via S3 per request. +# See: http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html +# and: http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html +MAX_SINGLE_UPLOAD_SIZE = 5 * (1024 ** 3) def human_readable_size(value): @@ -61,7 +64,7 @@ def human_readable_size(value): elif bytes_int < base: return '%d Bytes' % bytes_int - for i, suffix in enumerate(humanize_suffixes): + for i, suffix in enumerate(HUMANIZE_SUFFIXES): unit = base ** (i+2) if round((bytes_int / unit) * base) < base: return '%.1f %s' % ((base * bytes_int / unit), suffix) diff --git a/tests/unit/customizations/s3/test_s3handler.py b/tests/unit/customizations/s3/test_s3handler.py index 2105d3495770..d97a060306e8 100644 --- a/tests/unit/customizations/s3/test_s3handler.py +++ b/tests/unit/customizations/s3/test_s3handler.py @@ -23,6 +23,7 @@ from awscli.customizations.s3.fileinfo import FileInfo from awscli.customizations.s3.tasks import CreateMultipartUploadTask, \ UploadPartTask, CreateLocalFileTask +from awscli.customizations.s3.utils import MAX_PARTS from tests.unit.customizations.s3.fake_session import FakeSession from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \ make_s3_files, s3_cleanup, create_bucket, list_contents, list_buckets, \ @@ -697,7 +698,7 @@ def test_upload_stream_with_expected_size(self): # UploadPartTasks. changed_chunk_size = submitted_tasks[1][0][0]._chunk_size # New chunksize should have a total parts under 1000. - self.assertTrue(100000/changed_chunk_size < 1000) + self.assertTrue(100000 / float(changed_chunk_size) <= MAX_PARTS) def test_upload_stream_enqueue_upload_task(self): s3handler = S3StreamHandler(self.session, self.params) diff --git a/tests/unit/customizations/s3/test_utils.py b/tests/unit/customizations/s3/test_utils.py index 2a2355f252da..fde73e07a276 100644 --- a/tests/unit/customizations/s3/test_utils.py +++ b/tests/unit/customizations/s3/test_utils.py @@ -22,7 +22,7 @@ from awscli.customizations.s3.utils import AppendFilter from awscli.customizations.s3.utils import create_warning from awscli.customizations.s3.utils import human_readable_size -from awscli.customizations.s3.constants import MAX_SINGLE_UPLOAD_SIZE +from awscli.customizations.s3.utils import MAX_SINGLE_UPLOAD_SIZE def test_human_readable_size(): @@ -102,8 +102,10 @@ def test_large_chunk(self): size because the original ``chunksize`` is too small. """ chunksize = 7 * (1024 ** 2) - size = 8 * (1024 ** 3) - self.assertEqual(find_chunksize(size, chunksize), chunksize * 2) + size = 5 * (1024 ** 4) + # If we try to upload a 5TB file, we'll need to use 896MB part + # sizes. + self.assertEqual(find_chunksize(size, chunksize), 896 * (1024 ** 2)) def test_super_chunk(self): """