diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 19fa2289..a6becc1e 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -23,11 +23,11 @@ from storages.base import BaseStorage from storages.compress import CompressedFileMixin from storages.compress import CompressStorageMixin +from storages.utils import ReadBytesWrapper from storages.utils import check_location from storages.utils import clean_name from storages.utils import get_available_overwrite_name from storages.utils import is_seekable -from storages.utils import ReadBytesWrapper from storages.utils import safe_join from storages.utils import setting from storages.utils import to_bytes diff --git a/storages/utils.py b/storages/utils.py index 6e2147e3..cf5fac9f 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -5,6 +5,7 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import SuspiciousFileOperation +from django.core.files.utils import FileProxyMixin from django.utils.encoding import force_bytes @@ -127,7 +128,7 @@ def is_seekable(file_object): return not hasattr(file_object, 'seekable') or file_object.seekable() -class ReadBytesWrapper: +class ReadBytesWrapper(FileProxyMixin): """ A wrapper for a file-like object, that makes read() always returns bytes. """ @@ -138,20 +139,16 @@ def __init__(self, file, encoding=None): If not provided will default to file.encoding, of if that's not available, to utf-8. """ - self._file = file - self.encoding = ( + self.file = file + self._encoding = ( encoding or getattr(file, "encoding", None) or "utf-8" ) def read(self, *args, **kwargs): - content = self._file.read(*args, **kwargs) + content = self.file.read(*args, **kwargs) if not isinstance(content, bytes): - return content.encode(self.encoding) - else: - return content - - def seek(self, *args, **kwargs): - return self._file.seek(*args, **kwargs) + content = content.encode(self._encoding) + return content diff --git a/tests/settings.py b/tests/settings.py index 4f0de64a..244f94de 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -12,6 +12,4 @@ USE_TZ = True # the following test settings are required for moto to work. -AWS_STORAGE_BUCKET_NAME = "test_bucket" -AWS_ACCESS_KEY_ID = "testing_key_id" -AWS_SECRET_ACCESS_KEY = "testing_access_key" +AWS_STORAGE_BUCKET_NAME = "test-bucket" diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 10f47100..f1d49fa2 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -3,7 +3,6 @@ import io import pickle import threading -from datetime import datetime from textwrap import dedent from unittest import mock from unittest import skipIf @@ -11,9 +10,11 @@ import boto3 import boto3.s3.transfer +from botocore.exceptions import ClientError from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile +from django.core.files.base import File from django.test import TestCase from django.test import override_settings from django.utils.timezone import is_aware @@ -34,11 +35,11 @@ def setUp(self): self.storage._connections.connection = mock.MagicMock() def test_s3_session(self): - settings.AWS_S3_SESSION_PROFILE = "test_profile" - with mock.patch('boto3.Session') as mock_session: - storage = s3boto3.S3Boto3Storage() - _ = storage.connection - mock_session.assert_called_once_with(profile_name="test_profile") + with override_settings(AWS_S3_SESSION_PROFILE="test_profile"): + with mock.patch('boto3.Session') as mock_session: + storage = s3boto3.S3Boto3Storage() + _ = storage.connection + mock_session.assert_called_once_with(profile_name="test_profile") def test_pickle_with_bucket(self): """ @@ -96,7 +97,7 @@ def test_storage_save(self): obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + mock.ANY, ExtraArgs={ 'ContentType': 'text/plain', }, @@ -114,7 +115,7 @@ def test_storage_save_non_seekable(self): obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + mock.ANY, ExtraArgs={ 'ContentType': 'text/plain', }, @@ -174,7 +175,7 @@ def test_content_type(self): obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + mock.ANY, ExtraArgs={ 'ContentType': 'image/jpeg', }, @@ -189,7 +190,7 @@ def test_storage_save_gzipped(self): content = ContentFile("I am gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( + obj.upload_fileobj.assert_called_once_with( mock.ANY, ExtraArgs={ 'ContentType': 'application/octet-stream', @@ -210,7 +211,7 @@ def get_object_parameters(name): obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + mock.ANY, ExtraArgs={ "ContentType": "application/gzip", }, @@ -225,8 +226,8 @@ def test_storage_save_gzipped_non_seekable(self): content = NonSeekableContentFile("I am gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, + obj.upload_fileobj.assert_called_once_with( + mock.ANY, ExtraArgs={ 'ContentType': 'application/octet-stream', 'ContentEncoding': 'gzip', @@ -617,7 +618,7 @@ def test_storage_listdir_empty(self): self.storage._connections.connection.meta.client.get_paginator.return_value = paginator dirs, files = self.storage.listdir('dir/') - paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='dir/') + paginator.paginate.assert_called_with(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter='/', Prefix='dir/') self.assertEqual(dirs, []) self.assertEqual(files, []) @@ -868,31 +869,23 @@ def test_closed(self): f.close() self.assertTrue(f.closed) + @mock_s3 class S3Boto3StorageTestsWithMoto(TestCase): """ - These tests use the moto library to mock S3, rather than unittest.mock. - This is better because more of boto3's internal code will be run in tests. - - For example this issue - https://github.com/jschneier/django-storages/issues/708 - wouldn't be caught using unittest.mock, since the error occurs in boto3's internals. - Using mock_s3 as a class decorator automatically decorates methods, but NOT classmethods or staticmethods. """ - @classmethod - @mock_s3 - def setUpClass(cls): - super().setUpClass() - # create a bucket specified in settings. - cls.bucket = boto3.resource("s3").Bucket(settings.AWS_STORAGE_BUCKET_NAME) + + def setUp(cls): + super().setUp() + + cls.storage = s3boto3.S3Boto3Storage() + cls.bucket = cls.storage.connection.Bucket(settings.AWS_STORAGE_BUCKET_NAME) cls.bucket.create() - # create a S3Boto3Storage backend instance. - cls.s3boto3_storage = s3boto3.S3Boto3Storage() def test_save_bytes_file(self): - self.s3boto3_storage.save("bytes_file.txt", File(io.BytesIO(b"foo1"))) + self.storage.save("bytes_file.txt", File(io.BytesIO(b"foo1"))) self.assertEqual( b"foo1", @@ -900,7 +893,7 @@ def test_save_bytes_file(self): ) def test_save_string_file(self): - self.s3boto3_storage.save("string_file.txt", File(io.StringIO("foo2"))) + self.storage.save("string_file.txt", File(io.StringIO("foo2"))) self.assertEqual( b"foo2", @@ -908,7 +901,7 @@ def test_save_string_file(self): ) def test_save_bytes_content_file(self): - self.s3boto3_storage.save("bytes_content.txt", ContentFile(b"foo3")) + self.storage.save("bytes_content.txt", ContentFile(b"foo3")) self.assertEqual( b"foo3", @@ -916,7 +909,7 @@ def test_save_bytes_content_file(self): ) def test_save_string_content_file(self): - self.s3boto3_storage.save("string_content.txt", ContentFile("foo4")) + self.storage.save("string_content.txt", ContentFile("foo4")) self.assertEqual( b"foo4", @@ -930,7 +923,7 @@ def test_content_type_guess(self): name = 'test_image.jpg' content = ContentFile(b'data') content.content_type = None - self.s3boto3_storage.save(name, content) + self.storage.save(name, content) s3_object_fetched = self.bucket.Object(name).get() self.assertEqual(b"data", s3_object_fetched['Body'].read()) @@ -942,7 +935,7 @@ def test_content_type_attribute(self): """ content = ContentFile(b'data') content.content_type = "test/foo" - self.s3boto3_storage.save("test_file", content) + self.storage.save("test_file", content) s3_object_fetched = self.bucket.Object("test_file").get() self.assertEqual(b"data", s3_object_fetched['Body'].read()) @@ -954,7 +947,7 @@ def test_content_type_not_detectable(self): """ content = ContentFile(b'data') content.content_type = None - self.s3boto3_storage.save("test_file", content) + self.storage.save("test_file", content) s3_object_fetched = self.bucket.Object("test_file").get() self.assertEqual(b"data", s3_object_fetched['Body'].read())