diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9549f3bd4..ba4dc914b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,8 +4,31 @@ django-storages CHANGELOG UNRELEASED ********** +**Breaking** + - Removed support for end-of-life Python 2.7 and 3.4. +- The minimum supported version of boto3 is now 1.4.4. + +- The ``S3Boto3Storage`` backend no longer accepts the argument ``acl``. Use + the ``ACL`` key in ``AWS_S3_OBJECT_PARAMETERS`` instead. + +- The ``S3Boto3Storage`` backend no longer accepts the argument ``bucket``. Use + ``bucket_name`` or the setting ``AWS_STORAGE_BUCKET_NAME`` instead. + +- The ``S3Boto3Storage`` backend no longer automatically creates the bucket. + Doing so had encouraged using overly broad credentials. As a result, the + ``AWS_BUCKET_ACL`` setting has been removed. + +- The ``S3Boto3Storage`` backend no longer not longer supports the settings + ``AWS_DEFAULT_ACL``, ``AWS_REDUCED_REDUNDANCY`` and ``AWS_S3_ENCRYPTION``. + They have been removed in favor of the ``AWS_S3_OBJECT_PARAMETERS`` setting. + Using ``AWS_S3_OBJECT_PARAMETERS`` allows for full control over these S3 + parameters. + +- The ``S3Boto3Storage`` backend no longer supports the undocumented + ``AWS_PRELOAD_METADATA`` setting. + 1.9.1 (2020-02-03) ****************** diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index d1c1ebc49..8740ea55b 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -10,6 +10,9 @@ There is only one supported backend for interacting with Amazon's S3, The legacy ``S3BotoStorage`` backend was removed in version 1.9. To continue getting new features you must upgrade to the ``S3Boto3Storage`` backend by following the :ref:`migration instructions `. +The minimum required version of ``boto3`` is 1.4.4 although we always recommend +the most recent. + Settings -------- @@ -34,37 +37,6 @@ To allow ``django-admin.py`` collectstatic to automatically put your static file ``AWS_STORAGE_BUCKET_NAME`` Your Amazon Web Services storage bucket name, as a string. -``AWS_DEFAULT_ACL`` (optional, ``None`` or canned ACL, default ``public-read``) - Must be either ``None`` or from the `list of canned ACLs`_. If set to ``None`` - then all files will inherit the bucket's ACL. - -.. warning:: - - The default value of ``public-read`` is insecure and will be changing to ``None`` in - a future release of django-storages. Please set this explicitly to ``public-read`` - if that is the desired behavior. - -``AWS_BUCKET_ACL`` (optional, default ``public-read``) - Only used if ``AWS_AUTO_CREATE_BUCKET=True``. The ACL of the created bucket. - - Must be either ``None`` or from the `list of canned ACLs`_. If set to ``None`` - then the bucket will use the AWS account's default. - -.. warning:: - - The default value of ``public-read`` is insecure and will be changing to ``None`` in - a future release of django-storages. Please set this explicitly to ``public-read`` - if that is the desired behavior. - -``AWS_AUTO_CREATE_BUCKET`` (optional) - If set to ``True`` the bucket specified in ``AWS_STORAGE_BUCKET_NAME`` is automatically created. - -.. deprecated:: 1.9 - - The ability to automatically create a bucket will be removed in version 1.10. The permissions needed - to do so are incongruent with the requirements of the rest of this library. Either create it yourself - or use one of the popular configuration management tools. - ``AWS_S3_OBJECT_PARAMETERS`` (optional, default ``{}``) Use this to set parameters on all objects. To set these on a per-object basis, subclass the backend and override ``S3Boto3Storage.get_object_parameters``. @@ -84,14 +56,6 @@ To allow ``django-admin.py`` collectstatic to automatically put your static file ``AWS_QUERYSTRING_EXPIRE`` (optional; default is 3600 seconds) The number of seconds that a generated URL is valid for. -``AWS_S3_ENCRYPTION`` (optional; default is ``False``) - Enable server-side file encryption while at rest. - -.. deprecated:: 1.9 - - Support for this top level setting is deprecated. The functionality is still available by setting - ServerSideEncryption=AES256 in AWS_S3_OBJECT_PARAMETERS. - ``AWS_S3_FILE_OVERWRITE`` (optional: default is ``True``) By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. @@ -170,9 +134,8 @@ The following adjustments to settings are required: - If you persist urls and rely on the output to use the signature version of ``s3`` set ``AWS_S3_SIGNATURE_VERSION`` to ``s3`` - Update ``DEFAULT_FILE_STORAGE`` and/or ``STATICFILES_STORAGE`` to ``storages.backends.s3boto3.S3Boto3Storage`` -Additionally, you must install ``boto3``. In order to use -all currently supported features, ``1.4.4`` is the minimum required version although we -always recommend the most recent. +Additionally, you must install ``boto3``. The minimum required version is 1.4.4 +although we always recommend the most recent. Please open an issue on the GitHub repo if any further issues are encountered or steps were omitted. diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 1c9cbae2e..151d61dd0 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -3,13 +3,11 @@ import os import posixpath import threading -import warnings from datetime import datetime, timedelta from gzip import GzipFile from tempfile import SpooledTemporaryFile from urllib.parse import parse_qsl, urlsplit -from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.core.files.base import File from django.utils.deconstruct import deconstructible @@ -24,7 +22,6 @@ try: import boto3.session - from boto3 import __version__ as boto3_version from botocore.client import Config from botocore.exceptions import ClientError from botocore.signers import CloudFrontSigner @@ -76,9 +73,6 @@ def _cloud_front_signer_from_pem(key_id, pem): 'Supported backends are packages: cryptography and rsa.') -boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) - - @deconstructible class S3Boto3StorageFile(File): @@ -255,56 +249,9 @@ class S3Boto3Storage(BaseStorage): security_token_names = ['AWS_SESSION_TOKEN', 'AWS_SECURITY_TOKEN'] security_token = None - def __init__(self, acl=None, bucket=None, **settings): + def __init__(self, **settings): super().__init__(**settings) - # For backward-compatibility of old differing parameter names - if acl is not None: - warnings.warn( - "The acl argument of S3Boto3Storage is deprecated. Use " - "argument default_acl or setting AWS_DEFAULT_ACL instead. The " - "acl argument will be removed in version 1.10.", - DeprecationWarning, - ) - self.default_acl = acl - if bucket is not None: - warnings.warn( - "The bucket argument of S3Boto3Storage is deprecated. Use " - "argument bucket_name or setting AWS_STORAGE_BUCKET_NAME " - "instead. The bucket argument will be removed in version 1.10.", - DeprecationWarning, - ) - self.bucket_name = bucket - if self.auto_create_bucket: - warnings.warn( - "Automatic bucket creation will be removed in version 1.10. It encourages " - "using overly broad credentials with this library. Either create it before " - "manually or use one of a myriad of automatic configuration management tools. " - "Unset AWS_AUTO_CREATE_BUCKET (it defaults to False) to silence this warning.", - DeprecationWarning, - ) - if self.reduced_redundancy: - warnings.warn( - "Support for AWS_REDUCED_REDUNDANCY will be removed in version 1.10. " - "Update now by adding StorageClass=REDUCED_REDUNDANCY to " - "AWS_S3_OBJECT_PARAMETERS. There are also several other possible values " - "for StorageClass available. Check the AWS & boto3 docs for more info.", - DeprecationWarning, - ) - if self.encryption: - warnings.warn( - "Support for AWS_S3_ENCRYPTION will be removed in version 1.10. " - "Update now by adding ServerSideEncryption=AES256 to " - "AWS_S3_OBJECT_PARAMETERS. Doing so also easily allows using 'aws:kms' " - "for encryption. Check the AWS & boto3 docs for more info.", - DeprecationWarning, - ) - if self.preload_metadata: - warnings.warn( - "Support for AWS_PRELOAD_METADATA will be removed in version 1.10. ", - DeprecationWarning, - ) - check_location(self) # Backward-compatibility: given the anteriority of the SECURE_URL setting @@ -313,7 +260,6 @@ def __init__(self, acl=None, bucket=None, **settings): if self.secure_urls: self.url_protocol = 'https:' - self._entries = {} self._bucket = None self._connections = threading.local() @@ -321,29 +267,10 @@ def __init__(self, acl=None, bucket=None, **settings): self.security_token = self._get_security_token() if not self.config: - kwargs = dict( + self.config = Config( s3={'addressing_style': self.addressing_style}, signature_version=self.signature_version, - ) - - if boto3_version_info >= (1, 4, 4): - kwargs['proxies'] = self.proxies - else: - warnings.warn( - "In version 1.10 of django-storages the minimum required version of " - "boto3 will be 1.4.4. You have %s " % boto3_version_info - ) - self.config = Config(**kwargs) - - # warn about upcoming change in default AWS_DEFAULT_ACL setting - if not hasattr(django_settings, 'AWS_DEFAULT_ACL') and self.default_acl == 'public-read': - warnings.warn( - "The default behavior of S3Boto3Storage is insecure and will change " - "in django-storages 1.10. By default files and new buckets are saved " - "with an ACL of 'public-read' (globally publicly readable). Version 1.10 will " - "default to using the bucket's ACL. To opt into the new behavior set " - "AWS_DEFAULT_ACL = None, otherwise to silence this warning explicitly " - "set AWS_DEFAULT_ACL." + proxies=self.proxies, ) def get_cloudfront_signer(self, key_id, key): @@ -369,22 +296,17 @@ def get_default_settings(self): "file_overwrite": setting('AWS_S3_FILE_OVERWRITE', True), "object_parameters": setting('AWS_S3_OBJECT_PARAMETERS', {}), "bucket_name": setting('AWS_STORAGE_BUCKET_NAME'), - "auto_create_bucket": setting('AWS_AUTO_CREATE_BUCKET', False), - "default_acl": setting('AWS_DEFAULT_ACL', 'public-read'), "bucket_acl": setting('AWS_BUCKET_ACL', 'public-read'), "querystring_auth": setting('AWS_QUERYSTRING_AUTH', True), "querystring_expire": setting('AWS_QUERYSTRING_EXPIRE', 3600), "signature_version": setting('AWS_S3_SIGNATURE_VERSION'), - "reduced_redundancy": setting('AWS_REDUCED_REDUNDANCY', False), "location": setting('AWS_LOCATION', ''), - "encryption": setting('AWS_S3_ENCRYPTION', False), "custom_domain": setting('AWS_S3_CUSTOM_DOMAIN'), "cloudfront_signer": cloudfront_signer, "addressing_style": setting('AWS_S3_ADDRESSING_STYLE'), "secure_urls": setting('AWS_S3_SECURE_URLS', True), "file_name_charset": setting('AWS_S3_FILE_NAME_CHARSET', 'utf-8'), "gzip": setting('AWS_IS_GZIPPED', False), - "preload_metadata": setting('AWS_PRELOAD_METADATA', False), "gzip_content_types": setting('GZIP_CONTENT_TYPES', ( 'text/css', 'text/javascript', @@ -437,21 +359,9 @@ def bucket(self): create it. """ if self._bucket is None: - self._bucket = self._get_or_create_bucket(self.bucket_name) + self._bucket = self.connection.Bucket(self.bucket_name) return self._bucket - @property - def entries(self): - """ - Get the locally cached files for the bucket. - """ - if self.preload_metadata and not self._entries: - self._entries = { - entry.key: entry - for entry in self.bucket.objects.filter(Prefix=self.location) - } - return self._entries - def _get_access_keys(self): """ Gets the access keys to use when accessing S3. If none is @@ -470,57 +380,6 @@ def _get_security_token(self): security_token = self.security_token or lookup_env(S3Boto3Storage.security_token_names) return security_token - def _get_or_create_bucket(self, name): - """ - Retrieves a bucket if it exists, otherwise creates it. - """ - bucket = self.connection.Bucket(name) - if self.auto_create_bucket: - try: - # Directly call head_bucket instead of bucket.load() because head_bucket() - # fails on wrong region, while bucket.load() does not. - bucket.meta.client.head_bucket(Bucket=name) - except ClientError as err: - if err.response['ResponseMetadata']['HTTPStatusCode'] == 301: - raise ImproperlyConfigured("Bucket %s exists, but in a different " - "region than we are connecting to. Set " - "the region to connect to by setting " - "AWS_S3_REGION_NAME to the correct region." % name) - - elif err.response['ResponseMetadata']['HTTPStatusCode'] == 404: - # Notes: When using the us-east-1 Standard endpoint, you can create - # buckets in other regions. The same is not true when hitting region specific - # endpoints. However, when you create the bucket not in the same region, the - # connection will fail all future requests to the Bucket after the creation - # (301 Moved Permanently). - # - # For simplicity, we enforce in S3Boto3Storage that any auto-created - # bucket must match the region that the connection is for. - # - # Also note that Amazon specifically disallows "us-east-1" when passing bucket - # region names; LocationConstraint *must* be blank to create in US Standard. - if not hasattr(django_settings, 'AWS_BUCKET_ACL'): - warnings.warn( - "The default behavior of S3Boto3Storage is insecure and will change " - "in django-storages 1.10. By default new buckets are saved with an ACL of " - "'public-read' (globally publicly readable). Version 1.10 will default to " - "Amazon's default of the bucket owner. To opt into this behavior this warning " - "set AWS_BUCKET_ACL = None, otherwise to silence this warning explicitly set " - "AWS_BUCKET_ACL." - ) - if self.bucket_acl: - bucket_params = {'ACL': self.bucket_acl} - else: - bucket_params = {} - region_name = self.connection.meta.client.meta.region_name - if region_name != 'us-east-1': - bucket_params['CreateBucketConfiguration'] = { - 'LocationConstraint': region_name} - bucket.create(**bucket_params) - else: - raise - return bucket - def _clean_name(self, name): """ Cleans the name so that Windows style paths work @@ -588,9 +447,6 @@ def _save(self, name, content): params['ContentEncoding'] = 'gzip' obj = self.bucket.Object(name) - if self.preload_metadata: - self._entries[name] = obj - content.seek(0, os.SEEK_SET) obj.upload_fileobj(content, ExtraArgs=params) return cleaned_name @@ -599,13 +455,8 @@ def delete(self, name): name = self._normalize_name(self._clean_name(name)) self.bucket.Object(name).delete() - if name in self._entries: - del self._entries[name] - def exists(self, name): name = self._normalize_name(self._clean_name(name)) - if self.entries: - return name in self.entries try: self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name) return True @@ -632,23 +483,11 @@ def listdir(self, name): def size(self, name): name = self._normalize_name(self._clean_name(name)) - if self.entries: - entry = self.entries.get(name) - if entry: - return entry.size if hasattr(entry, 'size') else entry.content_length - return 0 return self.bucket.Object(name).content_length def _get_write_parameters(self, name, content=None): params = {} - if self.encryption: - params['ServerSideEncryption'] = 'AES256' - if self.reduced_redundancy: - params['StorageClass'] = 'REDUCED_REDUNDANCY' - if self.default_acl: - params['ACL'] = self.default_acl - _type, encoding = mimetypes.guess_type(name) content_type = getattr(content, 'content_type', None) content_type = content_type or _type or self.default_content_type @@ -677,11 +516,7 @@ def get_modified_time(self, name): USE_TZ is True, otherwise returns a naive datetime in the local timezone. """ name = self._normalize_name(self._clean_name(name)) - entry = self.entries.get(name) - # only call self.bucket.Object() if the key is not found - # in the preloaded metadata. - if entry is None: - entry = self.bucket.Object(name) + entry = self.bucket.Object(name) if setting('USE_TZ'): # boto3 returns TZ aware timestamps return entry.last_modified diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index ce4f6f1b7..8236c3b64 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -1,7 +1,6 @@ import gzip import pickle import threading -import warnings from datetime import datetime from textwrap import dedent from unittest import mock, skipIf @@ -112,7 +111,6 @@ def test_storage_save(self): content, ExtraArgs={ 'ContentType': 'text/plain', - 'ACL': self.storage.default_acl, } ) @@ -122,7 +120,7 @@ def test_storage_save_with_acl(self): """ name = 'test_storage_save.txt' content = ContentFile('new content') - self.storage.default_acl = 'private' + self.storage.object_parameters = {'ACL': 'private'} self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) @@ -150,7 +148,6 @@ def test_content_type(self): content, ExtraArgs={ 'ContentType': 'image/jpeg', - 'ACL': self.storage.default_acl, } ) @@ -167,7 +164,6 @@ def test_storage_save_gzipped(self): ExtraArgs={ 'ContentType': 'application/octet-stream', 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, } ) @@ -185,7 +181,6 @@ def test_storage_save_gzip(self): ExtraArgs={ 'ContentType': 'text/css', 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, } ) args, kwargs = obj.upload_fileobj.call_args @@ -213,7 +208,6 @@ def test_storage_save_gzip_twice(self): ExtraArgs={ 'ContentType': 'text/css', 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, } ) args, kwargs = obj.upload_fileobj.call_args @@ -238,9 +232,11 @@ def test_storage_open_write(self): content = 'new content' # Set the encryption flag used for multipart uploads - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' + self.storage.object_parameters = { + 'ServerSideEncryption': 'AES256', + 'StorageClass': 'REDUCED_REDUNDANCY', + 'ACL': 'public-read', + } file = self.storage.open(name, 'w') self.storage.bucket.Object.assert_called_with(name) @@ -275,9 +271,10 @@ def test_storage_open_no_write(self): name = 'test_open_no_write.txt' # Set the encryption flag used for puts - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' + self.storage.object_parameters = { + 'ServerSideEncryption': 'AES256', + 'StorageClass': 'REDUCED_REDUNDANCY', + } file = self.storage.open(name, 'w') self.storage.bucket.Object.assert_called_with(name) @@ -294,7 +291,6 @@ def test_storage_open_no_write(self): obj.load.assert_called_once_with() obj.put.assert_called_once_with( - ACL='public-read', Body=b"", ContentType='text/plain', ServerSideEncryption='AES256', @@ -308,9 +304,10 @@ def test_storage_open_no_overwrite_existing(self): name = 'test_open_no_overwrite_existing.txt' # Set the encryption flag used for puts - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' + self.storage.object_parameters = { + 'ServerSideEncryption': 'AES256', + 'StorageClass': 'REDUCED_REDUNDANCY', + } file = self.storage.open(name, 'w') self.storage.bucket.Object.assert_called_with(name) @@ -332,9 +329,10 @@ def test_storage_write_beyond_buffer_size(self): name = 'test_open_for_writïng_beyond_buffer_size.txt' # Set the encryption flag used for multipart uploads - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' + self.storage.object_parameters = { + 'ServerSideEncryption': 'AES256', + 'StorageClass': 'REDUCED_REDUNDANCY', + } file = self.storage.open(name, 'w') self.storage.bucket.Object.assert_called_with(name) @@ -345,7 +343,6 @@ def test_storage_write_beyond_buffer_size(self): # Initiate the multipart upload file.write('') obj.initiate_multipart_upload.assert_called_with( - ACL='public-read', ContentType='text/plain', ServerSideEncryption='AES256', StorageClass='REDUCED_REDUNDANCY' @@ -387,41 +384,6 @@ def test_storage_write_beyond_buffer_size(self): ]} ) - def test_auto_creating_bucket(self): - self.storage.auto_create_bucket = True - Bucket = mock.MagicMock() - self.storage._connections.connection.Bucket.return_value = Bucket - self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' - - Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, - 'ResponseMetadata': {'HTTPStatusCode': 404}}, - 'head_bucket') - self.storage._get_or_create_bucket('testbucketname') - Bucket.create.assert_called_once_with( - ACL='public-read', - CreateBucketConfiguration={ - 'LocationConstraint': 'sa-east-1', - } - ) - - def test_auto_creating_bucket_with_acl(self): - self.storage.auto_create_bucket = True - self.storage.bucket_acl = 'public-read' - Bucket = mock.MagicMock() - self.storage._connections.connection.Bucket.return_value = Bucket - self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' - - Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, - 'ResponseMetadata': {'HTTPStatusCode': 404}}, - 'head_bucket') - self.storage._get_or_create_bucket('testbucketname') - Bucket.create.assert_called_once_with( - ACL='public-read', - CreateBucketConfiguration={ - 'LocationConstraint': 'sa-east-1', - } - ) - def test_storage_exists(self): self.assertTrue(self.storage.exists("file.txt")) self.storage.connection.meta.client.head_object.assert_called_with( @@ -440,11 +402,6 @@ def test_storage_exists_false(self): Key='file.txt', ) - def test_storage_exists_doesnt_create_bucket(self): - with mock.patch.object(self.storage, '_get_or_create_bucket') as method: - self.storage.exists('file.txt') - self.assertFalse(method.called) - def test_storage_delete(self): self.storage.delete("path/to/file.txt") self.storage.bucket.Object.assert_called_with('path/to/file.txt') @@ -659,65 +616,6 @@ def test_location_leading_slash(self): with self.assertRaises(ImproperlyConfigured, msg=msg): s3boto3.S3Boto3Storage(location='/') - def test_deprecated_acl(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(acl='private') - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "The acl argument of S3Boto3Storage is deprecated. Use argument " - "default_acl or setting AWS_DEFAULT_ACL instead. The acl argument " - "will be removed in version 1.10." - ) - assert str(w[-1].message) == message - - def test_deprecated_bucket(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(bucket='django') - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "The bucket argument of S3Boto3Storage is deprecated. Use argument " - "bucket_name or setting AWS_STORAGE_BUCKET_NAME instead. The bucket " - "argument will be removed in version 1.10." - ) - assert str(w[-1].message) == message - - def test_deprecated_default_acl(self): - with warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage() - assert len(w) == 1 - message = ( - "The default behavior of S3Boto3Storage is insecure and will change " - "in django-storages 1.10. By default files and new buckets are saved " - "with an ACL of 'public-read' (globally publicly readable). Version 1.10 will " - "default to using the bucket's ACL. To opt into the new behavior set " - "AWS_DEFAULT_ACL = None, otherwise to silence this warning explicitly " - "set AWS_DEFAULT_ACL." - ) - assert str(w[-1].message) == message - - def test_deprecated_autocreate_bucket(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(auto_create_bucket=True) - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "Automatic bucket creation will be removed in version 1.10. It encourages " - "using overly broad credentials with this library. Either create it before " - "manually or use one of a myriad of automatic configuration management tools. " - "Unset AWS_AUTO_CREATE_BUCKET (it defaults to False) to silence this warning." - ) - assert str(w[-1].message) == message - - def test_deprecated_default_acl_override_class_variable(self): - class MyStorage(s3boto3.S3Boto3Storage): - default_acl = "private" - - with warnings.catch_warnings(record=True) as w: - MyStorage() - assert len(w) == 0 - def test_override_settings(self): with override_settings(AWS_LOCATION='foo1'): storage = s3boto3.S3Boto3Storage()