Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Gutiérrez committed Nov 4, 2021
2 parents 78aedb2 + a3c3f99 commit cda1579
Show file tree
Hide file tree
Showing 9 changed files with 72 additions and 19 deletions.
35 changes: 34 additions & 1 deletion CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,38 @@
django-storages CHANGELOG
=========================

1.12.3 (2021-10-29)
*******************

General
-------

- Add support for Python 3.10 (`#1078`_)

S3
--

- Re-raise non-404 errors in ``.exists()`` (`#1084`_, `#1085`_)

Azure
-----

- Fix using ``AZURE_CUSTOM_DOMAIN`` with an account key credential (`#1082`_, `#1083`_)

SFTP
----

- Catch ``FileNotFoundError`` instead of ``OSerror`` in ``.exists()`` to prevent swallowing ``socket.timeout`` exceptions (`#1064`_, `#1087`_)


.. _#1078: https://github.com/jschneier/django-storages/pull/1078
.. _#1084: https://github.com/jschneier/django-storages/issues/1084
.. _#1085: https://github.com/jschneier/django-storages/pull/1085
.. _#1082: https://github.com/jschneier/django-storages/issues/1082
.. _#1083: https://github.com/jschneier/django-storages/pull/1083
.. _#1064: https://github.com/jschneier/django-storages/issues/1064
.. _#1087: https://github.com/jschneier/django-storages/pull/1087

1.12.2 (2021-10-16)
*******************

Expand Down Expand Up @@ -31,10 +63,11 @@ S3
1.12 (2021-10-06)
*****************

General
-------
- Add support for Django 3.2 (`#1046`_, `#1042`_, `#1005`_)
- Replace Travis CI with GitHub actions (`#1051`_)


S3
--

Expand Down
6 changes: 4 additions & 2 deletions docs/backends/gcloud.rst
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,12 @@ Your Google Cloud project ID. If unset, falls back to the default
inferred from the environment.

``GS_IS_GZIPPED`` (optional: default is ``False``)
Whether or not to enable gzipping of content types specified by ``GZIP_CONTENT_TYPES``

Whether or not to enable gzipping of content types specified by ``GZIP_CONTENT_TYPES``

``GZIP_CONTENT_TYPES`` (optional: default is ``text/css``, ``text/javascript``, ``application/javascript``, ``application/x-javascript``, ``image/svg+xml``)
When ``GS_IS_GZIPPED`` is set to ``True`` the content types which will be gzipped

When ``GS_IS_GZIPPED`` is set to ``True`` the content types which will be gzipped

.. _gs-creds:

Expand Down
2 changes: 1 addition & 1 deletion storages/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '1.12.2'
__version__ = '1.12.3'
5 changes: 4 additions & 1 deletion storages/backends/azure_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,10 @@ def _get_service_client(self):

credential = None
if self.account_key:
credential = self.account_key
credential = {
"account_name": self.account_name,
"account_key": self.account_key,
}
elif self.sas_token:
credential = self.sas_token
elif self.token_credential:
Expand Down
11 changes: 4 additions & 7 deletions storages/backends/s3boto3.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,18 +465,15 @@ def exists(self, name):
self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name)
return True
except ClientError as error:
if error.response.get('Error', {}).get('Code') == '404':
if error.response['ResponseMetadata']['HTTPStatusCode'] == 404:
return False

# Some other error was encountered. As `get_available_name` calls this,
# we have to assume the filename is unavailable. If we return true due to some
# other error, we'd overwrite a file.
return True
# Some other error was encountered. Re-raise it.
raise

def listdir(self, name):
path = self._normalize_name(self._clean_name(name))
# The path needs to end with a slash, but if the root is empty, leave
# it.
# The path needs to end with a slash, but if the root is empty, leave it.
if path and not path.endswith('/'):
path += '/'

Expand Down
2 changes: 1 addition & 1 deletion storages/backends/sftpstorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def exists(self, name):
try:
self.sftp.stat(self._remote_path(name))
return True
except OSError:
except FileNotFoundError:
return False

def _isdir_attr(self, item):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def test_container_client_params_account_key(self):
self.assertEqual(storage.client, client_mock)
bsc_mocked.assert_called_once_with(
'https://foo_domain',
credential='foo_key')
credential={'account_name': 'foo_name', 'account_key': 'foo_key'})

def test_container_client_params_sas_token(self):
storage = azure_storage.AzureStorage()
Expand Down
14 changes: 12 additions & 2 deletions tests/test_s3boto3.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,15 +477,25 @@ def test_storage_exists(self):

def test_storage_exists_false(self):
self.storage.connection.meta.client.head_object.side_effect = ClientError(
{'Error': {'Code': '404', 'Message': 'Not Found'}},
{'Error': {}, 'ResponseMetadata': {'HTTPStatusCode': 404}},
'HeadObject',
)
self.assertFalse(self.storage.exists("file.txt"))
self.assertFalse(self.storage.exists('file.txt'))
self.storage.connection.meta.client.head_object.assert_called_with(
Bucket=self.storage.bucket_name,
Key='file.txt',
)

def test_storage_exists_other_error_reraise(self):
self.storage.connection.meta.client.head_object.side_effect = ClientError(
{'Error': {}, 'ResponseMetadata': {'HTTPStatusCode': 403}},
'HeadObject',
)
with self.assertRaises(ClientError) as cm:
self.storage.exists('file.txt')

self.assertEqual(cm.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)

def test_storage_delete(self):
self.storage.delete("path/to/file.txt")
self.storage.bucket.Object.assert_called_with('path/to/file.txt')
Expand Down
14 changes: 11 additions & 3 deletions tests/test_sftp.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import io
import os
import socket
import stat
from datetime import datetime
from unittest.mock import MagicMock, patch
Expand Down Expand Up @@ -56,7 +57,7 @@ def test_mkdir(self, mock_sftp):
self.assertEqual(mock_sftp.mkdir.call_args[0], ('foo',))

@patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{
'stat.side_effect': (IOError(), True)
'stat.side_effect': (FileNotFoundError(), True)
})
def test_mkdir_parent(self, mock_sftp):
self.storage._mkdir('bar/foo')
Expand All @@ -69,7 +70,7 @@ def test_save(self, mock_sftp):
self.assertTrue(mock_sftp.open.return_value.write.called)

@patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{
'stat.side_effect': (IOError(), True)
'stat.side_effect': (FileNotFoundError(), True)
})
def test_save_in_subdir(self, mock_sftp):
self.storage._save('bar/foo', File(io.BytesIO(b'foo'), 'foo'))
Expand All @@ -86,11 +87,18 @@ def test_exists(self, mock_sftp):
self.assertTrue(self.storage.exists('foo'))

@patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{
'stat.side_effect': IOError()
'stat.side_effect': FileNotFoundError()
})
def test_not_exists(self, mock_sftp):
self.assertFalse(self.storage.exists('foo'))

@patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{
'stat.side_effect': socket.timeout()
})
def test_not_exists_timeout(self, mock_sftp):
with self.assertRaises(socket.timeout):
self.storage.exists('foo')

@patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{
'listdir_attr.return_value':
[MagicMock(filename='foo', st_mode=stat.S_IFDIR),
Expand Down

0 comments on commit cda1579

Please sign in to comment.