Skip to content

Commit

Permalink
Merge pull request scrapy#954 from kalessin/int-download-timeout
Browse files Browse the repository at this point in the history
Force to read DOWNLOAD_TIMEOUT as int (for example to pass using environment variable)
  • Loading branch information
pablohoffman committed Nov 26, 2014
2 parents dedea72 + 7910fa0 commit c31fb87
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 3 deletions.
2 changes: 1 addition & 1 deletion scrapy/contrib/downloadermiddleware/downloadtimeout.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def __init__(self, timeout=180):

@classmethod
def from_crawler(cls, crawler):
o = cls(crawler.settings['DOWNLOAD_TIMEOUT'])
o = cls(crawler.settings.getfloat('DOWNLOAD_TIMEOUT'))
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
return o

Expand Down
10 changes: 8 additions & 2 deletions tests/test_downloadermiddleware_downloadtimeout.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@

class DownloadTimeoutMiddlewareTest(unittest.TestCase):

def get_request_spider_mw(self):
crawler = get_crawler(Spider)
def get_request_spider_mw(self, settings=None):
crawler = get_crawler(Spider, settings)
spider = crawler._create_spider('foo')
request = Request('http://scrapytest.org/')
return request, spider, DownloadTimeoutMiddleware.from_crawler(crawler)
Expand All @@ -20,6 +20,12 @@ def test_default_download_timeout(self):
assert mw.process_request(req, spider) is None
self.assertEquals(req.meta.get('download_timeout'), 180)

def test_string_download_timeout(self):
req, spider, mw = self.get_request_spider_mw({'DOWNLOAD_TIMEOUT': '20.1'})
mw.spider_opened(spider)
assert mw.process_request(req, spider) is None
self.assertEquals(req.meta.get('download_timeout'), 20.1)

def test_spider_has_download_timeout(self):
req, spider, mw = self.get_request_spider_mw()
spider.download_timeout = 2
Expand Down

0 comments on commit c31fb87

Please sign in to comment.