Skip to content

Commit

Permalink
Test the parse command locally instead of against an external url
Browse files Browse the repository at this point in the history
  • Loading branch information
curita committed Jan 19, 2015
1 parent f3110aa commit d68615a
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 9 deletions.
2 changes: 2 additions & 0 deletions scrapy/utils/testsite.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@
class SiteTest(object):

def setUp(self):
super(SiteTest, self).setUp()
self.site = reactor.listenTCP(0, test_site(), interface="127.0.0.1")
self.baseurl = "http://localhost:%d/" % self.site.getHost().port

def tearDown(self):
super(SiteTest, self).tearDown()
self.site.stopListening()

def url(self, path):
Expand Down
28 changes: 19 additions & 9 deletions tests/test_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,13 @@
from tempfile import mkdtemp

from twisted.trial import unittest
from twisted.internet import defer

from scrapy.utils.python import retry_on_eintr
from scrapy.utils.test import get_testenv
from scrapy.utils.testsite import SiteTest
from scrapy.utils.testproc import ProcessTest


class ProjectTest(unittest.TestCase):
project_name = 'testproject'
Expand Down Expand Up @@ -177,7 +181,9 @@ def test_runspider_unable_to_load(self):
self.assert_("Unable to load" in log)


class ParseCommandTest(CommandTest):
class ParseCommandTest(ProcessTest, SiteTest, CommandTest):

command = 'parse'

def setUp(self):
super(ParseCommandTest, self).setUp()
Expand Down Expand Up @@ -217,17 +223,21 @@ def process_item(self, item, spider):
ITEM_PIPELINES = {'%s.pipelines.MyPipeline': 1}
""" % self.project_name)

@defer.inlineCallbacks
def test_spider_arguments(self):
p = self.proc('parse', '--spider', self.spider_name, '-a', 'test_arg=1',
'-c', 'parse', 'http://scrapinghub.com')
log = p.stderr.read()
self.assert_("[parse_spider] DEBUG: It Works!" in log, log)
_, _, stderr = yield self.execute(['--spider', self.spider_name,
'-a', 'test_arg=1',
'-c', 'parse',
self.url('/html')])
self.assert_("[parse_spider] DEBUG: It Works!" in stderr, stderr)

@defer.inlineCallbacks
def test_pipelines(self):
p = self.proc('parse', '--spider', self.spider_name, '--pipelines',
'-c', 'parse', 'http://scrapinghub.com')
log = p.stderr.read()
self.assert_("[scrapy] INFO: It Works!" in log, log)
_, _, stderr = yield self.execute(['--spider', self.spider_name,
'--pipelines',
'-c', 'parse',
self.url('/html')])
self.assert_("[scrapy] INFO: It Works!" in stderr, stderr)


class BenchCommandTest(CommandTest):
Expand Down

0 comments on commit d68615a

Please sign in to comment.