Skip to content

Commit

Permalink
change istresearch.com to something more stable
Browse files Browse the repository at this point in the history
Hopefully this fixes the crawler integration tests which have been failing, tested locally and works
  • Loading branch information
Madison Bahmer committed Jan 19, 2018
1 parent b12959d commit 5064295
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions crawler/tests/online.py
Expand Up @@ -35,7 +35,7 @@ class CustomSpider(LinkSpider):
class TestLinkSpider(TestCase):

example_feed = "{\"allowed_domains\":null,\"allow_regex\":null,\""\
"crawlid\":\"abc12345\",\"url\":\"istresearch.com\",\"expires\":0,\""\
"crawlid\":\"abc12345\",\"url\":\"http://dmoztools.net/\",\"expires\":0,\""\
"ts\":1461549923.7956631184,\"priority\":1,\"deny_regex\":null,\""\
"cookie\":null,\"attrs\":null,\"appid\":\"test\",\"spiderid\":\""\
"test-link\",\"useragent\":null,\"deny_extensions\":null,\"maxdepth\":0}"
Expand Down Expand Up @@ -75,7 +75,7 @@ def test_crawler_process(self):
d = runner.crawl(CustomSpider)
d.addBoth(lambda _: reactor.stop())
# add crawl to redis
key = "test-spider:istresearch.com:queue"
key = "test-spider:dmoztools.net:queue"
self.redis_conn.zadd(key, self.example_feed, -99)

# run the spider, give 20 seconds to see the url, crawl it,
Expand Down

0 comments on commit 5064295

Please sign in to comment.