From c37796e9fa54ccc4f9a41635644c724854ede06d Mon Sep 17 00:00:00 2001 From: Étienne Loks Date: Mon, 12 Aug 2019 15:41:06 +0200 Subject: Recreate a Crawl process on each iteration --- commcrawler/scrapy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'commcrawler/scrapy.py') diff --git a/commcrawler/scrapy.py b/commcrawler/scrapy.py index 1c26834..213f28d 100644 --- a/commcrawler/scrapy.py +++ b/commcrawler/scrapy.py @@ -318,7 +318,6 @@ def update_db_result(result_dct, values): def launch_crawl(crawl_item, excluded_domains=None): scrap_settings = settings.SCRAPPY_SETTINGS.copy() - process = CrawlerProcess(settings=scrap_settings) crawl_item.started = timezone.now() crawl_item.pre_crawl_ended = None crawl_item.crawl_ended = None @@ -333,6 +332,7 @@ def launch_crawl(crawl_item, excluded_domains=None): page = 0 page_number = total // 50 while page >= page_number: + process = CrawlerProcess(settings=scrap_settings) idx = 0 current_idx = page * 50 while current_idx < total and idx < 50: -- cgit v1.2.3