0

I'm using scrapy-splash to scrape a car dealership website that uses javascript to load the results but I keep getting error 504 Gateway Time-out.

I have docker and Win10 and I don't think the problem is docker configuration because I can scrape another site with the same code.

import scrapy
from scrapy_splash import SplashRequest
from scrapy.loader import ItemLoader
from ..items import  AutoItem

class Main_Spider(scrapy.Spider):
     name = 'dealers'
     allowed_domains = ['audidowntowntoronto.com']
         
     script = '''
     function main(splash)
        local scroll_delay = 3
        local is_down = splash:jsfunc("function() { return((window.innerHeight + window.scrollY) >= document.body.offsetHeight);}")
        local scroll_to = splash:jsfunc("window.scrollTo")
        local get_body_height = splash:jsfunc("function() {return document.body.scrollHeight;}")
        assert(splash:go(splash.args.url))
        while not is_down() do
            scroll_to(0, get_body_height())
            splash:wait(scroll_delay)
        end        
        return splash:html()
     end
     '''
     
     def start_requests(self):    
       yield SplashRequest(url="http://audidowntowntoronto.com/all/", callback=self.parse, endpoint="execute", args={'lua_source': self.script})

     def parse(self, response):      
        url = response.xpath('//*[@itemprop="url"]/@href').extract() 
        print (url)

1 Answers1

0

It's okay not to use lua script here. Add endpoint and additional arguments shown below.

yield SplashRequest(
                    url="http://audidowntowntoronto.com/all/",
                    callback=self.parse,
                    args={
                            'html': 1,
                            'wait': 5,
                            'render_all': 1
                        },
                    endpoint='render.json')
bonifacio_kid
  • 633
  • 5
  • 8