I am trying to get 404 error in scrapy.Following is my code. But i donot understand how to get response.status code 404 in python.
name="HalfScrap"
allowed_domains=["www.sample.co.uk"]
start_urls=["https://www.sample.co.uk/Products",]
def parse(self,response):
cursor=self.con.cursor()
cursor.execute("Select top 1 Url from Category where Site=?",('Sample'))
rows=cursor.fetchall()
for row in rows:
url="https://www.sample.co.uk/P/Components/system.com/1234"+"?x=12&p_style=list&p_productsPerPage=2000"
yield Request(url,callback=self.HalfProduct)
def HalfProduct(self,response):
if response.status=='404':
print "statusCode=",response.status
try:
sel=Selector(response)
rows=sel.xpath('//table[@class="listTable"]/tr[starts-with(@class,"listTableTr")]')
len(rows)
Items=[]