I am getting the URL list but images are not getting downloaded.
import scrapy
from ..items import GoogleItem
class spider(scrapy.Spider):
name = 'google'
start_urls = [
"https://www.google.com/search?q=selena%20gomez&spell=1&ie=UTF-8&gbv=1&tbm=isch&sxsrf=ALeKk01ILeLRLtOpXXVyxa1PQYz38RnbRg%3A1586860191506&ei=n5CVXu2GHPGY4-EPreeA-AM&sa=N&btnG=Search"
]
def parse(self, response):
item = GoogleItem()
img_url =[]
for image in response.xpath('/html/body/table[4]/tr/td/a/img/@src').extract():
img_url.append(image)
item["image_urls"]= img_url
return item
In item i have done this :
import scrapy
class GoogleItem(scrapy.Item):
images = scrapy.Field()
image_urls = scrapy.Field()
in setting , this:
ITEM_PIPELINES = {
'test_scrapy.pipelines.TestScrapyPipeline': 1,
}
IMAGE_STORE ="E:/New folder/image"
I am getting this in my terminal with the links of the image urls :
{'downloader/request_bytes': 404, 'downloader/request_count': 1,
what did I do wrong??