I am trying to webscrape a site and I keep getting the error messages below. I have tried this script on other websites and it works fine. I have searched to find a solution and can't seem to find one that works. Traceback (most recent call last):
File "math-webscrape.py", line 8, in <module>
uClient = urlopen(math_url)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 532, in open
response = meth(req, response)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 570, in error
return self._call_chain(*args)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 504, in _call_chain
result = func(*args)
File "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
urllib.error.HTTPError: HTTP Error 403: Forbidden
Here is my script
from urllib.request import urlopen
from bs4 import BeautifulSoup as soup
import json
math_url = 'https://aimath.org/textbooks/approved-textbooks/'
#opening up connection and grabbing page
uClient = urlopen(math_url)
page_html = uClient.read()
uClient.close()
#html parsing
page_soup = soup(page_html, "html.parser")
#grabs info for each textbook
containers = page_soup.findAll("p",{"class":"approved-book"})
data = []
for container in containers:
item = {}
item['type'] = "Textbook"
item['title'] = container.a.text
item['author'] =container.a.findNextSibling(text=True).strip()
item['link'] = container.a["href"]
item['source'] = "College Open Textbooks"
data.append(item) # add the item to the list
with open("./json/cot.json", "w") as writeJSON:
json.dump(data, writeJSON, ensure_ascii=False)