I have a code that scrapes oddsportal website.
Sometimes while scraping, I get ValueError("No tables found")
and when I manually refresh browser, page loads.
How do I do it via code?
My code is as below:
import pandas as pd
from selenium import webdriver
from bs4 import BeautifulSoup as bs
browser = webdriver.Chrome()
class GameData:
def __init__(self):
self.date = []
self.time = []
self.game = []
self.score = []
self.home_odds = []
self.draw_odds = []
self.away_odds = []
self.country = []
self.league = []
def parse_data(url):
browser.get(url)
df = pd.read_html(browser.page_source, header=0)[0]
html = browser.page_source
soup = bs(html, "lxml")
cont = soup.find('div', {'id': 'wrap'})
content = cont.find('div', {'id': 'col-content'})
content = content.find('table', {'class': 'table-main'}, {'id': 'tournamentTable'})
main = content.find('th', {'class': 'first2 tl'})
if main is None:
return None
count = main.findAll('a')
country = count[1].text
league = count[2].text
game_data = GameData()
game_date = None
for row in df.itertuples():
if not isinstance(row[1], str):
continue
elif ':' not in row[1]:
game_date = row[1].split('-')[0]
continue
game_data.date.append(game_date)
game_data.time.append(row[1])
game_data.game.append(row[2])
game_data.score.append(row[3])
game_data.home_odds.append(row[4])
game_data.draw_odds.append(row[5])
game_data.away_odds.append(row[6])
game_data.country.append(country)
game_data.league.append(league)
return game_data
urls = {
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/2/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/3/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/4/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/5/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/6/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/7/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/8/",
"https://www.oddsportal.com/soccer/africa/africa-cup-of-nations/results/#/page/9/",
}
if __name__ == '__main__':
results = None
for url in urls:
try:
game_data = parse_data(url)
if game_data is None:
continue
result = pd.DataFrame(game_data.__dict__)
if results is None:
results = result
else:
results = results.append(result, ignore_index=True)
except ValueError:
game_data = parse_data(url)
if game_data is None:
continue
result = pd.DataFrame(game_data.__dict__)
if results is None:
results = result
else:
results = results.append(result, ignore_index=True)
except AttributeError:
game_data = parse_data(url)
if game_data is None:
continue
result = pd.DataFrame(game_data.__dict__)
if results is None:
results = result
else:
results = results.append(result, ignore_index=True)
Sometimes I get this browser error.
Traceback (most recent call last):
File "C:/Users/harsh/AppData/Roaming/JetBrains/PyCharmCE2021.1/scratches/scratch_29.py", line 10098, in <module>
game_data = parse_data(url)
File "C:/Users/harsh/AppData/Roaming/JetBrains/PyCharmCE2021.1/scratches/scratch_29.py", line 37, in parse_data
df = pd.read_html(browser.page_source, header=0)[0]
File "C:\Python\lib\site-packages\pandas\util\_decorators.py", line 299, in wrapper
return func(*args, **kwargs)
File "C:\Python\lib\site-packages\pandas\io\html.py", line 1100, in read_html
displayed_only=displayed_only,
File "C:\Python\lib\site-packages\pandas\io\html.py", line 913, in _parse
raise retained
File "C:\Python\lib\site-packages\pandas\io\html.py", line 893, in _parse
tables = p.parse_tables()
File "C:\Python\lib\site-packages\pandas\io\html.py", line 213, in parse_tables
tables = self._parse_tables(self._build_doc(), self.match, self.attrs)
File "C:\Python\lib\site-packages\pandas\io\html.py", line 543, in _parse_tables
raise ValueError("No tables found")
ValueError: No tables found
My best guess is that I have not built ValueError: No tables found
in the code correctly.
How do I handle this?