I had a .py file that used to work for web scraping gas prices from gasbuddy using selenium and phantomjs. It used to work, but it stopped all of a sudden. So I uninstalled and reinstalled selenium and phantomJS. I uninstalled and reinstalled Python 3 using homebrew. I even erased my MacBook and reinstalled Sierra on it, and reinstalled everything else What doesn't make sense to me is that I'm getting importing errors for modules I'm not even importing.
import os
from selenium import webdriver
import warnings
warnings.filterwarnings("ignore")
driver = webdriver.PhantomJS(service_log_path=os.path.devnull)
driver.get('https://www.gasbuddy.com/station/155967')
price = driver.find_element_by_xpath('//*[@id="container"]/div/div[3]/div/div/div/div[1]/div[2]/div[1]/div/div/div/div/h1')
print(price.text)
and the errors that I get are:
Traceback (most recent call last):
File "/Users/igisan/Desktop/scripts/gas.py", line 2, in <module>
from selenium import webdriver
File "/usr/local/lib/python3.7/site-packages/selenium/webdriver/__init__.py", line 18, in <module>
from .firefox.webdriver import WebDriver as Firefox # noqa
File "/usr/local/lib/python3.7/site-packages/selenium/webdriver/firefox/webdriver.py", line 29, in <module>
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
File "/usr/local/lib/python3.7/site-packages/selenium/webdriver/remote/webdriver.py", line 27, in <module>
from .remote_connection import RemoteConnection
File "/usr/local/lib/python3.7/site-packages/selenium/webdriver/remote/remote_connection.py", line 24, in <module>
import urllib3
File "/usr/local/lib/python3.7/site-packages/urllib3/__init__.py", line 8, in <module>
from .connectionpool import (
File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 11, in <module>
from .exceptions import (
File "/usr/local/lib/python3.7/site-packages/urllib3/exceptions.py", line 2, in <module>
from .packages.six.moves.http_client import (
File "/usr/local/lib/python3.7/site-packages/urllib3/packages/six.py", line 203, in load_module
mod = mod._resolve()
File "/usr/local/lib/python3.7/site-packages/urllib3/packages/six.py", line 115, in _resolve
return _import_module(self.mod)
File "/usr/local/lib/python3.7/site-packages/urllib3/packages/six.py", line 82, in _import_module
__import__(name)
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/http/client.py", line 71, in <module>
import email.parser
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/email/parser.py", line 12, in <module>
from email.feedparser import FeedParser, BytesFeedParser
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/email/feedparser.py", line 27, in <module>
from email._policybase import compat32
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/email/_policybase.py", line 9, in <module>
from email.utils import _has_surrogates
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/email/utils.py", line 33, in <module>
from email._parseaddr import quote
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/email/_parseaddr.py", line 16, in <module>
import time, calendar
File "/Users/igisan/Desktop/scripts/calendar.py", line 2, in <module>
from pyicloud import PyiCloudService
File "/usr/local/lib/python3.7/site-packages/pyicloud/__init__.py", line 2, in <module>
from pyicloud.base import PyiCloudService
File "/usr/local/lib/python3.7/site-packages/pyicloud/base.py", line 7, in <module>
import requests
File "/usr/local/lib/python3.7/site-packages/requests/__init__.py", line 46, in <module>
from .exceptions import RequestsDependencyWarning
File "/usr/local/lib/python3.7/site-packages/requests/exceptions.py", line 9, in <module>
from urllib3.exceptions import HTTPError as BaseHTTPError
ImportError: cannot import name 'HTTPError' from 'urllib3.exceptions' (/usr/local/lib/python3.7/site-packages/urllib3/exceptions.py)
I even updated everything. Installing and reinstalling urllib didn't make a difference. I've also tried using other webscraping methods (lxml, etc.) but to no avail. Any answers?