You could use this PageFetcher subclass in place of your PageFetcher. This solved all the issues for me.
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import javax.net.ssl.SSLContext;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
public class PageFetcher2 extends PageFetcher {
public static final String DEFAULT_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:45.0) Gecko/20100101 Firefox/45.0";
public static final RequestConfig DEFAULT_REQUEST_CONFIG = RequestConfig.custom().setConnectTimeout(30 * 1000)
.setSocketTimeout(60 * 1000).build();
public PageFetcher2(CrawlConfig config) throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException {
super(config);
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
connectionManager.setMaxTotal(30);
connectionManager.setDefaultMaxPerRoute(30);
SSLContext sslContext = new SSLContextBuilder()
.loadTrustMaterial(null, (certificate, authType) -> true).build();
httpClient = HttpClients.custom()
.setSSLContext(sslContext)
.setSSLHostnameVerifier(new NoopHostnameVerifier())
.setConnectionManager(connectionManager)
.setUserAgent(DEFAULT_USER_AGENT)
.setDefaultRequestConfig(DEFAULT_REQUEST_CONFIG)
.build();
}
}