I was experimenting with go-colly with below code, it seems to crawl same url multiple times, how do I restrict to one time crawling?
I suspected the 'Parallellsim:2' was causing the duplicates, however, some of the crawl message urls repeated more than 10 times each.
Reproducible across different websites.
gocolly is lean and great.
func main() {
c := colly.NewCollector(
colly.AllowedDomains( "www.coursera.org"),
colly.Async(true),
)
c.Limit(&colly.LimitRule{
DomainGlob: "*",
Parallelism: 2,
})
c.OnHTML("a[href]", func(e *colly.HTMLElement) {
link := e.Attr("href")
e.Request.Visit(link)
})
pageCount :=0
c.OnRequest(func(r *colly.Request) {
r.Ctx.Put("url", r.URL.String())
})
// Set error handler
c.OnError(func(r *colly.Response, err error) {
log.Println("Request URL:", r.Request.URL, "failed with response:", r, "\nError:", err)
})
// Print the response
c.OnResponse(func(r *colly.Response) {
pageCount++
urlVisited := r.Ctx.Get("url")
log.Println(fmt.Sprintf("%d DONE Visiting : %s", pageCount, urlVisited))
})
baseUrl := "https://www.coursera.org"
c.Visit(baseUrl)
c.Wait()
}