0

My program checks up a web-site every 20 seconds on the changes of text of Rebody and ip addresses she is included in a cycle, when she finds changes in text, going out from a cycle is already impossible, because endless goroutine is started that writes away for information on all ip addresses found that, I can not enter verification already. I need somehow to stop goroutine for the moment of verification, to check and again start. How to do it? Code:

func main() { 

    url := os.Args[1] 
    for { 
        time.Sleep(time.Second * 20) /* check every 20 seconds */
        req, err := http.Get(url)
        if err != nil { 

            fmt.Println("error 404") 

        } else { 

            defer req.Body.Close() 
            simple_match_body, io_err := ioutil.ReadAll(req.Body) 

            if io_err != nil { 
                fmt.Println("[Info] [IO.Fatal.Method.Read] IO-Error reading body... retry")
                continue 

            } else { 
                check_rebody := regexp.MustCompile("Rebody") 

                match_result := check_rebody.FindAll(simple_match_body, -1) 
                for _, tq := range match_result { 

                    query_response := fmt.Sprintf("%s", tq) 
                    fmt.Println(query_response) 

                }   


                if len(match_result) == 0 { 
                    fmt.Println("'Rebody' not found!")
                } else { 
                    fmt.Println("'Rebody' - found!", req.Body)
                }

                check_url := regexp.MustCompile(`((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)`) // теперь подставляется регулярное выражение для поиска ip адресов 
                match_result_a := check_url.FindAll(simple_match_body, -1) 
                for _, dddd := range match_result_a { 

                    def_url := fmt.Sprintf("http://"+"%s", dddd) 
                    for i := 0; i < 2; i++ { 

                        go HTTPRequests(def_url, 80) 
                    } 

                    fmt.Println(def_url) 

                } 

                if len(match_result_a) == 0 { 
                    fmt.Println("Url is found!", check_url)
                } else { 
                    fmt.Println("Url not found!", req.Body)

                }


                }


            }

        }



}

func HTTPRequests(url string, port int) { 

    for { 


        flags_http, s_err := http.Get(url) 

        if s_err != nil { 
            fmt.Println("error 404")

        } else { 
            fmt.Println("request has been send: ", flags_http.Body, flags_http.Header) 

        }

        time.Sleep(time.Second * 5)
        http_recheck, ss_err := http.Get(url)

        if ss_err != nil {
            fmt.Println("ss err")
        } else {
                defer http_recheck.Body.Close() 
                simple_match_body, io_err := ioutil.ReadAll(http_recheck.Body) 

                if io_err != nil {
                    fmt.Println("error")
                } else {
                    check_word := regexp.MustCompile("None") 

                    match_result := check_word.FindAll(simple_match_body, -1) 
                    for _, tq := range match_result { 

                        query := fmt.Sprintf("%s", tq) 
                        fmt.Println(query) 

                    }   


                    if len(match_result) == 0 { 
                        fmt.Println("ip not found")
                    } else { 
                        fmt.Println("ip found.", http_recheck.Body)
                    }
                }

        }

    }

}
Jack Smith
  • 23
  • 3

1 Answers1

1

A pattern I can recommend to you is to use time.Ticker as explained in this answer.

Zhormos
  • 31
  • 5