You are starting all the requests at once. If there are 1000s of urls in the file then you are starting 1000s of go routines all at once. This may work, but may give you errors about being out of sockets or file handles. I'd recommend starting a limited number of fetches at once, like this code below.
This should help with the timing also.
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"strings"
"sync"
"time"
)
func get_resp_time(url string) {
time_start := time.Now()
resp, err := http.Get(url)
if err != nil {
log.Printf("Error fetching: %v", err)
}
defer resp.Body.Close()
fmt.Println(time.Since(time_start), url)
}
func main() {
content, _ := ioutil.ReadFile("url_list.txt")
urls := strings.Split(string(content), "\n")
const workers = 25
wg := new(sync.WaitGroup)
in := make(chan string, 2*workers)
for i := 0; i < workers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for url := range in {
get_resp_time(url)
}
}()
}
for _, url := range urls {
if url != "" {
in <- url
}
}
close(in)
wg.Wait()
}