Skip to content

Instantly share code, notes, and snippets.

@jreisinger
Last active August 10, 2022 16:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jreisinger/af5b43d414f6d52a263ce3331d46e627 to your computer and use it in GitHub Desktop.
Save jreisinger/af5b43d414f6d52a263ce3331d46e627 to your computer and use it in GitHub Desktop.
// Fetch prints the download time and size for each URL. Usage:
//
// go build fetchz.go
// ./fetchz urls.txt
package main
import (
"bufio"
"fmt"
"io"
"log"
"net/http"
"os"
"sync"
"time"
)
const (
nWorkers = 20
)
func init() {
log.SetPrefix(os.Args[0] + ": ")
log.SetFlags(0)
}
func main() {
if len(os.Args[1:]) != 1 {
log.Fatal("supply file containing URLs, one per line")
}
file, err := os.Open(os.Args[1])
if err != nil {
log.Fatal(err)
}
defer file.Close()
urls := make(chan string)
var wg sync.WaitGroup
wg.Add(1)
go func() {
scanner := bufio.NewScanner(file)
for scanner.Scan() {
urls <- scanner.Text()
}
if scanner.Err() != nil {
log.Fatal(scanner.Err())
}
close(urls)
wg.Done()
}()
results := make(chan result)
for i := 0; i < nWorkers; i++ {
wg.Add(1)
go func() {
for url := range urls {
res := fetch(url)
results <- res
}
wg.Done()
}()
}
go func() {
wg.Wait()
close(results)
}()
for res := range results {
if res.err != nil {
log.Print(res.err)
continue
}
fmt.Printf("%.3fs %7d %s\n",
res.donwloadTime.Seconds(), res.donwloadSize, res.URL)
}
}
type result struct {
URL string
donwloadTime time.Duration
donwloadSize int64
err error
}
var client = &http.Client{Timeout: 5 * time.Second}
func fetch(url string) result {
start := time.Now()
resp, err := client.Get(url)
if err != nil {
return result{err: err}
}
defer resp.Body.Close()
n, err := io.Copy(io.Discard, resp.Body)
if err != nil {
return result{err: err}
}
return result{
URL: url,
donwloadTime: time.Since(start),
donwloadSize: n,
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment