Skip to content

Instantly share code, notes, and snippets.

@MonkeyIsNull
Created December 24, 2013 02:56
Show Gist options
  • Save MonkeyIsNull/8108222 to your computer and use it in GitHub Desktop.
Save MonkeyIsNull/8108222 to your computer and use it in GitHub Desktop.
urlBuff simulates downloads with timers. It has four workers that continuously pull from the urlChannel until everything is done
package main
import (
"time"
"math/rand"
"fmt"
"strconv"
)
func randInt(min int, max int) int {
return min + rand.Intn(max-min)
}
//go downloadUrl(urlChan, doneChan)
func downloadUrl(urlChan chan int, doneChan chan string) {
secs := randInt(1,10)
u := <-urlChan
u_str := strconv.Itoa(u)
fmt.Println("Url: " + u_str + " launched for: " + strconv.Itoa(secs))
select {
case <-time.After(time.Second * time.Duration(secs)):
fmt.Println("[" + u_str + "] timer done")
}
doneChan <- "[" + u_str + "] done"
downloadUrl(urlChan, doneChan)
return
}
func main() {
rand.Seed(time.Now().UnixNano())
urlChan := make(chan int, 100)
doneChan := make(chan string, 4) // Because we have 4 workers
// All the task urls to download go here
fmt.Println("Initialize the tasks...")
for i := range make([]int, 10) {
urlChan <- i
}
// This is your worker pool, they pull
// the next taskUrl off the masterChannel
fmt.Println("Initialize the workers...")
for worker := range make([]int, 4) {
fmt.Println("[worker]", worker)
go downloadUrl(urlChan, doneChan)
}
// Done counter so we break out
i := 0;
// Read everything off the doneChan
fmt.Println("Wait for it all..")
for j := range doneChan {
fmt.Println("[DONE]", j)
i++
if(i == 10) { //dont break until all 10 gathered
break
}
}
fmt.Println("ok, done")
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment