Skip to content

Instantly share code, notes, and snippets.

@mtHuberty
Last active January 23, 2021 05:52
Show Gist options
  • Save mtHuberty/e7bbd9fd9ee3905d1c740d07aed3f920 to your computer and use it in GitHub Desktop.
Save mtHuberty/e7bbd9fd9ee3905d1c740d07aed3f920 to your computer and use it in GitHub Desktop.
Concurrency in Golang
package main
import (
"golang.org/x/tour/tree"
"fmt"
)
// Solving https://tour.golang.org/concurrency/8
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
if t.Left != nil {
Walk(t.Left, ch)
}
ch <- t.Value
if t.Right != nil {
Walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
var t1Values, t2Values []int
// Constraints specified each tree would only have 10 nodes, so we just count to 20 here
for i := 0; i < 20; i++ {
select {
case x := <-ch1:
t1Values = append(t1Values, x)
case y := <-ch2:
t2Values = append(t2Values, y)
}
}
return equal(t1Values, t2Values)
}
// Needed for equality of slices
func equal(a, b []int) bool {
if len(a) != len(b) {
return false
}
for i, v := range a {
if v != b[i] {
return false
}
}
return true
}
func main() {
fmt.Println(Same(tree.New(1), tree.New(1))) // true
fmt.Println(Same(tree.New(2), tree.New(1))) // false
}
package main
import (
"sync"
"fmt"
)
// Solving https://tour.golang.org/concurrency/10
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type fetchedUrls struct {
mutex *sync.Mutex
fetchedUrls map[string]bool
}
var fetched fetchedUrls
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, wg *sync.WaitGroup) {
defer wg.Done()
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
fetched.mutex.Lock()
fetched.fetchedUrls[url] = true
if _, found := fetched.fetchedUrls[u]; !found {
wg.Add(1)
go Crawl(u, depth-1, fetcher, wg)
}
fetched.mutex.Unlock()
}
return
}
func main() {
fetched = fetchedUrls{mutex: &sync.Mutex{}, fetchedUrls: make(map[string]bool)}
wg := &sync.WaitGroup{}
Crawl("https://golang.org/", 4, fetcher, wg)
wg.Wait()
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment