Skip to content

Instantly share code, notes, and snippets.

@b10s
Created December 30, 2019 16:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save b10s/68e37eb89c57ef769273137596366d15 to your computer and use it in GitHub Desktop.
Save b10s/68e37eb89c57ef769273137596366d15 to your computer and use it in GitHub Desktop.
Last exercise in go tour https://tour.golang.org/concurrency/10
package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type CacheError string
func (e CacheError) Error() string {
return fmt.Sprintf("no record in cache for %v", string(e))
}
type CrawlerCacheRecord struct {
body string
urls []string
}
type CrawlerCache struct {
record map[string]*CrawlerCacheRecord
mux sync.Mutex
}
func (cache *CrawlerCache) add(key, body string, urls []string) {
cache.mux.Lock()
cache.record[key] = &CrawlerCacheRecord{urls: make([]string, 1), body: ""}
cache.record[key].body = body
cache.record[key].urls = urls
cache.mux.Unlock()
}
func (cache *CrawlerCache) get(key string) (string, []string, error) {
cache.mux.Lock()
defer cache.mux.Unlock()
record, ok := cache.record[key]
if !ok {
return "", nil, CacheError(key)
}
return record.body, record.urls, nil
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
cache := CrawlerCache{record: make(map[string]*CrawlerCacheRecord)}
fin := make(chan string)
defer func() { <-fin }()
cv := 0
var crawler func(u string, d int)
crawler = func(u string, d int) {
defer func() {
cv--
if cv == 0 {
fin <- "done!"
}
}()
if d <= 0 {
return
}
var body string
var urls []string
var err error
if body, urls, err = cache.get(u); err != nil {
body, urls, err = fetcher.Fetch(u)
if err != nil {
fmt.Println(err)
cache.add(u, body, urls)
return
}
cache.add(u, body, urls)
}
fmt.Printf("found: %s %q\n", u, body)
for _, uu := range urls {
cv++
go crawler(uu, d-1)
}
return
}
cv++
go crawler(url, depth)
return
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
//fmt.Println("hi from fetcher")
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment