Skip to content

Instantly share code, notes, and snippets.

@hankbao
Created March 25, 2021 06:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hankbao/67c2d063bfd335ebeec0318879526340 to your computer and use it in GitHub Desktop.
Save hankbao/67c2d063bfd335ebeec0318879526340 to your computer and use it in GitHub Desktop.
Solution to Exercise: Web Crawler (https://tour.golang.org/concurrency/10)
package main
import (
"fmt"
"sync"
"time"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type FetcherState int
const (
RUNNING FetcherState = 1
DONE FetcherState = 2
ERROR FetcherState = 3
)
type SafeCache struct {
lock sync.Mutex
cache map[string]bool
}
func NewSafeCache() SafeCache {
return SafeCache{cache: make(map[string]bool)}
}
func (sc *SafeCache) trySet (url string) bool {
sc.lock.Lock()
defer sc.lock.Unlock()
if sc.cache[url] == true {
return false
} else {
sc.cache[url] = true
return true
}
}
func doCrawl(url string, depth int, fetcher Fetcher, ch chan FetcherState, sc *SafeCache) {
if !sc.trySet(url) {
fmt.Printf("%v already crawled\n", url)
return
}
if depth <= 0 {
fmt.Println("reach depth limit")
return
}
ch <- RUNNING
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Printf("fetch failed: %v\n", err)
ch <- ERROR
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
go doCrawl(u, depth-1, fetcher, ch, sc)
}
time.Sleep(50 * time.Millisecond)
ch <- DONE
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
ch := make(chan FetcherState)
sc := NewSafeCache()
go doCrawl(url, depth, fetcher, ch, &sc)
runningTasks := 0
OuterLoop:
for s := range ch {
switch s {
case RUNNING:
runningTasks++
case ERROR, DONE:
runningTasks--
if runningTasks == 0 {
break OuterLoop
}
}
}
fmt.Println("Crawling finished")
}
func main() {
Crawl("https://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment