Created
May 5, 2014 03:58
-
-
Save krrrr38/f94c54678bc02224a8f5 to your computer and use it in GitHub Desktop.
A Tour of Go - Exercise: Web Crawler
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"fmt" | |
"sync" | |
) | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
type CrawlHistory struct { | |
lock sync.RWMutex | |
history map[string]bool | |
} | |
func (ch CrawlHistory) hasCrawled(url string) bool { | |
ch.lock.Lock() | |
_, isCrawled := ch.history[url] | |
return isCrawled | |
} | |
func (ch CrawlHistory) put(url string, isExist bool) { | |
ch.lock.RLock() | |
ch.history[url] = isExist | |
} | |
type CrawlResult struct { | |
url string | |
depth int | |
body string | |
links []string | |
err error | |
} | |
// Crawl uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func Crawl(url string, depth int, fetcher Fetcher) { | |
ch := make(chan string) | |
go crawl(url, depth, fetcher, ch) | |
for str := range ch { | |
fmt.Println(str) | |
} | |
} | |
func crawl(url string, depth int, fetcher Fetcher, ch chan string) { | |
result := make(chan CrawlResult) | |
crawled := CrawlHistory{history: make(map[string]bool)} | |
crawlingCnt := 1 | |
go _crawl(url, depth, fetcher, crawled, result) | |
for crawlingCnt > 0 { | |
res := <-result | |
if res.err != nil { | |
ch <- fmt.Sprintf("%v: %s", res.err, res.url) | |
} else { | |
ch <- fmt.Sprintf("found: %s %q", res.url, res.body) | |
for _, url := range res.links { | |
crawlingCnt += 1 | |
go _crawl(url, res.depth-1, fetcher, crawled, result) | |
} | |
} | |
if crawlingCnt--; crawlingCnt == 0 { | |
break | |
} | |
} | |
close(ch) | |
} | |
func _crawl(url string, depth int, fetcher Fetcher, crawled CrawlHistory, result chan CrawlResult) { | |
isCrawled := crawled.hasCrawled(url) | |
if depth == 0 { | |
result <- CrawlResult{url: url, err: fmt.Errorf("depth limited")} | |
} else if isCrawled { | |
result <- CrawlResult{url: url, err: fmt.Errorf("already crawled")} | |
} else { | |
body, urls, err := fetcher.Fetch(url) | |
result <- CrawlResult{url, depth, body, urls, err} | |
crawled.put(url, err != nil) | |
} | |
} | |
func main() { | |
Crawl("http://golang.org/", 4, fetcher) | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"http://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"http://golang.org/pkg/", | |
"http://golang.org/cmd/", | |
}, | |
}, | |
"http://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/cmd/", | |
"http://golang.org/pkg/fmt/", | |
"http://golang.org/pkg/os/", | |
}, | |
}, | |
"http://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
"http://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment