Skip to content

Instantly share code, notes, and snippets.

@yaegashi
Created March 19, 2016 09:34
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save yaegashi/bf5c099e83b5c54c00e9 to your computer and use it in GitHub Desktop.
Save yaegashi/bf5c099e83b5c54c00e9 to your computer and use it in GitHub Desktop.
My solution for web crawler exercise in Go tour: http://tour.golang.org/concurrency/10
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(crawlq chan *CrawlState, fetchq chan *CrawlState, fetcher Fetcher) {
for {
in := <-fetchq
body, urls, err := fetcher.Fetch(in.url)
if err == nil {
fmt.Printf("found: %s %q\n", in.url, body)
in.urls = urls
} else {
fmt.Println(err)
in.urls = []string{}
}
crawlq <- in
}
}
type CrawlState struct {
depth int
url string
urls []string
}
func main() {
history := make(map[string][]string)
crawlq := make(chan *CrawlState, 100)
fetchq := make(chan *CrawlState, 4)
go Crawl(crawlq, fetchq, fetcher)
go Crawl(crawlq, fetchq, fetcher)
go Crawl(crawlq, fetchq, fetcher)
go Crawl(crawlq, fetchq, fetcher)
count := 1
crawlq <- &CrawlState{4, "http://golang.org/", nil}
for count > 0 {
state := <-crawlq
count--
if state.urls != nil {
history[state.url] = state.urls
}
if state.depth > 0 {
urls, ok := history[state.url]
if ok {
for _, url := range urls {
crawlq <- &CrawlState{state.depth - 1, url, nil}
count++
}
} else {
fetchq <- state
count++
}
}
}
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@dilyanpalauzov
Copy link

I assume that the interface (call structure, definition) of the Crawl function shall be retained, as provided in the template for the task. This is not done here.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment