Skip to content

Instantly share code, notes, and snippets.

@estk
Created December 14, 2016 21:12
Show Gist options
  • Star 6 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save estk/40e60995f54682da5bf998eaabd6e23d to your computer and use it in GitHub Desktop.
Save estk/40e60995f54682da5bf998eaabd6e23d to your computer and use it in GitHub Desktop.
A Tour of Go: Web Crawler Solution
package main
import (
"fmt"
"sync"
)
type Crawler struct {
crawled map[string]bool
mux sync.Mutex
}
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
func New() *Crawler {
return &Crawler{
crawled: make(map[string]bool),
}
}
func (c *Crawler) visit(url string) bool {
c.mux.Lock()
defer c.mux.Unlock()
_, ok := c.crawled[url]
if ok {
return true
}
c.crawled[url] = true
return false
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func (c *Crawler) Crawl(url string, depth int, fetcher Fetcher) {
var wg sync.WaitGroup
v := c.visit(url)
if v || depth <= 0 {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
wg.Add(1)
go func(u string) {
defer wg.Done()
c.Crawl(u, depth-1, fetcher)
}(u)
}
wg.Wait()
return
}
func main() {
crawler := New()
crawler.Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@tomgk
Copy link

tomgk commented Jan 31, 2021

Calling c.visit before checking for depth might end up not crawling pages that are also linked at at less depth but where the responsible crawler hasn't picked up on in yet

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment