Skip to content

Instantly share code, notes, and snippets.

@hello-josh
Last active December 6, 2019 01:54
Show Gist options
  • Star 8 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save hello-josh/382723ae871fca080333697142dcdcc1 to your computer and use it in GitHub Desktop.
Save hello-josh/382723ae871fca080333697142dcdcc1 to your computer and use it in GitHub Desktop.
A Tour of Go - Exercise: Web Crawler
package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type Cache struct {
visited map[string]bool
mux sync.Mutex
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,
ch chan response, cache Cache) {
defer close(ch)
if depth <= 0 {
return
}
cache.mux.Lock()
if cache.visited[url] {
cache.mux.Unlock()
return
}
cache.visited[url] = true
cache.mux.Unlock()
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
ch <- response{url, body}
result := make([]chan response, len(urls))
for i, u := range urls {
result[i] = make(chan response)
go Crawl(u, depth-1, fetcher, result[i], cache)
}
for i := range result {
for resp := range result[i] {
ch <- resp
}
}
return
}
func main() {
var ch = make(chan response)
go Crawl("http://golang.org/", 4, fetcher, ch,
cache, Cache{visited: make(map[string] bool)})
for resp := range ch {
fmt.Printf("found: %s %q\n", resp.url, resp.body)
}
}
type response struct {
url string
body string
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@ayushr2
Copy link

ayushr2 commented May 19, 2019

I ended up writing something similar but I suppose it has a clearer logic. The above code looks correct to me through.

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, c chan bool) {
	defer close(c)
	
	if depth <= 0 {
		return
	}
	
	mtx.Lock()
	found := cache[url]
	mtx.Unlock()
	
	if found {
		return
	}
	
	mtx.Lock()
	cache[url] = true
	mtx.Unlock()
	
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	
	fmt.Printf("found: %s %q\n", url, body)
	
	var chans []chan bool

	for i, u := range urls {
		chans = append(chans, make(chan bool))
		go Crawl(u, depth-1, fetcher, chans[i])
	}
	
	for _, cc := range chans {
		<- cc
	}
	
	return
}

func main() {
	c := make(chan bool)
	go Crawl("https://golang.org/", 4, fetcher, c)
	<- c
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

var mtx sync.Mutex
var cache = make(map[string] bool)

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@SasukeBo
Copy link

SasukeBo commented Jul 1, 2019

with highlight

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, c chan bool) {
	defer close(c)
	
	if depth <= 0 {
		return
	}
	
	mtx.Lock()
	found := cache[url]
	mtx.Unlock()
	
	if found {
		return
	}
	
	mtx.Lock()
	cache[url] = true
	mtx.Unlock()
	
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	
	fmt.Printf("found: %s %q\n", url, body)
	
	var chans []chan bool

	for i, u := range urls {
		chans = append(chans, make(chan bool))
		go Crawl(u, depth-1, fetcher, chans[i])
	}
	
	for _, cc := range chans {
		<- cc
	}
	
	return
}

func main() {
	c := make(chan bool)
	go Crawl("https://golang.org/", 4, fetcher, c)
	<- c
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

var mtx sync.Mutex
var cache = make(map[string] bool)

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment