Create a gist now

Instantly share code, notes, and snippets.

What would you like to do?
tour.golang exercise solutions
/* Exercise: Loops and Functions #43 */
package main
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := float64(2.)
s := float64(0)
for {
z = z - (z*z - x)/(2*z)
if math.Abs(s-z) < 1e-15 {
break
}
s = z
}
return s
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(math.Sqrt(2))
}
/******************************************************************************************************/
/* Exercise: Maps #44 */
package main
import (
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
ss := strings.Fields(s)
num := len(ss)
ret := make(map[string]int)
for i := 0; i < num; i++ {
(ret[ss[i]])++
}
return ret
}
func main() {
wc.Test(WordCount)
}
/******************************************************************************************************/
/* Exercise: Slices #45 */
package main
import "tour/pic"
func Pic(dx, dy int) [][]uint8 {
ret := make([][]uint8, dy)
for i := 0; i < dy; i++ {
ret[i] = make([]uint8, dx)
for j := 0; j < dx; j++ {
ret[i][j] = uint8(i^j+(i+j)/2)
}
}
return ret
}
func main() {
pic.Show(Pic)
}
/******************************************************************************************************/
/* Exercise: Fibonacci closure #46 */
package main
import "fmt"
// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
x := 0
y := 1
return func() int {
x,y = y,x+y
return x
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
/******************************************************************************************************/
/* Advanced Exercise: Complex cube roots #47 */
package main
import (
"fmt"
"math/cmplx"
)
func Cbrt(x complex128) complex128 {
z := complex128(2)
s := complex128(0)
for {
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z))
if cmplx.Abs(s-z) < 1e-17 {
break
}
s = z
}
return z
}
func main() {
fmt.Println(Cbrt(2))
}
/******************************************************************************************************/
/* Exercise: Errors #57 */
package main
import (
"fmt"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}
func Sqrt(f float64) (float64, error) {
if f < 0 {
return 0, ErrNegativeSqrt(f)
}
return 0, nil
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
/******************************************************************************************************/
/* Exercise: Images #58 */
package main
import (
"image"
"tour/pic"
"image/color"
)
type Image struct{
Width, Height int
colr uint8
}
func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.Width, r.Height)
}
func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (r *Image) At(x, y int) color.Color {
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255}
}
func main() {
m := Image{100, 100, 128}
pic.ShowImage(&m)
}
/******************************************************************************************************/
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n,err = rot.r.Read(p)
for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
/******************************************************************************************************/
/* Exercise: Equivalent Binary Trees #67 */
package main
import (
"tour/tree"
"fmt"
)
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_walk(t, ch)
close(ch)
}
func _walk(t *tree.Tree, ch chan int) {
if t != nil {
_walk(t.Left, ch)
ch <- t.Value
_walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := range ch1 {
if i != <- ch2 {
return false
}
}
return true
}
func main() {
//tree.New(2)
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Print(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
/******************************************************************************************************/
/* Exercise: Web Crawler #69 */
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
func Krawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}
Urls <- urls
}
func Crawl(url string, depth int, fetcher Fetcher) {
Urls := make(chan []string)
go Krawl(url, fetcher, Urls)
band := 1
store[url] = true // init for level 0 done
for i := 0; i < depth; i++ {
for band > 0 {
band--
next := <- Urls
for _, url := range next {
if _, done := store[url] ; !done {
store[url] = true
band++
go Krawl(url, fetcher, Urls)
}
}
}
}
return
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

Solution #67 does not work if the tree sizes differ. You should check each channel's status inside, and after the for loop.

On Exercise: Rot13 Reader #59, you scan and "rot13" the entire contents of p. But that's incorrect--you should only scan n bytes. In the test harness, p is a slice 32k bytes long, so it's pretty inefficient!

ProTip commented Mar 5, 2013

First two numbers of fibonacci are 0 and 1. Just let x start 1 and y start 0.

My solution #69 is a litte different from yours.

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type FetcherResult struct{
body string
urls []string
err error
}

func _Crawl(url string, depth int, fetcher Fetcher, ch chan *FetcherResult){

fmt.Println("ready to _Crawl", depth)

if depth <= 0 {
    close(ch)
    return
}

var fr FetcherResult
fr.body, fr.urls, fr.err = fetcher.Fetch(url)

ch <- &fr

}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {

ch := make(chan *FetcherResult)

mapFetcherResult := make(map[string]int)

mapFetcherResult[url] = 1

go _Crawl(url, depth, fetcher, ch)

for pFR := range ch{

    for _,urlValue := range pFR.urls{

        _ ,inMap := mapFetcherResult[urlValue]
        if inMap == false{

            mapFetcherResult[urlValue] = 1

            fmt.Println("Add url", urlValue, "body", pFR.body)
            depth--
            go _Crawl(urlValue, depth, fetcher, ch)

        }
     }
}

return

}

your solution for exercise #67 assumes the two trees have the same structure in the way you implement Same():

[snippet]
    for i := range ch1 {
        if i != <- ch2 {
            return false
        }
    }
[/snippet]

The code here gets a value from ch1 and compares it to the next value from ch2. From the exercise:

There can be many different binary trees with the same sequence of values stored at the leaves.

The below code solves this by using a map to buffer the leaf values and either adding them as keys, if they don't exist, or deleting the entry if it already exists. It does assume leaf values are unique, but that's the case in the exercise and wouldn't be hard to fix.

package main

import (
    "code.google.com/p/go-tour/tree"
    "fmt"
)

func Walk(t *tree.Tree, ch chan int) {
    if t.Left != nil {
        Walk(t.Left, ch)
    }
    ch <- t.Value
    if t.Right != nil {
        Walk(t.Right, ch)
    }
}

func Same(t1, t2 *tree.Tree) bool {
    ch1 := make(chan int)
    ch2 := make(chan int)

    m := LeafList{make(map[int]bool)}

    go Walk(t1, ch1)
    go Walk(t2, ch2)

    for i := 0; i < 10; i++ {
        m.DeleteOrAdd(<- ch1)
        m.DeleteOrAdd(<- ch2)
    }

    return len(m.List) == 0

}

type LeafList struct {
    List map[int]bool
}

func (m *LeafList) DeleteOrAdd(v int) {
    _, ok := m.List[v]
    if !ok {
        m.List[v] = true
    } else {
        delete(m.List, v)
    }
}

func main() {
    fmt.Println(Same(tree.New(1), tree.New(1)))
    fmt.Println(Same(tree.New(1), tree.New(2)))
}

@evertlammerts the trees can have a different structure, but if you walk them left to right the resulting sequence should be in the same. Notice that the quoted sentence states exactly that.

ifq commented Jul 11, 2013

Walk(tree.New(1)) twice, you will find the results are different.

10,5,3,1,2,4,7,6,9,8,
7,4,2,1,3,5,6,9,8,10,

jaekwon commented Jul 22, 2013

package main

import "fmt"
import "code.google.com/p/go-tour/tree"

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
    Walk_(t, ch)
    close(ch)
}

func Walk_(t *tree.Tree, ch chan int) {
    if t != nil {
        Walk_(t.Left, ch)
        ch <- t.Value
        Walk_(t.Right, ch)
    }
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
    t1c, t2c := make(chan int), make(chan int)
    go Walk(t1, t1c)
    go Walk(t2, t2c)
    for {
        t1e, cont1 := <-t1c
        t2e, cont2 := <-t2c
        if (t1e != t2e) || (cont1 != cont2) {
            return false
        } else if cont1 == cont2 == true {
            return true
        }
    }
    return true
}

func main() {
    fmt.Print(Same(tree.New(1), tree.New(1)))
}

For solutions of #67, just name your function and helper Walk and walk. There's no need in golang to name package-private function with prefix or postfix.

rtoal commented Sep 19, 2013

Just an opinion here, but in your solution #45 I'd replace

for i := 0; i < dy; i++

with

for i := range ret

and something similar with the inner loop

mckoss commented Oct 25, 2013

A solution similar to your tree walker - but without the bug that one tree being a prefix of the other would return a false positive:

https://gist.github.com/mckoss/e440d5fc88a151f52a30

package main

import (
    "code.google.com/p/go-tour/tree"
    "fmt"
)

func main() {
    ch := make(chan int)
    go Walk(tree.New(1), ch)
    for v := range ch {
        fmt.Println(v)
    }
    fmt.Println(Same(tree.New(1), tree.New(1)))
    fmt.Println(Same(tree.New(1), tree.New(2)))
}

func Walk(t *tree.Tree, ch chan<- int) {
    var walker func(*tree.Tree)
    walker = func(t *tree.Tree) {
        for t != nil {
            walker(t.Left)
            ch <- t.Value
            t = t.Right
        }
    }
    walker(t)
    close(ch)
}

func Same(t1, t2 *tree.Tree) bool {
    ch1, ch2 := make(chan int), make(chan int)
    var v1, v2 int
    var ok bool

    go Walk(t1, ch1)
    go Walk(t2, ch2)
    for v1 = range ch1 {
        v2, ok = <-ch2
        if !ok || v1 != v2 {
            return false
        }
    }
    v2, ok = <-ch2
    return !ok
}

Thanks for taking the time to post these.

For #44 (now #43 on the site ) the map access can be simplified and perhaps better understood:

func WordCount(s string) map[string]int {
    ss := strings.Fields(s)
    ret := make(map[string]int)
    for _, sub := range ss {
        ret[sub]++
    }
    return ret
}

Shaked commented Jan 18, 2014

@PaulRoad I think with your solution the last url (=http://golang.org/pkg/os/) won't be scanned as depth will be 0.

It seems like that the condition should be at the end so that also the last call will count:

func crawl(url string, depth int, fetcher Fetcher, ch chan *FetcherResult) {
    fmt.Println("now crawlling ", url, depth)
    var fr FetcherResult 
    fr.body, fr.urls, fr.err = fetcher.Fetch(url)
    fr.purl = url 
    ch <- &fr 

    if depth <= 0 {
        close(ch) 
        return
    }
 }

(I have also added the parent url "purl")

Thanks

Since there were no iterative solution for #70 Equivalent Binary Trees

package main

import (
    "code.google.com/p/go-tour/tree"
    "fmt"
    "container/list"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
    queue := list.New()
    var visited []*tree.Tree

    queue.PushBack(t)
    for {
        if queue.Len() == 0 {
            close(ch)
            break
        }
        e := queue.Back()
        queue.Remove(e)
        t := e.Value.(*tree.Tree)

        if isVisited(visited, t) {
            ch <- t.Value
            continue
        } else {
            visited = append(visited, t)
        }

        if t.Right != nil {
            queue.PushBack(t.Right)
        }

        queue.PushBack(t)

        if t.Left != nil {
            queue.PushBack(t.Left)
        }
    }
}

func isVisited(v []*tree.Tree, t *tree.Tree) bool {
    for _, v := range v {
        if v == t {
            return true
        }
    }
    return false
}

func Same(t1 *tree.Tree, t2 *tree.Tree) bool {
    ch1 := make(chan int)
    ch2 := make(chan int)
    go Walk(t1, ch1)
    go Walk(t2, ch2)

    for s1 := range ch1 {
        if s1 != <- ch2 {
            return false
        }
    }
    return true
}

// Same determines whether the trees
// t1 and t2 contain the same values.
//func Same(t1, t2 *tree.Tree) bool

func main() {
    ch := make(chan int)
    go Walk(tree.New(3), ch)

    for v := range ch {
        fmt.Println(v)
    }


    s1 := Same(tree.New(1), tree.New(1))
    s2 := Same(tree.New(1), tree.New(2))
    fmt.Println(s1, s2)
}

Here's my solution to the WebCrawler one. Make sure you test yours with different values of "depth" as I found some issues with mine when I first built it (with different values of depth deadlocking).

package main

import (
    "fmt"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}

type Answer struct {
    body string
    urls []string
    url string
    remainingDepth int
    isError bool
}

func CrawlInner(url string, depth int, fetcher Fetcher, result chan Answer) {
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        result <- Answer{ fmt.Sprint(err), nil, url, depth-1, true }
    } else {
        result <- Answer{ body, urls, url, depth-1, false }
    }
}

func Crawl(url string, depth int, fetcher Fetcher) {
    answer := make(chan Answer)

    go CrawlInner(url, depth, fetcher, answer)
    ProcessResults(url, answer)
}

func ProcessResults(rootUrl string, answer chan Answer) {
    pending := make(map[string]bool)
    done := make(map[string]bool)

    pending[rootUrl] = true

    for result := range answer {
        if result.isError {
            fmt.Println(result.body)
        } else {
            fmt.Printf("found: %s %q\n", result.url, result.body)
        }
        if !result.isError && result.remainingDepth > 0 {
            for _,v := range result.urls {
                if _, ok := pending[v] ; ok { continue }
                if _, ok := done[v] ; ok { continue }
                pending[v] = true
                go CrawlInner(v, result.remainingDepth, fetcher, answer)
            }
        }
        delete(pending, result.url)
        done[result.url] = true
        if len(pending) == 0 {
            close(answer)
        }
    }
}

func main() {
    Crawl("http://golang.org/", 4, fetcher)
}


// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls     []string
}

func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := (*f)[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

Ayvan commented Jun 19, 2014

My solution #69 WebCrawler

package main

import (
    "fmt"
    "sync"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}

var store map[string]bool

var wg sync.WaitGroup

func _crawl(url string, depth int, fetcher Fetcher, Results chan string, wg *sync.WaitGroup) {
    defer wg.Done()

    if depth <= 0 {
        return
    }

    if exists := store[url]; exists {
        return
    }

    body, urls, err := fetcher.Fetch(url)

    if err != nil {
        Results <- fmt.Sprintf("not found: %s", url)
        return
    }
    store[url] = true

    Results <- fmt.Sprintf("found: %s %q", url, body)

    for _, u := range urls {
        wg.Add(1)
        go _crawl(u, depth-1, fetcher, Results, wg)
    }
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
    Results := make(chan string)
    wg.Add(1)
    go _crawl(url, depth, fetcher, Results, &wg)

    go func() {
        wg.Wait()
        close(Results)
    }()

    for r := range Results {
        fmt.Println(r)
    }
}

func main() {
    store = make(map[string]bool)
    Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

Shchvova commented Jul 9, 2014

Here is my solution to crawler problem.. I find it really nice http://play.golang.org/p/vMccfh7DC2

package main

import (
    "fmt"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}


func CrawlRecursive(url string, depth int, fetcher Fetcher, quit chan int, done chan map[string] bool) {
    if depth<=0 {
        quit <- 0
        return
    }

    doneUrls := <- done
    hasIt, didIt := doneUrls[url]
    if didIt && hasIt {
        quit <- 0
        done <- doneUrls
        return
    } else {
        doneUrls[url] = true
    }
    done <- doneUrls

    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        quit <- 0
        return
    }
    fmt.Printf("found: %s %q\n", url, body)
    childrenQuit := make(chan int)
    for _, u := range urls {
        go CrawlRecursive(u, depth-1, fetcher, childrenQuit, done)
    }
    for _ = range urls {
        <- childrenQuit
    }
    quit <- 0
}

func Crawl(url string, depth int, fetcher Fetcher) {
    done := make(chan map[string] bool, 1)
    done <- map[string]bool{url: false}
    quit := make(chan int)
    go CrawlRecursive(url, depth, fetcher, quit, done)
    <- quit
}

func main() {
    Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

rread commented Aug 14, 2014

My solution to Web Crawler (#73 in the version of the Tour I used).

Note, because the crawls are done in parallel and results may arrive out of order, I associated the depth with the crawl results which seems to simplify things a bit.

type crawlResult struct {
    depth int
    urls []string
}

func crawlOne(url string, depth int, ch chan crawlResult) {
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
    } else {
        fmt.Printf("found: %s %q\n", url, body)
    }
    ch <- crawlResult{depth: depth-1, urls:urls}   
    return
}

// Crawl runs parallizes calls to fetcher with crawlOne()
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
    var resultCache = map[string]bool{}

    ch := make(chan crawlResult)

    go crawlOne(url, depth, ch)
    resultCache[url] = true    

    for running := 1; running > 0; {    
        cr := <- ch
        running--
        if cr.depth > 0  {
            for _,u := range cr.urls {
                if _, ok := resultCache[u]; !ok {
                    go crawlOne(u, cr.depth, ch)
                    running++
                    resultCache[u] = true
                }
            }    
        }
    }
    return
}

/* Exercise: Loops and Functions #25 */

package main

import (
    "fmt"
    "math"
)

func Sqrt(x float64) float64 {
    z := 1.0
    for i:=0; i<10; i++ {
        z = z-((z*z-x)/(2*z))
    }
    return z
}

func main() {
    fmt.Println(Sqrt(2))
    fmt.Println(math.Sqrt(2))
}

My solution for #67 Equivalent Binary Trees

package main

import "code.google.com/p/go-tour/tree"
import "fmt"

func Walk(t *tree.Tree, ch chan int) {
    if t != nil {
        Walk(t.Left, ch)
        ch <- t.Value
        Walk(t.Right, ch)
    }
/*
a little bit of my original solution:
        if t.Left != nil {
            Walk(t.Left, ch)
        }
        ch <- t.Value
        if t.Right != nil {
            Walk(t.Right, ch)
        }
because `if' cheaper than function call
*/
}

func Same(t1, t2 *tree.Tree) bool {
    ch1 := make(chan int)
    ch2 := make(chan int)
    go Walk(t1, ch1)
    go Walk(t2, ch2)
    for i := 0; i < 10; i++ {
        if <-ch1 != <-ch2 {
            return false
        }
    }
    return true
}

func main() {
    fmt.Println("Let's go!")
    ch := make(chan int)
    go Walk(tree.New(1), ch)
    for i := 0; i < 10; i++ {
        fmt.Printf("value: %d\n", <-ch)
    }
    fmt.Println(Same(tree.New(1), tree.New(1)))
    fmt.Println(Same(tree.New(1), tree.New(2)))
}

txgo commented Nov 7, 2014

Exercise: Rot13 Reader #63

should return n,io.EOF

/* Exercise: Loops and Functions #25 */
/* with bonus (compare last iteration) */
package main

import (
    "fmt"
    "math"
)

func Sqrt(x float64) float64 {
    z := 1.0
    var last float64
    for math.Abs(last - z) > 0.00000001 {
        last = z
        z = z-((z*z-x)/(2*z))
        fmt.Println(last,"-",z)
    }
    return z
}

func main() {
    var num float64 = 2
    a,b := Sqrt(num), math.Sqrt(num)
    fmt.Println("end",a,"-",b)
}

mknet commented Dec 30, 2014

My solution for the Stringers exercise:
http://tour.golang.org/methods/7

package main

import "fmt"

type IPAddr [4]byte

func (p IPAddr) String() string {
    return fmt.Sprintf("%d.%d.%d.%d",p[0], p[1], p[2], p[3])
}

func main() {
    addrs := map[string]IPAddr{
        "loopback":  {127, 0, 0, 1},
        "googleDNS": {8, 8, 8, 8},
    }
    for n, a := range addrs {
        fmt.Printf("%v: %v\n", n, a)
    }
}

mknet commented Dec 30, 2014

My solution for http://tour.golang.org/methods/11

package main

import "code.google.com/p/go-tour/reader"

type MyReader struct{}

func (r MyReader) Read(b []byte) (n int, err error) {
     b[0] = 'A'
     return 1, nil
}

func main() {
    reader.Validate(MyReader{})
}

And yet another web crawler solution. =)

Here the "boss" keeps track of which urls have been visited, and requests can be made to him via a global channel (crawlers can send him a url to ask if they should download it). A go routine is spawned for each crawler, and since this is recursive, and the "boss" doesn't have a dedicated channel to reply to each crawler, the boss needs a way to communicate back to the crawler to give it the green light, or tell it not to bother. In order to do this, the crawler creates a callback channel for this purpose, and sends it down the global channel to the boss with the url. Therefore requests reach the boss via this global channel, which takes a "comm" struct, which is the url they are available to download, and a bool channel to get the "download/don't download" notification back. This is implemented as a bool (true => download, false => forget it).

package main

import (
    "fmt"
)

type comm struct {
    url   string
    reply chan bool
}

var (
    fetched map[string]bool = make(map[string]bool)
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, ch chan comm) {
    if depth <= 0 {
        return
    }
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        reply := make(chan bool)
        ch <- comm{url: u, reply: reply}
        decision := <-reply
        if decision {
            fmt.Printf("Downloading from url %v\n", u)
            Crawl(u, depth-1, fetcher, ch)
        } else {
            fmt.Printf("Not downloading from url %v\n", u)
        }
    }
    return
}

func main() {
    ch := make(chan comm)
    go boss(ch)
    fetched["http://golang.org/"] = true
    Crawl("http://golang.org/", 4, fetcher, ch)
}

func boss(ch chan comm) {
    for {
        comm := <-ch
        if !fetched[comm.url] {
            fetched[comm.url] = true
            comm.reply <- true
        } else {
            comm.reply <- false
        }
    }
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

wuranbo commented Feb 13, 2015

// http://tour.golang.org/methods/12
// my solution, another shorter way.

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (r *rot13Reader) Read(b []byte) (int,error) {
    l,e := r.r.Read(b)
    for i,c := range(b) {
        if c <= 'Z' && c >='A'  {
            b[i] = (c - 'A' + 13)%26 + 'A'
        }else if c >= 'a' && c <= 'z' {
            b[i] = (c - 'a' + 13)%26 + 'a'
        } 
    }
    return l, e
}

func main() {
    s := strings.NewReader("Lbh penpxrq gur pbqr!")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

http://tour.golang.org/methods/12
My solution, shooting for clarity since this is a learning endeavor after all.

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (r rot13Reader) rot13(b byte, basis byte) byte {
    return (b - basis + 13) % 26 + basis
}

func (r rot13Reader) cryptByte(b byte) byte {

    if b >= 'A' && b <= 'Z' {
        return r.rot13(b, 'A')
    }

    if b >= 'a' && b <= 'z' {
        return r.rot13(b, 'a')
    }

    return b
}

func (rot rot13Reader) Read(b []byte) (c int, err error) {
    c, err = rot.r.Read(b)

    for i := 0; i < c; i++ {
        b[i] = rot.cryptByte(b[i])
    }

    return
}

func main() {
    s := strings.NewReader("Lbh penpxrq gur pbqr!")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

One more wordscount version:

package main

import (
    "fmt"
    "golang.org/x/tour/wc"
    "strings"
)

func WordCount(s string) map[string]int {
    var ret map[string]int
    ret = make(map[string]int)
    for _, v := range strings.Fields(s) {
        ret[v]++
    }
    fmt.Println(ret)
    return ret
}

func main() {
    wc.Test(WordCount)
}

riramar commented Jul 5, 2015

The solution for "Exercise: Errors" it's missing the calculation.

package main

import (
    "fmt"
    "math"
)

type ErrNegativeSqrt float64

func (e ErrNegativeSqrt) Error() string {
    return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}

func Sqrt(f float64) (float64, error) {
    if f < 0 {
        return 0, ErrNegativeSqrt(f)
    }
    z := float64(1)
    i := 0
    for i < 100 {
        i += 1
        z = z - ( ( math.Pow( z, 2 ) - f ) / (2 * z) )
    }
    return z, nil
}

func main() {
    fmt.Println(Sqrt(25))
    fmt.Println(Sqrt(-2))
}

The solution to #57 doesn't calculate the Sqrt if there is no error ...

I think there is a bug at [https://gist.github.com/zyxar/2317744#file-exercise-tour-go-L256](the tree comparison).

I the the second tree is longer than the first and all the values are the same (eg. [1, 2], [1, 2, 3]) the func will return true but the trees are not the same.

func Same(t1, t2 *tree.Tree) bool {
    c1 := make(chan(int))
    c2 := make(chan(int))
    go Walk(t1, c1)
    go Walk(t2, c2)
    for i := range c1 {
        fmt.Println(i)
        if i != <- c2 {
            return false
        }
    }
    for range c2 {
        return false
    }
    return true
}

I also made an helper to show that:

func NewLen(k int) *Tree {
    var t *Tree
    for i :=1; i <= k; i++ {
        t = insert(t, i)
    }
    return t
}

and the main:

func main() {
    fmt.Println(Same(tree.NewLen(10), tree.NewLen(12)))
    fmt.Println(Same(tree.NewLen(14), tree.NewLen(12)))
    fmt.Println(Same(tree.NewLen(14), tree.NewLen(14)))
}

Cheers,
Alexis.

func (rot *rot13Reader) Read(p []byte) (n int, err error) {
    n,err = rot.r.Read(p)
    for i := 0; i < len(p); i++ {
        if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
            p[i] += 13
        } else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
            p[i] -= 13
        }
    }
    return
}

More precisely for i := 0; i < len(p); i++, what if the buffer was not filled completely by the read call? You'll modify parts of the buffer that were not meant to be used. The program works because whether you process extra bytes or not does not matter for this algorithm (only inefficient). It can certainly break if doing hashing, encryption or something.
Another issue I see is that it will not return immediately if the other reader returned an EOF or an error but will again process the whole slice.

len(p) needs to be changed to n.

One question: on error, would n always be 0 or is this undefined? If undefined additional check for error would be needed before the processing loop.

It's a nice example. Just starting with go and wanted to see how others approach the issue. I like very much the useful example for using named return values. Thank you.

Here is my MyReader solution

    package main

    import "golang.org/x/tour/reader"

    type MyReader struct{}

    // TODO: Add a Read([]byte) (int, error) method to MyReader.

    func (m MyReader) Read(b []byte) (int,error) {
      cnt := 0
      for i := 0; i < len(b); i++ {
        b[i] = 'A'
        cnt = i+1
      }
      return cnt,nil
    }

    func main() {
      reader.Validate(MyReader{})
    }

An alternate exercise-reader.go solution

package main

import "golang.org/x/tour/reader"

type MyReader struct{}

func (r MyReader) Read(s []byte) (int, error) {
    for i := range s {
        s[i] = 'A'
    }

    return len(s), nil
}

func main() {
    reader.Validate(MyReader{})
}

dontuta commented Nov 19, 2015

I didn't find any solution for the HTTP Handlers exercise, so here's my solution
(sorry for not formating my code properly, i'm still new to github)

package main

import (
"log"
"net/http"
"fmt"
)

type String string

type Struct struct {
Greeting string
Punct string
Who string
}

func (s String) ServeHTTP(
w http.ResponseWriter,
r *http.Request) {
fmt.Fprint(w, s)
}

func (s Struct) ServeHTTP(
w http.ResponseWriter,
r *http.Request) {
fmt.Fprint(w, s)
}

func main() {
// your http.Handle calls here
http.Handle("/string", String("I'm a frayed knot."))
http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
log.Fatal(http.ListenAndServe("localhost:4000", nil))
}

HTTP Handlers exercise's solution

package main

import (
    "log"
    "net/http"
    "fmt"
)

type String string

type Struct struct {
    Greeting string
    Punct string
    Who string
}

func (s String) ServeHTTP(w http.ResponseWriter,r *http.Request) {
    fmt.Fprint(w, s)
}

func (s Struct) ServeHTTP(w http.ResponseWriter,r *http.Request) {
    fmt.Fprint(w, s)
}

func main() {
    // your http.Handle calls here
    http.Handle("/string", String("I'm a frayed knot."))
    http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
    log.Fatal(http.ListenAndServe("localhost:4000", nil))
}

minpajr commented Jan 14, 2016

Exercise: Readers

Since the exercise asks you to implement a Reader which returns the number of bytes read and an error, we need to return the number of bytes read, and to at least cover some error cases. We know from previous exercises that io.Reader throws an EOF exception, and we can assume that if the input b is nil or empty, we have reached the end of the file...

package main

import (
    "golang.org/x/tour/reader"
    "fmt"
)

type ErrEOF []byte

func (e ErrEOF) Error() string {
    return fmt.Sprintf("End of file")
}

type MyReader struct{}

// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (r MyReader) Read(b []byte) (int, error) {
    if b == nil || len(b) == 0 {
        err := ErrEOF(b)
        return 0, err
    }

    for i := 0; i < len(b); i++ {
        b[i] = 'A'
    }

    return len(b), nil
}

func main() {
    reader.Validate(MyReader{})
}

Recently completed the gotour and made a repo with my solutions here: https://github.com/makushline/gotour
Any feedback is highly appreciated.

louwers commented Mar 18, 2016

package main

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (rot *rot13Reader) Read(p []byte) (int, error) {
    n, err := rot.r.Read(p)
    for i := 0; i < n; i++ {
        if p[i] < 'A' || p[i] > 'z' { // ignore non-ASCII letters
            continue
        }

        if p[i] > 'a' && p[i] < 'a'+13 || p[i] > 'A' && p[i] < 'A'+13 {
            p[i] += 13
        } else {
            p[i] -= 13
        }
    }
    return n, err
}

func main() {
    s := strings.NewReader("NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

A solution to the binary tree only using one channel and a map:

package main

import (
    "golang.org/x/tour/tree"
    "fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
    if t != nil {
        ch <- t.Value
        Walk(t.Left, ch)
        Walk(t.Right, ch)
    }
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
    values := make(map[int]bool)
    ch := make(chan int)
    go func() {
        Walk(t1, ch)
        Walk(t2, ch)
        close(ch)
    }()
    for val := range ch {
        if _, ok := values[val]; ok {
            delete(values, val)
        } else {
            values[val] = true
        }
    }
    return len(values) == 0
}

func main() {
    fmt.Println(Same(tree.New(1), tree.New(2)))
    fmt.Println(Same(tree.New(1), tree.New(1)))
}

alukos commented Apr 24, 2016

Exercise: rot13Reader

package main

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (r *rot13Reader) Read(p []byte) (n int, err error) {
    n, err = r.r.Read(p)
    for i, b := range p {
        switch {
        case 'A' <= b && b <= 'M', 'a' <= b && b <= 'm':
            p[i] += 13
        case 'N' <= b && b <= 'Z', 'n' <= b && b <= 'z':
            p[i] -= 13
        }
    }
    return
}

func main() {
    s := strings.NewReader("Lbh penpxrq gur pbqr!")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

zacps commented Apr 26, 2016

Error is thrown on #58 - Images because Bounds(), ColorModel() and At() need a non-pointer receiver in order to implement image.Image.

func (img Image) Bounds() image.Rectangle {
    return image.Rect(0, 0, img.Height, img.Width)
}
func (img Image) ColorModel() color.Model {
    return color.RGBAModel
}

func (img Image) At(x, y int) color.Color {
    return color.RGBA{img.color+uint8(x), img.color+uint8(y), 255, 255}
}

Hi, thanks for the solutions. It helped more in understanding of Go, rather than solution itself.

On a side note, may I know what "Color theme" you are using. I like white background in editors, please let me know. Thanks.

The ro13 solution assumes the lower reader gives the requested number of bytes. This is not necessarily the case so:

for i, b := range p

should be

for i:=0 ; i < n ; i++

Tantalum73 commented May 26, 2016 edited

I want to share my solution for the image exercise (number 25) with you and everyone who stumbles over this post.

package main

import (
    "golang.org/x/tour/pic"
    "image/color"
    "image"
)


type Image struct{
    Rect image.Rectangle
}

func (image Image) ColorModel() color.Model {
        return color.RGBAModel
}
func (i Image) Bounds() image.Rectangle {
    return i.Rect
}
func (image Image) At(x, y int) color.Color {
    return color.RGBA{uint8(x*y), uint8(y+x), 255, 255}
}

func main() {
    m := Image{image.Rect(0,0,300,300)}
    pic.ShowImage(m)
}

hugdru commented Sep 29, 2016 edited

/* Exercise: Web Crawler #69 - https://tour.golang.org/concurrency/10 */

package main

import (
    "fmt"
    "sync"
)

type Set struct {
    v   map[string]bool
    mux sync.Mutex
}

func (set *Set) Register(key string) {
    set.mux.Lock()
    defer set.mux.Unlock()
    set.v[key] = true
}

func (set *Set) Exists(key string) bool {
    set.mux.Lock()
    defer set.mux.Unlock()
    return set.v[key]
}

type Fetcher interface {
    Fetch(url string) (body string, urls []string, err error)
}

func Crawl(url string, depth int, fetcher Fetcher, ch chan string) {
    set := Set{v: make(map[string]bool)}
    var wg sync.WaitGroup
    wg.Add(1)
    go crawl(url, depth, fetcher, set, ch, &wg)
    wg.Wait()
    close(ch)
}

func crawl(url string, depth int, fetcher Fetcher, set Set, ch chan string, wg *sync.WaitGroup) {
    defer wg.Done()

    if depth <= 0 || set.Exists(url) {
        return
    }

    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    set.Register(url)
    ch <- url

    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        if set.Exists(u) {
            continue
        }
        wg.Add(1)
        go crawl(u, depth-1, fetcher, set, ch, wg)
    }
    return
}

func main() {
    ch := make(chan string)
    go Crawl("http://golang.org/", 4, fetcher, ch)
    for value := range ch {
        fmt.Println(value)
    }
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

mymtw commented Oct 6, 2016

My Solution for stringers does not have any sense, but if condition will be slice of bytes for IPAddr(dynamic size):

package main

import (
    "fmt"
    "strings"
    "strconv"
)


type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.
func (ipaddr IPAddr) String() string {
    var strs []string
    for _, v := range ipaddr {
        strs = append(
            strs, strconv.Itoa(int(v)))
    }
    return strings.Join(strs, ".")
}

func main() {
    hosts := map[string]IPAddr{
        "loopback":  {127, 0, 0, 1},
        "googleDNS": {8, 8, 8, 8},
    }
    for name, ip := range hosts {
        fmt.Printf("%v: %v\n", name, ip)
    }
}

ppontryagin commented Oct 21, 2016 edited

/* Exercise: Loops and Functions #43 */

package go_play

import "math"

const (
    Delta = 0.01
)

// By dividing by x regardless of the scale of x 
// the difference should maintain a constant percentage via its fractional value.
// https://sites.google.com/a/stonybrook.edu/functional-programming-scala/lecture-1-5
func isGoodEnough(guess, x float64) bool {
    return math.Abs((guess * guess) - x) / x < Delta // divide by x 
}

func iter(zN, x float64) float64 {
    zN1 := zN - (zN * zN - x) / (2 * zN)
    return zN1
}

func Sqrt(x float64) float64 {
    x0 := x
    guess := x
    for ; isGoodEnough(guess, x); guess = iter(x0, x) {
        x0 = guess
    }
    return guess
}

leonsim commented Nov 30, 2016

@zyxar , for #67, even though the test cases the question provided passed, that solution will not pass when ch2 has more elements. It even gets panic when ch2 has fewer elements.

mohigupt commented Dec 15, 2016 edited

My solution #69 WebCrawler using Mutex
My specific changes are only in provided Crawl function (and of course, the declaration for Cache type)

package main

import (
    "fmt"
    "sync"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}


type Cache struct {
    uniqueUrl  map[string]int
    mutx       sync.Mutex    
}

var myCache = Cache{uniqueUrl:make(map[string]int)}
var wg sync.WaitGroup     // required to let all launched goroutines finish before main() exits

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {

    defer wg.Done()
    
    if depth <= 0 {
        return
    }
        
    _, visited := myCache.uniqueUrl[url]
    if visited {
        return
    }
    myCache.mutx.Lock()
    myCache.uniqueUrl[url] = 1
    myCache.mutx.Unlock()
    
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        wg.Add(1)
        go Crawl(u, depth-1, fetcher)
    }
    return
}

func main() {
    wg.Add(1)
    Crawl("http://golang.org/", 4, fetcher)
    wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("***not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

tim-smart commented Feb 10, 2017 edited

My crawler:

package main

import (
	"fmt"
	"sync"
	"time"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type Crawler struct {
	baseUrl string
	depth int
	fetcher *Fetcher
	
	cache map[string] bool
	active int
	
	mux sync.Mutex
}

func NewCrawler(baseUrl string, depth int, fetcher *Fetcher) *Crawler {
	return &Crawler{
		baseUrl: baseUrl,
		depth: depth,
		cache: make(map[string] bool),
		fetcher: fetcher,
	}
}

func (c *Crawler) RegisterWorker() {
	c.mux.Lock()
	c.active++
	c.mux.Unlock()
}

func (c *Crawler) DeregisterWorkerAndMaybeCloseChannel(ch chan<- string) {
	c.mux.Lock()

	c.active--
	if c.active <= 0 {
		close(ch)
	}
	
	c.mux.Unlock()
}

func (c *Crawler) CheckUrl(url string) bool {
	c.mux.Lock()
	defer c.mux.Unlock()

	if _, ok := c.cache[url]; ok {
		return false
	}
	
	c.cache[url] = true
	return true
}

func (c *Crawler) Work(url string, currentDepth int, urlsChannel chan<- string) {
	if currentDepth <= 0 || !c.CheckUrl(url) {
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	
	_, urls, err := (*c.fetcher).Fetch(url)
	if err != nil {
		fmt.Println(err)
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	urlsChannel <- url
	
	for _, subUrl := range urls {
		c.RegisterWorker()
		go c.Work(subUrl, currentDepth - 1, urlsChannel)
	}
	
	c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
}

func (c *Crawler) Crawl(urlsChannel chan<- string) {
	c.RegisterWorker()
	go c.Work(c.baseUrl, c.depth, urlsChannel)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	urls := make(chan string)

	crawler := NewCrawler(url, depth, &fetcher)
	crawler.Crawl(urls)
	
	for u := range urls {
		fmt.Println("Got url:", u)
	}
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	time.Sleep(200 * time.Millisecond)
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

Gzure commented Mar 30, 2017

My solution #69 WebCrawler using Mutex

package main

import (
	"fmt"
	"sync"
	"time"
)

type urlCache struct{
	m map[string]bool
	mux sync.Mutex
}

func (uc *urlCache) Add(url string){
	uc.mux.Lock()
	uc.m[url] = true
	uc.mux.Unlock()
}

func (uc *urlCache) Value(url string) bool{
	uc.mux.Lock()
	defer uc.mux.Unlock()
	return uc.m[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,uc *urlCache) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	if depth <= 0 {
		return
	}
	if uc.Value(url) {
		//fmt.Println(url," is fetched")
		return 
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	uc.Add(url)
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		//fmt.Println(u)
		go Crawl(u, depth-1, fetcher,uc)
	}
	return
}

func main() {
	uc := urlCache{m:make(map[string]bool)}
	go Crawl("http://golang.org/", 4, fetcher,&uc)
	time.Sleep(time.Second)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

I apologize that I didn´t check all the above comments.
There are too many.

There are two problems for the Web Crawler:

  • mutual exclusion of the map
  • termination of the go routines (Crawl): It is necessary to check if the all the go routines terminated.

The solution for the mutual exclusion is straightforward. I used sync.Mutex
For the termination, I used the Dijkstra & Scholten idea. It is simple and it is detailed in "Termination Detection for Diffusing Computations", Information Processing Letters, Vol. 11, No. 1, Aug. 1980.

package main

import (
"fmt"
"sync"
)

// SafeMap is safe map of URL to use concurrently.
type SafeMap struct {
v map[string]int
mux sync.Mutex
}

func (c *SafeMap) Ins(key string) {
c.mux.Lock()
c.v[key]++
c.mux.Unlock()
}

func (c *SafeMap) HasValue(key string) bool {
c.mux.Lock()
defer c.mux.Unlock()
_, ok:= c.v[key]
return ok
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.

func Crawl(url string, depth int, fetcher Fetcher, ch chan int) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
defer func () {ch <-1} ()
if depth <= 0 {
return
}
ok := c.HasValue(url)
if ok {
//fmt.Println("URL alread in the map",url)
return
} else {
c.Ins(url)
}

body, urls, err := fetcher.Fetch(url)
if err != nil {
	fmt.Println(err)
	return
}
fmt.Printf("found: %s %q\n", url, body)
nUrls := len(urls)
chs := make(chan int)
for _, u := range urls {
	go Crawl(u, depth-1, fetcher, chs)
}
for i:=0; i<nUrls; i++ {<-chs}
return

}

var c=SafeMap{v:make(map[string]int)}

func main() {
ch := make(chan int)
go Crawl("http://golang.org/", 4, fetcher, ch)
fmt.Println("Processed ",<-ch)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment