Skip to content

Instantly share code, notes, and snippets.

@zyxar
Last active April 15, 2024 07:08
Show Gist options
  • Save zyxar/2317744 to your computer and use it in GitHub Desktop.
Save zyxar/2317744 to your computer and use it in GitHub Desktop.
tour.golang exercise solutions
/* Exercise: Loops and Functions #43 */
package main
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := float64(2.)
s := float64(0)
for {
z = z - (z*z - x)/(2*z)
if math.Abs(s-z) < 1e-15 {
break
}
s = z
}
return s
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(math.Sqrt(2))
}
/******************************************************************************************************/
/* Exercise: Maps #44 */
package main
import (
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
ss := strings.Fields(s)
num := len(ss)
ret := make(map[string]int)
for i := 0; i < num; i++ {
(ret[ss[i]])++
}
return ret
}
func main() {
wc.Test(WordCount)
}
/******************************************************************************************************/
/* Exercise: Slices #45 */
package main
import "tour/pic"
func Pic(dx, dy int) [][]uint8 {
ret := make([][]uint8, dy)
for i := 0; i < dy; i++ {
ret[i] = make([]uint8, dx)
for j := 0; j < dx; j++ {
ret[i][j] = uint8(i^j+(i+j)/2)
}
}
return ret
}
func main() {
pic.Show(Pic)
}
/******************************************************************************************************/
/* Exercise: Fibonacci closure #46 */
package main
import "fmt"
// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
var x, y = 0, 1
return func() (z int) {
z, x, y = x, y, x+y
return
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
/******************************************************************************************************/
/* Advanced Exercise: Complex cube roots #47 */
package main
import (
"fmt"
"math/cmplx"
)
func Cbrt(x complex128) complex128 {
z := complex128(2)
s := complex128(0)
for {
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z))
if cmplx.Abs(s-z) < 1e-17 {
break
}
s = z
}
return z
}
func main() {
fmt.Println(Cbrt(2))
}
/******************************************************************************************************/
/* Exercise: Errors #57 */
package main
import (
"fmt"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}
func Sqrt(f float64) (float64, error) {
if f < 0 {
return 0, ErrNegativeSqrt(f)
}
return 0, nil
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
/******************************************************************************************************/
/* Exercise: Images #58 */
package main
import (
"image"
"tour/pic"
"image/color"
)
type Image struct{
Width, Height int
colr uint8
}
func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.Width, r.Height)
}
func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (r *Image) At(x, y int) color.Color {
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255}
}
func main() {
m := Image{100, 100, 128}
pic.ShowImage(&m)
}
/******************************************************************************************************/
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n,err = rot.r.Read(p)
for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
/******************************************************************************************************/
/* Exercise: Equivalent Binary Trees #67 */
package main
import (
"tour/tree"
"fmt"
)
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_walk(t, ch)
close(ch)
}
func _walk(t *tree.Tree, ch chan int) {
if t != nil {
_walk(t.Left, ch)
ch <- t.Value
_walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := range ch1 {
if i != <- ch2 {
return false
}
}
return true
}
func main() {
//tree.New(2)
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Print(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
/******************************************************************************************************/
/* Exercise: Web Crawler #69 */
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
func Krawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}
Urls <- urls
}
func Crawl(url string, depth int, fetcher Fetcher) {
Urls := make(chan []string)
go Krawl(url, fetcher, Urls)
band := 1
store[url] = true // init for level 0 done
for i := 0; i < depth; i++ {
for band > 0 {
band--
next := <- Urls
for _, url := range next {
if _, done := store[url] ; !done {
store[url] = true
band++
go Krawl(url, fetcher, Urls)
}
}
}
}
return
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@BjoernPetersen
Copy link

The solution to #57 doesn't calculate the Sqrt if there is no error ...

@Alexis-benoist
Copy link

I think there is a bug at [https://gist.github.com/zyxar/2317744#file-exercise-tour-go-L256](the tree comparison).

I the the second tree is longer than the first and all the values are the same (eg. [1, 2], [1, 2, 3]) the func will return true but the trees are not the same.

func Same(t1, t2 *tree.Tree) bool {
    c1 := make(chan(int))
    c2 := make(chan(int))
    go Walk(t1, c1)
    go Walk(t2, c2)
    for i := range c1 {
        fmt.Println(i)
        if i != <- c2 {
            return false
        }
    }
    for range c2 {
        return false
    }
    return true
}

I also made an helper to show that:

func NewLen(k int) *Tree {
    var t *Tree
    for i :=1; i <= k; i++ {
        t = insert(t, i)
    }
    return t
}

and the main:

func main() {
    fmt.Println(Same(tree.NewLen(10), tree.NewLen(12)))
    fmt.Println(Same(tree.NewLen(14), tree.NewLen(12)))
    fmt.Println(Same(tree.NewLen(14), tree.NewLen(14)))
}

Cheers,
Alexis.

@akostadinov
Copy link

func (rot *rot13Reader) Read(p []byte) (n int, err error) {
    n,err = rot.r.Read(p)
    for i := 0; i < len(p); i++ {
        if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
            p[i] += 13
        } else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
            p[i] -= 13
        }
    }
    return
}

More precisely for i := 0; i < len(p); i++, what if the buffer was not filled completely by the read call? You'll modify parts of the buffer that were not meant to be used. The program works because whether you process extra bytes or not does not matter for this algorithm (only inefficient). It can certainly break if doing hashing, encryption or something.
Another issue I see is that it will not return immediately if the other reader returned an EOF or an error but will again process the whole slice.

len(p) needs to be changed to n.

One question: on error, would n always be 0 or is this undefined? If undefined additional check for error would be needed before the processing loop.

It's a nice example. Just starting with go and wanted to see how others approach the issue. I like very much the useful example for using named return values. Thank you.

@jasonknight
Copy link

Here is my MyReader solution

    package main

    import "golang.org/x/tour/reader"

    type MyReader struct{}

    // TODO: Add a Read([]byte) (int, error) method to MyReader.

    func (m MyReader) Read(b []byte) (int,error) {
      cnt := 0
      for i := 0; i < len(b); i++ {
        b[i] = 'A'
        cnt = i+1
      }
      return cnt,nil
    }

    func main() {
      reader.Validate(MyReader{})
    }

@nishanths
Copy link

An alternate exercise-reader.go solution

package main

import "golang.org/x/tour/reader"

type MyReader struct{}

func (r MyReader) Read(s []byte) (int, error) {
    for i := range s {
        s[i] = 'A'
    }

    return len(s), nil
}

func main() {
    reader.Validate(MyReader{})
}

@dacavcosta
Copy link

I didn't find any solution for the HTTP Handlers exercise, so here's my solution
(sorry for not formating my code properly, i'm still new to github)

package main

import (
"log"
"net/http"
"fmt"
)

type String string

type Struct struct {
Greeting string
Punct string
Who string
}

func (s String) ServeHTTP(
w http.ResponseWriter,
r *http.Request) {
fmt.Fprint(w, s)
}

func (s Struct) ServeHTTP(
w http.ResponseWriter,
r *http.Request) {
fmt.Fprint(w, s)
}

func main() {
// your http.Handle calls here
http.Handle("/string", String("I'm a frayed knot."))
http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
log.Fatal(http.ListenAndServe("localhost:4000", nil))
}

@abalone0204
Copy link

HTTP Handlers exercise's solution

package main

import (
    "log"
    "net/http"
    "fmt"
)

type String string

type Struct struct {
    Greeting string
    Punct string
    Who string
}

func (s String) ServeHTTP(w http.ResponseWriter,r *http.Request) {
    fmt.Fprint(w, s)
}

func (s Struct) ServeHTTP(w http.ResponseWriter,r *http.Request) {
    fmt.Fprint(w, s)
}

func main() {
    // your http.Handle calls here
    http.Handle("/string", String("I'm a frayed knot."))
    http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
    log.Fatal(http.ListenAndServe("localhost:4000", nil))
}

@minpajr
Copy link

minpajr commented Jan 14, 2016

Exercise: Readers

Since the exercise asks you to implement a Reader which returns the number of bytes read and an error, we need to return the number of bytes read, and to at least cover some error cases. We know from previous exercises that io.Reader throws an EOF exception, and we can assume that if the input b is nil or empty, we have reached the end of the file...

package main

import (
    "golang.org/x/tour/reader"
    "fmt"
)

type ErrEOF []byte

func (e ErrEOF) Error() string {
    return fmt.Sprintf("End of file")
}

type MyReader struct{}

// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (r MyReader) Read(b []byte) (int, error) {
    if b == nil || len(b) == 0 {
        err := ErrEOF(b)
        return 0, err
    }

    for i := 0; i < len(b); i++ {
        b[i] = 'A'
    }

    return len(b), nil
}

func main() {
    reader.Validate(MyReader{})
}

@makushline
Copy link

Recently completed the gotour and made a repo with my solutions here: https://github.com/makushline/gotour
Any feedback is highly appreciated.

@louwers
Copy link

louwers commented Mar 18, 2016

package main

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (rot *rot13Reader) Read(p []byte) (int, error) {
    n, err := rot.r.Read(p)
    for i := 0; i < n; i++ {
        if p[i] < 'A' || p[i] > 'z' { // ignore non-ASCII letters
            continue
        }

        if p[i] > 'a' && p[i] < 'a'+13 || p[i] > 'A' && p[i] < 'A'+13 {
            p[i] += 13
        } else {
            p[i] -= 13
        }
    }
    return n, err
}

func main() {
    s := strings.NewReader("NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

@HenryBarnett
Copy link

A solution to the binary tree only using one channel and a map:

package main

import (
    "golang.org/x/tour/tree"
    "fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
    if t != nil {
        ch <- t.Value
        Walk(t.Left, ch)
        Walk(t.Right, ch)
    }
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
    values := make(map[int]bool)
    ch := make(chan int)
    go func() {
        Walk(t1, ch)
        Walk(t2, ch)
        close(ch)
    }()
    for val := range ch {
        if _, ok := values[val]; ok {
            delete(values, val)
        } else {
            values[val] = true
        }
    }
    return len(values) == 0
}

func main() {
    fmt.Println(Same(tree.New(1), tree.New(2)))
    fmt.Println(Same(tree.New(1), tree.New(1)))
}

@alukos
Copy link

alukos commented Apr 24, 2016

Exercise: rot13Reader

package main

import (
    "io"
    "os"
    "strings"
)

type rot13Reader struct {
    r io.Reader
}

func (r *rot13Reader) Read(p []byte) (n int, err error) {
    n, err = r.r.Read(p)
    for i, b := range p {
        switch {
        case 'A' <= b && b <= 'M', 'a' <= b && b <= 'm':
            p[i] += 13
        case 'N' <= b && b <= 'Z', 'n' <= b && b <= 'z':
            p[i] -= 13
        }
    }
    return
}

func main() {
    s := strings.NewReader("Lbh penpxrq gur pbqr!")
    r := rot13Reader{s}
    io.Copy(os.Stdout, &r)
}

@zacps
Copy link

zacps commented Apr 26, 2016

Error is thrown on #58 - Images because Bounds(), ColorModel() and At() need a non-pointer receiver in order to implement image.Image.

func (img Image) Bounds() image.Rectangle {
    return image.Rect(0, 0, img.Height, img.Width)
}
func (img Image) ColorModel() color.Model {
    return color.RGBAModel
}

func (img Image) At(x, y int) color.Color {
    return color.RGBA{img.color+uint8(x), img.color+uint8(y), 255, 255}
}

@thegamblerrises
Copy link

Hi, thanks for the solutions. It helped more in understanding of Go, rather than solution itself.

On a side note, may I know what "Color theme" you are using. I like white background in editors, please let me know. Thanks.

@mpcjanssen
Copy link

The ro13 solution assumes the lower reader gives the requested number of bytes. This is not necessarily the case so:

for i, b := range p

should be

for i:=0 ; i < n ; i++

@Tantalum73
Copy link

Tantalum73 commented May 26, 2016

I want to share my solution for the image exercise (number 25) with you and everyone who stumbles over this post.

package main

import (
    "golang.org/x/tour/pic"
    "image/color"
    "image"
)


type Image struct{
    Rect image.Rectangle
}

func (image Image) ColorModel() color.Model {
        return color.RGBAModel
}
func (i Image) Bounds() image.Rectangle {
    return i.Rect
}
func (image Image) At(x, y int) color.Color {
    return color.RGBA{uint8(x*y), uint8(y+x), 255, 255}
}

func main() {
    m := Image{image.Rect(0,0,300,300)}
    pic.ShowImage(m)
}

@hugdru
Copy link

hugdru commented Sep 29, 2016

/* Exercise: Web Crawler #69 - https://tour.golang.org/concurrency/10 */

package main

import (
    "fmt"
    "sync"
)

type Set struct {
    v   map[string]bool
    mux sync.Mutex
}

func (set *Set) Register(key string) {
    set.mux.Lock()
    defer set.mux.Unlock()
    set.v[key] = true
}

func (set *Set) Exists(key string) bool {
    set.mux.Lock()
    defer set.mux.Unlock()
    return set.v[key]
}

type Fetcher interface {
    Fetch(url string) (body string, urls []string, err error)
}

func Crawl(url string, depth int, fetcher Fetcher, ch chan string) {
    set := Set{v: make(map[string]bool)}
    var wg sync.WaitGroup
    wg.Add(1)
    go crawl(url, depth, fetcher, set, ch, &wg)
    wg.Wait()
    close(ch)
}

func crawl(url string, depth int, fetcher Fetcher, set Set, ch chan string, wg *sync.WaitGroup) {
    defer wg.Done()

    if depth <= 0 || set.Exists(url) {
        return
    }

    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    set.Register(url)
    ch <- url

    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        if set.Exists(u) {
            continue
        }
        wg.Add(1)
        go crawl(u, depth-1, fetcher, set, ch, wg)
    }
    return
}

func main() {
    ch := make(chan string)
    go Crawl("http://golang.org/", 4, fetcher, ch)
    for value := range ch {
        fmt.Println(value)
    }
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

@mymtw
Copy link

mymtw commented Oct 6, 2016

My Solution for stringers does not have any sense, but if condition will be slice of bytes for IPAddr(dynamic size):

package main

import (
    "fmt"
    "strings"
    "strconv"
)


type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.
func (ipaddr IPAddr) String() string {
    var strs []string
    for _, v := range ipaddr {
        strs = append(
            strs, strconv.Itoa(int(v)))
    }
    return strings.Join(strs, ".")
}

func main() {
    hosts := map[string]IPAddr{
        "loopback":  {127, 0, 0, 1},
        "googleDNS": {8, 8, 8, 8},
    }
    for name, ip := range hosts {
        fmt.Printf("%v: %v\n", name, ip)
    }
}

@ppontryagin
Copy link

ppontryagin commented Oct 21, 2016

/* Exercise: Loops and Functions #43 */

package go_play

import "math"

const (
    Delta = 0.01
)

// By dividing by x regardless of the scale of x 
// the difference should maintain a constant percentage via its fractional value.
// https://sites.google.com/a/stonybrook.edu/functional-programming-scala/lecture-1-5
func isGoodEnough(guess, x float64) bool {
    return math.Abs((guess * guess) - x) / x < Delta // divide by x 
}

func iter(zN, x float64) float64 {
    zN1 := zN - (zN * zN - x) / (2 * zN)
    return zN1
}

func Sqrt(x float64) float64 {
    x0 := x
    guess := x
    for ; isGoodEnough(guess, x); guess = iter(x0, x) {
        x0 = guess
    }
    return guess
}

@1e0ng
Copy link

1e0ng commented Nov 30, 2016

@zyxar , for #67, even though the test cases the question provided passed, that solution will not pass when ch2 has more elements. It even gets panic when ch2 has fewer elements.

@mohigupt
Copy link

mohigupt commented Dec 15, 2016

My solution #69 WebCrawler using Mutex
My specific changes are only in provided Crawl function (and of course, the declaration for Cache type)

package main

import (
    "fmt"
    "sync"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}


type Cache struct {
    uniqueUrl  map[string]int
    mutx       sync.Mutex    
}

var myCache = Cache{uniqueUrl:make(map[string]int)}
var wg sync.WaitGroup     // required to let all launched goroutines finish before main() exits

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {

    defer wg.Done()
    
    if depth <= 0 {
        return
    }
        
    _, visited := myCache.uniqueUrl[url]
    if visited {
        return
    }
    myCache.mutx.Lock()
    myCache.uniqueUrl[url] = 1
    myCache.mutx.Unlock()
    
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        wg.Add(1)
        go Crawl(u, depth-1, fetcher)
    }
    return
}

func main() {
    wg.Add(1)
    Crawl("http://golang.org/", 4, fetcher)
    wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("***not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

@tim-smart
Copy link

tim-smart commented Feb 10, 2017

My crawler:

package main

import (
	"fmt"
	"sync"
	"time"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type Crawler struct {
	baseUrl string
	depth int
	fetcher *Fetcher
	
	cache map[string] bool
	active int
	
	mux sync.Mutex
}

func NewCrawler(baseUrl string, depth int, fetcher *Fetcher) *Crawler {
	return &Crawler{
		baseUrl: baseUrl,
		depth: depth,
		cache: make(map[string] bool),
		fetcher: fetcher,
	}
}

func (c *Crawler) RegisterWorker() {
	c.mux.Lock()
	c.active++
	c.mux.Unlock()
}

func (c *Crawler) DeregisterWorkerAndMaybeCloseChannel(ch chan<- string) {
	c.mux.Lock()

	c.active--
	if c.active <= 0 {
		close(ch)
	}
	
	c.mux.Unlock()
}

func (c *Crawler) CheckUrl(url string) bool {
	c.mux.Lock()
	defer c.mux.Unlock()

	if _, ok := c.cache[url]; ok {
		return false
	}
	
	c.cache[url] = true
	return true
}

func (c *Crawler) Work(url string, currentDepth int, urlsChannel chan<- string) {
	if currentDepth <= 0 || !c.CheckUrl(url) {
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	
	_, urls, err := (*c.fetcher).Fetch(url)
	if err != nil {
		fmt.Println(err)
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	urlsChannel <- url
	
	for _, subUrl := range urls {
		c.RegisterWorker()
		go c.Work(subUrl, currentDepth - 1, urlsChannel)
	}
	
	c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
}

func (c *Crawler) Crawl(urlsChannel chan<- string) {
	c.RegisterWorker()
	go c.Work(c.baseUrl, c.depth, urlsChannel)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	urls := make(chan string)

	crawler := NewCrawler(url, depth, &fetcher)
	crawler.Crawl(urls)
	
	for u := range urls {
		fmt.Println("Got url:", u)
	}
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	time.Sleep(200 * time.Millisecond)
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@Gzure
Copy link

Gzure commented Mar 30, 2017

My solution #69 WebCrawler using Mutex

package main

import (
	"fmt"
	"sync"
	"time"
)

type urlCache struct{
	m map[string]bool
	mux sync.Mutex
}

func (uc *urlCache) Add(url string){
	uc.mux.Lock()
	uc.m[url] = true
	uc.mux.Unlock()
}

func (uc *urlCache) Value(url string) bool{
	uc.mux.Lock()
	defer uc.mux.Unlock()
	return uc.m[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,uc *urlCache) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	if depth <= 0 {
		return
	}
	if uc.Value(url) {
		//fmt.Println(url," is fetched")
		return 
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	uc.Add(url)
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		//fmt.Println(u)
		go Crawl(u, depth-1, fetcher,uc)
	}
	return
}

func main() {
	uc := urlCache{m:make(map[string]bool)}
	go Crawl("http://golang.org/", 4, fetcher,&uc)
	time.Sleep(time.Second)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@hirata-ita
Copy link

I apologize that I didn´t check all the above comments.
There are too many.

There are two problems for the Web Crawler:

  • mutual exclusion of the map
  • termination of the go routines (Crawl): It is necessary to check if the all the go routines terminated.

The solution for the mutual exclusion is straightforward. I used sync.Mutex
For the termination, I used the Dijkstra & Scholten idea. It is simple and it is detailed in "Termination Detection for Diffusing Computations", Information Processing Letters, Vol. 11, No. 1, Aug. 1980.

package main

import (
"fmt"
"sync"
)

// SafeMap is safe map of URL to use concurrently.
type SafeMap struct {
v map[string]int
mux sync.Mutex
}

func (c *SafeMap) Ins(key string) {
c.mux.Lock()
c.v[key]++
c.mux.Unlock()
}

func (c *SafeMap) HasValue(key string) bool {
c.mux.Lock()
defer c.mux.Unlock()
_, ok:= c.v[key]
return ok
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.

func Crawl(url string, depth int, fetcher Fetcher, ch chan int) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
defer func () {ch <-1} ()
if depth <= 0 {
return
}
ok := c.HasValue(url)
if ok {
//fmt.Println("URL alread in the map",url)
return
} else {
c.Ins(url)
}

body, urls, err := fetcher.Fetch(url)
if err != nil {
	fmt.Println(err)
	return
}
fmt.Printf("found: %s %q\n", url, body)
nUrls := len(urls)
chs := make(chan int)
for _, u := range urls {
	go Crawl(u, depth-1, fetcher, chs)
}
for i:=0; i<nUrls; i++ {<-chs}
return

}

var c=SafeMap{v:make(map[string]int)}

func main() {
ch := make(chan int)
go Crawl("http://golang.org/", 4, fetcher, ch)
fmt.Println("Processed ",<-ch)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

@cixuuz
Copy link

cixuuz commented Jan 1, 2018

My solution using sync.Mutex

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

func crawl(url string, fetcher Fetcher, Urls chan []string) {
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
	} else {
		fmt.Printf("found: %s %q\n", url, body)
	}
	
	Urls <- urls
}

// SafeUrls is safe to use concurrently.
type SafeUrls struct {
	v   map[string]bool
	mux sync.Mutex
}

var Urls chan []string = make(chan []string)

// Set url visited for the given key.
func (c *SafeUrls) Add(key string) {
	c.mux.Lock()
	// Lock so only one goroutine at a time can access the map c.v.
	c.v[key] = true
	c.mux.Unlock()
}

// Value returns the current value of the urls for the given key.
func (c *SafeUrls) Value(key string) bool {
	c.mux.Lock()
	// Lock so only one goroutine at a time can access the map c.v.
	defer c.mux.Unlock()
	return c.v[key]
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	if depth <= 0 {
		return
	}
	
	go crawl(url, fetcher, Urls)
	
	urls := <- Urls
	for _, u := range urls {
		Crawl(u, depth-1, fetcher)
	}
	return
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@BnSmth
Copy link

BnSmth commented Jan 11, 2018

Here is my solution to the crawler. It uses a mutex within the url cache and a wait group in order to terminate once all go routines have finished. I'm not sure whether this is idiomatic or not (especially with the UrlCache, and Crawler structs/methods), if anyone has any feedback I would appreciate it.

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

type UrlCache struct {
	urls map[string]bool
	mux  sync.Mutex
}

func (cache *UrlCache) Add(url string) {
	cache.mux.Lock()
	cache.urls[url] = true
	cache.mux.Unlock()
}

func (cache *UrlCache) Contains(url string) bool {
	cache.mux.Lock()
	defer cache.mux.Unlock()
	return cache.urls[url]
}

type Crawler struct {
	urlCache  UrlCache
	fetcher   Fetcher
	waitGroup sync.WaitGroup
}

func (crawler *Crawler) Crawl(url string, depth int) {
	defer crawler.waitGroup.Done()

	if depth <= 0 {
		return
	}

	if crawler.urlCache.Contains(url) {
		return
	}

	_, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Errorf("crawl: error when fetching url %s %v\n", url, err)
		return
	}

	fmt.Printf("visited %s - %d urls found\n", url, len(urls))
	crawler.urlCache.Add(url)

	for _, u := range urls {
		crawler.waitGroup.Add(1)
		go crawler.Crawl(u, depth-1)
	}
}

func main() {
	waitGroup := sync.WaitGroup{}
	waitGroup.Add(1)

	crawler := Crawler{
		urlCache:  UrlCache{urls: make(map[string]bool)},
		fetcher:   fetcher,
		waitGroup: waitGroup,
	}

	go crawler.Crawl("http://golang.org/", 4)
	crawler.waitGroup.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@findshrey
Copy link

At line 61 the code :
ret[i] = make([]uint8, dx)
Can you tell me what is the reasoning behind using dx as length of slice? It doesnt say the length is dx but slice is of dx elements. Kinda confused :/

@shipa988
Copy link

if we know that trees have 10 nodes, then this variant has a chance of life)):
package main

import (
"golang.org/x/tour/tree"
"fmt"
"sort"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
//var cnt1 chan int

func Walk(t *tree.Tree, ch chan int) {
if t != nil {
ch <- t.Value
count:=<-ch
if count == 10 {
close(ch)
return}
Walk(t.Right, ch)
Walk(t.Left, ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.

func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
go Walk(t1, ch1)
ch2 := make(chan int)
go Walk(t2, ch2)
var n1 [10]int
var n2 [10]int
i1,i2 := 0, 0
for i:=range ch1 {
n1[i1] = i
i1++
ch1<-i1
}
for i:=range ch2 {
n2[i2] = i
i2++
ch2<-i2
}
sort.Ints(n1[:])
sort.Ints(n2[:])
if n1 == n2 {
return true
} else {return false}

}

func main() {
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))

}

@shipa988
Copy link

for Crawler, I only used knowledge from previous lessons, such as a SafeCouter (without sync.WaitGroup)
package main

import (
"fmt"
"sync"
"time"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type SafeHashset struct {
mux sync.Mutex
h map[string]bool
}
func (counter *SafeCounter) Inc(inc int) {
counter.mux.Lock()
defer counter.mux.Unlock()
counter.c = counter.c + inc
}

type SafeCounter struct {
mux sync.Mutex
c int
}
func (counter *SafeCounter) Value() int {
counter.mux.Lock()
defer counter.mux.Unlock()
return counter.c
}

func (hashset *SafeHashset) Add(url string) bool {
hashset.mux.Lock()
defer hashset.mux.Unlock()
if _, ok := hashset.h[url]; !ok {
hashset.h[url] = true
return true
} else {
return false
}
}

//var ch chan struct{} = make(chan struct {})
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
var hashset = SafeHashset{h: make(map[string]bool)}
var count =SafeCounter{}

func Crawl(url string, depth int, fetcher Fetcher) {
defer count.Inc(-1)
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}

if hashset.Add(url) {
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		count.Inc(1)
		go Crawl(u, depth-1, fetcher)
		//defer <-ch
	}
}
return

}

func main() {
count.Inc(1)
Crawl("http://golang.org/", 4, fetcher)
for {
if count.Value() == 0 {
break
}
time.Sleep(time.Second)
}
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

@eugeneYWang
Copy link

my binary tree #67 solution

package main

import (
	"golang.org/x/tour/tree"
	"fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int, layer int) {
	if t == nil { 
		return
	}
	
	Walk(t.Left, ch, layer + 1)
	ch <- t.Value
	Walk(t.Right, ch, layer + 1)
	
	if layer == 0 {
		close(ch)
	}
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
	
	ch1 := make(chan int, 10)
	ch2 := make(chan int, 10)
	
	go Walk(t1, ch1, 0)
	go Walk(t2, ch2, 0)
	
	for {
		n1, ok1 := <- ch1
		n2, ok2 := <- ch2
		
		if n1 != n2 || ok1 != ok2 {
			return false
		} else if ok1 == ok2 && ok1 == false {
			return true
		}
	}
}

func main() {
	fmt.Println(Same(tree.New(1), tree.New(2)));
	fmt.Println(Same(tree.New(1), tree.New(1)));
}

@Aternus
Copy link

Aternus commented Aug 14, 2022

IP Address Solution:

package tour

import "fmt"

type IPAddr [4]byte

func (ipAddr IPAddr) String() string {
	return fmt.Sprintf("%d.%d.%d.%d", ipAddr[0], ipAddr[1], ipAddr[2], ipAddr[3])
}

func IPAddress() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for name, ip := range hosts {
		fmt.Printf("%v: %v\n", name, ip)
	}
}

@Aternus
Copy link

Aternus commented Aug 14, 2022

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

@Aternus
Copy link

Aternus commented Aug 16, 2022

rot13Reader Solution:
❓ this solution saves an extra loop by returning io.EOF as early as possible.

package tour

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (r rot13Reader) Read(b []byte) (int, error) {
	n, err := r.r.Read(b)
	// short circuit: EOF
	if err == io.EOF {
		return 0, io.EOF
	}
	for k, v := range b {
		b[k] = rot13(v)
	}
	return n, err
}

func Rot13ReaderMain() {
	s := strings.NewReader("Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

func rot13(b byte) byte {
	if b >= 'A' && b <= 'Z' {
		b = 'A' + (b-'A'+13)%26
	} else if b >= 'a' && b <= 'z' {
		b = 'a' + (b-'a'+13)%26
	}
	return b
}

@Aternus
Copy link

Aternus commented Aug 22, 2022

Images exercise solution:

package tour

import (
	"image"
	"image/color"
)

type Image struct {
	Width  int
	Height int
}

func (i Image) ColorModel() color.Model {
	return color.RGBAModel
}

func (i Image) Bounds() image.Rectangle {
	return image.Rect(0, 0, i.Width, i.Height)
}

func (i Image) At(x, y int) color.Color {
	r := uint8(x + y)
	g := uint8(x ^ y)
	b := uint8(x * y)
	//fmt.Println("R:", r, "G:", g, "B:", b)
	return color.RGBA{r, g, b, 255}
}

@AlbertoCoder
Copy link

AlbertoCoder commented Sep 12, 2022

This is my solution to the Exercise "Stringers":

package main

import "fmt"

type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.

// What I did was I returned the call to Sprintf (in the fmt package) and formatted each of the four byte values in the array
// of bytes, separated with dots:

func (ip IPAddr) String() string{
		
        // The 'Sprintf' function formats according to a format specifier and returns the resulting string.
        // In other words: you first return the resulting string of the function 'Sprintf' and then
       // this method (String()) returns the previously returned string by 'Sprintf':

	return fmt.Sprintf("%v.%v.%v.%v",ip[0],ip[1],ip[2],ip[3]) 
	
}

func main() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for nombre, ip := range hosts {
		fmt.Printf("%v: %v\n", nombre, ip)
	}
}

I hope it helps!

@AlbertoCoder
Copy link

Excuse me:

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

package main

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (rot *rot13Reader) Read(p []byte) (n int, err error) {
	n,err = rot.r.Read(p)
	for i := 0; i < len(p); i++ {
		if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
			p[i] += 13
		} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
			p[i] -= 13
		}
	}
	return
}

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

@americanstone
Copy link

MIT Phd crawl solution

package main

import (
	"fmt"
	"sync"
)

type Cache struct {
	m sync.Mutex
	v map[string]uint
}

func (s *Cache) Exist(key string) bool {
	s.m.Lock()
	defer s.m.Unlock()
	if _, ok := s.v[key]; ok {
		s.v[key]++
		return true
	} else {
		s.v[key] = 1
		return false
	}
}

func (s *Cache) String() string {
	s.m.Lock()
	defer s.m.Unlock()
	var sb string
	for k, v := range s.v {
		sb += fmt.Sprintf("key: %s => value: %q", k, v)
	}
	return fmt.Sprintln(sb)
}

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

//
// Serial crawler
//
func SerialCrawl(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	fmt.Println("depth ", depth)
	if depth <= 0 {
		return
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	for _, u := range urls {
		//fmt.Printf("recursive crawl url %s\n", u)
		SerialCrawl(u, depth-1, fetcher, cache)
	}
}

//
// Concurrent crawler with shared state and Mutex
//

func ConcurrentMutex(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	if depth <= 0 {
		return
	}

	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	var done sync.WaitGroup
	for _, u := range urls {
		done.Add(1)
		//fmt.Printf("recursive crawl url %s\n", u)
		u2 := u
		go func() {
			defer done.Done()
			ConcurrentMutex(u2, depth-1, fetcher, cache)
		}()
		//go func(u string) {
		//	defer done.Done()
		//	ConcurrentMutex(u, depth -1,fetcher, cache)
		//}(u)
	}
	done.Wait()
}

//
// Concurrent crawler with channels
//

func worker(url string, depth uint, ch chan []string, fetcher Fetcher, cache *Cache) {
	//fmt.Printf("working on url %s depth %d\n", url, depth)
	body, urls, err := fetcher.Fetch(url)

	if err != nil {
		ch <- []string{}
	} else {
		fmt.Printf("Found: %s %q\n", url, body)
		ch <- urls
	}
}

func master(url string, ch chan []string, depth uint, fetcher Fetcher, cache *Cache) {
	n := 1
	copyDepth := depth
	for urls := range ch {
		// fmt.Printf("dep :%s\n", fmt.Sprintf("%q", copyDepth))
		if copyDepth == 0 {
			break
		}
		for _, url := range urls {
			if !cache.Exist(url) {
				n += 1
				go worker(url, depth, ch, fetcher, cache)
			}
		}
		depth--
		n -= 1
		if n == 0 {
			break
		}
	}
}

func ConcurrentChan(url string, depth uint, fetcher Fetcher, cache *Cache) {
	ch := make(chan []string)
	go func() {
		ch <- []string{url}
	}()
	master(url, ch, depth, fetcher, cache)
}

func main() {
	cache := Cache{v: make(map[string]uint)}
	SerialCrawl("https://golang.org/", 4, fetcher, &cache)

	ConcurrentMutex("https://golang.org/", 4, fetcher, &cache)

	ConcurrentChan("https://golang.org/", 4, fetcher, &cache)

	fmt.Printf("\nCached urls %q\n", cache.v)
}

type fakeFetcher map[string]*fakeResult // to avoid copy value

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@ViktorVoloshko
Copy link

ViktorVoloshko commented Oct 13, 2023

My relatively simple solution for crawl w/o global variables and channels and with mutex and WaitGroup:

package main

import (
	"fmt"
	"sync"
)

type UrlCache struct {
	urls map[string]bool
	mu   sync.Mutex
}

func (uc *UrlCache) Add(url string) {
	uc.mu.Lock()
	uc.urls[url] = true
	uc.mu.Unlock()
}

func (uc *UrlCache) Get(url string) bool {
	uc.mu.Lock()
	defer uc.mu.Unlock()
	return uc.urls[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	var wg sync.WaitGroup
	cache := UrlCache{urls: make(map[string]bool)}
	wg.Add(1)
	go crawl(url, depth, fetcher, &cache, &wg)
	wg.Wait()
}

func crawl(url string, depth int, fetcher Fetcher, cache *UrlCache, wg *sync.WaitGroup) {
	defer wg.Done()
	if depth <= 0 || cache.Get(url) {
		return
	}
	cache.Add(url)
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		wg.Add(1)
		go crawl(u, depth-1, fetcher, cache, wg)
	}
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@apo1967
Copy link

apo1967 commented Nov 30, 2023

@Aternus

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment