Instantly share code, notes, and snippets.

# zyxar/exercise.tour.go

Last active April 28, 2024 17:06
Show Gist options
• Save zyxar/2317744 to your computer and use it in GitHub Desktop.
tour.golang exercise solutions
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode characters

### louwers commented Mar 18, 2016

```package main

import (
"io"
"os"
"strings"
)

}

for i := 0; i < n; i++ {
if p[i] < 'A' || p[i] > 'z' { // ignore non-ASCII letters
continue
}

if p[i] > 'a' && p[i] < 'a'+13 || p[i] > 'A' && p[i] < 'A'+13 {
p[i] += 13
} else {
p[i] -= 13
}
}
return n, err
}

func main() {
io.Copy(os.Stdout, &r)
}```

### ghost commented Mar 26, 2016

A solution to the binary tree only using one channel and a map:

``````package main

import (
"golang.org/x/tour/tree"
"fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
if t != nil {
ch <- t.Value
Walk(t.Left, ch)
Walk(t.Right, ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
values := make(map[int]bool)
ch := make(chan int)
go func() {
Walk(t1, ch)
Walk(t2, ch)
close(ch)
}()
for val := range ch {
if _, ok := values[val]; ok {
delete(values, val)
} else {
values[val] = true
}
}
return len(values) == 0
}

func main() {
fmt.Println(Same(tree.New(1), tree.New(2)))
fmt.Println(Same(tree.New(1), tree.New(1)))
}
``````

### alukos commented Apr 24, 2016

```package main

import (
"io"
"os"
"strings"
)

}

for i, b := range p {
switch {
case 'A' <= b && b <= 'M', 'a' <= b && b <= 'm':
p[i] += 13
case 'N' <= b && b <= 'Z', 'n' <= b && b <= 'z':
p[i] -= 13
}
}
return
}

func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
io.Copy(os.Stdout, &r)
}```

### zacps commented Apr 26, 2016

Error is thrown on #58 - Images because `Bounds()`, `ColorModel()` and `At()` need a non-pointer receiver in order to implement image.Image.

``````func (img Image) Bounds() image.Rectangle {
return image.Rect(0, 0, img.Height, img.Width)
}
func (img Image) ColorModel() color.Model {
return color.RGBAModel
}

func (img Image) At(x, y int) color.Color {
return color.RGBA{img.color+uint8(x), img.color+uint8(y), 255, 255}
}
``````

### thegamblerrises commented May 10, 2016

Hi, thanks for the solutions. It helped more in understanding of Go, rather than solution itself.

On a side note, may I know what "Color theme" you are using. I like white background in editors, please let me know. Thanks.

### mpcjanssen commented May 12, 2016

The ro13 solution assumes the lower reader gives the requested number of bytes. This is not necessarily the case so:

`for i, b := range p`

should be

`for i:=0 ; i < n ; i++`

I want to share my solution for the image exercise (number 25) with you and everyone who stumbles over this post.

```package main

import (
"golang.org/x/tour/pic"
"image/color"
"image"
)

type Image struct{
Rect image.Rectangle
}

func (image Image) ColorModel() color.Model {
return color.RGBAModel
}
func (i Image) Bounds() image.Rectangle {
return i.Rect
}
func (image Image) At(x, y int) color.Color {
return color.RGBA{uint8(x*y), uint8(y+x), 255, 255}
}

func main() {
m := Image{image.Rect(0,0,300,300)}
pic.ShowImage(m)
}
```

/* Exercise: Web Crawler #69 - https://tour.golang.org/concurrency/10 */

```package main

import (
"fmt"
"sync"
)

type Set struct {
v   map[string]bool
mux sync.Mutex
}

func (set *Set) Register(key string) {
set.mux.Lock()
defer set.mux.Unlock()
set.v[key] = true
}

func (set *Set) Exists(key string) bool {
set.mux.Lock()
defer set.mux.Unlock()
return set.v[key]
}

type Fetcher interface {
Fetch(url string) (body string, urls []string, err error)
}

func Crawl(url string, depth int, fetcher Fetcher, ch chan string) {
set := Set{v: make(map[string]bool)}
var wg sync.WaitGroup
go crawl(url, depth, fetcher, set, ch, &wg)
wg.Wait()
close(ch)
}

func crawl(url string, depth int, fetcher Fetcher, set Set, ch chan string, wg *sync.WaitGroup) {
defer wg.Done()

if depth <= 0 || set.Exists(url) {
return
}

body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
set.Register(url)
ch <- url

fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
if set.Exists(u) {
continue
}
go crawl(u, depth-1, fetcher, set, ch, wg)
}
return
}

func main() {
ch := make(chan string)
go Crawl("http://golang.org/", 4, fetcher, ch)
for value := range ch {
fmt.Println(value)
}
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}```

### mymtw commented Oct 6, 2016

My Solution for stringers does not have any sense, but if condition will be slice of bytes for `IPAddr`(dynamic size):

``````package main

import (
"fmt"
"strings"
"strconv"
)

var strs []string
for _, v := range ipaddr {
strs = append(
strs, strconv.Itoa(int(v)))
}
return strings.Join(strs, ".")
}

func main() {
"loopback":  {127, 0, 0, 1},
}
for name, ip := range hosts {
fmt.Printf("%v: %v\n", name, ip)
}
}
``````

/* Exercise: Loops and Functions #43 */

``````package go_play

import "math"

const (
Delta = 0.01
)

// By dividing by x regardless of the scale of x
// the difference should maintain a constant percentage via its fractional value.
func isGoodEnough(guess, x float64) bool {
return math.Abs((guess * guess) - x) / x < Delta // divide by x
}

func iter(zN, x float64) float64 {
zN1 := zN - (zN * zN - x) / (2 * zN)
return zN1
}

func Sqrt(x float64) float64 {
x0 := x
guess := x
for ; isGoodEnough(guess, x); guess = iter(x0, x) {
x0 = guess
}
return guess
}
``````

### 1e0ng commented Nov 30, 2016

@zyxar , for #67, even though the test cases the question provided passed, that solution will not pass when `ch2` has more elements. It even gets panic when `ch2` has fewer elements.

My solution #69 WebCrawler using Mutex
My specific changes are only in provided Crawl function (and of course, the declaration for Cache type)

```package main

import (
"fmt"
"sync"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type Cache struct {
uniqueUrl  map[string]int
mutx       sync.Mutex
}

var myCache = Cache{uniqueUrl:make(map[string]int)}
var wg sync.WaitGroup     // required to let all launched goroutines finish before main() exits

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {

defer wg.Done()

if depth <= 0 {
return
}

_, visited := myCache.uniqueUrl[url]
if visited {
return
}
myCache.mutx.Lock()
myCache.uniqueUrl[url] = 1
myCache.mutx.Unlock()

body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
go Crawl(u, depth-1, fetcher)
}
return
}

func main() {
Crawl("http://golang.org/", 4, fetcher)
wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
```

My crawler:

```package main

import (
"fmt"
"sync"
"time"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type Crawler struct {
baseUrl string
depth int
fetcher *Fetcher

cache map[string] bool
active int

mux sync.Mutex
}

func NewCrawler(baseUrl string, depth int, fetcher *Fetcher) *Crawler {
return &Crawler{
baseUrl: baseUrl,
depth: depth,
cache: make(map[string] bool),
fetcher: fetcher,
}
}

func (c *Crawler) RegisterWorker() {
c.mux.Lock()
c.active++
c.mux.Unlock()
}

func (c *Crawler) DeregisterWorkerAndMaybeCloseChannel(ch chan<- string) {
c.mux.Lock()

c.active--
if c.active <= 0 {
close(ch)
}

c.mux.Unlock()
}

func (c *Crawler) CheckUrl(url string) bool {
c.mux.Lock()
defer c.mux.Unlock()

if _, ok := c.cache[url]; ok {
return false
}

c.cache[url] = true
return true
}

func (c *Crawler) Work(url string, currentDepth int, urlsChannel chan<- string) {
if currentDepth <= 0 || !c.CheckUrl(url) {
c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
return
}

_, urls, err := (*c.fetcher).Fetch(url)
if err != nil {
fmt.Println(err)
c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
return
}
urlsChannel <- url

for _, subUrl := range urls {
c.RegisterWorker()
go c.Work(subUrl, currentDepth - 1, urlsChannel)
}

c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
}

func (c *Crawler) Crawl(urlsChannel chan<- string) {
c.RegisterWorker()
go c.Work(c.baseUrl, c.depth, urlsChannel)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
urls := make(chan string)

crawler := NewCrawler(url, depth, &fetcher)
crawler.Crawl(urls)

for u := range urls {
fmt.Println("Got url:", u)
}
}

func main() {
Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
time.Sleep(200 * time.Millisecond)
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}```

### Gzure commented Mar 30, 2017

My solution #69 WebCrawler using Mutex

``````package main

import (
"fmt"
"sync"
"time"
)

type urlCache struct{
m map[string]bool
mux sync.Mutex
}

uc.mux.Lock()
uc.m[url] = true
uc.mux.Unlock()
}

func (uc *urlCache) Value(url string) bool{
uc.mux.Lock()
defer uc.mux.Unlock()
return uc.m[url]
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,uc *urlCache) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}
if uc.Value(url) {
//fmt.Println(url," is fetched")
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
//fmt.Println(u)
go Crawl(u, depth-1, fetcher,uc)
}
return
}

func main() {
uc := urlCache{m:make(map[string]bool)}
go Crawl("http://golang.org/", 4, fetcher,&uc)
time.Sleep(time.Second)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
``````

### hirata-ita commented Aug 18, 2017

I apologize that I didn´t check all the above comments.
There are too many.

There are two problems for the Web Crawler:

• mutual exclusion of the map
• termination of the go routines (Crawl): It is necessary to check if the all the go routines terminated.

The solution for the mutual exclusion is straightforward. I used sync.Mutex
For the termination, I used the Dijkstra & Scholten idea. It is simple and it is detailed in "Termination Detection for Diffusing Computations", Information Processing Letters, Vol. 11, No. 1, Aug. 1980.

package main

import (
"fmt"
"sync"
)

// SafeMap is safe map of URL to use concurrently.
type SafeMap struct {
v map[string]int
mux sync.Mutex
}

func (c *SafeMap) Ins(key string) {
c.mux.Lock()
c.v[key]++
c.mux.Unlock()
}

func (c *SafeMap) HasValue(key string) bool {
c.mux.Lock()
defer c.mux.Unlock()
_, ok:= c.v[key]
return ok
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.

func Crawl(url string, depth int, fetcher Fetcher, ch chan int) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
defer func () {ch <-1} ()
if depth <= 0 {
return
}
ok := c.HasValue(url)
if ok {
return
} else {
c.Ins(url)
}

``````body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
nUrls := len(urls)
chs := make(chan int)
for _, u := range urls {
go Crawl(u, depth-1, fetcher, chs)
}
for i:=0; i<nUrls; i++ {<-chs}
return
``````

}

var c=SafeMap{v:make(map[string]int)}

func main() {
ch := make(chan int)
go Crawl("http://golang.org/", 4, fetcher, ch)
fmt.Println("Processed ",<-ch)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

### cixuuz commented Jan 1, 2018

My solution using sync.Mutex

``````package main

import (
"fmt"
"sync"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

func crawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}

Urls <- urls
}

// SafeUrls is safe to use concurrently.
type SafeUrls struct {
v   map[string]bool
mux sync.Mutex
}

var Urls chan []string = make(chan []string)

// Set url visited for the given key.
func (c *SafeUrls) Add(key string) {
c.mux.Lock()
// Lock so only one goroutine at a time can access the map c.v.
c.v[key] = true
c.mux.Unlock()
}

// Value returns the current value of the urls for the given key.
func (c *SafeUrls) Value(key string) bool {
c.mux.Lock()
// Lock so only one goroutine at a time can access the map c.v.
defer c.mux.Unlock()
return c.v[key]
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
if depth <= 0 {
return
}

go crawl(url, fetcher, Urls)

urls := <- Urls
for _, u := range urls {
Crawl(u, depth-1, fetcher)
}
return
}

func main() {
Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
``````

### BnSmth commented Jan 11, 2018

Here is my solution to the crawler. It uses a mutex within the url cache and a wait group in order to terminate once all go routines have finished. I'm not sure whether this is idiomatic or not (especially with the `UrlCache`, and `Crawler` structs/methods), if anyone has any feedback I would appreciate it.

```package main

import (
"fmt"
"sync"
)

type Fetcher interface {
Fetch(url string) (body string, urls []string, err error)
}

type UrlCache struct {
urls map[string]bool
mux  sync.Mutex
}

func (cache *UrlCache) Add(url string) {
cache.mux.Lock()
cache.urls[url] = true
cache.mux.Unlock()
}

func (cache *UrlCache) Contains(url string) bool {
cache.mux.Lock()
defer cache.mux.Unlock()
return cache.urls[url]
}

type Crawler struct {
urlCache  UrlCache
fetcher   Fetcher
waitGroup sync.WaitGroup
}

func (crawler *Crawler) Crawl(url string, depth int) {
defer crawler.waitGroup.Done()

if depth <= 0 {
return
}

if crawler.urlCache.Contains(url) {
return
}

_, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Errorf("crawl: error when fetching url %s %v\n", url, err)
return
}

fmt.Printf("visited %s - %d urls found\n", url, len(urls))

for _, u := range urls {
go crawler.Crawl(u, depth-1)
}
}

func main() {
waitGroup := sync.WaitGroup{}

crawler := Crawler{
urlCache:  UrlCache{urls: make(map[string]bool)},
fetcher:   fetcher,
waitGroup: waitGroup,
}

go crawler.Crawl("http://golang.org/", 4)
crawler.waitGroup.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}```

### findshrey commented Aug 29, 2019

At line 61 the code :
`ret[i] = make([]uint8, dx)`
Can you tell me what is the reasoning behind using `dx` as length of slice? It doesnt say the length is `dx` but slice is of `dx` elements. Kinda confused :/

### shipa988 commented Feb 10, 2020

if we know that trees have 10 nodes, then this variant has a chance of life)):
package main

import (
"golang.org/x/tour/tree"
"fmt"
"sort"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
//var cnt1 chan int

func Walk(t *tree.Tree, ch chan int) {
if t != nil {
ch <- t.Value
count:=<-ch
if count == 10 {
close(ch)
return}
Walk(t.Right, ch)
Walk(t.Left, ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.

func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
go Walk(t1, ch1)
ch2 := make(chan int)
go Walk(t2, ch2)
var n1 [10]int
var n2 [10]int
i1,i2 := 0, 0
for i:=range ch1 {
n1[i1] = i
i1++
ch1<-i1
}
for i:=range ch2 {
n2[i2] = i
i2++
ch2<-i2
}
sort.Ints(n1[:])
sort.Ints(n2[:])
if n1 == n2 {
return true
} else {return false}

}

func main() {
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))

}

### shipa988 commented Feb 10, 2020

for Crawler, I only used knowledge from previous lessons, such as a SafeCouter (without sync.WaitGroup)
package main

import (
"fmt"
"sync"
"time"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type SafeHashset struct {
mux sync.Mutex
h map[string]bool
}
func (counter *SafeCounter) Inc(inc int) {
counter.mux.Lock()
defer counter.mux.Unlock()
counter.c = counter.c + inc
}

type SafeCounter struct {
mux sync.Mutex
c int
}
func (counter *SafeCounter) Value() int {
counter.mux.Lock()
defer counter.mux.Unlock()
return counter.c
}

func (hashset *SafeHashset) Add(url string) bool {
hashset.mux.Lock()
defer hashset.mux.Unlock()
if _, ok := hashset.h[url]; !ok {
hashset.h[url] = true
return true
} else {
return false
}
}

//var ch chan struct{} = make(chan struct {})
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
var hashset = SafeHashset{h: make(map[string]bool)}
var count =SafeCounter{}

func Crawl(url string, depth int, fetcher Fetcher) {
defer count.Inc(-1)
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}

``````if hashset.Add(url) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
count.Inc(1)
go Crawl(u, depth-1, fetcher)
//defer <-ch
}
}
return
``````

}

func main() {
count.Inc(1)
Crawl("http://golang.org/", 4, fetcher)
for {
if count.Value() == 0 {
break
}
time.Sleep(time.Second)
}
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

### eugeneYWang commented Jul 24, 2020

my binary tree #67 solution

``````package main

import (
"golang.org/x/tour/tree"
"fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int, layer int) {
if t == nil {
return
}

Walk(t.Left, ch, layer + 1)
ch <- t.Value
Walk(t.Right, ch, layer + 1)

if layer == 0 {
close(ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {

ch1 := make(chan int, 10)
ch2 := make(chan int, 10)

go Walk(t1, ch1, 0)
go Walk(t2, ch2, 0)

for {
n1, ok1 := <- ch1
n2, ok2 := <- ch2

if n1 != n2 || ok1 != ok2 {
return false
} else if ok1 == ok2 && ok1 == false {
return true
}
}
}

func main() {
fmt.Println(Same(tree.New(1), tree.New(2)));
fmt.Println(Same(tree.New(1), tree.New(1)));
}
``````

### Aternus commented Aug 14, 2022

```package tour

import "fmt"

}

"loopback":  {127, 0, 0, 1},
}
for name, ip := range hosts {
fmt.Printf("%v: %v\n", name, ip)
}
}```

```package tour

import (
"fmt"
)

bufferLength := len(b)
if bufferLength <= 0 {
return 0, fmt.Errorf("buffer length must be greater than 0")
}
buffer := make([]byte, bufferLength)
for k := range buffer {
buffer[k] = uint8('A')
}
n := copy(b, buffer)
return n, nil
}```

### Aternus commented Aug 16, 2022

❓ this solution saves an extra loop by returning `io.EOF` as early as possible.

```package tour

import (
"io"
"os"
"strings"
)

}

// short circuit: EOF
if err == io.EOF {
return 0, io.EOF
}
for k, v := range b {
b[k] = rot13(v)
}
return n, err
}

s := strings.NewReader("Lbh penpxrq gur pbqr!")
io.Copy(os.Stdout, &r)
}

func rot13(b byte) byte {
if b >= 'A' && b <= 'Z' {
b = 'A' + (b-'A'+13)%26
} else if b >= 'a' && b <= 'z' {
b = 'a' + (b-'a'+13)%26
}
return b
}```

### Aternus commented Aug 22, 2022

Images exercise solution:

```package tour

import (
"image"
"image/color"
)

type Image struct {
Width  int
Height int
}

func (i Image) ColorModel() color.Model {
return color.RGBAModel
}

func (i Image) Bounds() image.Rectangle {
return image.Rect(0, 0, i.Width, i.Height)
}

func (i Image) At(x, y int) color.Color {
r := uint8(x + y)
g := uint8(x ^ y)
b := uint8(x * y)
//fmt.Println("R:", r, "G:", g, "B:", b)
return color.RGBA{r, g, b, 255}
}```

This is my solution to the Exercise "Stringers":

```package main

import "fmt"

// What I did was I returned the call to Sprintf (in the fmt package) and formatted each of the four byte values in the array
// of bytes, separated with dots:

// The 'Sprintf' function formats according to a format specifier and returns the resulting string.
// In other words: you first return the resulting string of the function 'Sprintf' and then
// this method (String()) returns the previously returned string by 'Sprintf':

return fmt.Sprintf("%v.%v.%v.%v",ip[0],ip[1],ip[2],ip[3])

}

func main() {
"loopback":  {127, 0, 0, 1},
}
for nombre, ip := range hosts {
fmt.Printf("%v: %v\n", nombre, ip)
}
}```

I hope it helps!

### AlbertoCoder commented Sep 15, 2022

Excuse me:

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

```package main

import (
"io"
"os"
"strings"
)

}

for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}

func main() {
"Lbh penpxrq gur pbqr!")
io.Copy(os.Stdout, &r)
}```

### americanstone commented Oct 4, 2022

MIT Phd crawl solution

```package main

import (
"fmt"
"sync"
)

type Cache struct {
m sync.Mutex
v map[string]uint
}

func (s *Cache) Exist(key string) bool {
s.m.Lock()
defer s.m.Unlock()
if _, ok := s.v[key]; ok {
s.v[key]++
return true
} else {
s.v[key] = 1
return false
}
}

func (s *Cache) String() string {
s.m.Lock()
defer s.m.Unlock()
var sb string
for k, v := range s.v {
sb += fmt.Sprintf("key: %s => value: %q", k, v)
}
return fmt.Sprintln(sb)
}

type Fetcher interface {
Fetch(url string) (body string, urls []string, err error)
}

//
// Serial crawler
//
func SerialCrawl(url string, depth int, fetcher Fetcher, cache *Cache) {
if cache.Exist(url) {
return
}
//fmt.Printf("cached values %q\n", cache)

fmt.Println("depth ", depth)
if depth <= 0 {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("Found: %s %q\n", url, body)
//fmt.Printf("fetch children %q\n", urls)
for _, u := range urls {
//fmt.Printf("recursive crawl url %s\n", u)
SerialCrawl(u, depth-1, fetcher, cache)
}
}

//
// Concurrent crawler with shared state and Mutex
//

func ConcurrentMutex(url string, depth int, fetcher Fetcher, cache *Cache) {
if cache.Exist(url) {
return
}
//fmt.Printf("cached values %q\n", cache)

if depth <= 0 {
return
}

body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("Found: %s %q\n", url, body)
//fmt.Printf("fetch children %q\n", urls)
var done sync.WaitGroup
for _, u := range urls {
//fmt.Printf("recursive crawl url %s\n", u)
u2 := u
go func() {
defer done.Done()
ConcurrentMutex(u2, depth-1, fetcher, cache)
}()
//go func(u string) {
//	defer done.Done()
//	ConcurrentMutex(u, depth -1,fetcher, cache)
//}(u)
}
done.Wait()
}

//
// Concurrent crawler with channels
//

func worker(url string, depth uint, ch chan []string, fetcher Fetcher, cache *Cache) {
//fmt.Printf("working on url %s depth %d\n", url, depth)
body, urls, err := fetcher.Fetch(url)

if err != nil {
ch <- []string{}
} else {
fmt.Printf("Found: %s %q\n", url, body)
ch <- urls
}
}

func master(url string, ch chan []string, depth uint, fetcher Fetcher, cache *Cache) {
n := 1
copyDepth := depth
for urls := range ch {
// fmt.Printf("dep :%s\n", fmt.Sprintf("%q", copyDepth))
if copyDepth == 0 {
break
}
for _, url := range urls {
if !cache.Exist(url) {
n += 1
go worker(url, depth, ch, fetcher, cache)
}
}
depth--
n -= 1
if n == 0 {
break
}
}
}

func ConcurrentChan(url string, depth uint, fetcher Fetcher, cache *Cache) {
ch := make(chan []string)
go func() {
ch <- []string{url}
}()
master(url, ch, depth, fetcher, cache)
}

func main() {
cache := Cache{v: make(map[string]uint)}
SerialCrawl("https://golang.org/", 4, fetcher, &cache)

ConcurrentMutex("https://golang.org/", 4, fetcher, &cache)

ConcurrentChan("https://golang.org/", 4, fetcher, &cache)

fmt.Printf("\nCached urls %q\n", cache.v)
}

type fakeFetcher map[string]*fakeResult // to avoid copy value

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}```

My relatively simple solution for crawl w/o global variables and channels and with mutex and WaitGroup:

```package main

import (
"fmt"
"sync"
)

type UrlCache struct {
urls map[string]bool
mu   sync.Mutex
}

func (uc *UrlCache) Add(url string) {
uc.mu.Lock()
uc.urls[url] = true
uc.mu.Unlock()
}

func (uc *UrlCache) Get(url string) bool {
uc.mu.Lock()
defer uc.mu.Unlock()
return uc.urls[url]
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
var wg sync.WaitGroup
cache := UrlCache{urls: make(map[string]bool)}
go crawl(url, depth, fetcher, &cache, &wg)
wg.Wait()
}

func crawl(url string, depth int, fetcher Fetcher, cache *UrlCache, wg *sync.WaitGroup) {
defer wg.Done()
if depth <= 0 || cache.Get(url) {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
go crawl(u, depth-1, fetcher, cache, wg)
}
}

func main() {
Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}```

@Aternus

```package tour

import (
"fmt"
)

bufferLength := len(b)
if bufferLength <= 0 {
return 0, fmt.Errorf("buffer length must be greater than 0")
}
buffer := make([]byte, bufferLength)
for k := range buffer {
buffer[k] = uint8('A')
}
n := copy(b, buffer)
return n, nil
}```

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...