/* Exercise: Loops and Functions #43 */ | |
package main | |
import ( | |
"fmt" | |
"math" | |
) | |
func Sqrt(x float64) float64 { | |
z := float64(2.) | |
s := float64(0) | |
for { | |
z = z - (z*z - x)/(2*z) | |
if math.Abs(s-z) < 1e-15 { | |
break | |
} | |
s = z | |
} | |
return s | |
} | |
func main() { | |
fmt.Println(Sqrt(2)) | |
fmt.Println(math.Sqrt(2)) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Maps #44 */ | |
package main | |
import ( | |
"tour/wc" | |
"strings" | |
) | |
func WordCount(s string) map[string]int { | |
ss := strings.Fields(s) | |
num := len(ss) | |
ret := make(map[string]int) | |
for i := 0; i < num; i++ { | |
(ret[ss[i]])++ | |
} | |
return ret | |
} | |
func main() { | |
wc.Test(WordCount) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Slices #45 */ | |
package main | |
import "tour/pic" | |
func Pic(dx, dy int) [][]uint8 { | |
ret := make([][]uint8, dy) | |
for i := 0; i < dy; i++ { | |
ret[i] = make([]uint8, dx) | |
for j := 0; j < dx; j++ { | |
ret[i][j] = uint8(i^j+(i+j)/2) | |
} | |
} | |
return ret | |
} | |
func main() { | |
pic.Show(Pic) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Fibonacci closure #46 */ | |
package main | |
import "fmt" | |
// fibonacci is a function that returns | |
// a function that returns an int. | |
func fibonacci() func() int { | |
x := 0 | |
y := 1 | |
return func() int { | |
x,y = y,x+y | |
return x | |
} | |
} | |
func main() { | |
f := fibonacci() | |
for i := 0; i < 10; i++ { | |
fmt.Println(f()) | |
} | |
} | |
/******************************************************************************************************/ | |
/* Advanced Exercise: Complex cube roots #47 */ | |
package main | |
import ( | |
"fmt" | |
"math/cmplx" | |
) | |
func Cbrt(x complex128) complex128 { | |
z := complex128(2) | |
s := complex128(0) | |
for { | |
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z)) | |
if cmplx.Abs(s-z) < 1e-17 { | |
break | |
} | |
s = z | |
} | |
return z | |
} | |
func main() { | |
fmt.Println(Cbrt(2)) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Errors #57 */ | |
package main | |
import ( | |
"fmt" | |
) | |
type ErrNegativeSqrt float64 | |
func (e ErrNegativeSqrt) Error() string { | |
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e)) | |
} | |
func Sqrt(f float64) (float64, error) { | |
if f < 0 { | |
return 0, ErrNegativeSqrt(f) | |
} | |
return 0, nil | |
} | |
func main() { | |
fmt.Println(Sqrt(2)) | |
fmt.Println(Sqrt(-2)) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Images #58 */ | |
package main | |
import ( | |
"image" | |
"tour/pic" | |
"image/color" | |
) | |
type Image struct{ | |
Width, Height int | |
colr uint8 | |
} | |
func (r *Image) Bounds() image.Rectangle { | |
return image.Rect(0, 0, r.Width, r.Height) | |
} | |
func (r *Image) ColorModel() color.Model { | |
return color.RGBAModel | |
} | |
func (r *Image) At(x, y int) color.Color { | |
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255} | |
} | |
func main() { | |
m := Image{100, 100, 128} | |
pic.ShowImage(&m) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */ | |
package main | |
import ( | |
"io" | |
"os" | |
"strings" | |
) | |
type rot13Reader struct { | |
r io.Reader | |
} | |
func (rot *rot13Reader) Read(p []byte) (n int, err error) { | |
n,err = rot.r.Read(p) | |
for i := 0; i < len(p); i++ { | |
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') { | |
p[i] += 13 | |
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){ | |
p[i] -= 13 | |
} | |
} | |
return | |
} | |
func main() { | |
s := strings.NewReader( | |
"Lbh penpxrq gur pbqr!") | |
r := rot13Reader{s} | |
io.Copy(os.Stdout, &r) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Equivalent Binary Trees #67 */ | |
package main | |
import ( | |
"tour/tree" | |
"fmt" | |
) | |
// Walk walks the tree t sending all values | |
// from the tree to the channel ch. | |
func Walk(t *tree.Tree, ch chan int) { | |
_walk(t, ch) | |
close(ch) | |
} | |
func _walk(t *tree.Tree, ch chan int) { | |
if t != nil { | |
_walk(t.Left, ch) | |
ch <- t.Value | |
_walk(t.Right, ch) | |
} | |
} | |
// Same determines whether the trees | |
// t1 and t2 contain the same values. | |
func Same(t1, t2 *tree.Tree) bool { | |
ch1 := make(chan int) | |
ch2 := make(chan int) | |
go Walk(t1, ch1) | |
go Walk(t2, ch2) | |
for i := range ch1 { | |
if i != <- ch2 { | |
return false | |
} | |
} | |
return true | |
} | |
func main() { | |
//tree.New(2) | |
ch := make(chan int) | |
go Walk(tree.New(1), ch) | |
for v := range ch { | |
fmt.Print(v) | |
} | |
fmt.Println(Same(tree.New(1), tree.New(1))) | |
fmt.Println(Same(tree.New(1), tree.New(2))) | |
} | |
/******************************************************************************************************/ | |
/* Exercise: Web Crawler #69 */ | |
package main | |
import ( | |
"fmt" | |
) | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
var store map[string]bool | |
func Krawl(url string, fetcher Fetcher, Urls chan []string) { | |
body, urls, err := fetcher.Fetch(url) | |
if err != nil { | |
fmt.Println(err) | |
} else { | |
fmt.Printf("found: %s %q\n", url, body) | |
} | |
Urls <- urls | |
} | |
func Crawl(url string, depth int, fetcher Fetcher) { | |
Urls := make(chan []string) | |
go Krawl(url, fetcher, Urls) | |
band := 1 | |
store[url] = true // init for level 0 done | |
for i := 0; i < depth; i++ { | |
for band > 0 { | |
band-- | |
next := <- Urls | |
for _, url := range next { | |
if _, done := store[url] ; !done { | |
store[url] = true | |
band++ | |
go Krawl(url, fetcher, Urls) | |
} | |
} | |
} | |
} | |
return | |
} | |
func main() { | |
store = make(map[string]bool) | |
Crawl("http://golang.org/", 4, fetcher) | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f *fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := (*f)[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = &fakeFetcher{ | |
"http://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"http://golang.org/pkg/", | |
"http://golang.org/cmd/", | |
}, | |
}, | |
"http://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/cmd/", | |
"http://golang.org/pkg/fmt/", | |
"http://golang.org/pkg/os/", | |
}, | |
}, | |
"http://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
"http://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
} |
This comment has been minimized.
This comment has been minimized.
On Exercise: Rot13 Reader #59, you scan and "rot13" the entire contents of p. But that's incorrect--you should only scan n bytes. In the test harness, p is a slice 32k bytes long, so it's pretty inefficient! |
This comment has been minimized.
This comment has been minimized.
First two numbers of fibonacci are 0 and 1. Just let x start 1 and y start 0. |
This comment has been minimized.
This comment has been minimized.
My solution #69 is a litte different from yours. type Fetcher interface { type FetcherResult struct{ func _Crawl(url string, depth int, fetcher Fetcher, ch chan *FetcherResult){
} // Crawl uses fetcher to recursively crawl
} |
This comment has been minimized.
This comment has been minimized.
your solution for exercise #67 assumes the two trees have the same structure in the way you implement Same():
The code here gets a value from ch1 and compares it to the next value from ch2. From the exercise:
The below code solves this by using a map to buffer the leaf values and either adding them as keys, if they don't exist, or deleting the entry if it already exists. It does assume leaf values are unique, but that's the case in the exercise and wouldn't be hard to fix.
|
This comment has been minimized.
This comment has been minimized.
@evertlammerts the trees can have a different structure, but if you walk them left to right the resulting sequence should be in the same. Notice that the quoted sentence states exactly that. |
This comment has been minimized.
This comment has been minimized.
Walk(tree.New(1)) twice, you will find the results are different. 10,5,3,1,2,4,7,6,9,8, |
This comment has been minimized.
This comment has been minimized.
package main
import "fmt"
import "code.google.com/p/go-tour/tree"
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
Walk_(t, ch)
close(ch)
}
func Walk_(t *tree.Tree, ch chan int) {
if t != nil {
Walk_(t.Left, ch)
ch <- t.Value
Walk_(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
t1c, t2c := make(chan int), make(chan int)
go Walk(t1, t1c)
go Walk(t2, t2c)
for {
t1e, cont1 := <-t1c
t2e, cont2 := <-t2c
if (t1e != t2e) || (cont1 != cont2) {
return false
} else if cont1 == cont2 == true {
return true
}
}
return true
}
func main() {
fmt.Print(Same(tree.New(1), tree.New(1)))
} |
This comment has been minimized.
This comment has been minimized.
For solutions of #67, just name your function and helper |
This comment has been minimized.
This comment has been minimized.
This comment has been minimized.
This comment has been minimized.
Just an opinion here, but in your solution #45 I'd replace
with
and something similar with the inner loop |
This comment has been minimized.
This comment has been minimized.
A solution similar to your tree walker - but without the bug that one tree being a prefix of the other would return a false positive: https://gist.github.com/mckoss/e440d5fc88a151f52a30
|
This comment has been minimized.
This comment has been minimized.
Thanks for taking the time to post these. For #44 (now #43 on the site ) the map access can be simplified and perhaps better understood: func WordCount(s string) map[string]int {
ss := strings.Fields(s)
ret := make(map[string]int)
for _, sub := range ss {
ret[sub]++
}
return ret
} |
This comment has been minimized.
This comment has been minimized.
@PaulRoad I think with your solution the last url (=http://golang.org/pkg/os/) won't be scanned as depth will be 0. It seems like that the condition should be at the end so that also the last call will count:
(I have also added the parent url "purl") Thanks |
This comment has been minimized.
This comment has been minimized.
Since there were no iterative solution for #70 Equivalent Binary Trees
|
This comment has been minimized.
This comment has been minimized.
Here's my solution to the WebCrawler one. Make sure you test yours with different values of "depth" as I found some issues with mine when I first built it (with different values of depth deadlocking). package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type Answer struct {
body string
urls []string
url string
remainingDepth int
isError bool
}
func CrawlInner(url string, depth int, fetcher Fetcher, result chan Answer) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
result <- Answer{ fmt.Sprint(err), nil, url, depth-1, true }
} else {
result <- Answer{ body, urls, url, depth-1, false }
}
}
func Crawl(url string, depth int, fetcher Fetcher) {
answer := make(chan Answer)
go CrawlInner(url, depth, fetcher, answer)
ProcessResults(url, answer)
}
func ProcessResults(rootUrl string, answer chan Answer) {
pending := make(map[string]bool)
done := make(map[string]bool)
pending[rootUrl] = true
for result := range answer {
if result.isError {
fmt.Println(result.body)
} else {
fmt.Printf("found: %s %q\n", result.url, result.body)
}
if !result.isError && result.remainingDepth > 0 {
for _,v := range result.urls {
if _, ok := pending[v] ; ok { continue }
if _, ok := done[v] ; ok { continue }
pending[v] = true
go CrawlInner(v, result.remainingDepth, fetcher, answer)
}
}
delete(pending, result.url)
done[result.url] = true
if len(pending) == 0 {
close(answer)
}
}
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
My solution #69 WebCrawler package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
var wg sync.WaitGroup
func _crawl(url string, depth int, fetcher Fetcher, Results chan string, wg *sync.WaitGroup) {
defer wg.Done()
if depth <= 0 {
return
}
if exists := store[url]; exists {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
Results <- fmt.Sprintf("not found: %s", url)
return
}
store[url] = true
Results <- fmt.Sprintf("found: %s %q", url, body)
for _, u := range urls {
wg.Add(1)
go _crawl(u, depth-1, fetcher, Results, wg)
}
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
Results := make(chan string)
wg.Add(1)
go _crawl(url, depth, fetcher, Results, &wg)
go func() {
wg.Wait()
close(Results)
}()
for r := range Results {
fmt.Println(r)
}
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
Here is my solution to crawler problem.. I find it really nice http://play.golang.org/p/vMccfh7DC2 package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
func CrawlRecursive(url string, depth int, fetcher Fetcher, quit chan int, done chan map[string] bool) {
if depth<=0 {
quit <- 0
return
}
doneUrls := <- done
hasIt, didIt := doneUrls[url]
if didIt && hasIt {
quit <- 0
done <- doneUrls
return
} else {
doneUrls[url] = true
}
done <- doneUrls
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
quit <- 0
return
}
fmt.Printf("found: %s %q\n", url, body)
childrenQuit := make(chan int)
for _, u := range urls {
go CrawlRecursive(u, depth-1, fetcher, childrenQuit, done)
}
for _ = range urls {
<- childrenQuit
}
quit <- 0
}
func Crawl(url string, depth int, fetcher Fetcher) {
done := make(chan map[string] bool, 1)
done <- map[string]bool{url: false}
quit := make(chan int)
go CrawlRecursive(url, depth, fetcher, quit, done)
<- quit
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
My solution to Web Crawler (#73 in the version of the Tour I used). Note, because the crawls are done in parallel and results may arrive out of order, I associated the depth with the crawl results which seems to simplify things a bit.
|
This comment has been minimized.
This comment has been minimized.
/* Exercise: Loops and Functions #25 */
|
This comment has been minimized.
This comment has been minimized.
My solution for package main
import "code.google.com/p/go-tour/tree"
import "fmt"
func Walk(t *tree.Tree, ch chan int) {
if t != nil {
Walk(t.Left, ch)
ch <- t.Value
Walk(t.Right, ch)
}
/*
a little bit of my original solution:
if t.Left != nil {
Walk(t.Left, ch)
}
ch <- t.Value
if t.Right != nil {
Walk(t.Right, ch)
}
because `if' cheaper than function call
*/
}
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := 0; i < 10; i++ {
if <-ch1 != <-ch2 {
return false
}
}
return true
}
func main() {
fmt.Println("Let's go!")
ch := make(chan int)
go Walk(tree.New(1), ch)
for i := 0; i < 10; i++ {
fmt.Printf("value: %d\n", <-ch)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
} |
This comment has been minimized.
This comment has been minimized.
Exercise: Rot13 Reader #63 should return n,io.EOF |
This comment has been minimized.
This comment has been minimized.
|
This comment has been minimized.
This comment has been minimized.
My Fibonacci solution for the exercise |
This comment has been minimized.
This comment has been minimized.
My solution for the Stringers exercise: package main
import "fmt"
type IPAddr [4]byte
func (p IPAddr) String() string {
return fmt.Sprintf("%d.%d.%d.%d",p[0], p[1], p[2], p[3])
}
func main() {
addrs := map[string]IPAddr{
"loopback": {127, 0, 0, 1},
"googleDNS": {8, 8, 8, 8},
}
for n, a := range addrs {
fmt.Printf("%v: %v\n", n, a)
}
} |
This comment has been minimized.
This comment has been minimized.
My solution for http://tour.golang.org/methods/11 package main
import "code.google.com/p/go-tour/reader"
type MyReader struct{}
func (r MyReader) Read(b []byte) (n int, err error) {
b[0] = 'A'
return 1, nil
}
func main() {
reader.Validate(MyReader{})
} |
This comment has been minimized.
This comment has been minimized.
And yet another web crawler solution. =) Here the "boss" keeps track of which urls have been visited, and requests can be made to him via a global channel (crawlers can send him a url to ask if they should download it). A go routine is spawned for each crawler, and since this is recursive, and the "boss" doesn't have a dedicated channel to reply to each crawler, the boss needs a way to communicate back to the crawler to give it the green light, or tell it not to bother. In order to do this, the crawler creates a callback channel for this purpose, and sends it down the global channel to the boss with the url. Therefore requests reach the boss via this global channel, which takes a "comm" struct, which is the url they are available to download, and a bool channel to get the "download/don't download" notification back. This is implemented as a bool (true => download, false => forget it).
|
This comment has been minimized.
This comment has been minimized.
// http://tour.golang.org/methods/12 import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (r *rot13Reader) Read(b []byte) (int,error) {
l,e := r.r.Read(b)
for i,c := range(b) {
if c <= 'Z' && c >='A' {
b[i] = (c - 'A' + 13)%26 + 'A'
}else if c >= 'a' && c <= 'z' {
b[i] = (c - 'a' + 13)%26 + 'a'
}
}
return l, e
}
func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
} |
This comment has been minimized.
This comment has been minimized.
http://tour.golang.org/methods/12 import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (r rot13Reader) rot13(b byte, basis byte) byte {
return (b - basis + 13) % 26 + basis
}
func (r rot13Reader) cryptByte(b byte) byte {
if b >= 'A' && b <= 'Z' {
return r.rot13(b, 'A')
}
if b >= 'a' && b <= 'z' {
return r.rot13(b, 'a')
}
return b
}
func (rot rot13Reader) Read(b []byte) (c int, err error) {
c, err = rot.r.Read(b)
for i := 0; i < c; i++ {
b[i] = rot.cryptByte(b[i])
}
return
}
func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
} |
This comment has been minimized.
This comment has been minimized.
One more wordscount version:
|
This comment has been minimized.
This comment has been minimized.
The solution for "Exercise: Errors" it's missing the calculation.
|
This comment has been minimized.
This comment has been minimized.
The solution to #57 doesn't calculate the Sqrt if there is no error ... |
This comment has been minimized.
This comment has been minimized.
I think there is a bug at [https://gist.github.com/zyxar/2317744#file-exercise-tour-go-L256](the tree comparison). I the the second tree is longer than the first and all the values are the same (eg. [1, 2], [1, 2, 3]) the func will return true but the trees are not the same. func Same(t1, t2 *tree.Tree) bool {
c1 := make(chan(int))
c2 := make(chan(int))
go Walk(t1, c1)
go Walk(t2, c2)
for i := range c1 {
fmt.Println(i)
if i != <- c2 {
return false
}
}
for range c2 {
return false
}
return true
} I also made an helper to show that: func NewLen(k int) *Tree {
var t *Tree
for i :=1; i <= k; i++ {
t = insert(t, i)
}
return t
} and the main: func main() {
fmt.Println(Same(tree.NewLen(10), tree.NewLen(12)))
fmt.Println(Same(tree.NewLen(14), tree.NewLen(12)))
fmt.Println(Same(tree.NewLen(14), tree.NewLen(14)))
} Cheers, |
This comment has been minimized.
This comment has been minimized.
More precisely
One question: on error, would It's a nice example. Just starting with go and wanted to see how others approach the issue. I like very much the useful example for using named return values. Thank you. |
This comment has been minimized.
This comment has been minimized.
Here is my MyReader solution package main
import "golang.org/x/tour/reader"
type MyReader struct{}
// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (m MyReader) Read(b []byte) (int,error) {
cnt := 0
for i := 0; i < len(b); i++ {
b[i] = 'A'
cnt = i+1
}
return cnt,nil
}
func main() {
reader.Validate(MyReader{})
} |
This comment has been minimized.
This comment has been minimized.
An alternate package main
import "golang.org/x/tour/reader"
type MyReader struct{}
func (r MyReader) Read(s []byte) (int, error) {
for i := range s {
s[i] = 'A'
}
return len(s), nil
}
func main() {
reader.Validate(MyReader{})
} |
This comment has been minimized.
This comment has been minimized.
I didn't find any solution for the HTTP Handlers exercise, so here's my solution package main import ( type String string type Struct struct { func (s String) ServeHTTP( func (s Struct) ServeHTTP( func main() { |
This comment has been minimized.
This comment has been minimized.
HTTP Handlers exercise's solution package main
import (
"log"
"net/http"
"fmt"
)
type String string
type Struct struct {
Greeting string
Punct string
Who string
}
func (s String) ServeHTTP(w http.ResponseWriter,r *http.Request) {
fmt.Fprint(w, s)
}
func (s Struct) ServeHTTP(w http.ResponseWriter,r *http.Request) {
fmt.Fprint(w, s)
}
func main() {
// your http.Handle calls here
http.Handle("/string", String("I'm a frayed knot."))
http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
log.Fatal(http.ListenAndServe("localhost:4000", nil))
} |
This comment has been minimized.
This comment has been minimized.
Exercise: Readers Since the exercise asks you to implement a Reader which returns the number of bytes read and an error, we need to return the number of bytes read, and to at least cover some error cases. We know from previous exercises that io.Reader throws an EOF exception, and we can assume that if the input b is nil or empty, we have reached the end of the file... package main
import (
"golang.org/x/tour/reader"
"fmt"
)
type ErrEOF []byte
func (e ErrEOF) Error() string {
return fmt.Sprintf("End of file")
}
type MyReader struct{}
// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (r MyReader) Read(b []byte) (int, error) {
if b == nil || len(b) == 0 {
err := ErrEOF(b)
return 0, err
}
for i := 0; i < len(b); i++ {
b[i] = 'A'
}
return len(b), nil
}
func main() {
reader.Validate(MyReader{})
} |
This comment has been minimized.
This comment has been minimized.
Recently completed the gotour and made a repo with my solutions here: https://github.com/makushline/gotour |
This comment has been minimized.
This comment has been minimized.
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (int, error) {
n, err := rot.r.Read(p)
for i := 0; i < n; i++ {
if p[i] < 'A' || p[i] > 'z' { // ignore non-ASCII letters
continue
}
if p[i] > 'a' && p[i] < 'a'+13 || p[i] > 'A' && p[i] < 'A'+13 {
p[i] += 13
} else {
p[i] -= 13
}
}
return n, err
}
func main() {
s := strings.NewReader("NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
} |
This comment has been minimized.
This comment has been minimized.
A solution to the binary tree only using one channel and a map:
|
This comment has been minimized.
This comment has been minimized.
Exercise: rot13Reader package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (r *rot13Reader) Read(p []byte) (n int, err error) {
n, err = r.r.Read(p)
for i, b := range p {
switch {
case 'A' <= b && b <= 'M', 'a' <= b && b <= 'm':
p[i] += 13
case 'N' <= b && b <= 'Z', 'n' <= b && b <= 'z':
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
} |
This comment has been minimized.
This comment has been minimized.
Error is thrown on #58 - Images because
|
This comment has been minimized.
This comment has been minimized.
Hi, thanks for the solutions. It helped more in understanding of Go, rather than solution itself. On a side note, may I know what "Color theme" you are using. I like white background in editors, please let me know. Thanks. |
This comment has been minimized.
This comment has been minimized.
The ro13 solution assumes the lower reader gives the requested number of bytes. This is not necessarily the case so:
should be
|
This comment has been minimized.
This comment has been minimized.
I want to share my solution for the image exercise (number 25) with you and everyone who stumbles over this post. package main
import (
"golang.org/x/tour/pic"
"image/color"
"image"
)
type Image struct{
Rect image.Rectangle
}
func (image Image) ColorModel() color.Model {
return color.RGBAModel
}
func (i Image) Bounds() image.Rectangle {
return i.Rect
}
func (image Image) At(x, y int) color.Color {
return color.RGBA{uint8(x*y), uint8(y+x), 255, 255}
}
func main() {
m := Image{image.Rect(0,0,300,300)}
pic.ShowImage(m)
}
|
This comment has been minimized.
This comment has been minimized.
/* Exercise: Web Crawler #69 - https://tour.golang.org/concurrency/10 */ package main
import (
"fmt"
"sync"
)
type Set struct {
v map[string]bool
mux sync.Mutex
}
func (set *Set) Register(key string) {
set.mux.Lock()
defer set.mux.Unlock()
set.v[key] = true
}
func (set *Set) Exists(key string) bool {
set.mux.Lock()
defer set.mux.Unlock()
return set.v[key]
}
type Fetcher interface {
Fetch(url string) (body string, urls []string, err error)
}
func Crawl(url string, depth int, fetcher Fetcher, ch chan string) {
set := Set{v: make(map[string]bool)}
var wg sync.WaitGroup
wg.Add(1)
go crawl(url, depth, fetcher, set, ch, &wg)
wg.Wait()
close(ch)
}
func crawl(url string, depth int, fetcher Fetcher, set Set, ch chan string, wg *sync.WaitGroup) {
defer wg.Done()
if depth <= 0 || set.Exists(url) {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
set.Register(url)
ch <- url
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
if set.Exists(u) {
continue
}
wg.Add(1)
go crawl(u, depth-1, fetcher, set, ch, wg)
}
return
}
func main() {
ch := make(chan string)
go Crawl("http://golang.org/", 4, fetcher, ch)
for value := range ch {
fmt.Println(value)
}
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
My Solution for stringers does not have any sense, but if condition will be slice of bytes for
|
This comment has been minimized.
This comment has been minimized.
/* Exercise: Loops and Functions #43 */
|
This comment has been minimized.
This comment has been minimized.
@zyxar , for #67, even though the test cases the question provided passed, that solution will not pass when |
This comment has been minimized.
This comment has been minimized.
My solution #69 WebCrawler using Mutex package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type Cache struct {
uniqueUrl map[string]int
mutx sync.Mutex
}
var myCache = Cache{uniqueUrl:make(map[string]int)}
var wg sync.WaitGroup // required to let all launched goroutines finish before main() exits
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
defer wg.Done()
if depth <= 0 {
return
}
_, visited := myCache.uniqueUrl[url]
if visited {
return
}
myCache.mutx.Lock()
myCache.uniqueUrl[url] = 1
myCache.mutx.Unlock()
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
wg.Add(1)
go Crawl(u, depth-1, fetcher)
}
return
}
func main() {
wg.Add(1)
Crawl("http://golang.org/", 4, fetcher)
wg.Wait()
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("***not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
|
This comment has been minimized.
This comment has been minimized.
My crawler: package main
import (
"fmt"
"sync"
"time"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type Crawler struct {
baseUrl string
depth int
fetcher *Fetcher
cache map[string] bool
active int
mux sync.Mutex
}
func NewCrawler(baseUrl string, depth int, fetcher *Fetcher) *Crawler {
return &Crawler{
baseUrl: baseUrl,
depth: depth,
cache: make(map[string] bool),
fetcher: fetcher,
}
}
func (c *Crawler) RegisterWorker() {
c.mux.Lock()
c.active++
c.mux.Unlock()
}
func (c *Crawler) DeregisterWorkerAndMaybeCloseChannel(ch chan<- string) {
c.mux.Lock()
c.active--
if c.active <= 0 {
close(ch)
}
c.mux.Unlock()
}
func (c *Crawler) CheckUrl(url string) bool {
c.mux.Lock()
defer c.mux.Unlock()
if _, ok := c.cache[url]; ok {
return false
}
c.cache[url] = true
return true
}
func (c *Crawler) Work(url string, currentDepth int, urlsChannel chan<- string) {
if currentDepth <= 0 || !c.CheckUrl(url) {
c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
return
}
_, urls, err := (*c.fetcher).Fetch(url)
if err != nil {
fmt.Println(err)
c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
return
}
urlsChannel <- url
for _, subUrl := range urls {
c.RegisterWorker()
go c.Work(subUrl, currentDepth - 1, urlsChannel)
}
c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
}
func (c *Crawler) Crawl(urlsChannel chan<- string) {
c.RegisterWorker()
go c.Work(c.baseUrl, c.depth, urlsChannel)
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
urls := make(chan string)
crawler := NewCrawler(url, depth, &fetcher)
crawler.Crawl(urls)
for u := range urls {
fmt.Println("Got url:", u)
}
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
time.Sleep(200 * time.Millisecond)
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
My solution #69 WebCrawler using Mutex
|
This comment has been minimized.
This comment has been minimized.
I apologize that I didn´t check all the above comments. There are two problems for the Web Crawler:
The solution for the mutual exclusion is straightforward. I used sync.Mutex package main import ( // SafeMap is safe map of URL to use concurrently. func (c *SafeMap) Ins(key string) { func (c *SafeMap) HasValue(key string) bool { type Fetcher interface { // Crawl uses fetcher to recursively crawl func Crawl(url string, depth int, fetcher Fetcher, ch chan int) {
} var c=SafeMap{v:make(map[string]int)} func main() { // fakeFetcher is Fetcher that returns canned results. type fakeResult struct { func (f fakeFetcher) Fetch(url string) (string, []string, error) { // fetcher is a populated fakeFetcher. |
This comment has been minimized.
This comment has been minimized.
My solution using sync.Mutex
|
This comment has been minimized.
This comment has been minimized.
Here is my solution to the crawler. It uses a mutex within the url cache and a wait group in order to terminate once all go routines have finished. I'm not sure whether this is idiomatic or not (especially with the package main
import (
"fmt"
"sync"
)
type Fetcher interface {
Fetch(url string) (body string, urls []string, err error)
}
type UrlCache struct {
urls map[string]bool
mux sync.Mutex
}
func (cache *UrlCache) Add(url string) {
cache.mux.Lock()
cache.urls[url] = true
cache.mux.Unlock()
}
func (cache *UrlCache) Contains(url string) bool {
cache.mux.Lock()
defer cache.mux.Unlock()
return cache.urls[url]
}
type Crawler struct {
urlCache UrlCache
fetcher Fetcher
waitGroup sync.WaitGroup
}
func (crawler *Crawler) Crawl(url string, depth int) {
defer crawler.waitGroup.Done()
if depth <= 0 {
return
}
if crawler.urlCache.Contains(url) {
return
}
_, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Errorf("crawl: error when fetching url %s %v\n", url, err)
return
}
fmt.Printf("visited %s - %d urls found\n", url, len(urls))
crawler.urlCache.Add(url)
for _, u := range urls {
crawler.waitGroup.Add(1)
go crawler.Crawl(u, depth-1)
}
}
func main() {
waitGroup := sync.WaitGroup{}
waitGroup.Add(1)
crawler := Crawler{
urlCache: UrlCache{urls: make(map[string]bool)},
fetcher: fetcher,
waitGroup: waitGroup,
}
go crawler.Crawl("http://golang.org/", 4)
crawler.waitGroup.Wait()
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
} |
This comment has been minimized.
This comment has been minimized.
At line 61 the code : |
This comment has been minimized.
This comment has been minimized.
if we know that trees have 10 nodes, then this variant has a chance of life)): import ( // Walk walks the tree t sending all values func Walk(t *tree.Tree, ch chan int) { // Same determines whether the trees func Same(t1, t2 *tree.Tree) bool { } func main() { } |
This comment has been minimized.
This comment has been minimized.
for Crawler, I only used knowledge from previous lessons, such as a SafeCouter (without sync.WaitGroup) import ( type Fetcher interface { type SafeHashset struct { type SafeCounter struct { func (hashset *SafeHashset) Add(url string) bool { //var ch chan struct{} = make(chan struct {}) func Crawl(url string, depth int, fetcher Fetcher) {
} func main() { // fakeFetcher is Fetcher that returns canned results. type fakeResult struct { func (f fakeFetcher) Fetch(url string) (string, []string, error) { // fetcher is a populated fakeFetcher. |
This comment has been minimized.
This comment has been minimized.
my binary tree #67 solution
|
This comment has been minimized.
Solution #67 does not work if the tree sizes differ. You should check each channel's status inside, and after the for loop.