Skip to content

Instantly share code, notes, and snippets.

@velppa
Last active August 29, 2015 14:23
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save velppa/f1bd0faca60c8f9056b8 to your computer and use it in GitHub Desktop.
Save velppa/f1bd0faca60c8f9056b8 to your computer and use it in GitHub Desktop.
Solutions for Go Tour
package main
import (
"fmt"
"golang.org/x/tour/tree"
)
func walk(t *tree.Tree, ch chan int) {
if t.Left != nil {
walk(t.Left, ch)
}
ch <- t.Value
if t.Right != nil {
walk(t.Right, ch)
}
}
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
walk(t, ch)
close(ch)
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for v := range ch1 {
if v != <-ch2 {
return false
}
}
return true
}
func main() {
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Println(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
package main
import (
"fmt"
"math"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprint("cannot Sqrt negative number: ", float64(e))
}
func Sqrt(x float64) (float64, error) {
if x < 0 {
return 0, ErrNegativeSqrt(x)
} else {
return math.Sqrt(x), nil
}
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
package main
import (
"fmt"
"log"
"net/http"
)
type String string
func (s String) ServeHTTP(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, s)
}
type Struct struct {
Greeting string
Punct string
Who string
}
func (s *Struct) ServeHTTP(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "%s%s %s", s.Greeting, s.Punct, s.Who)
}
func main() {
http.Handle("/string", String("I'm a frayed knot."))
http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
err := http.ListenAndServe("localhost:4000", nil)
if err != nil {
log.Fatal(err)
}
}
package main
import (
"golang.org/x/tour/pic"
"image"
"image/color"
)
type Image struct {
w, h int
}
func (i Image) Bounds() image.Rectangle {
return image.Rect(0, 0, i.w, i.h)
}
func (i Image) ColorModel() color.Model {
return color.RGBAModel
}
func (i Image) At(x, y int) color.Color {
return color.RGBA{uint8(x % 255), uint8(y % 255), 255, 255}
}
func main() {
m := Image{100, 100}
pic.ShowImage(m)
}
package main
import "golang.org/x/tour/reader"
type MyReader struct{}
// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (m MyReader) Read(b []byte) (int, error) {
b[0] = 'A'
return 1, nil
}
func main() {
reader.Validate(MyReader{})
}
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func r13Decode(b byte) byte {
switch {
case b >= 'a' && b <= 'z'-13:
b = b + 13
case b >= 'z'-13 && b <= 'z':
b = 'a' + 12 - ('z' - b)
case b >= 'A' && b <= 'Z'-13:
b = b + 13
case b >= 'A'-13 && b <= 'Z':
b = 'A' + 12 - ('Z' - b)
}
return b
}
func (r13 rot13Reader) Read(b []byte) (int, error) {
for {
n, err := r13.r.Read(b)
if err == io.EOF {
return 0, err
} else {
for i, _ := range b {
b[i] = r13Decode(b[i])
}
return n, nil
}
}
}
func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
package main
import (
"fmt"
"log"
"sync"
)
const (
// Number of crawlers to extract urls
Crawlers int = 2
)
var lock sync.Mutex
var wg sync.WaitGroup
var results = make(chan *result)
var crawled = make(map[string]bool)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type result struct {
url string
depth int
}
func crawl(fetcher Fetcher, root bool) {
if root {
log.Println("root crawl started")
defer log.Println("root crawl ended")
defer wg.Done()
} else {
log.Println("child crawl started")
defer log.Println("child crawl ended")
}
r := <-results
log.Println(r)
if r.depth <= 0 || crawled[r.url] {
return
}
lock.Lock()
crawled[r.url] = true
lock.Unlock()
body, urls, err := fetcher.Fetch(r.url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("crawled: %s %q\n", r.url, body)
for _, u := range urls {
results <- &result{u, r.depth - 1}
crawl(fetcher, false)
}
return
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
log.Println("Crawl started")
wg.Add(1)
go func() {
defer wg.Done()
results <- &result{url, depth}
}()
log.Println("first url sent to chan")
for i := 0; i < Crawlers; i++ {
wg.Add(1)
go crawl(fetcher, true)
}
wg.Wait()
}
func main() {
log.Println("main started")
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment