Skip to content

Instantly share code, notes, and snippets.

@udzura
Last active August 29, 2015 14:06
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save udzura/b9b22ce583cf641c2f52 to your computer and use it in GitHub Desktop.
Save udzura/b9b22ce583cf641c2f52 to your computer and use it in GitHub Desktop.
tour of go 2nd day
package main
import (
"fmt"
"math"
)
type ErrNegativeSqrt float64
func (f ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negative number: %f", float64(f))
}
func Sqrt(f float64) (float64, error) {
if f > 0 {
return sqrt(f), nil
} else {
return 0, ErrNegativeSqrt(f)
}
}
func sqrt(x float64) float64 {
newton := func(z float64) float64 {
return z - (math.Pow(z, 2) - x) / (2 * z)
}
z1 := float64(1)
z2 := newton(z1)
for math.Abs(z1 - z2) > float64(1e-15) {
z1 = z2
z2 = newton(z2)
}
return z2
}
func main() {
fmt.Println(Sqrt(2))
if v, e := Sqrt(-2); e != nil {
fmt.Print(e)
} else {
fmt.Printf("return: %d", v)
}
}
package main
import (
"fmt"
h "net/http"
)
type String string
func (s String) ServeHTTP(w h.ResponseWriter, r *h.Request) {
fmt.Fprintf(w, "I am %s", s)
}
type Struct struct {
Greeting string
Punct string
Who string
}
func (s *Struct) ServeHTTP(w h.ResponseWriter, r *h.Request) {
fmt.Fprintf(w, "I am %s%s%s", s.Greeting, s.Punct, s.Who)
}
func main() {
// your http.Handle calls here
h.Handle("/string", String("I'm a frayed knot."))
h.Handle("/struct", &Struct{"Hello", "~~", "Gophers!"})
h.ListenAndServe("localhost:4000", nil)
}
package main
import (
"code.google.com/p/go-tour/pic"
"image"
"image/color"
"math/rand"
)
type Image struct{
Width int
Height int
}
func (img *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (img *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, img.Width, img.Height)
}
func (img *Image) At(x, y int) color.Color {
return color.RGBA{
128,
uint8(x / 4 + y / 4),
uint8(rand.Intn(256)),
255,
}
}
func main() {
m := new(Image)
m.Width, m.Height = 512, 512
pic.ShowImage(m)
}
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
reader io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n, err = rot.reader.Read(p)
for idx, b := range(p) {
if b >= 'A' && b <= 'Z' {
p[idx] = 'A' + (b - 'A' + 13) % 26
} else if b >= 'a' && b <= 'z' {
p[idx] = 'a' + (b - 'a' + 13) % 26
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stderr, &r)
}
package main
import (
"fmt"
"code.google.com/p/go-tour/tree"
)
/*
type tree.Tree struct {
Left *Tree
Value int
Right *Tree
}
*/
// Walk walks the tree t sending all values
// from the tree to the channel ch.
// Close when walking has ended.
func Walk(t *tree.Tree, ch chan int) {
if t.Left != nil {
walk(t.Left, ch)
}
ch <- t.Value
if t.Right != nil {
walk(t.Right, ch)
}
close(ch)
}
// Internal
func walk(t *tree.Tree, ch chan int) {
if t.Left != nil {
walk(t.Left, ch)
}
ch <- t.Value
if t.Right != nil {
walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1, ch2 := make(chan int), make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for v1 := range(ch1) {
v2 := <- ch2
if v1 != v2 {
return false
}
}
return true
}
func main() {
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range(ch) {
fmt.Printf("Value is: %d\n", v)
}
res1 := Same(tree.New(1), tree.New(1))
fmt.Printf("res1 = %v\n", res1)
res2 := Same(tree.New(1), tree.New(2))
fmt.Printf("res2 = %v\n", res2)
}
package main
// これはデッドロックになります
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,
visited map[string]bool, finished chan string) {
// TODO: Fetch URLs in parallel.
// OK: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
finished <- url
return
}
if _, exist := visited[url]; !exist {
body, urls, err := fetcher.Fetch(url)
visited[url] = true
if err != nil {
fmt.Println(err)
finished <- url
return
}
fmt.Printf("found: %s %q\n", url, body)
newQueue := make(chan string)
for _, u := range urls {
go Crawl(u, depth-1, fetcher, visited, newQueue)
}
for i := 0; i < len(urls); i++ {
fmt.Printf("d = %d, url = %s\n", depth-1, <-newQueue)
}
}
finished <- url
return
}
func main() {
startQueue := make(chan string)
Crawl("http://golang.org/", 4, fetcher, make(map[string]bool), startQueue)
<-startQueue
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
wg := new(sync.WaitGroup)
wg.Add(1)
go crawl(url, depth, fetcher, make(map[string]bool), wg)
wg.Wait()
}
func crawl(url string, depth int, fetcher Fetcher,
visited map[string]bool, wg *sync.WaitGroup) {
// OK: Fetch URLs in parallel.
// OK: Don't fetch the same URL twice.
// fmt.Printf("start: %s\n", url)
if depth >= 1 {
if _, exist := visited[url]; !exist {
body, urls, err := fetcher.Fetch(url)
visited[url] = true
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
newWg := new(sync.WaitGroup)
for _, u := range urls {
newWg.Add(1)
go crawl(u, depth-1, fetcher, visited, newWg)
}
newWg.Wait()
}
}
}
wg.Done()
return
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@udzura
Copy link
Author

udzura commented Sep 16, 2014

@udzura
Copy link
Author

udzura commented Sep 16, 2014

あ〜自分の答えをあとで http://yuuki.hatenablog.com/entry/2014/02/16/183206 とよみくらべよう

@udzura
Copy link
Author

udzura commented Sep 16, 2014

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment