Skip to content

Instantly share code, notes, and snippets.

@PetrGlad
Created July 24, 2015 10:34
Show Gist options
  • Save PetrGlad/db4e065d456b167da8b2 to your computer and use it in GitHub Desktop.
Save PetrGlad/db4e065d456b167da8b2 to your computer and use it in GitHub Desktop.
My exercises for "A tour of Go"
func fibonacci_seq() func() int {
f1, f2 := 0, 1
return func() int {
f1, f2 = f2, f1 + f2
return f2
}
}
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := x / 2
prev := x
for math.Abs(z - prev) > 1e-8 {
prev = z
z = z - (z*z-x)/(2*z)
}
return z
}
func test(x float64) {
r := Sqrt(x)
fmt.Println(r)
fmt.Println(r - math.Sqrt(x))
}
func main() {
test(2)
}
package main
import "fmt"
type Tree struct {
Left *Tree
Value int
Right *Tree
}
func NewLeaf(value int) *Tree {
return &Tree{Value: value}
}
func walk(t *Tree, out chan<- int) {
if t != nil {
walk(t.Left, out)
out <- t.Value
walk(t.Right, out)
}
}
func Walk(t *Tree, out chan<- int) {
walk(t, out)
close(out)
}
func AreChansEqual(ca, cb <-chan int) bool {
for {
xa, oka := <- ca
xb, okb := <- cb
if oka != okb || xa != xb {
return false
}
if !oka || !okb {
break
}
}
return true
}
func AreTreesEquivalent(ta, tb *Tree) bool {
as := make(chan int)
bs := make(chan int)
go Walk(ta, as)
go Walk(tb, bs)
return AreChansEqual(as, bs)
}
func main() {
tv := &Tree{Value: 12, Right: &Tree{NewLeaf(15), 34, nil}}
tq := &Tree{NewLeaf(12), 15, NewLeaf(34)}
tr := NewLeaf(112)
fmt.Println(AreTreesEquivalent(tv, tq)) // Expected: true
fmt.Println(AreTreesEquivalent(tv, tr)) // Expected: false
}
package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
func crawl(url string, depth int, fetcher Fetcher, visited func(url string) bool, completions chan<- bool) {
if depth <= 0 {
return
}
if visited(url) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
completions <- true
go crawl(u, depth-1, fetcher, visited, completions)
}
}
}
completions <- false
}
func makeVisitedCounter() func(url string) bool {
visitedUrls := make(map[string]bool)
visitedMutex := &sync.Mutex{}
return func(url string) bool {
defer visitedMutex.Unlock()
visitedMutex.Lock()
if visitedUrls[url] {
return false
}
visitedUrls[url] = true
return true
}
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
// Using agent to count active tasks
completions := make(chan bool)
finish := make(chan bool)
go func() {
taskCount := 0
for {
state, ok := <- completions
if !ok {
break
}
if state {
taskCount += 1
} else {
taskCount -= 1
if taskCount == 0 {
finish <- true
}
}
}
}()
completions <- true
crawl(url, depth, fetcher, makeVisitedCounter(), completions)
<- finish
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
package main
import (
"fmt"
"sync/atomic"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type UrlStr string
type FetchItem struct {
url UrlStr
depth int
}
type FetchResult struct {
url UrlStr
err error
body string
}
func (fr *FetchResult) String() string {
if fr.err == nil {
return fmt.Sprintf("url: %v, body: \"%v\"", fr.url, fr.body)
} else {
return fmt.Sprintf("url: %v, err: %v", fr.url, fr.err)
}
}
func taskIsComplete(awaitCount *int32, resultQueue chan<- FetchResult, taskQueue chan<- FetchItem) {
atomic.AddInt32(awaitCount, -1)
if *awaitCount == 0 {
close(taskQueue)
close(resultQueue)
}
}
func crawlUrl(item FetchItem, fetcher Fetcher,
contentOut chan<- FetchResult, linksOut chan<- FetchItem,
awaitCount *int32) {
body, urls, err := fetcher.Fetch(string(item.url))
contentOut <- FetchResult{item.url, err, body}
if item.depth > 0 {
for _, u := range urls {
atomic.AddInt32(awaitCount, 1)
linksOut <- FetchItem{UrlStr(u), item.depth - 1}
}
}
taskIsComplete(awaitCount, contentOut, linksOut)
}
/* Crawl uses fetcher to recursively crawl
pages starting with url, to a maximum of depth.
*/
func Crawl(url string, depth int, fetcher Fetcher) {
// Using task queue to chain tasks and atomic int to check for completion
fetchQueue := make(chan FetchItem, 20)
resultQueue := make(chan FetchResult)
// Result printer
go func() {
for {
fetchResult, ok := <-resultQueue
if !ok {
break
}
fmt.Println(&fetchResult)
}
}();
// Crawl swarm
fetchQueue <- FetchItem{UrlStr(url), depth}
var awaitCount int32 = 1
visited := make(map[UrlStr]bool)
for {
fetchItem, ok := <-fetchQueue
if !ok {
break;
}
if visited[fetchItem.url] {
taskIsComplete(&awaitCount, resultQueue, fetchQueue)
} else {
/* It would be simpler if we could swap task with it's linked tasks atomically in input queue.
Then empty queue would signal that there are no more tasks.
XXX AwaitCount is used to account for in-task pause. */
visited[fetchItem.url] = true
go crawlUrl(fetchItem, fetcher, resultQueue, fetchQueue, &awaitCount)
}
}
}
func main() {
Crawl("http://golang.org/", 4, fetcher)
}
// ------------------------------------------------------------------------------------------
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
package main
import (
"fmt"
"log"
"net/http"
)
type String string
func (s String) ServeHTTP(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "I say: ", s)
}
type Struct struct {
Greeting string
Punct string
Who string
}
func (s *Struct) ServeHTTP(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "Only three parts: ", s.Greeting, s.Punct, s.Who)
}
func main() {
// your http.Handle calls here
http.Handle("/string", String("I'm a frayed knot."))
http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
log.Fatal(http.ListenAndServe("localhost:4000", nil))
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment