Skip to content

Instantly share code, notes, and snippets.

@ryochack
Created January 21, 2012 17:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ryochack/1653388 to your computer and use it in GitHub Desktop.
Save ryochack/1653388 to your computer and use it in GitHub Desktop.
"A Tour of Go" http://tour.golang.org/#70 channelのみの並列制御版
/*
* http://tour.golang.org/#70
* OR
* http://http://go-tour-jp.appspot.com/#69
*/
package main
import (
"os"
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err os.Error)
}
/* Crawlのクロージャが返す情報 */
type crawlResult struct {
depth int
urls []string
ok bool
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
/* 現存するgoroutineの数 */
routines := 0
/* 次にCrawlするURLを通信するchannel */
ch_query := make(chan *crawlResult)
/* channel受信用 */
query := &crawlResult{depth, []string{url}, true}
/* Crawl済みのURLを管理するMap */
crawledUrl := make(map[string]int)
for {
/* 未CrawlのURLの数だけgoroutineを作成 */
for _, url := range query.urls {
/* Crawl済みかチェック */
if crawledUrl[url] == 0 {
/* Crawl済みのURLをMapに追加 */
crawledUrl[url]++
/*
* goroutineを生成
* goroutineは次のURLをchannelに送信して消滅する
*/
go func(u string, d int) {
if depth <= 0 {
/* routinesのデクリメントが必要なためchに送信してから終了する */
ch_query <- &crawlResult{d, nil, false}
return
}
body, urls, err := fetcher.Fetch(u)
if err != nil {
fmt.Println(err)
ch_query <- &crawlResult{d, urls, false}
return
}
fmt.Printf("found: %s %q \n", u, body)
ch_query <- &crawlResult{d, urls, true}
}(url, query.depth-1)
routines++
//fmt.Printf(" +routines=%d \n", routines)
}
}
/* goroutineが全て終了したら抜ける */
if routines < 1 {
break
}
/* goroutineからの受信とgoroutine数のデクリメント */
for {
query = <-ch_query
routines--
//fmt.Printf(" -routines=%d [%t]\n", routines, query.ok)
if (query.ok != false) || (routines < 1) {
break
}
}
}
}
func main() {
/* Crawlする深さ */
depth := 4
Crawl("http://golang.org/", depth, fetcher);
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, os.Error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment