// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
body, urls, err := fetcher.Fetch(url)
fmt.Printf("found: %s %q\n", url, body)
Crawl(u, depth-1, fetcher)
Crawl("https://golang.org/", 4, fetcher)
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
return "", nil, fmt.Errorf("not found: %s", url)
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
"https://golang.org/pkg/",
"https://golang.org/cmd/",
"https://golang.org/pkg/": &fakeResult{
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
"https://golang.org/pkg/fmt/": &fakeResult{
"https://golang.org/pkg/",
"https://golang.org/pkg/os/": &fakeResult{
"https://golang.org/pkg/",