-
Notifications
You must be signed in to change notification settings - Fork 0
/
crawl_page.go
55 lines (45 loc) · 1.11 KB
/
crawl_page.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
package main
import (
"fmt"
"net/url"
)
func crawlPage(rawBaseURL, rawCurrentURL string, pages map[string]int){
currentURL, err := url.Parse(rawCurrentURL)
if err != nil {
fmt.Printf("Error: couldn''t parse URL")
return
}
baseURL, err := url.Parse(rawBaseURL)
if err != nil {
fmt.Printf("Error: couldn''t parse URL")
return
}
// skip other websites
if currentURL.Hostname() != baseURL.Hostname() {
return
}
normalizedURL, err := normalizeURL(rawCurrentURL)
if err != nil {
fmt.Printf("Error - normalizedURL: %v", err)
}
// Increment if visited
if _, visited := pages[normalizedURL]; visited {
pages[normalizedURL]++
return
}
// mark as visited
pages[normalizedURL] = 1
fmt.Printf("Crawling %s\n", rawCurrentURL)
htmlBody, err := getHTML(rawCurrentURL)
if err != nil {
fmt.Printf("Error - getHTML: %v", err)
return
}
nextURLs, err := getURLsFromHTML(htmlBody, rawBaseURL)
if err != nil {
fmt.Printf("Error - getURLsFromHTML: %v", err)
}
for _, nextURL := range nextURLs {
crawlPage(rawBaseURL, nextURL, pages)
}
}