-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscraper.go
91 lines (82 loc) · 1.77 KB
/
scraper.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
package scraper
import (
"fmt"
"golang.org/x/net/html"
"net/http"
"io/ioutil"
"encoding/json"
"bytes"
)
type Link struct {
URL string `json:"url"`
Text string `json:"text"`
}
type Links struct {
Links []Link
}
func Crawl(id int, jobs <-chan string, results chan<- []byte, finished chan<- bool) {
for url := range jobs {
fmt.Println("Worker ", id, " got to search ", url)
resp, err := http.Get(url)
if err != nil {
fmt.Printf("error: failed to crawl %s \n", url)
results <- nil
}
defer resp.Body.Close() // Close body when function ends
b, err := ioutil.ReadAll(resp.Body);
if err != nil {
fmt.Printf("error: during reading content from url %s \n", url)
results <- nil
}
results <- findLinks(b)
}
// When function finished
defer func() {
finished <- true
}()
}
func findLinks(body []byte) ([]byte) {
HTMLLinks := Links{}
link := Link{}
var buffer bytes.Buffer
z := html.NewTokenizer(bytes.NewReader(body))
depth := 0
for {
tt := z.Next()
switch tt {
case html.ErrorToken:
// End of the document, we're done
// JSON encode
links, err := json.Marshal(HTMLLinks.Links);
if err != nil {
return []byte{}
}
return links
case html.TextToken:
if depth > 0 {
buffer.Write(z.Text())
}
case html.StartTagToken, html.EndTagToken:
t := z.Token()
if t.Data == "a" {
if tt == html.StartTagToken {
depth++
// Find href
for _, a := range t.Attr {
if a.Key == "href" {
link.URL = a.Val
break
}
}
} else {
if tt == html.EndTagToken {
link.Text = buffer.String()
buffer.Reset()
HTMLLinks.Links = append(HTMLLinks.Links, link)
}
depth--
}
}
}
}
}