forked from gocolly/colly
/
hackernews_comments.go
76 lines (65 loc) · 1.65 KB
/
hackernews_comments.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
package main
import (
"encoding/json"
"flag"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/AlexanderChen1989/colly"
)
type comment struct {
Author string `selector:"a.hnuser"`
URL string `selector:".age a[href]" attr:"href"`
Comment string `selector:".comment"`
Replies []*comment
depth int
}
func main() {
var itemID string
flag.StringVar(&itemID, "id", "", "hackernews post id")
flag.Parse()
if itemID == "" {
log.Println("Hackernews post id required")
os.Exit(1)
}
comments := make([]*comment, 0)
// Instantiate default collector
c := colly.NewCollector()
// Extract comment
c.OnHTML(".comment-tree tr.athing", func(e *colly.HTMLElement) {
width, err := strconv.Atoi(e.ChildAttr("td.ind img", "width"))
if err != nil {
return
}
// hackernews uses 40px spacers to indent comment replies,
// so we have to divide the width with it to get the depth
// of the comment
depth := width / 40
c := &comment{
Replies: make([]*comment, 0),
depth: depth,
}
e.Unmarshal(c)
c.Comment = strings.TrimSpace(c.Comment[:len(c.Comment)-5])
if depth == 0 {
comments = append(comments, c)
return
}
parent := comments[len(comments)-1]
// append comment to its parent
for i := 0; i < depth-1; i++ {
parent = parent.Replies[len(parent.Replies)-1]
}
parent.Replies = append(parent.Replies, c)
})
c.Visit("https://news.ycombinator.com/item?id=" + itemID)
// Convert results to JSON data if the scraping job has finished
jsonData, err := json.MarshalIndent(comments, "", " ")
if err != nil {
panic(err)
}
// Dump json to the standard output (can be redirected to a file)
fmt.Println(string(jsonData))
}