/
reddit.rb
64 lines (51 loc) · 1.57 KB
/
reddit.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# encoding: UTF-8
class Reddit < Source
def parse_data(result, work, options={})
return result if result[:error]
events = result.deep_fetch('data', 'children') { [] }
likes = get_sum(events, 'data', 'score')
comments = get_sum(events, 'data', 'num_comments')
total = likes + comments
events = get_events(events)
{ events: events,
events_by_day: get_events_by_day(events, work),
events_by_month: get_events_by_month(events),
events_url: get_events_url(work),
event_count: total,
event_metrics: get_event_metrics(comments: comments, likes: likes, total: total) }
end
def get_events(result)
result.map do |item|
data = item['data']
event_time = get_iso8601_from_epoch(data['created_utc'])
url = data['url']
{ event: data,
event_time: event_time,
event_url: url,
# the rest is CSL (citation style language)
event_csl: {
'author' => get_authors([data.fetch('author', "")]),
'title' => data.fetch('title', ""),
'container-title' => 'Reddit',
'issued' => get_date_parts(event_time),
'url' => url,
'type' => 'personal_communication' }
}
end
end
def config_fields
[:url, :events_url]
end
def url
config.url || "http://www.reddit.com/search.json?q=\"%{doi}\"&limit=100"
end
def events_url
config.events_url || "http://www.reddit.com/search?q=\"%{doi}\""
end
def job_batch_size
config.job_batch_size || 100
end
def rate_limiting
config.rate_limiting || 1800
end
end