forked from n0madic/twitter-scraper
/
search.go
157 lines (135 loc) · 4.48 KB
/
search.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
package twitterscraper
import (
"context"
"errors"
"log"
"strconv"
)
// SearchTweets returns channel with tweets for a given search query
func (s *Scraper) SearchTweets(ctx context.Context, query string, maxTweetsNbr int) <-chan *TweetResult {
return getTweetTimeline(ctx, query, maxTweetsNbr, s.FetchSearchTweets)
}
// Deprecated: SearchTweets wrapper for default Scraper
func SearchTweets(ctx context.Context, query string, maxTweetsNbr int) <-chan *TweetResult {
return defaultScraper.SearchTweets(ctx, query, maxTweetsNbr)
}
// SearchProfiles returns channel with profiles for a given search query
func (s *Scraper) SearchProfiles(ctx context.Context, query string, maxProfilesNbr int) <-chan *ProfileResult {
return getUserTimeline(ctx, query, maxProfilesNbr, s.FetchSearchProfiles)
}
// Deprecated: SearchProfiles wrapper for default Scraper
func SearchProfiles(ctx context.Context, query string, maxProfilesNbr int) <-chan *ProfileResult {
return defaultScraper.SearchProfiles(ctx, query, maxProfilesNbr)
}
// getSearchTimeline gets results for a given search query, via the Twitter frontend API
func (s *Scraper) getSearchTimeline(query string, maxNbr int, cursor string) (*timeline, error) {
if !s.isLogged {
return nil, errors.New("scraper is not logged in for search")
}
if maxNbr > 50 {
maxNbr = 50
}
req, err := s.newRequest("GET", "https://twitter.com/i/api/2/search/adaptive.json")
if err != nil {
return nil, err
}
q := req.URL.Query()
q.Add("q", query)
q.Add("count", strconv.Itoa(maxNbr))
q.Add("query_source", "typed_query")
q.Add("requestContext", "launch")
q.Add("spelling_corrections", "1")
q.Add("include_ext_edit_control", "true")
if cursor != "" {
q.Add("cursor", cursor)
}
if s.searchMode == SearchLatest {
q.Add("pc", "0")
} else {
q.Add("pc", "1")
}
switch s.searchMode {
case SearchLatest:
q.Add("tweet_search_mode", "live")
case SearchPhotos:
q.Add("result_filter", "image")
case SearchVideos:
q.Add("result_filter", "video")
case SearchUsers:
q.Add("result_filter", "user")
}
req.URL.RawQuery = q.Encode()
log.Println(req.URL.String())
var timeline timeline
_, err = s.RequestAPI(req, &timeline)
if err != nil {
return nil, err
}
return &timeline, nil
}
func (s *Scraper) getSearchTimelineWithResponseHeaders(query string, maxNbr int, cursor string) (*timeline, *ResponseAPIHeaders, error) {
if !s.isLogged {
return nil, nil, errors.New("scraper is not logged in for search")
}
if maxNbr > 50 {
maxNbr = 50
}
req, err := s.newRequest("GET", "https://twitter.com/i/api/2/search/adaptive.json")
if err != nil {
return nil, nil, err
}
q := req.URL.Query()
q.Add("q", query)
q.Add("count", strconv.Itoa(maxNbr))
q.Add("query_source", "typed_query")
q.Add("pc", "1")
q.Add("requestContext", "launch")
q.Add("spelling_corrections", "1")
q.Add("include_ext_edit_control", "true")
if cursor != "" {
q.Add("cursor", cursor)
}
switch s.searchMode {
case SearchLatest:
q.Add("tweet_search_mode", "live")
case SearchPhotos:
q.Add("result_filter", "image")
case SearchVideos:
q.Add("result_filter", "video")
case SearchUsers:
q.Add("result_filter", "user")
}
req.URL.RawQuery = q.Encode()
var timeline timeline
responseHeaders, reqApiErr := s.RequestAPI(req, &timeline)
if reqApiErr != nil {
return nil, nil, reqApiErr
}
return &timeline, responseHeaders, nil
}
// FetchSearchTweets gets tweets for a given search query, via the Twitter frontend API
func (s *Scraper) FetchSearchTweets(query string, maxTweetsNbr int, cursor string) ([]*Tweet, string, error) {
timeline, err := s.getSearchTimeline(query, maxTweetsNbr, cursor)
if err != nil {
return nil, "", err
}
tweets, nextCursor := timeline.parseTweets()
return tweets, nextCursor, nil
}
func (s *Scraper) FetchSearchTweetsWithResponseHeaders(query string, maxTweetsNbr int, cursor string) ([]*Tweet, string, *ResponseAPIHeaders, error) {
timeline, responseHeaders, err := s.getSearchTimelineWithResponseHeaders(query, maxTweetsNbr, cursor)
if err != nil {
return nil, "", nil, err
}
tweets, nextCursor := timeline.parseTweets()
return tweets, nextCursor, responseHeaders, nil
}
// FetchSearchProfiles gets users for a given search query, via the Twitter frontend API
func (s *Scraper) FetchSearchProfiles(query string, maxProfilesNbr int, cursor string) ([]*Profile, string, error) {
timeline, err := s.getSearchTimeline(query, maxProfilesNbr, cursor)
if err != nil {
return nil, "", err
}
users, nextCursor := timeline.parseUsers()
return users, nextCursor, nil
}