/
SimplyEmailAggressive.ini
159 lines (132 loc) · 3.06 KB
/
SimplyEmailAggressive.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
[GlobalSettings]
UserAgent: (Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6
SaveFile: Email_List.txt
HtmlFile: Email_List.html
Version: v1.5.0
VersionRepoCheck: Yes
VersionRepoCheckLocation: https://raw.githubusercontent.com/killswitch-GUI/SimplyEmail/master/License/LICENSE-Version
[ProcessConfig]
TotalProcs: 8
# API keys will be labeld
# By the service
[APIKeys]
Canario:
# Settings for HTML Scrapping module
# Save can add in a path - default is the SimpleEmail folder with domain name
[HtmlScrape]
Depth: 10
Wait: 0
LimitRate: 10000k
Timeout: 2
Save:
RemoveHTML: Yes
# You can use a few diffrent Key Servers so a config may be a good idea for this
[SearchPGP]
KeyServer: pgp.rediris.es:11371
Hostname: pgp.rediris.es
# Settings for Google Search
[GoogleSearch]
StartQuantity: 100
QueryLimit: 800
QueryStart: 0
#Flickr Settings
[FlickrSearch]
Hostname: flickr.com
#GitHub Code Scraping settigns
#Page Depth: WARNING every page can contain upto 30 users and multiple links to scrape, this can slow down the results obtain very fast
[GitHubSearch]
PageDepth: 10
QueryStart: 1
#StartPage Search engine settings
[StartPageSearch]
StartQuantity: 100
QueryLimit: 2000
QueryStart: 0
#YahooSearch engine settings
[YahooSearch]
StartQuantity: 100
QueryLimit: 900
QueryStart: 0
#Canary PasteBin Search NON-API
[CanaryPasteBin]
PageDepth: 4
QueryStart: 1
MaxPastesToSearch: 50
# Search Git Hub Gist code
# Page Depth: WARNING every page can contain upto 30 users and multiple links to scrape, this can slow down the results obtain very fast
[GitHubGistSearch]
PageDepth: 10
QueryStart: 1
# Ask Search Engine Search
[AskSearch]
QueryPageLimit: 50
QueryStart: 0
# Search Github Users
# This can return a TON of users, but dependent on parser
[GitHubUserSearch]
PageDepth: 30
QueryStart: 1
# Settings for ReditPost search
[RedditPostSearch]
QueryLimit: 500
QueryStart: 0
# Settings for Google Search
[GooglePDFSearch]
StartQuantity: 0
QueryLimit: 500
QueryStart: 0
# Settings for Google doc Search
[GoogleDocSearch]
StartQuantity: 0
QueryLimit: 500
QueryStart: 0
# Settings for Google xlsx Search
[GoogleXlsxSearch]
StartQuantity: 0
QueryLimit: 500
QueryStart: 0
# Settings for Google pptx Search
[GooglePptxSearch]
StartQuantity: 0
QueryLimit: 20
QueryStart: 0
# Settings for Google docx Search
[GoogleDocxSearch]
StartQuantity: 0
QueryLimit: 500
QueryStart: 0
# Settings for PasteBin Search on Google
[GooglePasteBinSearch]
StartQuantity: 100
QueryLimit: 800
QueryStart: 0
# Settings for Exalead Search
[ExaleadSearch]
StartQuantity: 30
QueryLimit: 400
QueryStart: 0
# Settings for Exalead Search
[ExaleadPPTXSearch]
StartQuantity: 30
QueryLimit: 400
QueryStart: 0
# Settings for Exalead PDF Search
[ExaleadPDFSearch]
StartQuantity: 30
QueryLimit: 250
QueryStart: 0
# Settings for Exalead DOC Search
[ExaleadDOCSearch]
StartQuantity: 30
QueryLimit: 250
QueryStart: 0
# Settings for Exalead DOCX Search
[ExaleadDOCXSearch]
StartQuantity: 30
QueryLimit: 250
QueryStart: 0
# Settings for Exalead XLSX Search
[ExaleadXLSXSearch]
StartQuantity: 30
QueryLimit: 250
QueryStart: 0