Skip to content

Commit

Permalink
sleep / jitter setup
Browse files Browse the repository at this point in the history
  • Loading branch information
killswitch-GUI committed Jul 1, 2016
1 parent 67fe027 commit f8282bb
Show file tree
Hide file tree
Showing 13 changed files with 51 additions and 10 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Expand Up @@ -56,3 +56,6 @@ docs/_build/

# PyBuilder
target/

# db
.db
8 changes: 7 additions & 1 deletion Common/SimplyEmail.ini
Expand Up @@ -9,6 +9,12 @@ VersionRepoCheckLocation: https://raw.githubusercontent.com/killswitch-GUI/Simpl
[ProcessConfig]
TotalProcs: 8

# Sets the default sleep time for all search
# engines, help reduce the Captcha issues.
[SleepConfig]
QuerySleep = 45
QueryJitter = 25

# API keys will be labeld
# By the service
[APIKeys]
Expand All @@ -19,7 +25,7 @@ Canario:
[HtmlScrape]
Depth: 1
Wait: 0
LimitRate: 10000k
LimitRate: 100000k
Timeout: 2
Maxfilesize:
Save:
Expand Down
6 changes: 3 additions & 3 deletions Helpers/Download.py
Expand Up @@ -61,7 +61,7 @@ def download_file(self, url, filetype, maxfile=100, verify=True):
download = os.path.isfile(local_filename)
return local_filename, download

def download_file2(self, url, filetype, timeout=5):
def download_file2(self, url, filetype, timeout=10):
# using the filename is dangerous, could have UTF8 chars etc.
local_filename = randint(10000, 999999999)
# set name
Expand Down Expand Up @@ -117,14 +117,14 @@ def delete_file(self, local_filename):
def GoogleCaptchaDetection(self, RawHtml):
soup = BeautifulSoup(RawHtml, "lxml")
if "Our systems have detected unusual traffic" in soup.text:
p = " [!] Google Captcha was detected! (For best results stop/resolve/restart)"
p = " [!] Google Captcha was detected! (For best results resolve/restart -- Increase sleep/jitter in SimplyEmail.ini)"
self.logger.warning("Google Captcha was detected!")
print helpers.color(p, warning=True)
return True
else:
return False

def requesturl(self, url, useragent, timeout=5, retrytime=3, statuscode=False, raw=False, verify=True):
def requesturl(self, url, useragent, timeout=10, retrytime=5, statuscode=False, raw=False, verify=True):
"""
A very simple request function
This is setup to handle the following parms:
Expand Down
13 changes: 9 additions & 4 deletions Helpers/helpers.py
Expand Up @@ -8,6 +8,7 @@
import json
import configparser
import collections
import random
from fake_useragent import UserAgent

def dictToJson(inputDict):
Expand Down Expand Up @@ -112,11 +113,15 @@ def getua():
ua = UserAgent()
return ua.random

def modsleep(delay, jitter=0):
# Quick Snipit From EmPyre Agent (@HarmJ0y)
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = 1/jitter

def modsleep(st):
# sleep module for spec time
time.sleep(int(st))

minSleep = int((1.0-jitter)*delay)
maxSleep = int((1.0+jitter)*delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(int(sleepTime))

def filetype(path):
m = magic.from_file(str(path))
Expand Down
4 changes: 4 additions & 0 deletions Modules/AskSearch.py
Expand Up @@ -28,6 +28,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.PageLimit = int(config['AskSearch']['QueryPageLimit'])
self.Counter = int(config['AskSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.Domain = Domain
self.verbose = verbose
self.Html = ""
Expand Down Expand Up @@ -66,6 +68,8 @@ def process(self):
print helpers.color(error, warning=True)
self.Html += rawhtml
self.Counter += 1
helpers.modsleep(self.Sleep, jitter=self.Jitter)


def get_emails(self):
parse = Parser.Parser(self.Html)
Expand Down
3 changes: 3 additions & 0 deletions Modules/GoogleCsvSearch.py
Expand Up @@ -29,6 +29,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GoogleCsvSearch']['QueryLimit'])
self.Counter = int(config['GoogleCsvSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand Down Expand Up @@ -80,6 +82,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.Jitter)
# now download the required files
try:
for url in self.urlList:
Expand Down
3 changes: 3 additions & 0 deletions Modules/GoogleDocSearch.py
Expand Up @@ -31,6 +31,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GoogleDocSearch']['QueryLimit'])
self.Counter = int(config['GoogleDocSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand Down Expand Up @@ -84,6 +86,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.jitter)
# now download the required files
try:
for url in self.urlList:
Expand Down
3 changes: 3 additions & 0 deletions Modules/GoogleDocxSearch.py
Expand Up @@ -32,6 +32,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GoogleDocxSearch']['QueryLimit'])
self.Counter = int(config['GoogleDocxSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand Down Expand Up @@ -79,6 +81,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.Jitter)
# now download the required files
try:
for url in self.urlList:
Expand Down
5 changes: 4 additions & 1 deletion Modules/GooglePDFSearch.py
Expand Up @@ -31,6 +31,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GooglePDFSearch']['QueryLimit'])
self.Counter = int(config['GooglePDFSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand All @@ -52,7 +54,7 @@ def search(self):
p = ' [*] Google PDF Search on page: ' + str(self.Counter)
print helpers.color(p, firewall=True)
try:
urly = "https://www.google.com/search?q=site:" + \
urly = "https://www.google.com/search?q=" + \
self.Domain + "+filetype:pdf&start=" + str(self.Counter)
except Exception as e:
error = " [!] Major issue with Google Search:" + str(e)
Expand Down Expand Up @@ -81,6 +83,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.Jitter)
# now download the required files
try:
for url in self.urlList:
Expand Down
3 changes: 3 additions & 0 deletions Modules/GooglePPTXSearch.py
Expand Up @@ -30,6 +30,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GooglePptxSearch']['QueryLimit'])
self.Counter = int(config['GooglePptxSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand Down Expand Up @@ -86,6 +88,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.Jitter)
# now download the required files
try:
for url in self.urlList:
Expand Down
4 changes: 3 additions & 1 deletion Modules/GoogleSearch.py
Expand Up @@ -27,6 +27,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['GoogleSearch']['QueryLimit'])
self.Counter = int(config['GoogleSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.Html = ""
except:
Expand Down Expand Up @@ -64,7 +66,7 @@ def search(self):
print e
self.Html += results
self.Counter += 100

helpers.modsleep(self.Sleep, jitter=self.Jitter)
def get_emails(self):
Parse = Parser.Parser(self.Html)
Parse.genericClean()
Expand Down
3 changes: 3 additions & 0 deletions Modules/GoogleXLSXSearch.py
Expand Up @@ -33,6 +33,8 @@ def __init__(self, Domain, verbose=False):
self.UserAgent = {
'User-Agent': helpers.getua()}
self.Counter = int(config['GoogleXlsxSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.urlList = []
self.Text = ""
Expand Down Expand Up @@ -88,6 +90,7 @@ def search(self):
except:
pass
self.Counter += 10
helpers.modsleep(self.Sleep, jitter=self.Jitter)
# now download the required files
self.logger.debug(
"GoogleXlsxSearch completed HTML result query, starting downloads")
Expand Down
3 changes: 3 additions & 0 deletions Modules/YahooSearch.py
Expand Up @@ -34,6 +34,8 @@ def __init__(self, Domain, verbose=False):
'User-Agent': helpers.getua()}
self.Limit = int(config['YahooSearch']['QueryLimit'])
self.Counter = int(config['YahooSearch']['QueryStart'])
self.Sleep = int(config['SleepConfig']['QuerySleep'])
self.Jitter = int(config['SleepConfig']['QueryJitter'])
self.verbose = verbose
self.Html = ""
except Exception as e:
Expand Down Expand Up @@ -72,6 +74,7 @@ def search(self):
results = r.content
self.Html += results
self.Counter += 100
helpers.modsleep(self.Sleep, jitter=self.Jitter)

def get_emails(self):
Parse = Parser.Parser(self.Html)
Expand Down

0 comments on commit f8282bb

Please sign in to comment.