Skip to content

Commit

Permalink
v2.2.2 build
Browse files Browse the repository at this point in the history
  • Loading branch information
s0md3v committed Nov 16, 2023
1 parent 10be64d commit faf9d30
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 30 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
#### 2.2.2
- Probing improvements
- Fix "target is misbehaving" errors
- Variable chunk size depending on HTTP method
- Improved heuristics
- Allow up to 20 "server fault" errors

#### 2.2.0
- Ability to detect parameters that respond to a certain value e.g. "?debug=yes"
- Added "required parameter" detection
Expand Down
2 changes: 1 addition & 1 deletion arjun/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.2.1'
__version__ = '2.2.2'
28 changes: 20 additions & 8 deletions arjun/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=250)
parser.add_argument('-q', help='Quiet mode. No output.', dest='quiet', action='store_true')
parser.add_argument('--headers', help='Add headers. Separate multiple headers with a new line.', dest='headers', nargs='?', const=True)
parser.add_argument('--passive', help='Collect parameter names from passive sources like wayback, commoncrawl and otx.', dest='passive', nargs='?', const='-')
Expand Down Expand Up @@ -58,6 +58,9 @@

mem.var['method'] = mem.var['method'].upper()

if mem.var['method'] != 'GET':
mem.var['chunks'] = 500

if mem.var['stable'] or mem.var['delay']:
mem.var['threads'] = 1
if mem.var['wordlist'] in ('large', 'medium', 'small'):
Expand Down Expand Up @@ -117,18 +120,27 @@ def initialize(request, wordlist, single_url=False):
if not request['url']:
return 'skipped'
else:
fuzz = random_str(6)
response_1 = requester(request, {fuzz: fuzz[::-1]})
fuzz = "z" + random_str(6)
response_1 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
if single_url:
print('%s Analysing HTTP response for anomalies' % run)
fuzz = random_str(6)
response_2 = requester(request, {fuzz: fuzz[::-1]})
response_2 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
if type(response_1) == str or type(response_2) == str:
return 'skipped'

# params from response must be extracted before factors but displayed later
found, words_exist = heuristic(response_1, wordlist)

factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
zzuf = "z" + random_str(6)
response_3 = requester(request, {zzuf[:-1]: zzuf[::-1][:-1]})
while factors:
reason = compare(response_3, factors, {zzuf[:-1]: zzuf[::-1][:-1]})[2]
if not reason:
break
factors[reason] = []
if single_url:
print('%s Analysing HTTP response for potential parameter names' % run)
found, words_exist = heuristic(response_1, wordlist)
if found:
num = len(found)
if words_exist:
Expand All @@ -147,8 +159,8 @@ def initialize(request, wordlist, single_url=False):
while True:
param_groups = narrower(request, factors, param_groups)
if len(param_groups) > prev_chunk_count:
response_3 = requester(request, {fuzz: fuzz[::-1]})
if compare(response_3, factors, {fuzz: fuzz[::-1]}) != '':
response_3 = requester(request, {zzuf[:-1]: zzuf[::-1][:-1]})
if compare(response_3, factors, {zzuf[:-1]: zzuf[::-1][:-1]})[0] != '':
print('%s Target is misbehaving. Try the --stable switch.' % bad)
return []
if mem.var['kill']:
Expand Down
24 changes: 12 additions & 12 deletions arjun/core/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,39 +58,39 @@ def compare(response, factors, params):
returns string, list (anomaly, list of parameters that caused it)
"""
if response == '':
return ('', [])
return ('', [], '')
these_headers = list(response.headers.keys())
these_headers.sort()
if factors['same_code'] and response.status_code != factors['same_code']:
return ('http code', params)
return ('http code', params, 'same_code')
if factors['same_headers'] and these_headers != factors['same_headers']:
return ('http headers', params)
return ('http headers', params, 'same_headers')
if mem.var['disable_redirects']:
if factors['same_redirect'] and urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params)
return ('redirection', params, 'same_redirect')
elif factors['same_redirect'] and 'Location' in response.headers:
if urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params)
return ('redirection', params, 'same_redirect')
if factors['same_body'] and response.text != factors['same_body']:
return ('body length', params)
return ('body length', params, 'same_body')
if factors['lines_num'] and response.text.count('\n') != factors['lines_num']:
return ('number of lines', params)
return ('number of lines', params, 'lines_num')
if factors['same_plaintext'] and remove_tags(response.text) != factors['same_plaintext']:
return ('text length', params)
return ('text length', params, 'same_plaintext')
if factors['lines_diff']:
for line in factors['lines_diff']:
if line not in response.text:
return ('lines', params)
return ('lines', params, 'lines_diff')
if type(factors['param_missing']) == list:
for param in params.keys():
if len(param) < 5:
continue
if param not in factors['param_missing'] and re.search(r'[\'"\s]%s[\'"\s]' % param, response.text):
return ('param name reflection', params)
return ('param name reflection', params, 'param_missing')
if factors['value_missing']:
for value in params.values():
if type(value) != str or len(value) != 6:
continue
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
return ('param value reflection', params)
return ('', [])
return ('param value reflection', params, 'value_missing')
return ('', [], '')
9 changes: 6 additions & 3 deletions arjun/core/error_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,19 @@ def error_handler(response, factors):
return 'kill'
else:
if factors['same_code'] != response.status_code:
mem.var['kill'] = True
print('%s Server received a bad request. Try decreasing the chunk size with -c option' % bad)
return 'kill'
mem.var['bad_req_count'] = mem.var.get('bad_req_count', 0) + 1
if mem.var['bad_req_count'] > 20:
mem.var['kill'] = True
print('%s Server received a bad request. Try decreasing the chunk size with -c option' % bad)
return 'kill'
else:
return 'ok'
else:
if 'Timeout' in response:
if mem.var['timeout'] > 20:
mem.var['kill'] = True
print('%s Connection timed out, unable to increase timeout further' % bad)
print('%s Target might have a rate limit in place, try --stable switch' % bad)
return 'kill'
else:
print('%s Connection timed out, increased timeout by 5 seconds' % bad)
Expand Down
8 changes: 2 additions & 6 deletions arjun/plugins/heuristic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
# TODO: for map keys, javascript tolerates { param: "value" }
re_words = re.compile(r'[A-Za-z][A-Za-z0-9_]*')
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
re_input_names = re.compile(r'''(?i)<input.+?name=["']?([^"'\s>]+)''')
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
re_inputs = re.compile(r'''(?i)<(?:input|textarea)[^>]+?(?:id|name)=["']?([^"'\s>]+)''')
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')

Expand All @@ -28,12 +27,9 @@ def heuristic(raw_response, wordlist):
words_exist = True
potential_params = re_words.findall(response)
# Parse Inputs
input_names = re_input_names.findall(response)
input_names = re_inputs.findall(response)
potential_params += input_names

input_ids = re_input_ids.findall(response)
potential_params += input_ids

# Parse Scripts
for script in extract_js(response):
empty_vars = re_empty_vars.findall(script)
Expand Down

0 comments on commit faf9d30

Please sign in to comment.