Skip to content

Commit

Permalink
Merge pull request #171 from s0md3v/2.1.6
Browse files Browse the repository at this point in the history
2.1.6
  • Loading branch information
s0md3v committed Sep 10, 2022
2 parents 01c755d + 76e3c0e commit 4219512
Show file tree
Hide file tree
Showing 8 changed files with 51 additions and 16 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
#### 2.1.6
- Fixed multiple breaking bugs
- Export results as they come in multi-target mode
- Various improvements to output in multi-target mode
- changed default chunk size 300->500 and threads to 2->5

#### 2.1.5
- Fixed header comparison (will fix infinite bruteforce on some targets)
- Fixed catastrophic backtracking in some regexes (arjun used to get stuck)
Expand Down
2 changes: 1 addition & 1 deletion arjun/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.1.5'
__version__ = '2.1.6'
43 changes: 31 additions & 12 deletions arjun/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from arjun.core.bruter import bruter
from arjun.core.exporter import exporter
from arjun.core.requester import requester
from arjun.core.anomaly import define
from arjun.core.anomaly import define, compare
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path

from arjun.plugins.heuristic import heuristic
Expand All @@ -24,12 +24,12 @@
parser.add_argument('-oT', help='Path for text output file.', dest='text_file')
parser.add_argument('-oB', help='Port for output to Burp Suite Proxy. Default port is 8080.', dest='burp_port', nargs='?', const=8080)
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
parser.add_argument('-t', help='Number of concurrent threads. (default: 2)', dest='threads', type=int, default=2)
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=300)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
parser.add_argument('-q', help='Quiet mode. No output.', dest='quiet', action='store_true')
parser.add_argument('--headers', help='Add headers. Separate multiple headers with a new line.', dest='headers', nargs='?', const=True)
parser.add_argument('--passive', help='Collect parameter names from passive sources like wayback, commoncrawl and otx.', dest='passive', nargs='?', const='-')
Expand Down Expand Up @@ -102,7 +102,7 @@ def narrower(request, factors, param_groups):
return anomalous_params


def initialize(request, wordlist):
def initialize(request, wordlist, single_url=False):
"""
handles parameter finding process for a single request object
returns 'skipped' (on error), list on success
Expand All @@ -118,27 +118,37 @@ def initialize(request, wordlist):
else:
fuzz = random_str(6)
response_1 = requester(request, {fuzz: fuzz[::-1]})
print('%s Analysing HTTP response for anomalies' % run)
if single_url:
print('%s Analysing HTTP response for anomalies' % run)
fuzz = random_str(6)
response_2 = requester(request, {fuzz: fuzz[::-1]})
if type(response_1) == str or type(response_2) == str:
return 'skipped'
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
print('%s Analysing HTTP response for potential parameter names' % run)
if single_url:
print('%s Analysing HTTP response for potential parameter names' % run)
found = heuristic(response_1.text, wordlist)
if found:
num = len(found)
s = 's' if num > 1 else ''
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
print('%s Logicforcing the URL endpoint' % run)
if single_url:
print('%s Logicforcing the URL endpoint' % run)
populated = populate(wordlist)
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
prev_chunk_count = len(param_groups)
last_params = []
while True:
param_groups = narrower(request, factors, param_groups)
if len(param_groups) > prev_chunk_count:
response_3 = requester(request, {fuzz: fuzz[::-1]})
if compare(response_3, factors, [fuzz]) != '':
print('%s Target is misbehaving. Try the --stable swtich.' % bad)
return []
if mem.var['kill']:
return 'skipped'
param_groups = confirm(param_groups, last_params)
prev_chunk_count = len(param_groups)
if not param_groups:
break
confirmed_params = []
Expand All @@ -147,7 +157,7 @@ def initialize(request, wordlist):
if reason:
name = list(param.keys())[0]
confirmed_params.append(name)
print('%s name: %s, factor: %s' % (res, name, reason))
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
return confirmed_params


Expand All @@ -169,12 +179,17 @@ def main():
final_result[url]['params'] = these_params
final_result[url]['method'] = request['method']
final_result[url]['headers'] = request['headers']
exporter(final_result)
else:
print('%s No parameters were discovered.' % info)
elif type(request) == list:
# in case of multiple targets
count = 0
for each in request:
count += 1
url = each['url']
mem.var['kill'] = False
print('%s Scanning: %s' % (run, url))
print('%s Scanning %d/%d: %s' % (run, count, len(request), url))
these_params = initialize(each, list(wordlist))
if these_params == 'skipped':
print('%s Skipped %s due to errors' % (bad, url))
Expand All @@ -183,12 +198,16 @@ def main():
final_result[url]['params'] = these_params
final_result[url]['method'] = each['method']
final_result[url]['headers'] = each['headers']
print('%s Parameters found: %s' % (good, ', '.join(final_result[url])))
exporter(final_result)
print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
if not mem.var['json_file']:
final_result = {}
continue
else:
print('%s No parameters were discovered.\n' % info)
except KeyboardInterrupt:
exit()

exporter(final_result)


if __name__ == '__main__':
main()
6 changes: 5 additions & 1 deletion arjun/core/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ def compare(response, factors, params):
detects anomalies by comparing a HTTP response against a rule list
returns string, list (anomaly, list of parameters that caused it)
"""
if response == '':
return ('', [])
these_headers = list(response.headers.keys())
these_headers.sort()
if factors['same_code'] and response.status_code != factors['same_code']:
Expand All @@ -67,7 +69,7 @@ def compare(response, factors, params):
if factors['same_redirect'] and urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params)
elif factors['same_redirect'] and 'Location' in response.headers:
if urlparse(response.headers.get['Location']).path != factors['same_redirect']:
if urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params)
if factors['same_body'] and response.text != factors['same_body']:
return ('body length', params)
Expand All @@ -87,6 +89,8 @@ def compare(response, factors, params):
return ('param name reflection', params)
if factors['value_missing']:
for value in params.values():
if type(value) != str:
continue
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
return ('param value reflection', params)
return ('', [])
2 changes: 1 addition & 1 deletion arjun/core/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def text_export(result):
"""
exports results to a text file, one url per line
"""
with open(mem.var['text_file'], 'w+', encoding='utf8') as text_file:
with open(mem.var['text_file'], 'a+', encoding='utf8') as text_file:
for url, data in result.items():
clean_url = url.lstrip('/')
if data['method'] == 'JSON':
Expand Down
2 changes: 1 addition & 1 deletion arjun/core/requester.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def requester(request, payload={}):
central function for making http requests
returns str on error otherwise response object of requests library
"""
if 'include' in request and request['include']:
if len(request.get('include', '')) != 0:
payload.update(request['include'])
if mem.var['stable']:
mem.var['delay'] = random.choice(range(6, 12))
Expand Down
2 changes: 2 additions & 0 deletions arjun/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,8 @@ def get_params(include):
if include.startswith('{'):
try:
params = json.loads(str(include).replace('\'', '"'))
if type(params) != dict:
return {}
return params
except json.decoder.JSONDecodeError:
return {}
Expand Down
4 changes: 4 additions & 0 deletions arjun/plugins/heuristic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from arjun.core.utils import extract_js

re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')


def is_not_junk(param):
return (re_not_junk.match(param) is not None)

Expand All @@ -11,6 +13,8 @@ def is_not_junk(param):
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')


def heuristic(response, wordlist):
potential_params = []

Expand Down

0 comments on commit 4219512

Please sign in to comment.