Skip to content

Commit

Permalink
some basic refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
AyemunHossain committed May 21, 2023
1 parent fc4425b commit 4d2be76
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 33 deletions.
Empty file removed About.md
Empty file.
33 changes: 11 additions & 22 deletions Wrapper/Attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,33 @@
django.setup()
from common import main_common_pattern_to_traverse_a_website as MAIN_COMMON_PATTERN
from django.db import transaction

os.system("clear")

import requests
from core.models import LinkActionItem, LinkActionItemPost, LinkActionItemResponse, FormItem, FormDetailsItem, LinkItem
from bs4 import BeautifulSoup

LOGIN_LINK = ['login','Login','signin','sessions']
SANSATIVE_INFO = ["social security numbers","ssn", "driver license number", "financial identifiers", "citizen visa code","test scores", "Biometric identifiers", "Account balances", "Bank account number", "credit card number", "payment history", "income history", "expiration","CVV","CVV2","PIN","BIN"]


os.system("clear")

def _get_url():
os.chdir('idord_infograther')
file= open("link_to_crawl.txt","r")
try:
return file.readline()

except:
return None

BASE_LINK = _get_url()

def _get_api():
return [f"http://api.{BASE_LINK}",f"http://{BASE_LINK}",f"https://api.{BASE_LINK}",f"https://{BASE_LINK}"]

BASE_LINK = _get_url()
API_LINKS = _get_api()
BASE_LINK_GET = "http://"+BASE_LINK


LOGIN_LINK = ['login','Login','signin','sessions']
SANSATIVE_INFO = ["social security numbers","ssn", "driver license number", "financial identifiers", "citizen visa code","test scores", "Biometric identifiers", "Account balances", "Bank account number", "credit card number", "payment history", "income history", "expiration","CVV","CVV2","PIN","BIN"]


def has_numbers(inputString):
return any(char.isdigit() for char in inputString)

Expand All @@ -44,10 +44,7 @@ def generateAttack():

for link in LinkItem.objects.all():
if(has_numbers(link.link)):

# full_link = BASE_LINK+str(link.link)
matching = re.split("\W+",str(link.link))
# print(','.join(out))
for i in range(len(matching)):

original= matching[i]
Expand All @@ -57,22 +54,17 @@ def generateAttack():
for j in range(10):
matching[i]=str(j)
obj = LinkActionItem.objects.create(link=BASE_LINK+'/'.join(matching),orginal_param=original,manupulated_param=matching[i])
#print(obj)
obj.save()

for API_LINK in API_LINKS:
obj2 = LinkActionItem.objects.create(link=API_LINK+'/'.join(matching),orginal_param=original,manupulated_param=matching[i])
#print(obj2)
obj2.save()

except Exception as e:
pass

try:
for API_LINK in API_LINKS:
for i in MAIN_COMMON_PATTERN:
for j in range(10):


try:
obj2 = LinkActionItem.objects.create(link=(API_LINK+'/'+str(i)+'/'+str(j)),manupulated_param=str(j))
obj2.save()
Expand All @@ -99,7 +91,7 @@ def generateAttack():
generateAttack()

def checkSansativeInfo(html_page):
from bs4 import BeautifulSoup

soup = BeautifulSoup(html_page, 'html.parser')
result = []
for th in soup.find_all('th'):
Expand Down Expand Up @@ -157,7 +149,6 @@ def get_attack():

if len(result)>0:
print(f"___________________________________________________\nGET : {actionItem.link}")
#LinkActionItemResponse.objects.create(action=actionItem,status="get_idor",effected_full_page=data.text)
print("_____________________________________________________")
return 1

Expand Down Expand Up @@ -284,8 +275,6 @@ def delete_attack():
break
else:
print("No PUT IDOR Vulnerability Found")


if(patch_attack()):
break
else:
Expand Down
10 changes: 1 addition & 9 deletions Wrapper/IDORD.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ class colors:
UNDERLINE = '\033[4m'

# x = subprocess.check_output(['scrapy', 'crawl', 'prothomalo'], cwd='idord_infograther/')
# print(f">>>>>>>>>>>>>>>>>{x}>>>>>>>>>>>>>>")



#Below this for Production
from subprocess import Popen, PIPE
Expand Down Expand Up @@ -54,11 +51,7 @@ def configure_django():


def crawl():


try:


os.system(f"clear")
os.system(f"cd idord_infograther && scrapy crawl railsgoatNotLogin")
os.system(f"cd idord_infograther && scrapy crawl railsgoatLogin")
Expand Down Expand Up @@ -98,8 +91,7 @@ def sec_to_min(seconds):
attack()
end = time.time()

print(f"Runtime of the program is {sec_to_min((end - start))}")

print(f"Total time took: {sec_to_min((end - start))}")



Expand Down
3 changes: 1 addition & 2 deletions Wrapper/common.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
main_common_pattern_to_traverse_a_website = ["user","blog","account","users","blogs","accounts","post","posts",
"app","item","balance","query"]
main_common_pattern_to_traverse_a_website = ["user","blog","account","users","blogs","accounts","post","posts","app","item","balance","query"]

0 comments on commit 4d2be76

Please sign in to comment.