Skip to content

Commit

Permalink
New documents for attack all auehehuf
Browse files Browse the repository at this point in the history
  • Loading branch information
jul10l14 committed Jan 2, 2019
1 parent 7deb50f commit f09b2a4
Show file tree
Hide file tree
Showing 17 changed files with 6,624 additions and 0 deletions.
674 changes: 674 additions & 0 deletions LICENSE

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions README.md
@@ -0,0 +1,2 @@
# D4N155
It's a tool of security audit for smart wordlist and Google hacking attack
36 changes: 36 additions & 0 deletions main
@@ -0,0 +1,36 @@
#!/usr/bin/env bash
printf "\033[32m"
cat << "EOF"
dmmmmdmmmddddddddddddddddddddmmdhmmmmdhhddddddds+//////+osydddddddddd
NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNmmmNNNNmmmdmmmmmdsss+++++++osymNNNNNmdhh
NNNNNNNNNNNNNNNNNNNNNNNNNNmNNNNNNMNNmmmNhddhdhsosddyso+++ossdNNNNNmhhh
NNNNNNNNNNNNNNNNNNNNNNNNNNNNMMMMNNNNNmNmmhdhhhhhysssys+++osymNNNNdhdmN
NNNNNNNNNNNNNNNNNNNNNNNNMMMMMMMNNNNNNNmmmdyooyhdsdso+++ossshNNNNNNNNNN
NNNNNNNNNNNNNNNNNNNNNMMMNNNNNNmmmmNNmmmhsso+/+osoo++/:+hdmmNMNNNNNNNNm
MMNNNNNNNNNNNNNNNNNNNNNmmNmNNNmddmNmdhyoo+::::::::::::+sdmmNMMNNNNNNNd
MMMMMNNNNNNNNNNNNNNNNddhddmNNdmddNmhhhso++:::::::::/+++ddNMMMNNNNNNNNN
NNNNNNNNNNNNNNNNNNNmmdhhhhhmmmdhdmNddho+++::::///::/:::smMMMMNNNNNNNNm
NNNNNNNNNNNNNNNNNMNNmmdmdddmNddmmNNNmho++///::::++ss+++odMMMMMNNNNNddh
NNNNNNNNNNNNMNNNNNMMNNNNNNNNmNNmmmmNdso++//++/::::/osssoNMMMNNNddddoyh
NNNNNNNNNNNNmdmNNNMMMMMNNNNNNNNNdhdmdso++++++o++oooosssdMMNNNNNh+hhhhh
NNNmdddmNNNdhhdmNNMMMMMNNmmNNNNNmNdhhys++/::/++//ohmddNMNNNNNNNmysdhhh
NNNmhhhhmmhhhhmmNNNMMMMNNmddNMMMMMNdddyo+/:::/oooyymNNMNNNmNNNNNNNNdhh
NNNmhhdhdNmhdmNNNNNNMNNNNmddmNMMMMMNhddhso+//:++oNNMMMNNNNmNNNNNmNNmdh
NNNdsdmNmNmddmmmmNNNNNNNNmhhhdNMMMNNddmmNmhssssydMMMMMNMNMNNNNNNNNNmhh
NNNmydNmdNmddmhhhddNNNNmmNmdhmNMMMNNNmmmmNMNNNNNMMMMMMMMMMMMNNNNNNNmdh
NNNhdNNmdNNmhhmmhhdNNMMNNNNNNNNMMMMMNNmmmmNNNMMMMNNNNMMMMMMMNNNNNmmmdh
NNNdsmNNdNNNmhmdmmNNNMMMMMNmNNMMMMMMNNmmmddhmNNNNNNNNNNNNNNNmmNNdhmNdh
NNNdsdNNNNNNNmmmmmNNNMMMNNNmmNNMMMMNNmdhysoshmNNNNNNNNNNNNmdmNNmdmmmdh
NNNdhmNNmNmdhhhymNNNNNNNNmmmmmmNNMMNmysoo+++oshdmmNMMMMMNNmmNNNmmNNmdh
NNNmhmNNdysooooosdmNNmmmmmmdddmNNNmdyo++++++++ossmmNmNNNNNNNNNNmddmmdh
NNNmddmyso++++++++osNdyhmNNNNNNmmyssy++++++++++++sydmmNNNNNNNmmdhyyddh
NNNNddyo+++/////++++omysydmNdddds+++++++:/++++++++hossyhmNNNNmhs++osyh
MNNNmys++/::::::::://oNdsssossy+:::/++++o-++++++++os+++oNMMMNmsoooossy
MNNNmys++/::::::/:::::sNhoo//::::::::+++s/+++++++++++oyNMMMMMmdyssssss
MMNNyss+++/:::::+/::::/yNddhs++/::::::/+++o++oo+syydNNMMMMMMMMNdhyssss
MMMNyss+++/::://++/::::sNNNNNNNmddddddyyyydddmmmNMMMMMMMMMMMMMMNmddhhh
NNNmhsoo////://+oo+///ymNNNNNNNNNNmmmmmNNNNNNNNNNNNNNNNNNNNNNNNNNNdddh
EOF
printf "\033[0m"


108 changes: 108 additions & 0 deletions objetive/.gitignore
@@ -0,0 +1,108 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# Vim
.*~
*~

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
62 changes: 62 additions & 0 deletions objetive/objetive.py
@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
import mechanicalsoup # needed
import os
import sys

#define man
heelp = """
0BJ3T1V3: This software is a mini-crawler that aims to grab some text
parts from some website or ip that responds http*.
Objetive: python objetive.py [url] [option]
URL:
Is the host web, example: https://jul10l1r4.github.io
OPTION:
Can pass specific option.
-t, --title For get all titles, tags in "h1".
-txt, --text For get all text in paragraphs.
-a, --anchol For get all urls in tag "a"
"""
args = 0
# Connect to target
browser = mechanicalsoup.StatefulBrowser()
try:
browser.open(sys.argv[1])
except:
print(heelp)
exit(3)
# All functions for get values
def text():
for p in browser.get_current_page().select('p'):
try:
print(p.text)
except:
print("Houve um erro nesse url");

def title():
for h1 in browser.get_current_page().select('h1'):
try:
print(h1.text)
except:
print("Houve um erro ao pegar esse titulo")
def links():
for link in browser.get_current_page().select('a'):
try:
print(link.text)
except:
print("Houve um erro ao pegar esse link")

# For all arguments get in all numbers
while args <= (len(sys.argv) - 1):
if sys.argv[args] == '--text' or '-txt' == sys.argv[args]:
text()
elif sys.argv[args] == '--title' or '-t' == sys.argv[args]:
title()
elif sys.argv[args] == '--anchol' or '-a' == sys.argv[args]:
links()
elif sys.argv[args] == '-h' or '--help' == sys.argv[args]:
print(heelp)
exit(0)
args+=1
1 change: 1 addition & 0 deletions objetive/requeriments.txt
@@ -0,0 +1 @@
mechanicalsoup
Binary file added pagodo/.dorks.txt.un~
Binary file not shown.
Binary file added pagodo/.ghdb_scraper.py.un~
Binary file not shown.
Binary file added pagodo/.google_dorks.txt.un~
Binary file not shown.
Binary file added pagodo/.hackingdork.txt.un~
Binary file not shown.
Binary file added pagodo/.pagodo.py.un~
Binary file not shown.
Empty file added pagodo/blank.txt
Empty file.
54 changes: 54 additions & 0 deletions pagodo/ghdb_scraper.py
@@ -0,0 +1,54 @@
#!/usr/bin/env python

# Standard Python libraries.
import json

# Third party Python libraries.
import requests
from bs4 import BeautifulSoup # noqa

# Custom Python libraries.


def retrieve_google_dorks():
"""Retrieves all google dorks from https://www.exploit-db.com/google-hacking-database
Writes then entire json reponse to a file in addition to just the dorks.
"""

url = "https://www.exploit-db.com/google-hacking-database"

header = {
"Accept": "application/json, text/javascript, */*; q=0.01",
"Accept-Encoding": "deflate, gzip, br",
"Accept-Language": "en-US",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0",
"X-Requested-With": "XMLHttpRequest",
}

response = requests.get(url, headers=header, verify=True)

if response.status_code != 200:
print(f"[-] Error retrieving google dorks from: {url}")
return

# Extract json data.
json_response = response.json()

# Extract recordsTotal and data.
total_records = json_response["recordsTotal"]
json_dorks = json_response["data"]

google_dork_file = f"google_dorks.txt"
with open(google_dork_file, "w") as fh:
for dork in json_dorks:
soup = BeautifulSoup(dork["url_title"], "html.parser")
extracted_dork = soup.find("a").contents[0]
fh.write(f"{extracted_dork}\n")

print(f"[*] Total Google dorks retrieved: {total_records}")



if __name__ == "__main__":
retrieve_google_dorks()
print("[+] Done!")

0 comments on commit f09b2a4

Please sign in to comment.