Skip to content

Commit

Permalink
misc fixes and working on new module
Browse files Browse the repository at this point in the history
  • Loading branch information
L1ghtn1ng committed Feb 11, 2020
1 parent fe3d6a3 commit 4d48dcb
Show file tree
Hide file tree
Showing 12 changed files with 55 additions and 16 deletions.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion LICENSES → README/LICENSES
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Released under the GPL v 2.0.

If you did not recieve a copy of the GPL, try http://www.gnu.org/.
If you did not receive a copy of the GPL, try http://www.gnu.org/.

Copyright 2011 Christian Martorella

Expand Down
2 changes: 1 addition & 1 deletion tests/discovery/githubcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from theHarvester.discovery.githubcode import RetryResult, ErrorResult, SuccessResult
from theHarvester.discovery.constants import MissingKey
from theHarvester.lib.core import Core
from mock import MagicMock
from unittest.mock import MagicMock
from requests import Response
import pytest

Expand Down
22 changes: 13 additions & 9 deletions theHarvester/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,17 @@ async def start():
parser.add_argument('-d', '--domain', help='company name or domain to search', required=True)
parser.add_argument('-l', '--limit', help='limit the number of search results, default=500', default=500, type=int)
parser.add_argument('-S', '--start', help='start with result number X, default=0', default=0, type=int)
parser.add_argument('-g', '--google-dork', help='use Google Dorks for Google search', default=False,
action='store_true')
parser.add_argument('-p', '--proxies', help='use proxies for requests, enter proxies in proxies.yaml',
default=False, action='store_true')
parser.add_argument('-s', '--shodan', help='use Shodan to query discovered hosts', default=False,
action='store_true')
parser.add_argument('-v', '--virtual-host', help='verify host name via DNS resolution and search for virtual hosts',
action='store_const', const='basic', default=False)
parser.add_argument('-g', '--google-dork', help='use Google Dorks for Google search', default=False, action='store_true')
parser.add_argument('-p', '--proxies', help='use proxies for requests, enter proxies in proxies.yaml', default=False, action='store_true')
parser.add_argument('-s', '--shodan', help='use Shodan to query discovered hosts', default=False, action='store_true')
parser.add_argument('-v', '--virtual-host', help='verify host name via DNS resolution and search for virtual hosts', action='store_const', const='basic', default=False)
parser.add_argument('-e', '--dns-server', help='DNS server to use for lookup')
parser.add_argument('-t', '--dns-tld', help='perform a DNS TLD expansion discovery, default False', default=False)
parser.add_argument('-r', '--take-over', help='Check for takeovers', default=False, action='store_true')
parser.add_argument('-n', '--dns-lookup', help='enable DNS server lookup, default False', default=False, action='store_true')
parser.add_argument('-c', '--dns-brute', help='perform a DNS brute force on the domain', default=False, action='store_true')
parser.add_argument('-f', '--filename', help='save the results to an HTML and/or XML file', default='', type=str)
parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, certspotter, crtsh, dnsdumpster,
parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, bufferoverun, certspotter, crtsh, dnsdumpster,
dogpile, duckduckgo, exalead, github-code, google,
hunter, intelx,
linkedin, linkedin_links, netcraft, otx, securityTrails, spyse, threatcrowd,
Expand Down Expand Up @@ -186,6 +182,14 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
else:
print(e)

elif engineitem == 'bufferoverun':
from theHarvester.discovery import bufferoverun
try:
bufferoverun_search = bufferoverun.SearchBufferover(word)
stor_lst.append(store(bufferoverun_search, engineitem, store_host=True, store_ip=True))
except Exception as e:
print(e)

elif engineitem == 'certspotter':
from theHarvester.discovery import certspottersearch
try:
Expand Down
1 change: 1 addition & 0 deletions theHarvester/discovery/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
__all__ = ['baidusearch',
'bingsearch',
'bufferoverun',
'crtsh',
'certspottersearch',
'dnssearch',
Expand Down
34 changes: 34 additions & 0 deletions theHarvester/discovery/bufferoverun.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from theHarvester.lib.core import *
import re


class SearchBufferover:

def __init__(self, word):
self.word = word
self.totalhosts = set()
self.totalips = set()
self.proxy = False

async def do_search(self):
url = f'https://dns.bufferover.run/dns?q={self.word}'
headers = {'User-Agent': Core.get_user_agent()}
client = aiohttp.ClientSession(headers=headers, timeout=aiohttp.ClientTimeout(total=20))
responses = await AsyncFetcher.fetch(client, url, json=True, proxy=self.proxy)
await client.close()

dct = responses
self.totalhosts: set = {host for host in dct['FDNS_A']}
# filter out ips that are just called NXDOMAIN
self.totalips: set = {ip['address'] for ip in dct['FDNS_A']
if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip['FDNS_A'])}

async def get_hostnames(self) -> set:
return self.totalhosts

async def get_ips(self) -> set:
return self.totalips

async def process(self, proxy=False):
self.proxy = proxy
await self.do_search()
2 changes: 0 additions & 2 deletions theHarvester/discovery/constants.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from theHarvester.lib.core import *
from typing import Union
import random
import aiohttp
import asyncio

googleUA = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 ' \
'Safari/537.36 '
Expand Down
2 changes: 1 addition & 1 deletion theHarvester/discovery/dnsdumpster.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def do_search(self):
self.results = await resp.text()
await session.close()
except Exception as e:
print(f'An exception occured: {e}')
print(f'An exception occurred: {e}')
self.totalresults += self.results

async def get_hostnames(self):
Expand Down
2 changes: 2 additions & 0 deletions theHarvester/discovery/githubcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import urllib.parse as urlparse
import random


class RetryResult(NamedTuple):
time: float

Expand Down Expand Up @@ -96,6 +97,7 @@ async def do_search(self, page: Optional[int]) -> Tuple[str, dict, int, Any]:
else:
async with sess.get(url, ) as resp:
return await resp.text(), await resp.json(), resp.status, resp.links

@staticmethod
async def next_page_or_end(result: SuccessResult) -> Optional[int]:
if result.next_page is not None:
Expand Down
3 changes: 2 additions & 1 deletion theHarvester/lib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
class Core:
@staticmethod
def version() -> str:
return '3.1.1dev5'
return '3.2.0dev0'

@staticmethod
def bing_key() -> str:
Expand Down Expand Up @@ -123,6 +123,7 @@ def get_supportedengines() -> Set[Union[str, Any]]:
supportedengines = {'baidu',
'bing',
'bingapi',
'bufferoverun',
'certspotter',
'crtsh',
'dnsdumpster',
Expand Down
1 change: 0 additions & 1 deletion theHarvester/lib/reportgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import plotly.graph_objs as go



class GraphGenerator:

def __init__(self, domain):
Expand Down

0 comments on commit 4d48dcb

Please sign in to comment.