Skip to content

Commit

Permalink
Merge pull request #1 from gwen001/master
Browse files Browse the repository at this point in the history
Update of Fork
  • Loading branch information
bytew0lf committed Oct 21, 2020
2 parents 1d3393e + a9af5c9 commit 64b3336
Show file tree
Hide file tree
Showing 12 changed files with 167 additions and 55 deletions.
12 changes: 12 additions & 0 deletions .github/FUNDING.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# These are supported funding model platforms

github: [gwen001]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ I don't believe in licenses.
You can do whatever you want with this program.

However, there is a way to support :)
<a href="https://www.buymeacoffee.com/gwendallecoguic" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/default-yellow.png" alt="Buy Me A Coffee" style="height: 51px !important;width: 217px !important;" width="217" ></a>
<a href="https://github.com/sponsors/gwen001" title="Sponsor gwen001"><img src="https://raw.githubusercontent.com/gwen001/pentest-tools/master/github-sponsor.jpg" alt="Sponsor gwen001" title="Sponsor gwen001"></a>


### arpa.sh
Expand Down
2 changes: 1 addition & 1 deletion bbhost.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ else
fi
#echo $output

parallel -j 10 "host " :::: $input | tee -a $output
parallel -j 20 "host " :::: $input | tee -a $output
exit;

for h in $(cat $input) ; do
Expand Down
62 changes: 31 additions & 31 deletions extract-endpoints.php
Original file line number Diff line number Diff line change
Expand Up @@ -360,9 +360,9 @@ function usage( $err=null ) {
} else {
$buffer = file_get_contents( $s );
}

ob_start();

if( $_mode == MODE_KEYWORD )
{
$ss = escapeshellcmd( $s );
Expand All @@ -373,14 +373,14 @@ function usage( $err=null ) {
echo $cmd."\n";
exec( $cmd, $output );
$n_sensitive = printColoredGrep( $_keywords_sensitive_regexp, implode("\n",$output), 1 );

if( $_keywords_insensitive_regexp != $_keywords_sensitive_regexp ) {
$output = null;
$cmd = 'egrep -i -n "'.$_keywords_insensitive_regexp.'" "'.$ss.'"';
exec( $cmd, $output );
$n_insensitive = printColoredGrep( $_keywords_insensitive_regexp, implode("\n",$output), 0 );
}

$n_total = $n_sensitive + $n_insensitive;
if( $_verbose < 2 ) {
echo $n_total." keywords found!\n";
Expand Down Expand Up @@ -415,8 +415,8 @@ function usage( $err=null ) {
clean( $t_final );
$n_final = count($t_final);
$n_possible = count($t_possible);
if( $n_final ) {

if( $n_final ) {
$t_final = array_unique( $t_final );
$n_final = count( $t_final );
foreach( $t_final as $u ) {
Expand All @@ -437,14 +437,14 @@ function usage( $err=null ) {
if( $_verbose < 2 ) {
echo $n_final." urls found!\n";
}

if( $n_possible && $_verbose<2 ) {
Utils::_println( str_repeat('-',100), 'light_grey' );
$t_possible = array_unique( $t_possible );
Utils::_println( implode( "\n",$t_possible), 'light_grey' );
Utils::_println( $n_possible." possible...", 'light_grey' );
}

$n_total = $n_possible + $n_final;
}

Expand Down Expand Up @@ -475,9 +475,9 @@ function testUrl( $url, $follow_location )
curl_setopt( $c, CURLOPT_FOLLOWLOCATION, $follow_location );
curl_setopt( $c, CURLOPT_RETURNTRANSFER, true );
$r = curl_exec( $c );

$t_info = curl_getinfo( $c );

return $t_info['http_code'];
}

Expand All @@ -488,13 +488,13 @@ function printColoredGrep( $regexp, $str, $case_sensitive )
//$l = strlen( $str );
//$m = preg_match_all( '#'.$regexp.'#i', $str, $matches, PREG_OFFSET_CAPTURE );
//var_dump( $matches );

if( $case_sensitive ) {
$flag = '';
} else {
$flag = 'i';
}

$colored = preg_replace( '#'.$regexp.'#'.$flag, "\033[0;32m".'\\1'."\033[0m", $str, -1, $cnt );
if( $cnt ) {
echo $colored."\n";
Expand All @@ -515,7 +515,7 @@ function printColoredGrep( $regexp, $str, $case_sensitive )
//break;
}
}
$s3 = substr( $str, $p );
Utils::_print( $s3, 'white' );*/
return $cnt;
Expand All @@ -528,7 +528,7 @@ function run( $buffer )
//var_dump( $_regexp );

$t_all = [];

foreach( $_regexp as $r ) {
$m = preg_match_all( $r.'i', $buffer, $matches );
//var_dump( $matches );
Expand All @@ -537,7 +537,7 @@ function run( $buffer )
$t_all = array_merge( $t_all, $matches[1] );
}
}

$t_exclude_extension = [ ];
$t_exclude_domain = [ ];
$t_exclude_scheme = [ 'javascript', 'mailto', 'data', 'about', 'file' ];
Expand All @@ -552,67 +552,67 @@ function run( $buffer )
{
//var_dump($url);
//$url = urldecode( $url );

$test = preg_replace( '#[^0-9a-zA-Z]#', '', $url );
if( $test == '' ) {
unset( $t_all[$k] );
continue;
}

$parse = parse_url( $url );
//var_dump($parse);
if( !$parse ) {
unset( $t_all[$k] );
$t_possible[] = $url;
continue;
}

foreach( $t_exclude_string as $s ) {
if( strstr($url,$s) ) {
unset( $t_all[$k] );
$t_possible[] = $url;
continue;
}
}

foreach( $t_exclude_possible as $s ) {
if( strstr($url,$s) ) {
unset( $t_all[$k] );
$t_possible[] = $url;
continue;
}
}

if( isset($parse['scheme']) && in_array($parse['scheme'],$t_exclude_scheme) ) {
unset( $t_all[$k] );
$t_possible[] = $url;
continue;
}

if( isset($parse['path']) && is_array($_ignore) && count($_ignore) ) {
$p = strrpos( $parse['path'], '.' );
if( $p !== false ) {
$ext = substr( $parse['path'], $p+1 );
$ext = substr( $parse['path'], $p+1 );
if( in_array($ext,$_ignore) ) {
unset( $t_all[$k] );
continue;
}
}
}

if( $url[0] == '#' ) {
unset( $t_all[$k] );
$t_possible[] = $url;
continue;
}

if( isset($parse['path']) )
{
if( strstr($parse['path'],' ') !== false ) {
$tmp = explode( ' ', $parse['path'] );
$parse['path'] = $tmp[0];
}

$kk = preg_replace('|'.$_url_chars.'|i','',$parse['path']);
if( strlen($kk) != 0 ) {
unset( $t_all[$k] );
Expand All @@ -621,7 +621,7 @@ function run( $buffer )
}
}
}

//var_dump($t_all);
return [$t_all,$t_possible];
}
Expand All @@ -630,32 +630,32 @@ function run( $buffer )
function clean( &$t_urls )
{
global $_scheme, $_host, $_ignore;

$scheme = $host = '';

foreach( $t_urls as &$u )
{
//var_dump( $u );
$scheme = $host = '';
$parse = parse_url( $u );
//var_dump( $parse );

if( isset($parse['host']) ) {
$host = $parse['host'];
} elseif( $_host ) {
$host = $_host;
$u = ltrim( $u, '/' );
$u = $host . '/' . $u;
}

if( isset($parse['scheme']) && $parse['scheme'] != NULL ) {
$scheme = $parse['scheme'];
} elseif( $host ) {
$scheme = $_scheme;
$u = ltrim( $u, '/' );
$u = $scheme . '://' . $u;
}

if( strstr($u,' ') !== false ) {
$tmp = explode( ' ', $u );
$u = $tmp[0];
Expand Down
22 changes: 16 additions & 6 deletions favicon-hashtrick.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@
from colored import fg, bg, attr


# disable "InsecureRequestWarning: Unverified HTTPS request is being made."
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)


def banner():
print("""
__ _ _ _ _ _ _
Expand All @@ -29,8 +34,6 @@ def banner():
""")
pass

banner()


def faviconHash( data, web ):
if web:
Expand All @@ -53,6 +56,9 @@ def faviconHash( data, web ):
parser.parse_args()
args = parser.parse_args()

if not args.silent:
banner()

if args.values:
t_values = args.values.split(',')
else:
Expand All @@ -77,10 +83,14 @@ def faviconHash( data, web ):
web_src = False

if args.favurl:
favsource = args.favurl
r = requests.get( favsource )
data = r.content
web_src = True
favsource = args.favurl
try:
r = requests.get( favsource, timeout=3, verify=False )
except Exception as e:
sys.stdout.write( "%s[-] error occurred: %s%s\n" % (fg('red'),e,attr(0)) )
exit()
data = r.content
web_src = True

if not args.favfile64 and not args.favfile and not args.favurl:
parser.error( 'missing favicon' )
Expand Down
Binary file added github-sponsor.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 5 additions & 4 deletions google-search.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@

def banner():
print("""
_ _
__ _ ___ ___ __ _| | ___ ___ ___ __ _ _ __ ___| |__ _ __ _ _
_ _
__ _ ___ ___ __ _| | ___ ___ ___ __ _ _ __ ___| |__ _ __ _ _
/ _` |/ _ \ / _ \ / _` | |/ _ \ / __|/ _ \/ _` | '__/ __| '_ \ | '_ \| | | |
| (_| | (_) | (_) | (_| | | __/ \__ \ __/ (_| | | | (__| | | | _ | |_) | |_| |
\__, |\___/ \___/ \__, |_|\___| |___/\___|\__,_|_| \___|_| |_| (_) | .__/ \__, |
|___/ |___/ |_| |___/
|___/ |___/ |_| |___/
by @gwendallecoguic
Expand Down Expand Up @@ -86,6 +86,7 @@ def banner():
else:
urldecode = False

# print(fb_cookie)

def doMultiSearch( term, numbers_only, urldecode, page ):
zero_result = 0
Expand All @@ -106,7 +107,7 @@ def doMultiSearch( term, numbers_only, urldecode, page ):
print( s_results[i]['url'] )
else:
for i in range(page,end_page):
page_history[i] = 0
page_history[i] = 0

for term in t_terms:
page_history = {}
Expand Down
1 change: 1 addition & 0 deletions goop/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__version__ = '0.1.1'
Loading

0 comments on commit 64b3336

Please sign in to comment.