Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

First commit

  • Loading branch information...
commit af0b02e0c19b268ab1c6fd79c86bcbc38189df4f 0 parents
SensePost authored
127 BiLE-weigh.pl
... ... @@ -0,0 +1,127 @@
  1 +#!/usr/bin/perl
  2 +
  3 +## perl BiLE-weigh.pl domain.com outputfile.mine
  4 +## Takes output file *.mine from BiLE.pl
  5 +## domain.com is the website domain
  6 +
  7 +$|=1;
  8 +
  9 +@exceptionlist=("microsoft.com","216.239.5","yahoo.com",
  10 + "ultraseek.com","ananzi.co.za","macromedia.com",
  11 + "clickstream","w3.org","adobe.com","google.com");
  12 +
  13 +if ($#ARGV < 1){die "perl BiLE-weigh.pl domain.com output.file.from.bile.mine\n";}
  14 +
  15 +#load and init
  16 +`cat @ARGV[1] | sort | uniq > @ARGV[1].2`;
  17 +`mv @ARGV[1].2 @ARGV[1]`;
  18 +open (IN,"@ARGV[1]") || die "Cant open data file\n";
  19 +while (<IN>){
  20 + chomp;
  21 + ($src,$dst,$cellid)=split(/:/,$_);
  22 + if ($src ne $dst){
  23 + $flag=0;
  24 + foreach $except (@exceptionlist){
  25 + if (($src =~ /$except/) || ($dst =~ /$except/)) {$flag=1;}
  26 + }
  27 + if ($flag == 0){push @structure,$_;}
  28 + }
  29 +}
  30 +close(IN);
  31 +
  32 +
  33 +$sites{@ARGV[0]}=300;
  34 +
  35 +
  36 +####################compute first cell node values
  37 +print "compute nodes\n";
  38 +print "Nodes alone\n";
  39 +$ws=weight(@ARGV[0],"s");
  40 +$wd=weight(@ARGV[0],"d");
  41 +print "src $ws dst $wd\n";
  42 +foreach $piece (@structure){
  43 +
  44 +
  45 + ($src,$dst,$cellid)=split(/:/,$piece);
  46 +
  47 + ## link -from- X to node
  48 + if ($src eq @ARGV[0]){
  49 + $newsites{$dst}=$newsites{$dst}+($sites{$src}*(1/$ws));
  50 + }
  51 +
  52 + ## link -to- X from node
  53 + if ($dst eq @ARGV[0]){
  54 + $newsites{$src}=$newsites{$src}+($sites{$dst}*(0.6/$wd));
  55 + }
  56 +}
  57 +
  58 +&writenodes;
  59 +
  60 +
  61 +undef $sites;
  62 +undef %sites;
  63 +&loadnodes;
  64 +
  65 +#between nodes
  66 +foreach $blah (keys %sites){
  67 + print "\n[Testing with node $blah]\n";
  68 + $ws=weight($blah,"s");
  69 + $wd=weight($blah,"d");
  70 + print "src $ws dst $wd\n";
  71 + foreach $piece (@structure){
  72 +
  73 + ($src,$dst,$cellid)=split(/:/,$piece);
  74 +
  75 + ## link -from- node to other node (2/3)
  76 + if ($src eq $blah){
  77 + $newsites{$dst}=$newsites{$dst}+($sites{$src}*(1/$ws));
  78 + $add=($sites{$src}*(1/$ws));
  79 + $orig=$sites{$src};
  80 + }
  81 +
  82 + ## link -to- node from nodes (1/3)
  83 + if ($dst eq $blah){
  84 + $newsites{$src}=$newsites{$src}+($sites{$dst}*(0.6/$wd));
  85 +
  86 + $add=($sites{$dst}*(0.6/$wd));
  87 + $orig=$sites{$dst};
  88 + }
  89 + }
  90 +
  91 +}
  92 +
  93 +&writenodes;
  94 +
  95 +`cat temp | sort -r -t ":" +1 -n > @ARGV[1].sorted`;
  96 +
  97 +
  98 +sub loadnodes{
  99 + $sites="";
  100 + open (IN,"temp") || die "cant open temp file\n";
  101 + while (<IN>){
  102 + chomp;
  103 + ($node,$value)=split(/:/,$_);
  104 + $sites{$node}=$value;
  105 + }
  106 + close (IN);
  107 +}
  108 +
  109 +sub writenodes{
  110 + open (OUT,">temp") || die "Cant write\n";
  111 + foreach $blah (keys %newsites){
  112 + print OUT "$blah:$newsites{$blah}\n";
  113 + }
  114 + close OUT;
  115 +}
  116 +
  117 +sub weight{
  118 + ($site,$mode)=@_;
  119 + $from=0; $to=0;
  120 + foreach $piece (@structure){
  121 + ($src,$dst,$cellid)=split(/:/,$piece);
  122 + if ($dst eq $site){$from++};
  123 + if ($src eq $site){$to++;}
  124 + }
  125 + if ($mode eq "s"){return $to;}
  126 + if ($mode eq "d"){return $from;}
  127 +}
238 BiLE.pl
... ... @@ -0,0 +1,238 @@
  1 +#!/usr/bin/perl
  2 +
  3 +### perl BiLE.pl web.site.com output.name
  4 +### BiLE will out put two files *.mine and *.walrus
  5 +### *.walrus can be ignored for now
  6 +
  7 +use Socket;
  8 +$|=1;
  9 +
  10 +if ($#ARGV<1){die "Usage BiLE.pl <site> <outfile>\n";}
  11 +$tocheck=@ARGV[0];
  12 +
  13 +@links=getlinks($tocheck,4,0);
  14 +push @links,&linkto($tocheck,0);
  15 +
  16 +undef @lotsoflinks;
  17 +
  18 +foreach $link (@links){
  19 + if ($link ne $tocheck){
  20 + push @lotsoflinks,&linkto($link,1);
  21 + push @lotsoflinks,&getlinks($link,3,1);
  22 + }
  23 +}
  24 +
  25 +
  26 +
  27 +###
  28 +##SubRoutines
  29 +###
  30 +
  31 +sub linkto{
  32 + my ($tocheck,$cellid)=@_;
  33 + if (length($tocheck<3)){return "";}
  34 + my @returns=("");
  35 +
  36 + undef @global;
  37 +
  38 + @global=dedupe(returngoogle("link:$tocheck","web"));
  39 + foreach $taa (@global){print "[$taa]\n";}
  40 +
  41 + open (OUT,">>@ARGV[1].mine") || die "cant open out file\n";
  42 + open (OUTT,">>@ARGV[1].walrus") || die "cant open walrus file\n";
  43 + print OUT "----> Links to: [$tocheck]\n";
  44 + foreach $site (@global){
  45 + ($site,$crap)=split(/[\`\!\@\#\$\%\^\&\*\(\)\=\\\|\+\[\]\'\>\<\/\?\,\"\' ]/,$site);
  46 + if (($site =~ /\./) && (length($site)>2) && ($site !~ /shdocvw/)) {
  47 + print OUT "$site:$tocheck\n";
  48 + print OUTT "$tocheck:$site\n";
  49 + push @returns,$site;
  50 + }
  51 + }
  52 + close (OUT);
  53 + close(OUTT);
  54 + return (@returns);
  55 +}
  56 +
  57 +#################################
  58 +sub getlinks{
  59 +
  60 + my @return=("");
  61 + my @global=("");
  62 +
  63 + ($site,$depth,$cellid)=@_;
  64 + if (length($site)<3){return "";}
  65 + print "mirroring $tocheck\n";
  66 + $rc=system("rm -Rf work");
  67 + $mc="httrack $site --max-size=350000 --max-time=600 -I0 --quiet --do-not-log -O work.$site --depth=$depth -%v-K -*.gif -*.jpg -*.pdf -*.zip -*.dat -*.exe -*.doc -*.avi -*.pps -*.ppt 2>&1";
  68 + $rc=system ($mc);
  69 +
  70 +
  71 + #HTTP hrefs
  72 + @res=`grep -ri "://" work.$site/*`;
  73 +
  74 + foreach $line (@res){
  75 + ($file,$crap,$stuff)=split(/:/,$line);
  76 + ($crap,$getit,$crap)=split(/\//,$file);
  77 + ($crap,$want)=split(/\/\//,$stuff);
  78 +
  79 + ($want,$crap)=split(/\//,$want);
  80 + ($want,$crap)=split(/\"/,$want);
  81 + ($want,$crap)=split(/\>/,$want);
  82 + ($want,$crap)=split(/\</,$want);
  83 + ($want,$crap)=split(/[\`\!\@\#\$\%\^\&\*\(\)\=\\\|\+\[\]\'\>\<\/\?\,\"\']/,$want);
  84 + $want =~ s/\[\]\;//g;
  85 + if ((length($want)>0) && ($getit ne $want)) {
  86 + if (($want =~ /\./) && (length($want)>2) && ($want !~/shdocvw/) &&
  87 + ($want !~ /[\`!\@\#\$\%\^\&\*\(\)\=\\\|\+\[\]\'\>\<\/\?\,\"]/)) {
  88 + $store="$site:$want";
  89 + push @global,$store;
  90 + push @return,$want;
  91 + }
  92 + }
  93 + }
  94 +
  95 + ## To get mailtos:
  96 + @res=`grep -ri "\@" work.$site/*`;
  97 + foreach $line (@res){
  98 + ($crap,$want)=split(/\@/,$line);
  99 + ($want,$crap)=split(/[ ">\n?<']/,$want);
  100 + ($left,$right)=split(/\./,$want);
  101 + if ( ($want =~ /\./) && (length($want)>3) && (length($right)> 1) && ($want !~/shdocvw/)){
  102 + ($want,$crap)=split(/[\`\!\@\#\$\%\^\&\*\(\)\=\\\|\+\[\]\'\>\<\/\?\,\"\']/,$want);
  103 + $store="$site:$want";
  104 + push @global,$store;
  105 + push @return,$want;
  106 + }
  107 + }
  108 +
  109 +
  110 + @global=dedupe(@global);
  111 +
  112 + open (OUT,">>@ARGV[1].mine") || die "cant open out file\n";
  113 + open (OUTT,">>@ARGV[1].walrus") || die "cant open walrus file\n";
  114 + print OUT "====> Link from: [$site]\n";
  115 + foreach $site (@global){
  116 + print OUT "$site\n";
  117 + print OUTT "$site\n";
  118 + }
  119 + close (OUT);
  120 + close(OUTT);
  121 +
  122 +# $rc=system("rm -Rf work");
  123 + return (dedupe(@return));
  124 +}
  125 +
  126 +
  127 +
  128 +#############Putting it together.
  129 +sub returngoogle{
  130 + ($term,$type)=@_;
  131 + if ($type eq "web") {$gtype="search"; $host="www.google.com";}
  132 + if ($type eq "news") {$gtype="groups"; $host="groups.google.com";};
  133 + if ($term !~ /link\:/){
  134 + $term="%2b".$term;
  135 + $term=~s/\./\.\%2b/g;
  136 + $term=~s/ /+/g;
  137 + }
  138 + $port=80; $target = inet_aton($host);
  139 + $enough=numg($term,$gtype);
  140 + print "The number is $enough\n";
  141 + undef @rglobal;
  142 + for ($i=0; $i<=$enough; $i=$i+100){
  143 + print "tick $i\n";
  144 + @response=sendraw("GET /$gtype?q=$term&num=100&hl=en&safe=off&start=$i&sa=N&filter=0 HTTP/1.0\r\n\r\n");
  145 +
  146 + undef @collect;
  147 +
  148 + @collect=googleparseweb(@response);
  149 + foreach (@collect){
  150 + print "[$_]\n";
  151 + push @rglobal,$_;
  152 + }
  153 + }
  154 + return @rglobal;
  155 +}
  156 +
  157 +############find out how many request we must do
  158 +sub numg{
  159 + ($theterm,$gtype)=@_;
  160 + @response=sendraw("GET /$gtype?q=$theterm&num=10&hl=en&safe=off&start=10&sa=N&filter=0 HTTP/1.0\r\n\r\n");
  161 + $flag=0;
  162 + foreach $line (@response){
  163 + if ($line =~ /of about/){
  164 + ($duh,$one)=split(/of about \<b\>/,$line);
  165 + ($two,$duh)=split(/\</,$one);
  166 + $flag=1;
  167 + last;
  168 + }
  169 + #single reply
  170 + if ($line =~ /of \<b\>/){
  171 + ($duh,$one)=split(/of \<b\>/,$line);
  172 + ($two,$duh)=split(/\</,$one);
  173 + $flag=1;
  174 + last;
  175 + }
  176 + }
  177 + if ($flag==0){return 0;}
  178 + for ($r=0; $r<=1000; $r=$r+100){
  179 + if (($two>$r) && ($two<100+$r)) {$top=$r+100;}
  180 + }
  181 + if (($two>1000) || ($two =~ /\,/)) {
  182 + $top=1000;
  183 + print "Over 1000 hits..\n";
  184 + }
  185 +
  186 + print "Received $two Hits - Google for $top returns\n";
  187 + return $top;
  188 +}
  189 +
  190 +###########Parse for web stuff
  191 +sub googleparseweb{
  192 +
  193 + my @googles;
  194 +
  195 + foreach $line (@_){
  196 + if ($line =~ /http/){
  197 + (@stuffs)=split(/\/\//,$line);
  198 + foreach $stuff (@stuffs){
  199 + ($want,$crap)=split(/\//,$stuff);
  200 + if (($want !~ /</) && ($want !~ /google/)){push @googles,$want;}
  201 + }
  202 + }
  203 + }
  204 + return dedupe(@googles);
  205 +}
  206 +
  207 +
  208 +###########Good old old sendraw
  209 +sub sendraw {
  210 + my ($pstr)=@_;
  211 + socket(S,PF_INET,SOCK_STREAM,getprotobyname('tcp')||0) || return "";
  212 + if(connect(S,pack "SnA4x8",2,$port,$target)){
  213 + my @in="";
  214 + select(S); $|=1; print $pstr;
  215 + while(<S>) {
  216 + push @in,$_; last if ($line=~ /^[\n\r]+$/ );
  217 + }
  218 + select(STDOUT);
  219 + return @in;
  220 + } else { return ""; }
  221 +}
  222 +
  223 +
  224 +#########################-- dedupe
  225 +sub dedupe
  226 +{
  227 + (@keywords) = @_;
  228 + my %hash = ();
  229 + foreach (@keywords) {
  230 + $_ =~ tr/[A-Z]/[a-z]/;
  231 + chomp;
  232 + if (length($_)>1){$hash{$_} = $_;}
  233 + }
  234 + return keys %hash;
  235 +} #dedupe
  236 +
  237 +
  238 +
249 README.markdown
Source Rendered
... ... @@ -0,0 +1,249 @@
  1 +#1. Name
  2 +BiLE Suite
  3 +#2. Author
  4 +Roelof Temmingh
  5 +#3. License, version & release date
  6 +License : GPLv2
  7 +Version : v1.0
  8 +Release Date : Unknown
  9 +
  10 +#4. Description
  11 +##4.1 BiLE.pl
  12 +The Bi-directional Link Extractor. BiLE leans on Google and HTTrack to
  13 +automate the collections to and from the target site, and then applies a
  14 +simple statistical weighing algorithm to deduce which Web sites have the
  15 +strongest .relationships. with the target site.
  16 +
  17 +We run BiLE.pl against the target Web site by simply specifying the Website
  18 +address and a name for the output file.
  19 +##4.2 BiLE-weight.pl
  20 +The next tool used in the collection is BiLE-weigh, which takes the output
  21 +of BiLE and calculates the significance of each site found. The weighing
  22 +algorithm is complex and the details will not be discussed; what should be
  23 +noted is:
  24 +
  25 +The target site that was given as an input parameter does not need to end
  26 +up with the highest weight. This is a good sign that the provided target
  27 +site is not the central site of the organization.
  28 +
  29 +A link to a site with many links to the site weighs less than a link to a
  30 +site with fewer links to the site.
  31 +A link from a site with many links weighs less than a link from a site
  32 +with fewer links.
  33 +A link from a site weighs more than a link to a site.
  34 +
  35 +##4.3 tld-expand.pl
  36 +The tld-expand.pl script is used to find domains in any other TLDs.
  37 +##4.4 vet-IPrange.pl
  38 +The output of BiLE-weigh now lists a number of domains with a relevance
  39 +number. The sites with a lower relevance number that are situated much lower
  40 +down the list are not as important as the top sites.The results from the
  41 +BiLE-weigh have listed a number of domains with their relevance to our
  42 +target Web site. Sites that rank much further down the list are not as
  43 +important as the top sites. The next step is to take the list of sites and
  44 +match their domain names to IPs.
  45 +
  46 +For this, we use vet-IPrange.The vet-IPrange tool performs DNS lookups for a
  47 +supplied list of DNS names. It will then write the IP address of each lookup
  48 +into a file, and then perform a lookup on a second set of names. If the IP
  49 +address matches any of the IP addresses obtained from the first step, the
  50 +tool will add the DNS name to the file.
  51 +#4.5 qtrace.pl
  52 +qtrace is used to plot the boundaries of networks. It uses a heavily
  53 +modified traceroute using a #custom compiled hping# to perform multiple
  54 +traceroutes to boundary sections of a class C network. qtrace uses a list of
  55 +single IP addresses to test the network size. Output is written to a
  56 +specified file.
  57 +##4.6 vet-mx.pl
  58 +Looking at the MX records of a company can also be used to group domains
  59 +together. For this process, we use the vet-mx tool. The tool performs MX
  60 +lookups for a list of domains, and stores each IP it gets in a file. vet-mx
  61 +performs a second run of lookups on a list of domains, and if any of the IPs
  62 +of the MX records matches any of the first phase IPs found, the domain is
  63 +added to the output file.
  64 +
  65 +##4.7 jarf-rev.pl
  66 +jarf-rev is used to perform a reverse DNS lookup on an IP range. All reverse
  67 +entries that match the filter file are displayed to screen (STDOUT). The
  68 +output displayed is the DNS name followed by IP address.
  69 +
  70 +##4.8 jarl-dnsbrute.pl
  71 +The jarf-dnsbrute script is a DNS brute forcer, for when DNS zone transfers
  72 +are not allowed. jarf-dnsbrute will perform forward DNS lookups using a
  73 +specified domain name with a list of names for hosts. The script is
  74 +multithreaded, setting off up to 10 threads at a time.
  75 +
  76 +#5. Usage
  77 +##5.1 BiLE.pl
  78 +> perl bile-weigh.pl www.sensepost.com sp\_bile\_out.txt.mine out.txt
  79 +Two output files are produced, *.mine and *.walrus, for now *.mine is the
  80 +important file we will use later.
  81 +
  82 +This command will run for some time. BiLE will use HTTrack to download and
  83 +analyze the entire site, extracting links to other sites that will also be
  84 +downloaded, analyzed, and so forth. BiLE will also run a series of Google
  85 +searches using the link: directive to see what external sites have HTTP
  86 +links toward our target site.
  87 +
  88 +The output of this a file containing all the link pairs in the format:
  89 +
  90 +Source\_site:Destination\_site
  91 +
  92 +BiLE produces output that only contains the source and destination sites for
  93 +each link, but tells us nothing about the relevance of each site. Once you
  94 +have a list of all the .relationships. (links to and from your chosen target
  95 +Web site), you want to sort them according to relevance. The tool we use
  96 +here, bile-weigh.pl, uses a complex formula to sort the relationships so you
  97 +can easily see which are most important.
  98 +
  99 +##5.2 BiLE-weight.pl
  100 +> perl bile-weigh.pl www.sensepost.com sp\_bile\_out.txt.mine out.txt
  101 +
  102 +Input fields:
  103 +< website > is a Web site name; for example, www.sensepost.com
  104 +input file typically output from BiLE
  105 +
  106 +Output:
  107 +Creates a file called < input file name >.sorted, sorted by weight with lower
  108 +weights first.
  109 +
  110 +Output format:
  111 +Site name:weight
  112 +
  113 +The list you get should look something like:
  114 +
  115 +www.sensepost.com:378.69
  116 +www.redpay.com:91.15
  117 +www.hackrack.com:65.71
  118 +www.condyn.net:76.15
  119 +www.nmrc.org:38.08
  120 +www.nanoteq.co.za:38.08
  121 +www.2computerguys.com:38.08
  122 +www.securityfocus.com:35.10
  123 +www.marcusevans.com:30.00
  124 +www.convmgmt.com:24.00
  125 +www.sqlsecurity.com:23.08
  126 +www.scmagazine.com:23.08
  127 +www.osvdb.org:23.08
  128 +
  129 +The number you see next to each site is the .weight. that BiLE has assigned.
  130 +The weight in itself is an arbitrary value and of no real use to us. What is
  131 +interesting, however, is the relationship between the values of the sites.
  132 +The rate at which the sites discovered become less relevant is referred to
  133 +as the .rate of decay.. A slow rate of decay means there are many sites with
  134 +a high relevance.an indication of widespread cross-linking. A steep decent
  135 +shows us that the site is fairly unknown and unconnected.a stand-alone site.
  136 +It is in the latter case that HTML Link Analysis becomes interesting to us,
  137 +as these links are likely to reflect actual business relationships.
  138 +
  139 +##5.3 tld-expand.pl
  140 +> perl exp-tld.pl [input file] [output file]
  141 +
  142 +Input fields:
  143 +Input file, is the file containing a list of domains
  144 +
  145 +Output:
  146 +Output file, is the output file containing domains expanded by TLD
  147 +
  148 +Note:
  149 +tld-expand will run for awhile depending on how many domains are listed in
  150 +the input file. One can monitor the output by; tail -f outputfilename
  151 +#5.4 vet-IPrange.pl
  152 +> perl vet-IPrange.pl [input file] [true domain file] [output file] < range >
  153 +
  154 +Input fields:
  155 +Input file, file containing list of domains
  156 +True domain file contains list of domains to be compared to
  157 +
  158 +Output:
  159 +Output file a file containing matched domains
  160 +
  161 +##5.5 qtrace.pl
  162 +> perl qtrace.pl [ip_address_file] [output_file]
  163 +
  164 +Input fields:
  165 +Full IP addresses one per line
  166 +Output results to file
  167 +
  168 +Typical use:
  169 +perl qtrace.pl ip\_list.txt outputfile.txt
  170 +
  171 +Output format:
  172 +Network range 10.10.1.1-10.10.28
  173 +
  174 +##5.6 vet-mx.pl
  175 +> perl vet-mx.pl [input file] [true domain file] [output file]
  176 +Input fields:
  177 +Input file, is the file containing a list of domains
  178 +True domain file contains list of domains to be compared to
  179 +
  180 +Output:
  181 +Output file, is an output file containing matched domains
  182 +
  183 +##5.7 jarf-rey
  184 +> perl jarf-rev [subnetblock]
  185 +Input fields:
  186 +Subnetblock specified is the first three octets of network address
  187 +
  188 +Typical use:
  189 +> perl jarf-rev 192.168.37.1-192.168.37.118
  190 +Output format:
  191 +DNS name ; IP number
  192 +DNS name is blank if no reverse entry could be discovered.
  193 +
  194 +##5.8 jarf-dnsbrute.pl
  195 +
  196 +> perl jarf-dnsbrute [domain_name] [file_with_names]
  197 +
  198 +
  199 +Input fields:
  200 +Domain name the domain name
  201 +File\_with\_name the full path the file containing common DNS names
  202 +
  203 +Typical use:
  204 +> perl jarf-dnsbrute syngress.com common
  205 +
  206 +Output format:
  207 +DNS name ; IP number
  208 +
  209 +#6. Requirements
  210 +##6.1 BiLE.pl
  211 +
  212 +In order for BiLE.pl to run correctly httrack needs to be installed on the
  213 +operating system. Line 67 of BiLE.pl can be modified to point to the httrack
  214 +executable:
  215 +
  216 +> $mc="httrack $site......
  217 +
  218 +to
  219 +
  220 +> $mc="/home/sensepost/tools/httrack $site......
  221 +
  222 +##6.2 BiLE-weigh.pl
  223 +
  224 +There are no real requirements, except that the script requires the *.bile
  225 +output file from the BiLE.pl script
  226 +
  227 +##6.3 tld-expand.pl
  228 +N/A
  229 +
  230 +##6.4 vet-IPrange.pl
  231 +N/A
  232 +
  233 +##6.5 qtrace.pl
  234 +
  235 +NB! hping-1s is a recompiled hping with setuid support,1sec timeout - setuid
  236 +Note: remember to allow icmp type 11 into your network!
  237 +
  238 +Line 59 of qtrace.pl can be modified to point to the hping-1s executable:
  239 +
  240 +> my @res=`hping-1s -2......
  241 +
  242 +to
  243 +
  244 +> my @res=`/home/sensepost/tools/modified/hping-1s -2.....
  245 +
  246 +##6.6 vet-mx.pl
  247 +N/A
  248 +
  249 +
162 common
... ... @@ -0,0 +1,162 @@
  1 +www
  2 +ftp
  3 +ns
  4 +mail
  5 +3com
  6 +aix
  7 +apache
  8 +back
  9 +bastion
  10 +bind
  11 +border
  12 +bsd
  13 +business
  14 +chains
  15 +cisco
  16 +content
  17 +corporate
  18 +cvp
  19 +debian
  20 +dns
  21 +domino
  22 +dominoserver
  23 +download
  24 +e-bus
  25 +e-business
  26 +e-mail
  27 +e-safe
  28 +email
  29 +esafe
  30 +external
  31 +extranet
  32 +firebox
  33 +firewall
  34 +freebsd
  35 +front
  36 +ftp
  37 +fw
  38 +fw-
  39 +fwe
  40 +fwi
  41 +gate
  42 +gatekeeper
  43 +gateway
  44 +gauntlet
  45 +group
  46 +help
  47 +hop
  48 +hp
  49 +hp-ux
  50 +hpjet
  51 +hpux
  52 +http
  53 +https
  54 +hub
  55 +ibm
  56 +ids
  57 +info
  58 +inside
  59 +internal
  60 +internet
  61 +intranet
  62 +ipchains
  63 +ipfw
  64 +irix
  65 +jet
  66 +list
  67 +lotus
  68 +lotusdomino
  69 +lotusnotes
  70 +lotusserver
  71 +mail
  72 +mailfeed
  73 +mailgate
  74 +mailgateway
  75 +mailgroup
  76 +mailhost
  77 +maillist
  78 +mailmarshall
  79 +mailpop
  80 +mailrelay
  81 +mandrake
  82 +mimesweeper
  83 +ms
  84 +msproxy
  85 +mx
  86 +nameserver
  87 +news
  88 +newsdesk
  89 +newsfeed
  90 +newsgroup
  91 +newsroom
  92 +newsserver
  93 +nntp
  94 +notes
  95 +noteserver
  96 +notesserver
  97 +ns
  98 +nt
  99 +openbsd
  100 +outside
  101 +pix
  102 +pop
  103 +pop3
  104 +pophost
  105 +popmail
  106 +popserver
  107 +print
  108 +printer
  109 +printspool
  110 +private
  111 +proxy
  112 +proxyserver
  113 +public
  114 +qpop
  115 +raptor
  116 +read
  117 +redcreek
  118 +redhat
  119 +route
  120 +router
  121 +router
  122 +scanner
  123 +screen
  124 +screening
  125 +secure
  126 +seek
  127 +slackware
  128 +smail
  129 +smap
  130 +smtp
  131 +smtpgateway
  132 +smtpgw
  133 +sniffer
  134 +snort
  135 +solaris
  136 +sonic
  137 +spool
  138 +squid
  139 +sun
  140 +sunos
  141 +suse
  142 +switch
  143 +transfer
  144 +trend
  145 +trendmicro
  146 +unseen
  147 +vlan
  148 +wall
  149 +web
  150 +webmail
  151 +webserver
  152 +webswitch
  153 +win2000
  154 +win2k
  155 +win31
  156 +win95
  157 +win98
  158 +winnt
  159 +write
  160 +ww
  161 +www
  162 +xfer
117 jarf-dnsbrute
... ... @@ -0,0 +1,117 @@
  1 +#!/usr/bin/perl
  2 + use Data::Dumper; # Vital debugging tool
  3 + use Net::DNS;
  4 +my $res= Net::DNS::Resolver->new();
  5 +
  6 +##############
  7 +#Sub Routines
  8 +##############
  9 +sub dedupe(@){return uniq map { lc $_ } @_; }
  10 +
  11 +# ----------------------------
  12 +sub forward($;$) {my( $host,$mode, )= @_;
  13 + $mode ||= 0;
  14 +# ----------------------------
  15 + my @results = ();
  16 + my $query = $res->search($host);
  17 + return () unless $query;
  18 + foreach my $rr ($query->answer) {
  19 + next unless ( $rr->type eq "A" );
  20 + my $address = $rr->{"address"};
  21 + my $data = $address;
  22 + $data = "$host;$data" if ( $mode >= 1 );
  23 + $data = "$data;FL" if ( $mode >= 2 );
  24 + $data = "$data;$host" if ( $mode >= 3 );
  25 + push @results, $data; }
  26 + return uniq sort @results;
  27 + }
  28 +
  29 +# ----------------------------
  30 +sub nslookup($;$){my( $host,$mode, ) = @_;
  31 + $mode ||= 0;
  32 +# ----------------------------
  33 + my @results = ();
  34 + my $query = $res->query($host,"NS");
  35 + return () unless $query;
  36 + foreach my $rr ($query->answer) {
  37 + next unless ( $rr->type eq "NS" );
  38 + my $address = $rr->{"nsdname"};
  39 + my @addresses = forward($address);
  40 + foreach my $data (@addresses){
  41 + $data = "$host;$data" if ( $mode >= 1 );
  42 + $data = "$data;NS" if ( $mode >= 2 );
  43 + $data = "$data;$host" if ( $mode >= 3 );
  44 + push @results, $data; }
  45 + }
  46 + return uniq sort @results;
  47 + }
  48 +
  49 +# ----------------------------
  50 +###
  51 +#Main Program starts here
  52 +###
  53 +$|=1;
  54 +
  55 +use Parallel::ForkManager;
  56 +
  57 +my $MAX_PROCESSES = 6;
  58 +my $pm=new Parallel::ForkManager($MAX_PROCESSES);
  59 +
  60 +################# POPULATE @stuff ####################
  61 +if ($#ARGV < 1) {die "perl jarf-dnsbrute domain.com commonnames.txt\n\n";}
  62 +
  63 +my $domain=$ARGV[0];
  64 +my $common=@ARGV[1];
  65 +my @stuff = ();
  66 +
  67 +if ( -r $common ) {
  68 + open DATA, $common;
  69 + @stuff = <DATA>;
  70 + close DATA;
  71 + chomp @stuff;
  72 + }
  73 +else {
  74 + print STDERR "Couldn't read file '$common'\n";
  75 + exit 1;
  76 + }
  77 +
  78 +unless ( scalar @stuff ) {
  79 + print STDERR "Couldn't load any data\n";
  80 + exit 2;
  81 + }
  82 +
  83 +@stuff = map "$_.$domain", @stuff;
  84 +
  85 +
  86 +my $brutel=0;
  87 +my $mode=2;
  88 +
  89 +if ($mode==0){$mode=1;}
  90 +
  91 +my ($bullshit)=forward("name.$domain");
  92 +if (length($bullshit)>0){
  93 + if ($mode==2){
  94 + print "-any*non*valid-.$domain;$bullshit;CRAP\n";
  95 + } else {print "-any*non*valid-.$domain;$bullshit;\n";}
  96 +exit 3;
  97 +}
  98 +
  99 +############## OK ready to launch!
  100 +foreach $thing (@stuff){
  101 + my $pid = $pm->start("id: $thing") and next;
  102 + do_work($thing);
  103 + $pm->finish();
  104 +}
  105 +$pm->wait_all_children();
  106 +
  107 +##############
  108 +sub do_work{
  109 + my ($passed)=@_;
  110 + @nslookupout=forward($passed,$mode);
  111 + foreach $out (@nslookupout){
  112 + my ($testname,$testip)=split(/;/,$out);
  113 + if ($testip ne $bullshit){
  114 + print "$out\n";
  115 + }
  116 + }
  117 +}
137 jarf-rev
... ... @@ -0,0 +1,137 @@
  1 +#!/usr/bin/perl
  2 +
  3 +###
  4 +### USAGE: perl jarf-rev 10.10.1.1-10.10.1.234
  5 +###
  6 +
  7 +if (scalar @ARGV < 1) {die "perl jarf-rev 10.10.1.1-10.10.1.234 \r\n";}
  8 +
  9 +use Parallel::ForkManager;
  10 +
  11 +###
  12 +#Sub Routines
  13 +###
  14 +
  15 +####
  16 +sub do_work{
  17 + my ($search)=@_;
  18 + @nslookupout=`nslookup -timeout=3 -retry=2 $search $ns 2>&1`;
  19 + foreach $line (@nslookupout){
  20 + if ($line =~ /Name:/){
  21 + ($duh,$returner,@crap)=split(/Name: /,$line);
  22 + }
  23 + }
  24 + $returner=~s/ //g;
  25 + chomp $returner;
  26 + if ($mode==0){
  27 + $tosave=$returner.";".$search.";RL";
  28 + }
  29 + if ($mode==1){
  30 + $tosave=$returner.";".$search.";RL";
  31 + }
  32 + if ($mode==2){
  33 + $tosave=$returner.";".$search.";RL";
  34 + }
  35 + print "$tosave\n";
  36 +}
  37 +
  38 +####
  39 +sub dedupe(@){return uniq map { lc $_ } @_;}
  40 +
  41 +####
  42 +
  43 +sub ip2long
  44 +{
  45 + my @ips = split (/\./, $_[0]);
  46 + my $binNum = "";
  47 + foreach $tuple (@ips) {
  48 + $binNum = $binNum.dec2bin($tuple);
  49 + }
  50 + $BigNum = bin2dec($binNum);
  51 + return ($BigNum);
  52 +}
  53 +
  54 +####
  55 +sub dec2bin
  56 +{
  57 + my $str = unpack("B32", pack("N", shift));
  58 + $str =~ s/^0+(?=\d)//;
  59 + my $RetStr = "";
  60 + for ($i=0; $i< 8 - length($str); $i++) {
  61 + $RetStr=$RetStr."0";
  62 + }
  63 + $RetStr = $RetStr.$str;
  64 + return $RetStr;
  65 +}
  66 +
  67 +####
  68 +sub bin2dec
  69 +{
  70 + return unpack("N", pack("B32", substr("0" x 32 . shift, -32)));
  71 +}
  72 +
  73 +####
  74 +sub long2ip
  75 +{
  76 + my $binNum = dec2bin($_[0]);
  77 + my $ipNum = "";
  78 + my $i;
  79 + my $ln;
  80 + if ( ($ln = length($binNum)) < 32) {
  81 + my $Pad = "";
  82 + for ($p=0; $p<32-$ln; $p++) {
  83 + $Pad.="0";
  84 + }
  85 + $binNum=$Pad.$binNum;
  86 + }
  87 + for ($i=0; $i<32; $i+=8) {
  88 + my $tuple = substr($binNum, $i, 8);
  89 + my $des = bin2dec($tuple);
  90 + if ($ipNum eq "") {
  91 + $ipNum = $ipNum.$des;
  92 + } else {
  93 + $ipNum = $ipNum.".".$des;
  94 + }
  95 + }
  96 + return ($ipNum);
  97 +}
  98 +
  99 +###
  100 +#Main Program starts here
  101 +###
  102 +
  103 +$|=1;
  104 +
  105 +my $MAX_PROCESSES = 10;
  106 +my $pm=new Parallel::ForkManager($MAX_PROCESSES);
  107 +
  108 +my $subnet=@ARGV[0];
  109 +my $ns=@ARGV[1];
  110 +my $mode=2;
  111 +
  112 +##get the real length of the range
  113 +my ($StartIP, $StopIP) = split (/-/,$subnet);
  114 +my $Start=ip2long($StartIP);
  115 +my $Stop=ip2long($StopIP)+1;
  116 +my $Dif=$Stop-$Start;
  117 +
  118 +if ($StartIP eq $StopIP){$Dif=1;}
  119 +
  120 +## load it up
  121 +for (my $z=0; $z < $Dif; $z++) {
  122 + $Longval=$Start+$z;
  123 + $NewIP = long2ip($Longval);
  124 + push @stuff,$NewIP;
  125 +}
  126 +
  127 +## fire away!
  128 +foreach $thing (@stuff){
  129 + my $pid = $pm->start("id: $thing") and next;
  130 + do_work($thing);
  131 + $pm->finish();
  132 +}
  133 +$pm->wait_all_children();
  134 +
  135 +
  136 +
  137 +
217 qtrace.pl
... ... @@ -0,0 +1,217 @@
  1 +#!/usr/bin/perl
  2 +
  3 +## REQUIREMENTS:
  4 +## NB! NB! NB! hping-1s (must be recompiled hping with setuid support,1sec timeout - setuid)
  5 +## Note: remember to allow icmp type 11 into your network!
  6 +##
  7 +
  8 +###
  9 +#Sub Routines
  10 +###
  11 +
  12 +###############
  13 +sub ip2long
  14 +{
  15 + my @ips = split (/\./, $_[0]);
  16 + my $binNum = "";
  17 + foreach $tuple (@ips) {
  18 + $binNum = $binNum.dec2bin($tuple);
  19 + }
  20 + $BigNum = bin2dec($binNum);
  21 + return ($BigNum);
  22 +}
  23 +######################
  24 +sub dec2bin
  25 +{
  26 + my $str = unpack("B32", pack("N", shift));
  27 + $str =~ s/^0+(?=\d)//;
  28 + my $RetStr = "";
  29 + for ($i=0; $i< 8 - length($str); $i++) {
  30 + $RetStr=$RetStr."0";
  31 + }
  32 + $RetStr = $RetStr.$str;
  33 + return $RetStr;
  34 +}
  35 +########################
  36 +sub bin2dec
  37 +{
  38 + return unpack("N", pack("B32", substr("0" x 32 . shift, -32)));
  39 +}
  40 +########################
  41 +sub findnet
  42 +{
  43 + $classc = "";
  44 + ($iptouse) = @_;
  45 + if (!($iptouse =~ /127.0.0.1/))
  46 + {
  47 + @splitter=split(/\./,$iptouse);
  48 + $classc=@splitter[0].".".@splitter[1].".".@splitter[2];
  49 + }
  50 + return ($classc);
  51 +} # findnet
  52 +########################
  53 +sub rampup{
  54 + ($passed,$ttl,$top)=@_;
  55 + my $flag=0;
  56 + my $i=$ttl;
  57 + if ($ttl==0){$i=$top;}
  58 + while (($flag==0) || ($i<1)){
  59 + my @res=`hping-1s -2 -t $i $passed -n -c 2 -p 53 2>&1`;
  60 + foreach my $line (@res){
  61 + if ($line =~ /TTL/){
  62 + ($crap,$want)=split(/=/,$line);
  63 + $want =~ s/ //g; chomp $want;
  64 + if ($want ne $passed) {
  65 + return ($i,$want);
  66 + }
  67 + }
  68 + }
  69 + $i--;
  70 + }
  71 +return 0;
  72 +}
  73 +# ------------------------
  74 +
  75 +###
  76 +#Main Program starts here
  77 +###
  78 +$|=1;
  79 +
  80 + if($#ARGV<1){die "qtrace.pl <inputfile_with_ips> <outputfile>\n"; }
  81 + $file = @ARGV[0];
  82 + $deel="32";
  83 + $acc="2";
  84 + $outfile = @ARGV[1];
  85 +
  86 +open (IN,"$file") || die "Cant open input file please check\n";
  87 +
  88 + ##command line mode
  89 + open (IN,"$file") || die "Cant open the IP file\n";
  90 + while (<IN>){
  91 + chomp;
  92 + if ($_ !~ /\./){print "$_ Not an IP number $_\n";}
  93 + else {push @IPS,$_;}
  94 + }
  95 +close (IN);
  96 +
  97 +##ok rest is pretty generic..
  98 +
  99 +#check for usage problems
  100 +if (($deel !=4 ) && ($deel != 8) && ($deel != 16) && ($deel != 32) && ($deel != 64)){
  101 + die "Duh - i said 4,8,16,32 or 64!!\n";
  102 +}
  103 +if (($acc > 4) || ($acc<0)) {
  104 + die "Duh - accuracy is 0-4! Go away! LEave! Shoo!!\n";
  105 +}
  106 +
  107 +#first ramp up...
  108 +foreach $ip (@IPS){
  109 + #defaults
  110 + $lowerbound=&findnet($ip).".0";
  111 + $upperbound=&findnet($ip).".255";
  112 +
  113 + #check the file if our IP falls within a range we already had
  114 + if (open (NETS,"$outfile")){
  115 + $exitflag=0;
  116 + while (<NETS>){
  117 + chomp;
  118 + #for wrapper..
  119 + $_ =~ s/[\>\<\#]//g;
  120 + ($startip,$endip)=split(/\-/,$_);
  121 + $startiplong = ip2long($startip);
  122 + $endiplong = ip2long($endip);
  123 + $ouriplong= ip2long($ip);
  124 +
  125 + if (($startiplong <= $ouriplong) && ($endiplong >= $ouriplong)){
  126 + $exitflag=1;
  127 + }
  128 +
  129 + }
  130 + }
  131 +
  132 + ##it doesn't..we have to test..
  133 + if ($exitflag==0){
  134 +
  135 + $thing=&findnet($ip);
  136 + ($rampup,$duh)=rampup("$thing.1",0,25);
  137 + print "Done ramping - [$rampup]\n";
  138 + #### go down from here.
  139 + my ($crap,$crap,$crap,$want)=split(/\./,$ip);
  140 +
  141 + $count=0;
  142 + for ($i = $deel*int($want/$deel); $i >= 0; $i=$i-$deel){
  143 +
  144 + $value=$i+1;
  145 + $totrace=&findnet($ip).".".$value;
  146 +
  147 + $pieceres="";
  148 + for (1..3){
  149 + ($duh,$lh)=rampup($totrace,$rampup+$acc,0);
  150 + chomp $lh;
  151 + $pieceres=$pieceres.$lh." ";
  152 + }
  153 +
  154 + @allres[$count]=$pieceres;
  155 +
  156 + if ($count > 0){
  157 + @one=sort(split(/ /,@allres[$count]));
  158 + @two=sort(split(/ /,@allres[$count-1]));
  159 +
  160 + $neqsum=0;
  161 + for (0..2){
  162 + if (@one[$_] ne @two[$_]) {$neqsum++;}
  163 + }
  164 + if ($neqsum >= 3){
  165 + $boundary=&findnet($ip).".".($i+$deel);
  166 + $lowerbound=$boundary;
  167 + last;
  168 + }
  169 + }
  170 + $count++;
  171 + }
  172 +
  173 + print "$ip - lower boundary is $lowerbound\n";
  174 +
  175 + ## find upper boundary
  176 +
  177 + $count=1;
  178 + for ($i = $deel*(1+(int($want/$deel))); $i < 256; $i=$i+$deel){
  179 +
  180 + $value=$i+1;
  181 + $totrace=&findnet($ip).".".$value;
  182 +
  183 + $pieceres="";
  184 + for (1..3){
  185 + ($duh,$lh)=rampup($totrace,$rampup+$acc,25);
  186 + chomp $lh;
  187 + $pieceres=$pieceres.$lh." ";
  188 + }
  189 +
  190 + @allres[$count]=$pieceres;
  191 +
  192 + #we can test anyhow..we have boundary from previous step
  193 + @one=sort(split(/ /,@allres[$count]));
  194 + @two=sort(split(/ /,@allres[$count-1]));
  195 +
  196 + $neqsum=0;
  197 + for (0..2){
  198 + if (@one[$_] ne @two[$_]) {$neqsum++;}
  199 + }
  200 + if ($neqsum >= 3){
  201 + $boundary=&findnet($ip).".".($i);
  202 + $upperbound=$boundary;
  203 + last;
  204 + }
  205 + $count++;
  206 + }
  207 +
  208 + print "$ip - upper bound is $upperbound\n\n";
  209 +
  210 + open (OUT,"+>>$outfile") || die "Cant create output file\n";
  211 + print OUT "$lowerbound\-$upperbound\n";
  212 + close (OUT);
  213 + }
  214 +}
  215 +close (OUT);
  216 +print("Sleeping 10\n");
  217 +print("close 2\n");
302 readme.txt
... ... @@ -0,0 +1,302 @@
  1 +!!!Please read carefully through this readme file, as there are various changes one will have
  2 +!!!to make as to get the scripts as to make them work correctly for your operating system.
  3 +
  4 +
  5 +#########
  6 +#BiLE.pl#
  7 +#########
  8 +
  9 +The Bi-directional Link Extractor. BiLE leans on Google and HTTrack to
  10 +automate the collections to and from the target site, and then applies a
  11 +simple statistical weighing algorithm to deduce which Web sites have the
  12 +strongest .relationships. with the target site.
  13 +
  14 +We run BiLE.pl against the target Web site by simply specifying the Website
  15 +address and a name for the output file.
  16 +