Skip to content

Commit

Permalink
Updated list of CVEs, pushing update script
Browse files Browse the repository at this point in the history
  • Loading branch information
p0dalirius committed Dec 5, 2022
1 parent be00d0d commit b7f690b
Show file tree
Hide file tree
Showing 4 changed files with 525 additions and 12 deletions.
28 changes: 16 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,19 @@
## Features

- [x] Multithreaded workers to search for Apache tomcat servers.
- [x] Multiple target source possible:
- [x] Retrieving list of computers from a Windows domain through an LDAP query to use them as a list of targets.
- [x] Reading targets line by line from a file.
- [x] Reading individual targets (IP/DNS/CIDR) from `-tt/--target` option.
- [x] Multiple target sources accepted:
+ [x] Retrieving list of computers from a Windows domain through an LDAP query to use them as a list of targets.
+ [x] Reading targets line by line from a file.
+ [x] Reading individual targets (IP/DNS/CIDR) from `-tt/--target` option.
- [x] Custom list of ports to test.
- [x] Tests for `/manager/html` access and default credentials.
- [x] Tests for `/manager/html` accessibility.
- [x] Tests for default credentials to access the Tomcat Manager.
- [x] List the CVEs of each version with the `--list-cves` option


## Installation

You can now install it from pypi (latest version is <img alt="PyPI" src="https://img.shields.io/pypi/v/apachetomcatscanner">) with this command:
You can now install it from PyPI (latest version is <img alt="PyPI" src="https://img.shields.io/pypi/v/apachetomcatscanner">) with this command:

```
sudo python3 -m pip install apachetomcatscanner
Expand All @@ -34,15 +35,18 @@ sudo python3 -m pip install apachetomcatscanner
## Usage

```
Apache Tomcat Scanner v2.3.4 - by @podalirius_
$ ./ApacheTomcatScanner.py -h
Apache Tomcat Scanner v2.3.5 - by @podalirius_
usage: ApacheTomcatScanner.py [-h] [-v] [--debug] [-C] [-T THREADS] [-s] [--only-http] [--only-https] [--no-check-certificate] [--export-xlsx EXPORT_XLSX] [--export-json EXPORT_JSON] [--export-sqlite EXPORT_SQLITE] [-PI PROXY_IP]
[-PP PROXY_PORT] [-rt REQUEST_TIMEOUT] [--tomcat-username TOMCAT_USERNAME] [--tomcat-usernames-file TOMCAT_USERNAMES_FILE] [--tomcat-password TOMCAT_PASSWORD]
[--tomcat-passwords-file TOMCAT_PASSWORDS_FILE] [-tf TARGETS_FILE] [-tt TARGET] [-tp TARGET_PORTS] [-ad AUTH_DOMAIN] [-ai AUTH_DC_IP] [-au AUTH_USER] [-ap AUTH_PASSWORD] [-ah AUTH_HASHES] [--ldaps]
usage: ApacheTomcatScanner.py [-h] [-v] [--debug] [-C] [-T THREADS] [-s] [--only-http] [--only-https] [--no-check-certificate] [--export-xlsx EXPORT_XLSX]
[--export-json EXPORT_JSON] [--export-sqlite EXPORT_SQLITE] [-PI PROXY_IP] [-PP PROXY_PORT] [-rt REQUEST_TIMEOUT]
[--tomcat-username TOMCAT_USERNAME] [--tomcat-usernames-file TOMCAT_USERNAMES_FILE] [--tomcat-password TOMCAT_PASSWORD]
[--tomcat-passwords-file TOMCAT_PASSWORDS_FILE] [-tf TARGETS_FILE] [-tt TARGET] [-tp TARGET_PORTS] [-ad AUTH_DOMAIN] [-ai AUTH_DC_IP]
[-au AUTH_USER] [-ap AUTH_PASSWORD] [-ah AUTH_HASHES] [--ldaps] [--subnets]
A python script to scan for Apache Tomcat server vulnerabilities.
optional arguments:
options:
-h, --help show this help message and exit
-v, --verbose Verbose mode. (default: False)
--debug Debug mode, for huge verbosity. (default: False)
Expand Down Expand Up @@ -97,7 +101,7 @@ Targets:
-ah AUTH_HASHES, --auth-hashes AUTH_HASHES
LM:NT hashes to pass the hash for this user.
--ldaps Use LDAPS (default: False)
--subnets Get all subnets from the domain and use them as targets (default: False)
```

## Example
Expand Down
145 changes: 145 additions & 0 deletions apachetomcatscanner/data/update_db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File name : update_db.py
# Author : Podalirius (@podalirius_)
# Date created : 5 Dec 2022


import json
import os
import requests
from bs4 import BeautifulSoup
import glob


def parse_vulns(vulnerabilities_in_this_ver, Version, Language, Update, Edition, CVES):
r = requests.get("https://www.cvedetails.com/%s" % vulnerabilities_in_this_ver)
soup = BeautifulSoup(r.content, 'lxml')

table = soup.find('table', attrs={"id": "vulnslisttable"})
for tr in table.findAll("tr"):
tds = tr.findAll('td')
if len(tds) == 15:
id = tds[0].text.strip()

cve_id = tds[1].text.strip()
print(" [>] Parsing %s" % cve_id)
cve_id_link = "https://www.cvedetails.com" + tds[1].find('a')['href']

cwe_id = tds[2].text.strip()
cwe_id_link = None
if tds[2].find('a') is not None:
cwe_id_link = "https://www.cvedetails.com" + tds[2].find('a')['href']

if cve_id not in CVES.keys():
CVES[cve_id] = {}

if "cve" not in CVES[cve_id].keys():
CVES[cve_id]["cve"] = {}
number_of_exploits = tds[3].text.strip()
CVES[cve_id]["cve"]["id"] = cve_id
CVES[cve_id]["cve"]["year"] = int(cve_id.split('-')[1])
CVES[cve_id]["cve"]["vuln_type"] = tds[4].text.strip()
CVES[cve_id]["cve"]["publish_date"] = tds[5].text.strip()
CVES[cve_id]["cve"]["update_date"] = tds[6].text.strip()

if "cvss" not in CVES[cve_id].keys():
CVES[cve_id]["cvss"] = {}
CVES[cve_id]["cvss"]["score"] = tds[7].text.strip()

risk_levels = ["None", "Low", "Medium", "High", "Critical"]
criticity = "None"
if float(CVES[cve_id]["cvss"]["score"]) == 0:
criticity = risk_levels[0]
elif 0 < float(CVES[cve_id]["cvss"]["score"]) < 4:
criticity = risk_levels[1]
elif 4 <= float(CVES[cve_id]["cvss"]["score"]) < 7:
criticity = risk_levels[2]
elif 7 <= float(CVES[cve_id]["cvss"]["score"]) < 9:
criticity = risk_levels[3]
elif 9 <= float(CVES[cve_id]["cvss"]["score"]) < 10:
criticity = risk_levels[4]

CVES[cve_id]["cvss"]["criticity"] = criticity
CVES[cve_id]["cvss"]["gained_access_level"] = tds[8].text.strip()
CVES[cve_id]["cvss"]["access"] = tds[9].text.strip()
CVES[cve_id]["cvss"]["complexity"] = tds[10].text.strip()
CVES[cve_id]["cvss"]["confidentiality"] = tds[8].text.strip()
CVES[cve_id]["cvss"]["integrity"] = tds[8].text.strip()
CVES[cve_id]["cvss"]["availablility"] = tds[8].text.strip()

if "affected_versions" not in CVES[cve_id].keys():
CVES[cve_id]["affected_versions"] = []
CVES[cve_id]["affected_versions"].append({
"tag": (Version+'-'+Update if Update != '*' else Version),
"version": Version,
"language": Language,
"update": Update,
"edition": Edition
})

if "references" not in CVES[cve_id].keys():
CVES[cve_id]["references"] = []
CVES[cve_id]["references"].append("https://nvd.nist.gov/vuln/detail/%s" % cve_id)

r = requests.get(cve_id_link)
soup = BeautifulSoup(r.content, 'lxml')

cvedetailssummary = soup.find('div', attrs={"class": "cvedetailssummary"})
CVES[cve_id]["description"] = cvedetailssummary.text.strip().split('Publish Date : ')[0].strip()

vulnrefstable = soup.find('table', attrs={"id": "vulnrefstable"})
for tr in vulnrefstable.findAll('tr'):
links = [a['href'] for a in tr.findAll('a')]
for link in links:
CVES[cve_id]["references"].append(link)
CVES[cve_id]["references"] = list(sorted(set(CVES[cve_id]["references"])))


if __name__ == '__main__':
CVES = {}

if os.path.exists("./vulnerabilities/"):
for file in glob.glob("./vulnerabilities/*/*.json"):
f = open(file, "r")
data = json.loads(f.read())
f.close()
CVES[data["cve"]["id"]] = data

r = requests.get("https://www.cvedetails.com/version-list/45/887/2/Apache-Tomcat.html?sha=1e26d2dc4f7319bbf6b0bf066415a3daf97151c8&order=1&trc=986")
soup = BeautifulSoup(r.content, 'lxml')

pagingb = soup.find('div', attrs={"id": "pagingb", "class": "paging"})
pages = list(map(int, [a.text.strip() for a in pagingb.findAll('a')]))

for page_number in pages:
r = requests.get("https://www.cvedetails.com/version-list/45/887/%d/Apache-Tomcat.html?sha=1e26d2dc4f7319bbf6b0bf066415a3daf97151c8&order=1&trc=986" % page_number)
soup = BeautifulSoup(r.content, 'lxml')

table = soup.find('table', attrs={"class": "listtable"})
print("[>] Parsing page %d/%d" % (page_number, pages[-1]))
for tr in table.findAll("tr"):
tds = tr.findAll('td')
if len(tds) == 6:
Version = tds[0].text.strip()
Language = tds[1].text.strip()
Update = tds[2].text.strip()
Edition = tds[3].text.strip()
Number_of_Vulnerabilities = int(tds[4].text.strip())
links = tds[5].findAll('a')
version_details = links[0]["href"]
vulnerabilities_in_this_ver = links[1]["href"]
if not (Version == "*" and Language == "*" and Update == "*" and Edition == "*"):
parse_vulns(vulnerabilities_in_this_ver, Version, Language, Update, Edition, CVES)

for cve_id, cve_data in CVES.items():
save_path = "./vulnerabilities/%d/%s.json" % (cve_data["cve"]["year"], cve_id)
if not os.path.exists(os.path.dirname(save_path)):
os.makedirs(os.path.dirname(save_path))

if not os.path.exists(save_path):
f = open(save_path, 'w')
f.write(json.dumps(cve_data, indent=4))
f.close()
else:
print("[+] Skipping CVE-%s-%s because it already exists." % (cve_data["cve"]["year"], cve_id))
Loading

0 comments on commit b7f690b

Please sign in to comment.