Skip to content

Commit

Permalink
Merge pull request #134 from PidgeyL/master
Browse files Browse the repository at this point in the history
Several bugfixes and minor changes
  • Loading branch information
adulau committed Nov 7, 2016
2 parents 734f86f + 7e852c9 commit 571a410
Show file tree
Hide file tree
Showing 17 changed files with 228 additions and 367 deletions.
12 changes: 0 additions & 12 deletions etc/configuration.ini.sample
Expand Up @@ -12,18 +12,6 @@ DB: cvedb
Tmpdir: ./tmp/
[FulltextIndex]
Indexdir: ./indexdir/
[Sources]
CVE: https://static.nvd.nist.gov/feeds/xml/cve/
CPE: https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml
CWE: http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip
d2sec: http://www.d2sec.com/exploits/elliot.xml
vFeed: http://www.toolswatch.org/vfeed/vfeed.db.tgz
vFeedStatus: http://www.toolswatch.org/update.dat
Vendor: https://nvd.nist.gov/download/vendorstatements.xml
CAPEC: http://capec.mitre.org/data/xml/capec_v2.6.xml
MSBULLETIN: http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx
Ref: https://cve.mitre.org/data/refs/refmap/allrefmaps.zip
exploitdb: https://github.com/offensive-security/exploit-database/raw/master/files.csv
[Webserver]
Host: 127.0.0.1
Port: 5000
Expand Down
10 changes: 10 additions & 0 deletions etc/sources.ini.sample
@@ -0,0 +1,10 @@
[Sources]
CVE: https://static.nvd.nist.gov/feeds/xml/cve/
CPE: https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml.zip
CWE: http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip
d2sec: http://www.d2sec.com/exploits/elliot.xml
Vendor: https://nvd.nist.gov/download/vendorstatements.xml.gz
CAPEC: http://capec.mitre.org/data/xml/capec_v2.6.xml
MSBULLETIN: http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx
Ref: https://cve.mitre.org/data/refs/refmap/allrefmaps.zip
exploitdb: https://github.com/offensive-security/exploit-database/raw/master/files.csv
161 changes: 54 additions & 107 deletions lib/Config.py
Expand Up @@ -16,13 +16,15 @@
import pymongo
import redis

import re
import datetime
import bz2
import configparser
import datetime
import gzip
import re
import urllib.parse
import urllib.request as req
import zipfile
from io import BytesIO
import gzip

class Configuration():
ConfigParser = configparser.ConfigParser()
Expand All @@ -35,33 +37,31 @@ class Configuration():
'mongoUsername': '', 'mongoPassword': '',
'flaskHost': "127.0.0.1", 'flaskPort': 5000,
'flaskDebug': True, 'pageLength': 50,
'loginRequired': False,
'loginRequired': False, 'listLogin': True,
'ssl': False, 'sslCertificate': "./ssl/cve-search.crt",
'sslKey': "./ssl/cve-search.crt",
'CVEStartYear': 2002,
'vFeedurl': "http://www.toolswatch.org/vfeed/vfeed.db.tgz",
'vFeedstatus': "http://www.toolswatch.org/update.dat",
'cvedict': "https://static.nvd.nist.gov/feeds/xml/cve/",
'cpedict': "https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml",
'cwedict': "http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip",
'd2sec': "http://www.d2sec.com/exploits/elliot.xml",
'vendor': "https://nvd.nist.gov/download/vendorstatements.xml.gz",
'capec': "http://capec.mitre.org/data/xml/capec_v2.6.xml",
'msbulletin': "http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx",
'ref': "https://cve.mitre.org/data/refs/refmap/allrefmaps.zip",
'exploitdb': "https://github.com/offensive-security/exploit-database/raw/master/files.csv",
'logging': True, 'logfile': "./log/cve-search.log",
'maxLogSize': '100MB', 'backlog': 5,
'Indexdir': './indexdir', 'updatelogfile': './log/update.log',
'Tmpdir': './tmp',
'includeCapec': True, 'includeD2Sec': True,
'includeVFeed': True, 'includeVendor': True,
'includeCWE': True,
'http_proxy': '',
'plugin_load': './etc/plugins.txt',
'plugin_config': './etc/plugins.ini',
'auth_load': './etc/auth.txt'
}
sources={'cve': "https://static.nvd.nist.gov/feeds/xml/cve/",
'cpe': "https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml",
'cwe': "http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip",
'd2sec': "http://www.d2sec.com/exploits/elliot.xml",
'vendor': "https://nvd.nist.gov/download/vendorstatements.xml.gz",
'capec': "http://capec.mitre.org/data/xml/capec_v2.6.xml",
'msbulletin': "http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx",
'ref': "https://cve.mitre.org/data/refs/refmap/allrefmaps.zip",
'exploitdb': "https://github.com/offensive-security/exploit-database/raw/master/files.csv",
'includecve': True, 'includecapec': True, 'includemsbulletin': True,
'includecpe': True, 'included2sec': True, 'includeref': True,
'includecwe': True, 'includevendor': True, 'includeexploitdb': True}

@classmethod
def readSetting(cls, section, item, default):
Expand Down Expand Up @@ -163,6 +163,12 @@ def getPageLength(cls):
def loginRequired(cls):
return cls.readSetting("Webserver", "LoginRequired", cls.default['loginRequired'])


@classmethod
def listLoginRequired(cls):
return cls.readSetting("Webserver", "ListLoginRequired", cls.default['listLogin'])


@classmethod
def getAuthLoadSettings(cls):
return cls.toPath(cls.readSetting("Webserver", "authSettings", cls.default['auth_load']))
Expand Down Expand Up @@ -191,50 +197,6 @@ def getCVEStartYear(cls):
score = cls.default['CVEStartYear']
return cls.readSetting("CVE", "StartYear", cls.default['CVEStartYear'])

# Sources
@classmethod
def getvFeedURL(cls):
return cls.readSetting("Sources", "vFeed", cls.default['vFeedurl'])

@classmethod
def getvFeedStatus(cls):
return cls.readSetting("Sources", "vFeedStatus", cls.default['vFeedstatus'])

@classmethod
def getRefURL(cls):
return cls.readSetting("Sources", "Ref", cls.default['ref'])

@classmethod
def getCVEDict(cls):
return cls.readSetting("Sources", "CVE", cls.default['cvedict'])

@classmethod
def getCPEDict(cls):
return cls.readSetting("Sources", "CPE", cls.default['cpedict'])

@classmethod
def getCWEDict(cls):
return cls.readSetting("Sources", "CWE", cls.default['cwedict'])

@classmethod
def getd2secDict(cls):
return cls.readSetting("Sources", "d2sec", cls.default['d2sec'])

@classmethod
def getVendorDict(cls):
return cls.readSetting("Sources", "Vendor", cls.default['vendor'])

@classmethod
def getCAPECDict(cls):
return cls.readSetting("Sources", "CAPEC", cls.default['capec'])

@classmethod
def getMSBULLETINDict(cls):
return cls.readSetting("Sources", "MSBULLETIN", cls.default['msbulletin'])

@classmethod
def getexploitdbDict(cls):
return cls.readSetting("Sources", "exploitdb", cls.default['exploitdb'])

# Logging
@classmethod
Expand Down Expand Up @@ -291,63 +253,48 @@ def getTmpdir(cls):
def getIndexdir(cls):
return cls.toPath(cls.readSetting("FulltextIndex", "Indexdir", cls.default['Indexdir']))

# Enabled Feeds
# Http Proxy
@classmethod
def includesCapec(cls):
return cls.readSetting("EnabledFeeds", "capec", cls.default['includeCapec'])
def getProxy(cls):
return cls.readSetting("Proxy", "http", cls.default['http_proxy'])

@classmethod
def includesVFeed(cls):
return cls.readSetting("EnabledFeeds", "vFeed", cls.default['includeVFeed'])
def getFile(cls, getfile):
if cls.getProxy():
proxy = req.ProxyHandler({'http': cls.getProxy(), 'https': cls.getProxy()})
auth = req.HTTPBasicAuthHandler()
opener = req.build_opener(proxy, auth, req.HTTPHandler)
req.install_opener(opener)
response = req.urlopen(getfile)
data = response
if 'gzip' in response.info().get('Content-Type'):
buf = BytesIO(response.read())
data = gzip.GzipFile(fileobj=buf)
elif 'bzip2' in response.info().get('Content-Type'):
data = BytesIO(bz2.decompress(response.read()))
elif 'zip' in response.info().get('Content-Type'):
fzip = zipfile.ZipFile(BytesIO(response.read()), 'r')
if len(fzip.namelist())>0:
data=BytesIO(fzip.read(fzip.namelist()[0]))
return (data, response)

@classmethod
def includesD2Sec(cls):
return cls.readSetting("EnabledFeeds", "d2sec", cls.default['includeD2Sec'])

# Feeds (NEW)
@classmethod
def includesVendor(cls):
return cls.readSetting("EnabledFeeds", "vendor", cls.default['includeVendor'])
def getFeedData(cls, source):
source = cls.getFeedURL(source)
return cls.getFile(source) if source else None

@classmethod
def includesCWE(cls):
return cls.readSetting("EnabledFeeds", "CWE", cls.default['includeCWE'])
def getFeedURL(cls, source):
cls.ConfigParser.clear()
cls.ConfigParser.read(os.path.join(runPath, "../etc/sources.ini"))
return cls.readSetting("Sources", source, cls.sources.get(source, ""))

@classmethod
def includesFeed(cls, feed):
if feed == 'capec' and not cls.includesCapec():
return False
elif feed == 'vfeed' and not cls.includesVFeed():
return False
elif feed == 'd2sec' and not cls.includesD2Sec():
return False
elif feed == 'vendor' and not cls.includesVendor():
return False
elif feed == 'cwe' and not cls.includesCWE():
return False
else:
return True

# Http Proxy
@classmethod
def getProxy(cls):
return cls.readSetting("Proxy", "http", cls.default['http_proxy'])
return cls.readSetting("EnabledFeeds", feed, cls.sources.get('include'+feed, False))

@classmethod
def getFile(cls, getfile, compressed=False):
if cls.getProxy():
proxy = req.ProxyHandler({'http': cls.getProxy(), 'https': cls.getProxy()})
auth = req.HTTPBasicAuthHandler()
opener = req.build_opener(proxy, auth, req.HTTPHandler)
req.install_opener(opener)
if not compressed:
return req.urlopen(getfile)
else:
response = req.urlopen(getfile + '.gz')
data = None
if 'gzip' in response.info().get('Content-Type'):
buf = BytesIO(response.read())
data = gzip.GzipFile(fileobj=buf)
return (data, response)

# Plugins
@classmethod
Expand Down
10 changes: 5 additions & 5 deletions sbin/db_mgmt.py
Expand Up @@ -38,7 +38,7 @@

# init parts of the file names to enable looped file download
file_prefix = "nvdcve-2.0-"
file_suffix = ".xml"
file_suffix = ".xml.gz"
file_mod = "modified"
file_rec = "recent"

Expand Down Expand Up @@ -195,9 +195,9 @@ def endElement(self, name):
# get the 'modified' file
getfile = file_prefix + file_mod + file_suffix
try:
(f, r) = Configuration.getFile(Configuration.getCVEDict() + getfile, compressed = True)
(f, r) = Configuration.getFile(Configuration.getFeedURL('cve') + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cve") + getfile))
i = db.getInfo("cve")
last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True)
if i is not None:
Expand Down Expand Up @@ -226,9 +226,9 @@ def endElement(self, name):
# get the 'recent' file
getfile = file_prefix + file_rec + file_suffix
try:
(f, r) = Configuration.getFile(Configuration.getCVEDict() + getfile, compressed = True)
(f, r) = Configuration.getFile(Configuration.getFeedURL('cve') + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cve") + getfile))
parser = make_parser()
ch = CVEHandler()
parser.setContentHandler(ch)
Expand Down
10 changes: 4 additions & 6 deletions sbin/db_mgmt_capec.py
Expand Up @@ -4,6 +4,7 @@
#
# Imported in cvedb in the collection named capec.
#
# Copyright (c) 2016 Pieter-Jan Moreels - pieterjan.moreels@gmail.com

# Imports
import os
Expand Down Expand Up @@ -156,20 +157,17 @@ def endElement(self, name):
if name == 'capec:Attack_Pattern_Catalog':
self.Attack_Pattern_Catalog_tag = False

# dictionary
capecurl = Configuration.getCAPECDict()

# make parser
parser = make_parser()
ch = CapecHandler()
parser.setContentHandler(ch)
# check modification date
try:
f = Configuration.getFile(capecurl)
(f, r) = Configuration.getFeedData('capec')
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(capecurl))
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("capec")))
i = db.getLastModified('capec')
last_modified = parse_datetime(f.headers['last-modified'], ignoretz=True)
last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True)
if i is not None:
if last_modified == i:
print("Not modified")
Expand Down
11 changes: 4 additions & 7 deletions sbin/db_mgmt_cpe_dictionary.py
Expand Up @@ -14,7 +14,7 @@
#
# Copyright (c) 2012 Wim Remes
# Copyright (c) 2012-2014 Alexandre Dulaunoy - a@foo.be
# Copyright (c) 2014-2015 Pieter-Jan Moreels - pieterjan.moreels@gmail.com
# Copyright (c) 2014-2016 Pieter-Jan Moreels - pieterjan.moreels@gmail.com

# Imports
import os
Expand Down Expand Up @@ -70,20 +70,17 @@ def endElement(self, name):
self.referencetag = False
self.href = None

# dict
cpedict = Configuration.getCPEDict()

# make parser
parser = make_parser()
ch = CPEHandler()
parser.setContentHandler(ch)
# check modification date
try:
f = Configuration.getFile(cpedict)
(f, r) = Configuration.getFeedData('cpe')
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(cpedict))
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cpe")))
i = db.getLastModified('cpe')
last_modified = parse_datetime(f.headers['last-modified'], ignoretz=True)
last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True)
if i is not None:
if last_modified == i:
print("Not modified")
Expand Down

0 comments on commit 571a410

Please sign in to comment.