Skip to content

Commit

Permalink
Merge remote-tracking branch 'pidgeyl/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
adulau committed Nov 20, 2016
2 parents 571a410 + 7a000ec commit fb502bf
Show file tree
Hide file tree
Showing 15 changed files with 329 additions and 189 deletions.
6 changes: 3 additions & 3 deletions bin/db_dump.py
Expand Up @@ -21,16 +21,16 @@

argParser = argparse.ArgumentParser(description='Dump database in JSON format')
argParser.add_argument('-r', default=False, action='store_true', help='Include ranking value')
argParser.add_argument('-v', default=False, action='store_true', help='Include vfeed map')
argParser.add_argument('-v', default=False, action='store_true', help='Include vfeed map') # TODO change
argParser.add_argument('-c', default=False, action='store_true', help='Include CAPEC information')
argParser.add_argument('-l', default=False, type=int, help='Limit output to n elements (default: unlimited)')
args = argParser.parse_args()

rankinglookup = args.r
vfeedlookup = args.v
reflookup = args.v
capeclookup = args.c

l = cves.last(rankinglookup=rankinglookup, vfeedlookup=vfeedlookup, capeclookup=capeclookup)
l = cves.last(rankinglookup=rankinglookup, reflookup=reflookup, capeclookup=capeclookup)

for cveid in db.getCVEIDs(limit=args.l):
item = l.getcve(cveid=cveid)
Expand Down
1 change: 0 additions & 1 deletion doc/html/Structure.html
Expand Up @@ -25,7 +25,6 @@ <h2>Mongo Database Structure</h2>
<tr><td>cpeother</td> <td>CVE-Search</td> <td>CVE-Search has a script to fill this database, using the original CPE's and generating titles for them.</td></tr>
<tr><td>cwe</td> <td>NVD NIST</td> <td>Information about Common Weaknesses, as published by NIST</td></tr>
<tr><td>d2sec</td> <td>d2sec.com</td> <td>Information about CVE's, as released by d2sec</td></tr>
<tr><td>vfeed</td> <td>vFeed</td> <td>Information about CVE's, as released by vFeed</td></tr>
<tr><td>vendor</td> <td>NVD NIST</td> <td>Vendor statements, released by NIST</td></tr>
<tr><td>info</td> <td>CVE-Search</td> <td>Information about the Mongo Database updates.</td></tr>
</table>
Expand Down
1 change: 0 additions & 1 deletion doc/markdown/structure.md
Expand Up @@ -15,7 +15,6 @@ By default, these are the official sources.
| cpeother | CVE-Search | CVE-Search has a script to fill this database, using the original CPE's and generating titles for them. |
| cwe | NVD NIST | Information about Common Weaknesses, as published by NIST |
| d2sec | d2sec.com | Information about CVE's, as released by d2sec |
| vfeed | vFeed | Information about CVE's, as released by vFeed |
| vendor | NVD NIST | Vendor statements, released by NIST |
| info | CVE-Search | Information about the Mongo Database updates. |

Expand Down
18 changes: 10 additions & 8 deletions etc/sources.ini.sample
@@ -1,10 +1,12 @@
[Sources]
CVE: https://static.nvd.nist.gov/feeds/xml/cve/
CPE: https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml.zip
CWE: http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip
d2sec: http://www.d2sec.com/exploits/elliot.xml
Vendor: https://nvd.nist.gov/download/vendorstatements.xml.gz
CAPEC: http://capec.mitre.org/data/xml/capec_v2.6.xml
CVE: https://static.nvd.nist.gov/feeds/xml/cve/
CPE: https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml.zip
CWE: http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip
d2sec: http://www.d2sec.com/exploits/elliot.xml
Vendor: https://nvd.nist.gov/download/vendorstatements.xml.gz
CAPEC: http://capec.mitre.org/data/xml/capec_v2.6.xml
MSBULLETIN: http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx
Ref: https://cve.mitre.org/data/refs/refmap/allrefmaps.zip
exploitdb: https://github.com/offensive-security/exploit-database/raw/master/files.csv
exploitdb: https://github.com/offensive-security/exploit-database/raw/master/files.csv
Ref: https://cve.mitre.org/data/refs/refmap/allrefmaps.zip
RPM: https://www.redhat.com/security/data/metrics/rpm-to-cve.xml
RHSA: https://www.redhat.com/security/data/oval/com.redhat.rhsa-all.xml.bz2
18 changes: 9 additions & 9 deletions lib/CVEs.py
Expand Up @@ -25,15 +25,15 @@

class last():
def __init__(self, collection="cves", rankinglookup=False,
namelookup=False, vfeedlookup=False, capeclookup=False,
namelookup=False, capeclookup=False,
subscorelookup=False, reflookup=False):

self.collectionname = collection
self.rankinglookup = rankinglookup
self.namelookup = namelookup
self.vfeedlookup = vfeedlookup
self.capeclookup = capeclookup
self.subscorelookup = subscorelookup
self.reflookup = reflookup

self.collection = collection

Expand Down Expand Up @@ -66,10 +66,10 @@ def getcpe(self, cpeid=None):
if 'id' in e:
return e['title']

def getvfeed(self, cveid=None):
if not(self.vfeedlookup):
def getRefs(self, cveid=None):
if not(self.reflookup):
return cveid
e = db.getvFeed(cveid)
e = db.getRefs(cveid)
return e if e else cveid

def getcve(self, cveid=None):
Expand All @@ -92,8 +92,8 @@ def getcve(self, cveid=None):
e['vulnerable_configuration'] = vulconf
if self.rankinglookup and len(ranking) > 0:
e['ranking'] = ranking
if self.vfeedlookup:
f = self.getvfeed(cveid=cveid)
if self.reflookup:
f = self.getRefs(cveid=cveid)
if not isinstance(f, str):
g = dict(itertools.chain(e.items(), f.items()))
e = g
Expand Down Expand Up @@ -164,13 +164,13 @@ def __exit__(self, type, value, traceback):


def test_last():
l = last(rankinglookup=True, vfeedlookup=True, capeclookup=False)
l = last(rankinglookup=True, reflookup=True, capeclookup=False)
print (l.getcpe(cpeid="cpe:/o:google:android:2.0"))
print (l.getranking(cpeid="cpe:/o:google:android:2.0"))
print (l.get())
print (l.getcve("CVE-2015-0597"))
print (l.getcapec("85"))
l = last(rankinglookup=False, vfeedlookup=True, capeclookup=True)
l = last(rankinglookup=False, reflookup=True, capeclookup=True)
print (l.getcve("CVE-2015-0597"))
print (l.getcapec("200"))
l = last(reflookup=True)
Expand Down
49 changes: 27 additions & 22 deletions lib/Config.py
Expand Up @@ -50,18 +50,21 @@ class Configuration():
'plugin_config': './etc/plugins.ini',
'auth_load': './etc/auth.txt'
}
sources={'cve': "https://static.nvd.nist.gov/feeds/xml/cve/",
'cpe': "https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml",
'cwe': "http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip",
'd2sec': "http://www.d2sec.com/exploits/elliot.xml",
'vendor': "https://nvd.nist.gov/download/vendorstatements.xml.gz",
'capec': "http://capec.mitre.org/data/xml/capec_v2.6.xml",
sources={'cve': "https://static.nvd.nist.gov/feeds/xml/cve/",
'cpe': "https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml",
'cwe': "http://cwe.mitre.org/data/xml/cwec_v2.8.xml.zip",
'd2sec': "http://www.d2sec.com/exploits/elliot.xml",
'vendor': "https://nvd.nist.gov/download/vendorstatements.xml.gz",
'capec': "http://capec.mitre.org/data/xml/capec_v2.6.xml",
'msbulletin': "http://download.microsoft.com/download/6/7/3/673E4349-1CA5-40B9-8879-095C72D5B49D/BulletinSearch.xlsx",
'ref': "https://cve.mitre.org/data/refs/refmap/allrefmaps.zip",
'exploitdb': "https://github.com/offensive-security/exploit-database/raw/master/files.csv",
'exploitdb': "https://github.com/offensive-security/exploit-database/raw/master/files.csv",
'ref': "https://cve.mitre.org/data/refs/refmap/allrefmaps.zip",
'rpm': "https://www.redhat.com/security/data/metrics/rpm-to-cve.xml",
'rhsa': "https://www.redhat.com/security/data/oval/com.redhat.rhsa-all.xml.bz2",
'includecve': True, 'includecapec': True, 'includemsbulletin': True,
'includecpe': True, 'included2sec': True, 'includeref': True,
'includecwe': True, 'includevendor': True, 'includeexploitdb': True}
'includecwe': True, 'includevendor': True, 'includeexploitdb': True,
'includerpm': True, 'includerhsa': True}

@classmethod
def readSetting(cls, section, item, default):
Expand Down Expand Up @@ -259,31 +262,33 @@ def getProxy(cls):
return cls.readSetting("Proxy", "http", cls.default['http_proxy'])

@classmethod
def getFile(cls, getfile):
def getFile(cls, getfile, unpack=True):
if cls.getProxy():
proxy = req.ProxyHandler({'http': cls.getProxy(), 'https': cls.getProxy()})
auth = req.HTTPBasicAuthHandler()
opener = req.build_opener(proxy, auth, req.HTTPHandler)
req.install_opener(opener)
response = req.urlopen(getfile)
data = response
if 'gzip' in response.info().get('Content-Type'):
buf = BytesIO(response.read())
data = gzip.GzipFile(fileobj=buf)
elif 'bzip2' in response.info().get('Content-Type'):
data = BytesIO(bz2.decompress(response.read()))
elif 'zip' in response.info().get('Content-Type'):
fzip = zipfile.ZipFile(BytesIO(response.read()), 'r')
if len(fzip.namelist())>0:
data=BytesIO(fzip.read(fzip.namelist()[0]))
# TODO: if data == text/plain; charset=utf-8, read and decode
if unpack:
if 'gzip' in response.info().get('Content-Type'):
buf = BytesIO(response.read())
data = gzip.GzipFile(fileobj=buf)
elif 'bzip2' in response.info().get('Content-Type'):
data = BytesIO(bz2.decompress(response.read()))
elif 'zip' in response.info().get('Content-Type'):
fzip = zipfile.ZipFile(BytesIO(response.read()), 'r')
if len(fzip.namelist())>0:
data=BytesIO(fzip.read(fzip.namelist()[0]))
return (data, response)


# Feeds (NEW)
# Feeds
@classmethod
def getFeedData(cls, source):
def getFeedData(cls, source, unpack=True):
source = cls.getFeedURL(source)
return cls.getFile(source) if source else None
return cls.getFile(source, unpack) if source else None

@classmethod
def getFeedURL(cls, source):
Expand Down
47 changes: 13 additions & 34 deletions lib/DatabaseLayer.py
Expand Up @@ -30,7 +30,7 @@
colBLACKLIST= db['mgmt_blacklist']
colUSERS= db['mgmt_users']
colINFO= db['info']
colVFEED= db['vfeed']
colREFS= db['refs']
colRANKING= db['ranking']
colMSBULLETIN= db['ms']
colCAPEC= db['capec']
Expand Down Expand Up @@ -77,33 +77,11 @@ def bulkUpdate(collection, data):
bulk.find({'id': x['id']}).upsert().update({'$set': x})
bulk.execute()

def bulkvFeedUpdate(dbpath, vfeedmap):
# connect to sqlite db
con = sqlite3.connect(dbpath+'/vfeed.db')
con.text_factory = lambda x: x.decode("utf-8", "ignore")
c = con.cursor()
# loop over sqlite db and insert into vfeed collection
for vmap in progressbar(vfeedmap):
e = c.execute('SELECT * FROM %s' % vmap)
names = list(map(lambda x: x[0], e.description))
bulk = colVFEED.initialize_ordered_bulk_op()
for r in e:
try:
if vmap == 'map_redhat_bugzilla':
icveid = names.index('redhatid')
else:
icveid = names.index("cveid")
except Exception as ex:
print('Exeption in %s: %s' % (vmap, ex))
continue
mapArray={}
for i in range(0,len(r)):
if not (names[i] == "cveid"):
mapArray[str(names[i])]=str(r[i])
if not vmap=='map_redhat_bugzilla':
bulk.find({'id': r[icveid]}).upsert().update({"$set":{vmap:mapArray}})
else:
bulk.find({'map_cve_redhat.redhatid': r[icveid]}).update({"$set":{vmap:mapArray}})
def bulkRefUpdate(data):
if len(data)>0:
bulk=colREFS.initialize_ordered_bulk_op()
for x in data:
bulk.find({'id': x['id']}).upsert().update({'$set': x})
bulk.execute()

def cpeotherBulkInsert(cpeotherlist):
Expand Down Expand Up @@ -196,8 +174,8 @@ def getAlternativeCPE(id):
def getAlternativeCPEs():
return sanitize(colCPEOTHER.find())

def getvFeed(id):
return sanitize(colVFEED.find_one({'id': id}))
def getRefs(id):
return sanitize(colREFS.find_one({'id': id}))

def getCPEMatching(regex, fullSearch=False):
lst=list(colCPE.find({"id": {"$regex": regex}}))
Expand All @@ -213,13 +191,14 @@ def getFreeText(text):
def getSearchResults(search):
result={'data':[]}
regSearch = re.compile(re.escape(search), re.I)
# TODO: remove and replace with refs
vFeedLinks=['map_cve_ms.msid', 'map_cve_redhat.redhatid',
'map_redhat_bugzilla.redhatid', 'map_cve_ubuntu.ubuntuid',
'map_cve_suse.suseid', 'map_cve_fedora.fedoraid', 'map_cve_hp.hpid',
'map_cve_cisco.ciscoid']
links = {'n': 'Link', 'd': []}
for vLink in vFeedLinks:
links['d'].extend(sanitize(colVFEED.find({vLink: {'$in': [regSearch]}})))
links['d'].extend(sanitize(db['vfeed'].find({vLink: {'$in': [regSearch]}})))

try:
textsearch={'n': 'Text search', 'd': getFreeText(search)}
Expand Down Expand Up @@ -255,12 +234,12 @@ def getLastModified(collection):
def getSize(collection):
return db[collection].count()

def vFeedLinked(key, val):
cveList=[x['id'] for x in colVFEED.find({key: val})]
def vFeedLinked(key, val): # TODO: remove, and replace with refs
cveList=[x['id'] for x in colREFS.find({key: val})]
return sanitize(getCVEs(query={'id':{'$in':cveList}}))

def getDBStats():
cols=['cve', 'cpe', 'cpeOther', 'capec', 'd2sec', 'vendor', 'vfeed']
cols=['cve', 'cpe', 'cpeOther', 'capec', 'd2sec', 'vendor']
stats={x+'A': getSize(x.lower()) for x in cols}
stats['cveA']=getSize('cves')
stats.update({x+'U': getLastModified(x.lower()) for x in cols})
Expand Down
4 changes: 2 additions & 2 deletions sbin/db_mgmt.py
Expand Up @@ -266,9 +266,9 @@ def endElement(self, name):
parser.setContentHandler(ch)
getfile = file_prefix + str(x) + file_suffix
try:
(f, r) = Configuration.getFile(Configuration.getCVEDict() + getfile, compressed = True)
(f, r) = Configuration.getFile(Configuration.getFeedURL('cve') + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL('cve') + getfile))
parser.parse(f)
if args.v:
for item in ch.cves:
Expand Down
1 change: 0 additions & 1 deletion sbin/db_mgmt_create_index.py
Expand Up @@ -31,7 +31,6 @@ def setIndex(col, field):
setIndex('cves', 'vulnerable_configuration')
setIndex('cves', 'Modified')
setIndex('cves', [("summary",TEXT)])
setIndex('vfeed', 'id')
setIndex('vendor', 'id')
setIndex('d2sec', 'id')
setIndex('mgmt_whitelist', 'id')
Expand Down

0 comments on commit fb502bf

Please sign in to comment.