Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix on pager + added the http proxy support. #84

Merged
merged 2 commits into from
May 20, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions lib/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import datetime
import configparser
import urllib.parse

import urllib.request as req

class Configuration():
ConfigParser = configparser.ConfigParser()
Expand Down Expand Up @@ -50,7 +50,8 @@ class Configuration():
'Indexdir': './indexdir',
'includeCapec': True, 'includeD2Sec': True,
'includeVFeed': True, 'includeVendor': True,
'includeCWE': True
'includeCWE': True,
'http_proxy' : ''
}

@classmethod
Expand Down Expand Up @@ -282,3 +283,18 @@ def includesFeed(cls, feed):
return False
else:
return True

# Http Proxy
@classmethod
def getProxy(cls):
return cls.readSetting("Proxy", "http", cls.default['http_proxy'])

@classmethod
def getFile(cls, getfile):
if not cls.getProxy():
proxy = req.ProxyHandler({'http': cls.getProxy(), 'https': cls.getProxy()})
auth = req.HTTPBasicAuthHandler()
opener = req.build_opener(proxy, auth, req.HTTPHandler)
req.install_opener(opener)
return req.urlopen(getfile)

22 changes: 12 additions & 10 deletions sbin/db_mgmt.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import argparse
import datetime
from urllib.request import urlopen
from xml.sax import make_parser
from xml.sax.handler import ContentHandler

Expand Down Expand Up @@ -185,12 +184,6 @@ def endElement(self, name):
self.inPUBElem = 0
self.cves[-1]['Modified'] = self.PUB

def getFile(getfile):
try:
return urlopen(Configuration.getCVEDict() + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))

if __name__ == '__main__':
# connect to the DB.
db = Configuration.getMongoConnection()
Expand All @@ -203,7 +196,10 @@ def getFile(getfile):
if args.u:
# get the 'modified' file
getfile = file_prefix + file_mod + file_suffix
f = getFile(getfile)
try:
f = Configuration.getFile(Configuration.getCVEDict() + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
i = info.find_one({'db': 'cve'})
if i is not None:
if f.headers['last-modified'] == i['last-modified']:
Expand Down Expand Up @@ -232,7 +228,10 @@ def getFile(getfile):
collection.insert(item)
# get the 'recent' file
getfile = file_prefix + file_rec + file_suffix
f = getFile(getfile)
try:
f = Configuration.getFile(Configuration.getCVEDict() + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
parser = make_parser()
ch = CVEHandler()
parser.setContentHandler(ch)
Expand Down Expand Up @@ -271,7 +270,10 @@ def getFile(getfile):
ch = CVEHandler()
parser.setContentHandler(ch)
getfile = file_prefix + str(x) + file_suffix
f = getFile(getfile)
try:
f = Configuration.getFile(Configuration.getCVEDict() + getfile)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getCVEDict() + getfile))
parser.parse(f)
if args.v:
for item in ch.cves:
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_capec.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from urllib.request import urlopen

from lib.ProgressBar import progressbar
from lib.Config import Configuration
Expand Down Expand Up @@ -168,7 +167,7 @@ def endElement(self, name):
parser.setContentHandler(ch)
# check modification date
try:
f = urlopen(capecurl)
f = Configuration.getFile(capecurl)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(capecurl))
i = info.find_one({'db': 'capec'})
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_cpe_dictionary.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from urllib.request import urlopen

from lib.ProgressBar import progressbar
from lib.Toolkit import toStringFormattedCPE
Expand Down Expand Up @@ -82,7 +81,7 @@ def endElement(self, name):
parser.setContentHandler(ch)
# check modification date
try:
f = urlopen(cpedict)
f = Configuration.getFile(cpedict)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(cpedict))
i = info.find_one({'db': 'cpe'})
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_cwe.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@

from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from urllib.request import urlopen
import argparse
import zipfile
import tempfile
Expand Down Expand Up @@ -84,7 +83,7 @@ def endElement(self, name):
parser.setContentHandler(ch)
# check modification date
try:
f = urlopen(cwedict)
f = Configuration.getFile(cwedict)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(cwedict))
lastmodified = f.headers['last-modified']
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_d2sec.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from urllib.request import urlopen
import argparse

from lib.ProgressBar import progressbar
Expand Down Expand Up @@ -102,7 +101,7 @@ def endElement(self, name):
parser.setContentHandler(ch)
# check modification date
try:
f = urlopen(d2securl)
f = Configuration.getFile(d2securl)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(d2securl))
i = info.find_one({'db': 'd2sec'})
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_vendorstatements.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from urllib.request import urlopen
import argparse

from lib.ProgressBar import progressbar
Expand Down Expand Up @@ -70,7 +69,7 @@ def endElement(self, name):
parser.setContentHandler(ch)
# check modification date
try:
f = urlopen(vendordict)
f = Configuration.getFile(vendordict)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vendordict))
i = info.find_one({'db': 'vendor'})
Expand Down
3 changes: 1 addition & 2 deletions sbin/db_mgmt_vfeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
runPath = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(runPath, ".."))

from urllib.request import urlopen
import tarfile
import shutil
import sqlite3
Expand All @@ -30,7 +29,7 @@
info = db.info
# check modification date
try:
u = urlopen(vFeedurl)
u = Configuration.getFile(vFeedurl)
except:
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl))
i = info.find_one({'db': 'vfeed'})
Expand Down
13 changes: 7 additions & 6 deletions web/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def markCPEs(cve):
return cve


def getFilterSettingsFromPost():
def getFilterSettingsFromPost(r):
blacklist = request.form.get('blacklistSelect')
whitelist = request.form.get('whitelistSelect')
unlisted = request.form.get('unlistedSelect')
Expand All @@ -320,8 +320,9 @@ def getFilterSettingsFromPost():
'timeTypeSelect': timeTypeSelect, 'cvssSelect': cvssSelect,
'cvss': cvss, 'rejectedSelect': rejectedSelect, "hideSeen": hideSeen}
# retrieving data
skip = r * 50
cve = filter_logic(blacklist, whitelist, unlisted, timeSelect, startDate, endDate,
timeTypeSelect, cvssSelect, cvss, rejectedSelect, hideSeen, pageLength, 0)
timeTypeSelect, cvssSelect, cvss, rejectedSelect, hideSeen, pageLength, skip)
return(settings,cve)

@login_manager.user_loader
Expand Down Expand Up @@ -349,15 +350,15 @@ def index():

@app.route('/', methods=['POST'])
def filterPost():
settings,cve = getFilterSettingsFromPost()
settings,cve = getFilterSettingsFromPost(0)
return render_template('index.html', settings=settings, cve=cve, r=0, pageLength=pageLength)


@app.route('/r/<int:r>', methods=['POST'])
def filterLast(r):
if not r:
r = 0
settings,cve = getFilterSettingsFromPost()
settings,cve = getFilterSettingsFromPost(r)
return render_template('index.html', settings=settings, cve=cve, r=r, pageLength=pageLength)

@app.route('/r/<int:r>/seen', methods=['POST'])
Expand All @@ -369,7 +370,7 @@ def seen(r):
if current_user.is_authenticated():
col = db.mgmt_seen
col.update({"user":current_user.get_id()},{"$addToSet":{"seen_cves":{"$each":seenlist}}})
settings,cve = getFilterSettingsFromPost()
settings,cve = getFilterSettingsFromPost(r)
return render_template('index.html', settings=settings, cve=cve, r=r, pageLength=pageLength)


Expand All @@ -382,7 +383,7 @@ def unseen(r):
if current_user.is_authenticated():
col = db.mgmt_seen
col.update({"user":current_user.get_id()},{"$pullAll":{"seen_cves":seenlist}})
settings,cve = getFilterSettingsFromPost()
settings,cve = getFilterSettingsFromPost(r)
return render_template('index.html', settings=settings, cve=cve, r=r, pageLength=pageLength)


Expand Down