Skip to content

Commit

Permalink
Fixing unittests and potentially root cause for TypeError
Browse files Browse the repository at this point in the history
  • Loading branch information
andresriancho committed May 20, 2019
1 parent 5c6a287 commit 54d432d
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 59 deletions.
102 changes: 60 additions & 42 deletions w3af/core/data/db/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ def inner_verify_has_db(self, *args, **kwds):


class HistoryItem(object):
"""Represents history item."""
"""
Represents history item
"""

_db = None
_DATA_TABLE = 'history_items'
Expand Down Expand Up @@ -90,6 +92,7 @@ class HistoryItem(object):
_latest_compression_job_end = 0

id = None
url = None
_request = None
_response = None
info = None
Expand All @@ -101,6 +104,7 @@ class HistoryItem(object):
msg = 'OK'
code = 200
time = 0.2
charset = None

history_lock = threading.RLock()
compression_lock = threading.RLock()
Expand Down Expand Up @@ -163,40 +167,45 @@ def set_request(self, req):
request = property(get_request, set_request)

@verify_has_db
def find(self, searchData, result_limit=-1, orderData=[], full=False):
def find(self, search_data, result_limit=-1, order_data=None):
"""
Make complex search.
search_data = {name: (value, operator), ...}
orderData = [(name, direction)]
order_data = [(name, direction)]
"""
order_data = order_data or []
result = []

sql = 'SELECT * FROM ' + self._DATA_TABLE
where = WhereHelper(searchData)
where = WhereHelper(search_data)
sql += where.sql()
orderby = ""

order_by = ''
#
# TODO we need to move SQL code to parent class
#
for item in orderData:
orderby += item[0] + " " + item[1] + ","
orderby = orderby[:-1]
for item in order_data:
order_by += item[0] + ' ' + item[1] + ','
order_by = order_by[:-1]

if orderby:
sql += " ORDER BY " + orderby
if order_by:
sql += ' ORDER BY ' + order_by

sql += ' LIMIT ' + str(result_limit)
try:
for row in self._db.select(sql, where.values()):
item = self.__class__()
item._load_from_row(row, full)
item._load_from_row(row)
result.append(item)
except DBException:
msg = 'You performed an invalid search. Please verify your syntax.'
raise DBException(msg)
return result

def _load_from_row(self, row, full=True):
"""Load data from row with all columns."""
def _load_from_row(self, row):
"""
Load data from row with all columns
"""
self.id = row[0]
self.url = row[1]
self.code = row[2]
Expand Down Expand Up @@ -269,7 +278,7 @@ def _load_from_trace_file_concurrent(self, _id):
#
for _ in xrange(int(1 / wait_time)):
try:
self._load_from_trace_file(_id)
return self._load_from_trace_file(_id)
except TraceReadException as e:
args = (_id, e)
msg = 'Failed to read trace file %s: "%s"'
Expand Down Expand Up @@ -364,8 +373,10 @@ def delete(self, _id=None):
pass

@verify_has_db
def load(self, _id=None, full=True, retry=True):
"""Load data from DB by ID."""
def load(self, _id=None, retry=True):
"""
Load data from DB by ID
"""
if _id is None:
_id = self.id

Expand All @@ -376,36 +387,40 @@ def load(self, _id=None, full=True, retry=True):
msg = ('An unexpected error occurred while searching for id "%s"'
' in table "%s". Original exception: "%s".')
raise DBException(msg % (_id, self._DATA_TABLE, dbe))
else:
if row is not None:
self._load_from_row(row, full)
else:
# The request/response with 'id' == id is not in the DB!
# Lets do some "error handling" and try again!

if retry:
# TODO:
# According to sqlite3 documentation this db.commit()
# might fix errors like
# https://sourceforge.net/apps/trac/w3af/ticket/164352 ,
# but it can degrade performance due to disk IO
#
self._db.commit()
self.load(_id=_id, full=full, retry=False)
else:
# This is the second time load() is called and we end up
# here, raise an exception and finish our pain.
msg = ('An internal error occurred while searching for '
'id "%s", even after commit/retry' % _id)
raise DBException(msg)

return True
if row is not None:
self._load_from_row(row)
return True

if not retry:
#
# This is the second time load() is called and we end up
# here, raise an exception and finish our pain.
#
msg = ('An internal error occurred while searching for id "%s",'
' even after commit/retry')
raise DBException(msg % _id)

#
# The request/response with _id is not in the DB!
# Lets do some error handling and try again!
#
# According to sqlite3 documentation this db.commit()
# might fix errors like [0] but it can degrade performance due
# to disk IO
#
# [0] https://sourceforge.net/apps/trac/w3af/ticket/164352 ,
#
self._db.commit()
return self.load(_id=_id, retry=False)

@verify_has_db
def read(self, _id, full=True):
"""Return item by ID."""
def read(self, _id):
"""
Return item by ID
"""
result_item = self.__class__()
result_item.load(_id, full)
result_item.load(_id)
return result_item

def save(self):
Expand Down Expand Up @@ -692,6 +707,9 @@ def clear(self):

return True

def __repr__(self):
return '<HistoryItem %s %s>' % (self.method, self.url)


def get_trace_id(trace_file):
return int(trace_file.rsplit('/')[-1].rsplit('.')[-2])
Expand Down
6 changes: 3 additions & 3 deletions w3af/core/data/db/tests/test_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def test_find(self):
self.assertEqual(len(h2.find([('mark', 1, '=')])), 1)
self.assertEqual(len(h2.find([('has_qs', 1, '=')])), 500)
self.assertEqual(len(h2.find([('has_qs', 1, '=')], result_limit=10)), 10)
results = h2.find([('has_qs', 1, '=')], result_limit=1, orderData=[('id', 'desc')])
results = h2.find([('has_qs', 1, '=')], result_limit=1, order_data=[('id', 'desc')])
self.assertEqual(results[0].id, 499)
search_data = [('id', find_id + 1, "<"),
('id', find_id - 1, ">")]
Expand Down Expand Up @@ -133,7 +133,7 @@ def test_save_load(self):
h2 = HistoryItem()
h2.load(i)

self.assertEqual(h1.request, h2.request)
self.assertEqual(h1.request.to_dict(), h2.request.to_dict())
self.assertEqual(h1.response.body, h2.response.body)

def test_load_not_exists(self):
Expand Down Expand Up @@ -281,6 +281,6 @@ def test_save_load_unicode_decode_error(self):
h2 = HistoryItem()
h2.load(1)

self.assertEqual(h1.request, h2.request)
self.assertEqual(h1.request.to_dict(), h2.request.to_dict())
self.assertEqual(h1.response.body, h2.response.body)
self.assertEqual(h1.request.url_object, h2.request.url_object)
2 changes: 1 addition & 1 deletion w3af/core/data/url/HTTPRequest.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def from_dict(cls, unserialized_dict):
cookies = udict['cookies']
session = udict['session']
cache = udict['cache']
timeout = socket._GLOBAL_DEFAULT_TIMEOUT if udict['timeout'] is None else udict['timeout']
timeout = socket.getdefaulttimeout() if udict['timeout'] is None else udict['timeout']
new_connection = udict['new_connection']
follow_redirects = udict['follow_redirects']
use_basic_auth = udict['use_basic_auth']
Expand Down
26 changes: 13 additions & 13 deletions w3af/core/ui/gui/httpLogTab.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,14 +324,14 @@ def find_request_response(self, widget=None, refresh=False):
"""Find entries (req/res)."""
searchText = self._searchText.get_text()
searchText = searchText.strip()
searchData = []
search_data = []
#
# Search part
#
if searchText:
likePieces = [('url', "%" + searchText + "%", 'like'),
('tag', "%" + searchText + "%", 'like')]
searchData.append((likePieces, 'OR'))
search_data.append((likePieces, 'OR'))
#
# Filter part
#
Expand All @@ -342,7 +342,7 @@ def find_request_response(self, widget=None, refresh=False):
if opt.get_value():
codef = opt.get_name()
filterCodes.append(('codef', int(codef[0]), '='))
searchData.append((filterCodes, 'OR'))
search_data.append((filterCodes, 'OR'))
# IDs
try:
minId = int(self.pref.get_value('trans_id', 'min'))
Expand All @@ -353,38 +353,38 @@ def find_request_response(self, widget=None, refresh=False):
except:
maxId = 0
if maxId > 0:
searchData.append(('id', maxId, "<"))
search_data.append(('id', maxId, "<"))
if minId > 0:
searchData.append(('id', minId, ">"))
search_data.append(('id', minId, ">"))
if refresh:
searchData.append(('id', self._lastId, ">"))
search_data.append(('id', self._lastId, ">"))
# Sizes
if self.pref.get_value('sizes', 'resp_size'):
searchData.append(('response_size', 0, ">"))
search_data.append(('response_size', 0, ">"))
# Tags
if self.pref.get_value('misc', 'tag'):
searchData.append(('tag', '', "!="))
search_data.append(('tag', '', "!="))
# has_query_string
if self.pref.get_value('misc', 'has_qs'):
searchData.append(('has_qs', 0, ">"))
search_data.append(('has_qs', 0, ">"))
# Content type
filterTypes = []
for filterType in self._filterTypes:
if self.pref.get_value('types', filterType[0]):
filterTypes.append(
('content_type', "%" + filterType[0] + "%", 'like'))
searchData.append((filterTypes, 'OR'))
search_data.append((filterTypes, 'OR'))
# Method
filterMethods = []
for method in self._filterMethods:
if self.pref.get_value('methods', method[0]):
filterTypes.append(('method', method[0], '='))
searchData.append((filterMethods, 'OR'))
search_data.append((filterMethods, 'OR'))

try:
# Please see the 5000 below
searchResultObjects = self._historyItem.find(searchData,
result_limit=5001, orderData=[("id", "")])
searchResultObjects = self._historyItem.find(search_data,
result_limit=5001, order_data=[("id", "")])
except BaseFrameworkException, w3:
self._empty_results()
return
Expand Down

0 comments on commit 54d432d

Please sign in to comment.