Skip to content

Commit

Permalink
Minor improvement for timeouts
Browse files Browse the repository at this point in the history
  • Loading branch information
andresriancho committed Nov 12, 2019
1 parent 4ff8a9c commit 549ee6b
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 0 deletions.
6 changes: 6 additions & 0 deletions w3af/core/data/url/HTTPRequest.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,12 @@ def get_timeout(self):
def set_timeout(self, timeout):
self.timeout = timeout

def set_new_connection(self, new_connection):
self.new_connection = new_connection

def get_new_connection(self):
return self.new_connection

def to_dict(self):
serializable_dict = {}
sdict = serializable_dict
Expand Down
14 changes: 14 additions & 0 deletions w3af/core/data/url/extended_urllib.py
Original file line number Diff line number Diff line change
Expand Up @@ -1190,11 +1190,25 @@ def _retry(self, req, grep, url_error):
args = (req, req.debugging_id, url_error)
om.out.debug(msg % args)

#
# Before sending it again we update the timeout, which could have
# changed because of the error we just found
#
host = req.get_host()
req.set_timeout(self.get_timeout(host))

#
# And for retries we force a new connection to be used to increase
# the chances of successfully retrieving a response
#
# TCP/IP connections are closed every time they receive an error and
# shouldn't be used anymore to send any HTTP requests. That is
# responsibility of the keepalive.handler code. So it should never
# happen, even without the next line of code, that a connection that
# triggered a timeout is re-used. The next line is to be 100% sure
#
req.set_new_connection(True)

return self.send(req, grep=grep)

else:
Expand Down

0 comments on commit 549ee6b

Please sign in to comment.