Skip to content

Commit

Permalink
fixed-bugs.txt test passes
Browse files Browse the repository at this point in the history
  • Loading branch information
kedder committed Mar 15, 2013
1 parent 2032193 commit 6fadab3
Showing 1 changed file with 12 additions and 9 deletions.
21 changes: 12 additions & 9 deletions src/zope/testbrowser/browser2.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import six
from zope.interface import implementer
from wsgiproxy.proxies import TransparentProxy
from bs4 import BeautifulSoup

from zope.testbrowser import interfaces
from zope.testbrowser._compat import httpclient, PYTHON2, urllib_request
Expand Down Expand Up @@ -239,13 +240,7 @@ def open(self, url, data=None):
six.reraise(*translateAppError(*sys.exc_info()))

self._setResponse(resp)

# if the headers don't have a status, I suppose there can't be an error
if 'Status' in self.headers:
code, msg = self.headers['Status'].split(' ', 1)
code = int(code)
if self.raiseHttpErrors and code >= 400:
raise httpclient.HTTPException(url, code, msg, self.headers)
self._checkStatus()

def post(self, url, data, content_type=None):
if content_type is not None:
Expand All @@ -268,10 +263,19 @@ def _clickSubmit(self, form, control=None, coord=None):
resp = resp.maybe_follow()

self._setResponse(resp)
self._checkStatus()

except webtest.app.AppError:
six.reraise(*translateAppError(*sys.exc_info()))

def _checkStatus(self):
# if the headers don't have a status, I suppose there can't be an error
if 'Status' in self.headers:
code, msg = self.headers['Status'].split(' ', 1)
code = int(code)
if self.raiseHttpErrors and code >= 400:
raise urllib_request.HTTPError(self.url, code, msg, [], None)

def _submit(self, form, name=None, index=None, coord=None, **args):
# A reimplementation of webtest.forms.Form.submit() to allow to insert
# coords into the request
Expand Down Expand Up @@ -407,7 +411,6 @@ def _indexControls(self, form):
# form.html after parsing. But we need them (at least to locate labels
# for radio buttons). So we are forced to reparse part of html, to
# extract elements.
from bs4 import BeautifulSoup
html = BeautifulSoup(form.text)
tags = ('input', 'select', 'textarea', 'button')
return html.find_all(tags)
Expand Down Expand Up @@ -453,7 +456,7 @@ def _preparedRequest(self, url):

kwargs = {'headers': sorted(self._req_headers.items()),
'extra_environ': extra_environ,
'expect_errors': not self.raiseHttpErrors}
'expect_errors': True}

yield kwargs

Expand Down

0 comments on commit 6fadab3

Please sign in to comment.