Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100644 440 lines (395 sloc) 18.065 kB
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
1 #!/usr/bin/env python
2 #
3 # Copyright 2009 Facebook
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License"); you may
6 # not use this file except in compliance with the License. You may obtain
7 # a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14 # License for the specific language governing permissions and limitations
15 # under the License.
16
17 """Blocking and non-blocking HTTP client implementations using pycurl."""
18
58a7ff1 @bdarnell Turn on __future__ division too.
bdarnell authored
19 from __future__ import absolute_import, division, with_statement
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
20
21 import cStringIO
22 import collections
23 import logging
83d4e93 @bdarnell Move USE_SIMPLE_HTTPCLIENT environment check out of curl_httpclient
bdarnell authored
24 import pycurl
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
25 import threading
26 import time
27
28 from tornado import httputil
29 from tornado import ioloop
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
30 from tornado.log import gen_log
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
31 from tornado import stack_context
32
c62fd26 @bdarnell Make tornado.escape.utf8 None-safe and use it in curl_httpclient.
bdarnell authored
33 from tornado.escape import utf8
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
34 from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
35
c152b78 @bdarnell While I'm touching every file, run autopep8 too.
bdarnell authored
36
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
37 class CurlAsyncHTTPClient(AsyncHTTPClient):
6385cfa @bdarnell Remove httpclient max_simultaneous_connections argument.
bdarnell authored
38 def initialize(self, io_loop=None, max_clients=10):
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
39 self.io_loop = io_loop
40 self._multi = pycurl.CurlMulti()
41 self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
42 self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
6385cfa @bdarnell Remove httpclient max_simultaneous_connections argument.
bdarnell authored
43 self._curls = [_curl_create() for i in xrange(max_clients)]
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
44 self._free_list = self._curls[:]
45 self._requests = collections.deque()
46 self._fds = {}
47 self._timeout = None
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
48
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
49 try:
50 self._socket_action = self._multi.socket_action
51 except AttributeError:
52 # socket_action is found in pycurl since 7.18.2 (it's been
53 # in libcurl longer than that but wasn't accessible to
54 # python).
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
55 gen_log.warning("socket_action method missing from pycurl; "
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
56 "falling back to socket_all. Upgrading "
57 "libcurl and pycurl will improve performance")
58 self._socket_action = \
59 lambda fd, action: self._multi.socket_all()
60
61 # libcurl has bugs that sometimes cause it to not report all
62 # relevant file descriptors and timeouts to TIMERFUNCTION/
63 # SOCKETFUNCTION. Mitigate the effects of such bugs by
64 # forcing a periodic scan of all active requests.
65 self._force_timeout_callback = ioloop.PeriodicCallback(
66 self._handle_force_timeout, 1000, io_loop=io_loop)
67 self._force_timeout_callback.start()
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
68
69 def close(self):
70 self._force_timeout_callback.stop()
71 for curl in self._curls:
72 curl.close()
73 self._multi.close()
74 self._closed = True
bfb171f @bdarnell Move AsyncHTTPClient creation logic to httpclient.py.
bdarnell authored
75 super(CurlAsyncHTTPClient, self).close()
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
76
77 def fetch(self, request, callback, **kwargs):
78 if not isinstance(request, HTTPRequest):
79 request = HTTPRequest(url=request, **kwargs)
80 self._requests.append((request, stack_context.wrap(callback)))
81 self._process_queue()
82 self._set_timeout(0)
83
84 def _handle_socket(self, event, fd, multi, data):
85 """Called by libcurl when it wants to change the file descriptors
86 it cares about.
87 """
88 event_map = {
89 pycurl.POLL_NONE: ioloop.IOLoop.NONE,
90 pycurl.POLL_IN: ioloop.IOLoop.READ,
91 pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
92 pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE
93 }
94 if event == pycurl.POLL_REMOVE:
95 self.io_loop.remove_handler(fd)
96 del self._fds[fd]
97 else:
98 ioloop_event = event_map[event]
99 if fd not in self._fds:
100 self._fds[fd] = ioloop_event
101 self.io_loop.add_handler(fd, self._handle_events,
102 ioloop_event)
103 else:
104 self._fds[fd] = ioloop_event
105 self.io_loop.update_handler(fd, ioloop_event)
106
107 def _set_timeout(self, msecs):
108 """Called by libcurl to schedule a timeout."""
109 if self._timeout is not None:
110 self.io_loop.remove_timeout(self._timeout)
111 self._timeout = self.io_loop.add_timeout(
20deb5c @bdarnell Add time_func parameter to IOLoop, and make it possible to use time.m…
bdarnell authored
112 self.io_loop.time() + msecs / 1000.0, self._handle_timeout)
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
113
114 def _handle_events(self, fd, events):
115 """Called by IOLoop when there is activity on one of our
116 file descriptors.
117 """
118 action = 0
c152b78 @bdarnell While I'm touching every file, run autopep8 too.
bdarnell authored
119 if events & ioloop.IOLoop.READ:
120 action |= pycurl.CSELECT_IN
121 if events & ioloop.IOLoop.WRITE:
122 action |= pycurl.CSELECT_OUT
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
123 while True:
124 try:
125 ret, num_handles = self._socket_action(fd, action)
126 except pycurl.error, e:
127 ret = e.args[0]
128 if ret != pycurl.E_CALL_MULTI_PERFORM:
129 break
130 self._finish_pending_requests()
131
132 def _handle_timeout(self):
133 """Called by IOLoop when the requested timeout has passed."""
134 with stack_context.NullContext():
135 self._timeout = None
136 while True:
137 try:
138 ret, num_handles = self._socket_action(
139 pycurl.SOCKET_TIMEOUT, 0)
140 except pycurl.error, e:
141 ret = e.args[0]
142 if ret != pycurl.E_CALL_MULTI_PERFORM:
143 break
144 self._finish_pending_requests()
145
146 # In theory, we shouldn't have to do this because curl will
147 # call _set_timeout whenever the timeout changes. However,
148 # sometimes after _handle_timeout we will need to reschedule
149 # immediately even though nothing has changed from curl's
150 # perspective. This is because when socket_action is
151 # called with SOCKET_TIMEOUT, libcurl decides internally which
152 # timeouts need to be processed by using a monotonic clock
153 # (where available) while tornado uses python's time.time()
154 # to decide when timeouts have occurred. When those clocks
155 # disagree on elapsed time (as they will whenever there is an
156 # NTP adjustment), tornado might call _handle_timeout before
157 # libcurl is ready. After each timeout, resync the scheduled
158 # timeout with libcurl's current state.
159 new_timeout = self._multi.timeout()
160 if new_timeout != -1:
161 self._set_timeout(new_timeout)
162
163 def _handle_force_timeout(self):
164 """Called by IOLoop periodically to ask libcurl to process any
165 events it may have forgotten about.
166 """
167 with stack_context.NullContext():
168 while True:
169 try:
170 ret, num_handles = self._multi.socket_all()
171 except pycurl.error, e:
172 ret = e.args[0]
173 if ret != pycurl.E_CALL_MULTI_PERFORM:
174 break
175 self._finish_pending_requests()
176
177 def _finish_pending_requests(self):
178 """Process any requests that were completed by the last
179 call to multi.socket_action.
180 """
181 while True:
182 num_q, ok_list, err_list = self._multi.info_read()
183 for curl in ok_list:
184 self._finish(curl)
185 for curl, errnum, errmsg in err_list:
186 self._finish(curl, errnum, errmsg)
187 if num_q == 0:
188 break
189 self._process_queue()
190
191 def _process_queue(self):
192 with stack_context.NullContext():
193 while True:
194 started = 0
195 while self._free_list and self._requests:
196 started += 1
197 curl = self._free_list.pop()
198 (request, callback) = self._requests.popleft()
199 curl.info = {
200 "headers": httputil.HTTPHeaders(),
201 "buffer": cStringIO.StringIO(),
202 "request": request,
203 "callback": callback,
204 "curl_start_time": time.time(),
205 }
206 # Disable IPv6 to mitigate the effects of this bug
207 # on curl versions <= 7.21.0
208 # http://sourceforge.net/tracker/?func=detail&aid=3017819&group_id=976&atid=100976
209 if pycurl.version_info()[2] <= 0x71500: # 7.21.0
210 curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
211 _curl_setup_request(curl, request, curl.info["buffer"],
212 curl.info["headers"])
213 self._multi.add_handle(curl)
214
215 if not started:
216 break
217
218 def _finish(self, curl, curl_error=None, curl_message=None):
219 info = curl.info
220 curl.info = None
221 self._multi.remove_handle(curl)
222 self._free_list.append(curl)
223 buffer = info["buffer"]
224 if curl_error:
225 error = CurlError(curl_error, curl_message)
226 code = error.code
227 effective_url = None
228 buffer.close()
229 buffer = None
230 else:
231 error = None
232 code = curl.getinfo(pycurl.HTTP_CODE)
233 effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
234 buffer.seek(0)
235 # the various curl timings are documented at
236 # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
237 time_info = dict(
238 queue=info["curl_start_time"] - info["request"].start_time,
239 namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
240 connect=curl.getinfo(pycurl.CONNECT_TIME),
241 pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
242 starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
243 total=curl.getinfo(pycurl.TOTAL_TIME),
244 redirect=curl.getinfo(pycurl.REDIRECT_TIME),
245 )
246 try:
247 info["callback"](HTTPResponse(
248 request=info["request"], code=code, headers=info["headers"],
249 buffer=buffer, effective_url=effective_url, error=error,
250 request_time=time.time() - info["curl_start_time"],
251 time_info=time_info))
17eed4f @bdarnell Replace all bare "except:" blocks with "except Exception:" so we don't
bdarnell authored
252 except Exception:
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
253 self.handle_callback_exception(info["callback"])
254
255 def handle_callback_exception(self, callback):
256 self.io_loop.handle_callback_exception(callback)
257
258
259 class CurlError(HTTPError):
260 def __init__(self, errno, message):
261 HTTPError.__init__(self, 599, message)
262 self.errno = errno
263
264
6385cfa @bdarnell Remove httpclient max_simultaneous_connections argument.
bdarnell authored
265 def _curl_create():
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
266 curl = pycurl.Curl()
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
267 if gen_log.isEnabledFor(logging.DEBUG):
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
268 curl.setopt(pycurl.VERBOSE, 1)
269 curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
270 return curl
271
272
273 def _curl_setup_request(curl, request, buffer, headers):
98f7797 @bdarnell Make it possible to run the test suite with --httpclient=CurlAsyncHTT…
bdarnell authored
274 curl.setopt(pycurl.URL, utf8(request.url))
425f069 @bdarnell Move curl Expect: header hack from httpclient.py to curl_httpclient.py.
bdarnell authored
275
276 # libcurl's magic "Expect: 100-continue" behavior causes delays
277 # with servers that don't support it (which include, among others,
278 # Google's OpenID endpoint). Additionally, this behavior has
279 # a bug in conjunction with the curl_multi_socket_action API
280 # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
281 # which increases the delays. It's more trouble than it's worth,
282 # so just turn off the feature (yes, setting Expect: to an empty
283 # value is the official way to disable this)
284 if "Expect" not in request.headers:
c5b9847 @neilrahilly Disable Expect header in curl_httpclient.py without clearing all head…
neilrahilly authored
285 request.headers["Expect"] = ""
286
2afc7ba @bdarnell Move another curl hack (disabling Pragma: no-cache) to curl_httpclien…
bdarnell authored
287 # libcurl adds Pragma: no-cache by default; disable that too
288 if "Pragma" not in request.headers:
289 request.headers["Pragma"] = ""
290
c5b9847 @neilrahilly Disable Expect header in curl_httpclient.py without clearing all head…
neilrahilly authored
291 # Request headers may be either a regular dict or HTTPHeaders object
292 if isinstance(request.headers, httputil.HTTPHeaders):
293 curl.setopt(pycurl.HTTPHEADER,
294 [utf8("%s: %s" % i) for i in request.headers.get_all()])
295 else:
296 curl.setopt(pycurl.HTTPHEADER,
297 [utf8("%s: %s" % i) for i in request.headers.iteritems()])
425f069 @bdarnell Move curl Expect: header hack from httpclient.py to curl_httpclient.py.
bdarnell authored
298
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
299 if request.header_callback:
300 curl.setopt(pycurl.HEADERFUNCTION, request.header_callback)
301 else:
302 curl.setopt(pycurl.HEADERFUNCTION,
303 lambda line: _curl_header_callback(headers, line))
304 if request.streaming_callback:
305 curl.setopt(pycurl.WRITEFUNCTION, request.streaming_callback)
306 else:
307 curl.setopt(pycurl.WRITEFUNCTION, buffer.write)
308 curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
309 curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
3622650 @bdarnell Support non-integer timeouts for curl_httpclient.
bdarnell authored
310 curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
311 curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
312 if request.user_agent:
c62fd26 @bdarnell Make tornado.escape.utf8 None-safe and use it in curl_httpclient.
bdarnell authored
313 curl.setopt(pycurl.USERAGENT, utf8(request.user_agent))
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
314 else:
315 curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
316 if request.network_interface:
317 curl.setopt(pycurl.INTERFACE, request.network_interface)
318 if request.use_gzip:
319 curl.setopt(pycurl.ENCODING, "gzip,deflate")
320 else:
321 curl.setopt(pycurl.ENCODING, "none")
322 if request.proxy_host and request.proxy_port:
323 curl.setopt(pycurl.PROXY, request.proxy_host)
324 curl.setopt(pycurl.PROXYPORT, request.proxy_port)
325 if request.proxy_username:
326 credentials = '%s:%s' % (request.proxy_username,
327 request.proxy_password)
328 curl.setopt(pycurl.PROXYUSERPWD, credentials)
329 else:
330 curl.setopt(pycurl.PROXY, '')
331 if request.validate_cert:
332 curl.setopt(pycurl.SSL_VERIFYPEER, 1)
333 curl.setopt(pycurl.SSL_VERIFYHOST, 2)
334 else:
335 curl.setopt(pycurl.SSL_VERIFYPEER, 0)
336 curl.setopt(pycurl.SSL_VERIFYHOST, 0)
337 if request.ca_certs is not None:
338 curl.setopt(pycurl.CAINFO, request.ca_certs)
339 else:
340 # There is no way to restore pycurl.CAINFO to its default value
341 # (Using unsetopt makes it reject all certificates).
342 # I don't see any way to read the default value from python so it
343 # can be restored later. We'll have to just leave CAINFO untouched
344 # if no ca_certs file was specified, and require that if any
345 # request uses a custom ca_certs file, they all must.
346 pass
347
0ec8dce @bdarnell Client-side ipv6 support (disabled by default)
bdarnell authored
348 if request.allow_ipv6 is False:
349 # Curl behaves reasonably when DNS resolution gives an ipv6 address
350 # that we can't reach, so allow ipv6 unless the user asks to disable.
351 # (but see version check in _process_queue above)
352 curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
353
f27b49a @bdarnell Misc doc updates.
bdarnell authored
354 # Set the request method through curl's irritating interface which makes
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
355 # up names for almost every single method
356 curl_options = {
357 "GET": pycurl.HTTPGET,
358 "POST": pycurl.POST,
359 "PUT": pycurl.UPLOAD,
360 "HEAD": pycurl.NOBODY,
361 }
362 custom_methods = set(["DELETE"])
363 for o in curl_options.values():
364 curl.setopt(o, False)
365 if request.method in curl_options:
366 curl.unsetopt(pycurl.CUSTOMREQUEST)
367 curl.setopt(curl_options[request.method], True)
368 elif request.allow_nonstandard_methods or request.method in custom_methods:
369 curl.setopt(pycurl.CUSTOMREQUEST, request.method)
370 else:
371 raise KeyError('unknown method ' + request.method)
372
373 # Handle curl's cryptic options for every individual HTTP method
374 if request.method in ("POST", "PUT"):
c152b78 @bdarnell While I'm touching every file, run autopep8 too.
bdarnell authored
375 request_buffer = cStringIO.StringIO(utf8(request.body))
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
376 curl.setopt(pycurl.READFUNCTION, request_buffer.read)
377 if request.method == "POST":
378 def ioctl(cmd):
379 if cmd == curl.IOCMD_RESTARTREAD:
380 request_buffer.seek(0)
381 curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
382 curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
383 else:
384 curl.setopt(pycurl.INFILESIZE, len(request.body))
385
bcb30e5 @mankyd fix curl basic auth
mankyd authored
386 if request.auth_username is not None:
387 userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
388 curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
df23c03 @mankyd change encode('ascii') to utf8()
mankyd authored
389 curl.setopt(pycurl.USERPWD, utf8(userpwd))
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
390 gen_log.debug("%s %s (username: %r)", request.method, request.url,
b0750cb @bdarnell Reduce logging verbosity in curl_httpclient to match simple_httpclient.
bdarnell authored
391 request.auth_username)
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
392 else:
393 curl.unsetopt(pycurl.USERPWD)
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
394 gen_log.debug("%s %s", request.method, request.url)
0f621fe @bdarnell Add support for client SSL certificates in simple_httpclient
bdarnell authored
395
1157a55 Add support for client certificates in curl http client
Gustav Rydstedt authored
396 if request.client_cert is not None:
397 curl.setopt(pycurl.SSLCERT, request.client_cert)
dd9a8ac @bdarnell Another round of minor whitespace cleanups from autopep8
bdarnell authored
398
1157a55 Add support for client certificates in curl http client
Gustav Rydstedt authored
399 if request.client_key is not None:
400 curl.setopt(pycurl.SSLKEY, request.client_key)
0f621fe @bdarnell Add support for client SSL certificates in simple_httpclient
bdarnell authored
401
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
402 if threading.activeCount() > 1:
403 # libcurl/pycurl is not thread-safe by default. When multiple threads
404 # are used, signals should be disabled. This has the side effect
405 # of disabling DNS timeouts in some environments (when libcurl is
406 # not linked against ares), so we don't do it when there is only one
407 # thread. Applications that use many short-lived threads may need
408 # to set NOSIGNAL manually in a prepare_curl_callback since
409 # there may not be any other threads running at the time we call
410 # threading.activeCount.
411 curl.setopt(pycurl.NOSIGNAL, 1)
412 if request.prepare_curl_callback is not None:
413 request.prepare_curl_callback(curl)
414
415
416 def _curl_header_callback(headers, header_line):
417 # header_line as returned by curl includes the end-of-line characters.
418 header_line = header_line.strip()
419 if header_line.startswith("HTTP/"):
420 headers.clear()
421 return
422 if not header_line:
423 return
424 headers.parse_line(header_line)
425
c152b78 @bdarnell While I'm touching every file, run autopep8 too.
bdarnell authored
426
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
427 def _curl_debug(debug_type, debug_msg):
428 debug_types = ('I', '<', '>', '<', '>')
429 if debug_type == 0:
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
430 gen_log.debug('%s', debug_msg.strip())
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
431 elif debug_type in (1, 2):
432 for line in debug_msg.splitlines():
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
433 gen_log.debug('%s %s', debug_types[debug_type], line)
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
434 elif debug_type == 4:
9b944aa @bdarnell Switch from root logger to separate loggers.
bdarnell authored
435 gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
436
437 if __name__ == "__main__":
5fb6e39 @bdarnell Consolidate httpclient main() functions
bdarnell authored
438 AsyncHTTPClient.configure(CurlAsyncHTTPClient)
3facb21 @bdarnell Rename httpclient to curl_httpclient
bdarnell authored
439 main()
Something went wrong with that request. Please try again.