Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merge pull request #54 from ploxiln/master

clean up logging
  • Loading branch information...
commit 4f80d5b4511654b2c25c7f6d44fb29e37ff05e8a 2 parents bf6919a + 3fd16df
Jehiah Czebotar jehiah authored
19 asyncmongo/connection.py
View
@@ -130,7 +130,6 @@ def _send_message(self, message):
self.usage_count +=1
# __request_id used by get_more()
(self.__request_id, data) = message
- # logging.info('request id %d writing %r' % (self.__request_id, data))
try:
self.__stream.write(data)
if self.__callback:
@@ -139,14 +138,13 @@ def _send_message(self, message):
self.__request_id = None
self.__pool.cache(self)
- except IOError, e:
+ except IOError:
self.__alive = False
raise
# return self.__request_id
def _parse_header(self, header):
# return self.__receive_data_on_socket(length - 16, sock)
- # logging.info('got data %r' % header)
length = int(struct.unpack("<i", header[:4])[0])
request_id = struct.unpack("<i", header[8:12])[0]
assert request_id == self.__request_id, \
@@ -154,16 +152,13 @@ def _parse_header(self, header):
request_id)
operation = 1 # who knows why
assert operation == struct.unpack("<i", header[12:])[0]
- # logging.info('%s' % length)
- # logging.info('waiting for another %d bytes' % length - 16)
try:
self.__stream.read(length - 16, callback=self._parse_response)
- except IOError, e:
+ except IOError:
self.__alive = False
raise
def _parse_response(self, response):
- # logging.info('got data %r' % response)
callback = self.__callback
request_id = self.__request_id
self.__request_id = None
@@ -177,22 +172,20 @@ def _parse_response(self, response):
try:
response = helpers._unpack_response(response, request_id) # TODO: pass tz_awar
except Exception, e:
- logging.error('error %s' % e)
+ logging.debug('error %s' % e)
callback(None, e)
return
if response and response['data'] and response['data'][0].get('err') and response['data'][0].get('code'):
- # logging.error(response['data'][0]['err'])
callback(response, IntegrityError(response['data'][0]['err'], code=response['data'][0]['code']))
return
- # logging.info('response: %s' % response)
callback(response)
def _start_authentication(self, response, error=None):
# this is the nonce response
if error:
- logging.error(error)
- logging.error(response)
+ logging.debug(error)
+ logging.debug(response)
raise AuthenticationError(error)
nonce = response['data'][0]['nonce']
key = helpers._auth_key(nonce, self.__dbuser, self.__dbpass)
@@ -214,7 +207,7 @@ def _finish_authentication(self, response, error=None):
assert response['number_returned'] == 1
response = response['data'][0]
if response['ok'] != 1:
- logging.error('Failed authentication %s' % response['errmsg'])
+ logging.debug('Failed authentication %s' % response['errmsg'])
self.__deferred_message = None
self.__deferred_callback = None
raise AuthenticationError(response['errmsg'])
6 asyncmongo/cursor.py
View
@@ -390,7 +390,7 @@ def find(self, spec=None, fields=None, skip=0, limit=0,
self.__fields),
callback=functools.partial(self._handle_response, orig_callback=callback))
except Exception, e:
- logging.error('Error sending query %s' % e)
+ logging.debug('Error sending query %s' % e)
connection.close()
raise
@@ -402,12 +402,12 @@ def _handle_response(self, result, error=None, orig_callback=None):
message.kill_cursors([result['cursor_id']]),
callback=None)
except Exception, e:
- logging.error('Error killing cursor %s: %s' % (result['cursor_id'], e))
+ logging.debug('Error killing cursor %s: %s' % (result['cursor_id'], e))
connection.close()
raise
if error:
- logging.error('%s %s' % (self.full_collection_name , error))
+ logging.debug('%s %s' % (self.full_collection_name , error))
orig_callback(None, error=error)
else:
if self.__limit == -1 and len(result['data']) == 1:
4 asyncmongo/pool.py
View
@@ -127,7 +127,7 @@ def cache(self, con):
"""Put a dedicated connection back into the idle cache."""
if self._maxusage and con.usage_count > self._maxusage:
self._connections -=1
- # logging.info('dropping connection %s uses past max usage %s' % (con.usage_count, self._maxusage))
+ logging.debug('dropping connection %s uses past max usage %s' % (con.usage_count, self._maxusage))
con._close()
return
self._condition.acquire()
@@ -140,7 +140,7 @@ def cache(self, con):
# the idle cache is not full, so put it there
self._idle_cache.append(con)
else: # if the idle cache is already full,
- # logging.info('dropping connection. connection pool (%s) is full. maxcached %s' % (len(self._idle_cache), self._maxcached))
+ logging.debug('dropping connection. connection pool (%s) is full. maxcached %s' % (len(self._idle_cache), self._maxcached))
con._close() # then close the connection
self._condition.notify()
finally:
3  test/test_shunt.py
View
@@ -61,7 +61,8 @@ def setUp(self):
os.makedirs(dirname)
self.temp_dirs.append(dirname)
- options = ['mongod', '--bind_ip', '127.0.0.1', '--oplogSize', '10', '--dbpath', dirname, '-v'] + list(options)
+ options = ['mongod', '--bind_ip', '127.0.0.1', '--oplogSize', '10',
+ '--dbpath', dirname, '--smallfiles', '-v'] + list(options)
logging.debug(options)
pipe = subprocess.Popen(options)
self.mongods.append(pipe)
Please sign in to comment.
Something went wrong with that request. Please try again.