Skip to content

Commit

Permalink
Log hanging fruit for logs cleanup (#2299)
Browse files Browse the repository at this point in the history
  • Loading branch information
dmanjunath committed Jan 13, 2022
1 parent ebe53f1 commit e09cfc3
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 12 deletions.
2 changes: 1 addition & 1 deletion creator-node/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ const initializeApp = (port, serviceRegistry) => {

// Increase from 2min default to accommodate long-lived requests.
server.setTimeout(config.get('setTimeout'), () => {
logger.warn(`Server socket timeout hit`)
logger.debug(`Server socket timeout hit`)
})
server.timeout = config.get('timeout')
server.keepAliveTimeout = config.get('keepAliveTimeout')
Expand Down
6 changes: 3 additions & 3 deletions discovery-provider/src/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,18 +66,18 @@ def redis_get_json_cached_key_or_restore(redis, key):
logger = logging.getLogger(__name__)
cached_value = redis.get(key)
if not cached_value:
logger.info(f"Redis Cache - miss {key}, restoring")
logger.debug(f"Redis Cache - miss {key}, restoring")
cached_value = redis_restore(redis, key)

if cached_value:
logger.info(f"Redis Cache - hit {key}")
logger.debug(f"Redis Cache - hit {key}")
try:
deserialized = json.loads(cached_value)
return deserialized
except Exception as e:
logger.warning(f"Unable to deserialize json cached response: {e}")
return None
logger.info(f"Redis Cache - miss {key}")
logger.debug(f"Redis Cache - miss {key}")
return None


Expand Down
3 changes: 1 addition & 2 deletions discovery-provider/src/utils/ipfs_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,8 +282,7 @@ def connect_peer(self, peer):
r = self._api.swarm.connect(peer, timeout=3)
logger.info(r)
except Exception as e:
logger.error("IPFSCLIENT | IPFS Failed to update peer")
logger.error(e)
logger.error(f"IPFSCLIENT | IPFS Failed to update peer: {e}")

def update_cnode_urls(self, cnode_endpoints):
if len(cnode_endpoints):
Expand Down
12 changes: 6 additions & 6 deletions discovery-provider/src/utils/redis_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@ def extract_key(path, arg_items, cache_prefix_override=None):
def get_pickled_key(redis, key):
cached_value = redis.get(key)
if cached_value:
logger.info(f"Redis Cache - hit {key}")
logger.debug(f"Redis Cache - hit {key}")
try:
deserialized = pickle.loads(cached_value)
return deserialized
except Exception as e:
logger.warning(f"Unable to deserialize cached response: {e}")
return None
logger.info(f"Redis Cache - miss {key}")
logger.debug(f"Redis Cache - miss {key}")
return None


Expand All @@ -63,14 +63,14 @@ def use_redis_cache(key, ttl_sec, work_func):
def get_json_cached_key(redis, key):
cached_value = redis.get(key)
if cached_value:
logger.info(f"Redis Cache - hit {key}")
logger.debug(f"Redis Cache - hit {key}")
try:
deserialized = json.loads(cached_value)
return deserialized
except Exception as e:
logger.warning(f"Unable to deserialize json cached response: {e}")
return None
logger.info(f"Redis Cache - miss {key}")
logger.debug(f"Redis Cache - miss {key}")
return None


Expand Down Expand Up @@ -130,7 +130,7 @@ def inner_wrap(*args, **kwargs):
cached_resp = redis.get(key)

if cached_resp:
logger.info(f"Redis Cache - hit {key}")
logger.debug(f"Redis Cache - hit {key}")
try:
deserialized = pickle.loads(cached_resp)
if transform is not None:
Expand All @@ -139,7 +139,7 @@ def inner_wrap(*args, **kwargs):
except Exception as e:
logger.warning(f"Unable to deserialize cached response: {e}")

logger.info(f"Redis Cache - miss {key}")
logger.debug(f"Redis Cache - miss {key}")
response = func(*args, **kwargs)

if len(response) == 2:
Expand Down

0 comments on commit e09cfc3

Please sign in to comment.