Skip to content

Commit

Permalink
1. support custom extra #84 (default is enabled)
Browse files Browse the repository at this point in the history
2. add more logging handler default fields
3. allow to add custom fields
4. add blacklist for reserved fields
  • Loading branch information
wjo1212 committed Jan 8, 2019
1 parent 427f769 commit 52c5be5
Show file tree
Hide file tree
Showing 2 changed files with 89 additions and 15 deletions.
77 changes: 62 additions & 15 deletions aliyun/log/logger_hanlder.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from time import time
from enum import Enum
from .version import LOGGING_HANDLER_USER_AGENT

from collections import Callable
import six

if six.PY2:
Expand All @@ -28,6 +28,7 @@ class LogFields(Enum):
"""
record_name = 'name'
level = 'levelname'
file_name = 'filename'
func_name = 'funcName'
module = 'module'
file_path = 'pathname'
Expand All @@ -37,6 +38,25 @@ class LogFields(Enum):
thread_id = 'thread'
thread_name = 'threadName'

level_no = 'levelno'
asc_time = 'asctime'
created_timestamp = 'created'
micro_second = 'msecs'
relative_created = 'relativeCreated'


DEFAULT_RECORD_LOG_FIELDS = set((LogFields.record_name, LogFields.level,
LogFields.func_name, LogFields.module,
LogFields.file_path, LogFields.line_no,
LogFields.process_id, LogFields.process_name,
LogFields.thread_id, LogFields.thread_name))

BLACK_FIELD_LIST = set(['exc_info', 'exc_text', 'stack_info', 'msg', 'args', 'message'])

BUILTIN_LOG_FIELDS_NAMES = set(x for x in dir(LogFields) if not x.startswith('__'))
BUILTIN_LOG_FIELDS_NAMES.update(set(LogFields[x].value for x in BUILTIN_LOG_FIELDS_NAMES))
BUILTIN_LOG_FIELDS_NAMES.update(BLACK_FIELD_LIST)


class SimpleLogHandler(logging.Handler, object):
"""
Expand All @@ -54,7 +74,7 @@ class SimpleLogHandler(logging.Handler, object):
:param topic: topic, by default is empty
:param fields: list of LogFields or list of names of LogFields, default is LogFields.record_name, LogFields.level, LogFields.func_name, LogFields.module, LogFields.file_path, LogFields.line_no, LogFields.process_id, LogFields.process_name, LogFields.thread_id, LogFields.thread_name
:param fields: list of LogFields or list of names of LogFields, default is LogFields.record_name, LogFields.level, LogFields.func_name, LogFields.module, LogFields.file_path, LogFields.line_no, LogFields.process_id, LogFields.process_name, LogFields.thread_id, LogFields.thread_name, you could also just use he string name like 'thread_name', it's also possible customize extra fields in this list by disable extra fields and put white list here.
:param buildin_fields_prefix: prefix of builtin fields, default is empty. suggest using "__" when extract json is True to prevent conflict.
Expand All @@ -78,6 +98,8 @@ class SimpleLogHandler(logging.Handler, object):
:param extract_kv_sep: separator for KV case, defualt is '=', e.g. k1=v1
:param extra: if show extra info, default True to show all. default is True. Note: the extra field will also be handled with buildin_fields_prefix/suffix
:param kwargs: other parameters passed to logging.Handler
"""

Expand All @@ -87,7 +109,7 @@ def __init__(self, end_point, access_key_id, access_key, project, log_store, top
extract_json_prefix=None, extract_json_suffix=None,
extract_kv=None, extract_kv_drop_message=None,
extract_kv_prefix=None, extract_kv_suffix=None,
extract_kv_sep=None,
extract_kv_sep=None, extra=None,
**kwargs):
logging.Handler.__init__(self, **kwargs)
self.end_point = end_point
Expand All @@ -97,11 +119,7 @@ def __init__(self, end_point, access_key_id, access_key, project, log_store, top
self.log_store = log_store
self.client = None
self.topic = topic
self.fields = (LogFields.record_name, LogFields.level,
LogFields.func_name, LogFields.module,
LogFields.file_path, LogFields.line_no,
LogFields.process_id, LogFields.process_name,
LogFields.thread_id, LogFields.thread_name) if fields is None else fields
self.fields = DEFAULT_RECORD_LOG_FIELDS if fields is None else set(fields)

self.extract_json = False if extract_json is None else extract_json
self.extract_json_prefix = "" if extract_json_prefix is None else extract_json_prefix
Expand All @@ -116,6 +134,7 @@ def __init__(self, end_point, access_key_id, access_key, project, log_store, top
self.extract_kv_drop_message = False if extract_kv_drop_message is None else extract_kv_drop_message
self.extract_kv_sep = "=" if extract_kv_sep is None else extract_kv_sep
self.extract_kv_ptn = self._get_extract_kv_ptn()
self.extra = True if extra is None else extra

def set_topic(self, topic):
self.topic = topic
Expand All @@ -137,7 +156,7 @@ def _n(v):
if v is None:
return ""

if isinstance(v, (dict, list)):
if isinstance(v, (dict, list, tuple)):
try:
v = json.dumps(v)
except Exception:
Expand Down Expand Up @@ -183,6 +202,14 @@ def extract_kv_str(self, message):

return data

def _add_record_fields(self, record, k, contents):
v = getattr(record, k, None)
if v is None or isinstance(v, Callable):
return

v = self._n(v)
contents.append(("{0}{1}{2}".format(self.buildin_fields_prefix, k, self.buildin_fields_suffix), v))

def make_request(self, record):
contents = []
message_field_name = "{0}message{1}".format(self.buildin_fields_prefix, self.buildin_fields_suffix)
Expand All @@ -203,13 +230,27 @@ def make_request(self, record):

# add builtin fields
for x in self.fields:
if isinstance(x, (six.binary_type, six.text_type)):
x = LogFields[x]
k = x
if isinstance(x, LogFields):
k = x.name
x = x.value
elif isinstance(x, (six.binary_type, six.text_type)):
if x in BLACK_FIELD_LIST:
continue # by pass for those reserved fields. make no sense to render them

if x in BUILTIN_LOG_FIELDS_NAMES:
k = LogFields[x].name
x = LogFields[x].value
elif self.extra: # will handle it later
continue

v = getattr(record, x.value)
if not isinstance(v, (six.binary_type, six.text_type)):
v = str(v)
contents.append(("{0}{1}{2}".format(self.buildin_fields_prefix, x.name, self.buildin_fields_suffix), v))
self._add_record_fields(record, x, contents)

# handle extra
if self.extra:
for x in dir(record):
if not x.startswith('__') and not x in BUILTIN_LOG_FIELDS_NAMES:
self._add_record_fields(record, x, contents)

item = LogItem(contents=contents, timestamp=record.created)

Expand Down Expand Up @@ -271,6 +312,8 @@ class QueuedLogHandler(SimpleLogHandler):
:param extract_kv_sep: separator for KV case, defualt is '=', e.g. k1=v1
:param extra: if show extra info, default True to show all. default is True
:param kwargs: other parameters passed to logging.Handler
"""

Expand All @@ -282,6 +325,7 @@ def __init__(self, end_point, access_key_id, access_key, project, log_store, top
extract_kv=None, extract_kv_drop_message=None,
extract_kv_prefix=None, extract_kv_suffix=None,
extract_kv_sep=None,
extra=None,
**kwargs):
super(QueuedLogHandler, self).__init__(end_point, access_key_id, access_key, project, log_store,
topic=topic, fields=fields,
Expand All @@ -296,6 +340,7 @@ def __init__(self, end_point, access_key_id, access_key, project, log_store, top
extract_kv_prefix=extract_kv_prefix,
extract_kv_suffix=extract_kv_suffix,
extract_kv_sep=extract_kv_sep,
extra=extra,
**kwargs)
self.stop_flag = False
self.stop_time = None
Expand Down Expand Up @@ -424,6 +469,8 @@ class UwsgiQueuedLogHandler(QueuedLogHandler):
:param extract_kv_sep: separator for KV case, defualt is '=', e.g. k1=v1
:param extra: if show extra info, default True to show all. default is True
:param kwargs: other parameters passed to logging.Handler
"""
def __init__(self, *args, **kwargs):
Expand Down
27 changes: 27 additions & 0 deletions tests/integration_test/test_log_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,30 @@ def test_log_handler(end_point, access_key_id, access_key, project, logstore):
print("...finish... %s seconds" % (time() - s))


def test_log_handler_extra(end_point, access_key_id, access_key, project, logstore):
logger = logging.getLogger(__name__)

aliyun_logger = QueuedLogHandler(end_point, access_key_id, access_key, project, logstore)
aliyun_logger.setLevel(logging.INFO)
aliyun_logger.set_fields([LogFields.level, LogFields.func_name, LogFields.file_path])
aliyun_logger.set_topic("sdk test")
logger.addHandler(aliyun_logger)
logger.setLevel(logging.INFO)

logger.error("x1", extra={"a": 100, "b": [1,2,3]})
logger.error("x2", extra={"a": 100, "b": "xyz"})
logger.error("x3", extra={"a": 100, "b": [1, "abc", {"x": 200}]})
logger.error("x4", extra={"a": 100.1, "b": "xyz"})
logger.error("x5", extra={"a": u"中国".encode('utf8'), "b": {u"中国".encode('utf8'): u"中国".encode('utf8')}})
logger.error("x6", extra={"a": u"中国", "b": {u"中国": u"中国"}})

try:
1/0
except ZeroDivisionError as ex:
logger.error(ex)
logger.error(ex, exc_info=True)


def test_log_handler_json(end_point, access_key_id, access_key, project, logstore):
logger = logging.getLogger('json')

Expand Down Expand Up @@ -135,6 +159,9 @@ def main():
assert len(x.get_flatten_logs_json()) == 10
break

# test extra
test_log_handler_extra(endpoint, accessKeyId, accessKey, project, logstore)

# test extract json
test_log_handler_json(endpoint, accessKeyId, accessKey, project, logstore)

Expand Down

0 comments on commit 52c5be5

Please sign in to comment.