Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

references #93, fixed the issue #94

Merged
merged 1 commit into from
Sep 14, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ deploy:
- basescript/utils.py
- examples/adder.py
- examples/helloworld.py
name: basescript-0.3.1
tag_name: 0.3.1
name: basescript-0.3.2
tag_name: 0.3.2
true:
repo: deep-compute/basescript
- provider: pypi
Expand Down
92 changes: 24 additions & 68 deletions basescript/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ def __init__(self, *streams):
self.streams = streams

def write(self, data):
print(data)
for s in self.streams:
s.write(data)

Expand Down Expand Up @@ -246,26 +245,6 @@ def setLevel(self, level):
self._logger.setLevel(level)


def define_log_renderer(fmt, fpath, quiet):
"""
the final log processor that structlog requires to render.
"""
# it must accept a logger, method_name and event_dict (just like processors)
# but must return the rendered string, not a dictionary.
# TODO tty logic

if fmt:
return structlog.processors.JSONRenderer()

if fpath is not None:
return structlog.processors.JSONRenderer()

if sys.stderr.isatty() and not quiet:
return structlog.dev.ConsoleRenderer()

return structlog.processors.JSONRenderer()


def _structlog_default_keys_processor(logger_class, log_method, event):
""" Add unique id, type and hostname """
global HOSTNAME
Expand Down Expand Up @@ -379,68 +358,45 @@ def define_log_processors():
]


def _configure_logger(
fmt, quiet, level, fpath, pre_hooks, post_hooks, metric_grouping_interval
):
def _configure_logger(fmt, quiet, level, fpath, processors, metric_grouping_interval):
"""
configures a logger when required write to stderr or a file
"""

# NOTE not thread safe. Multiple BaseScripts cannot be instantiated concurrently.
level = getattr(logging, level.upper())

global _GLOBAL_LOG_CONFIGURED
if _GLOBAL_LOG_CONFIGURED:
return

# since the hooks need to run through structlog, need to wrap them like processors
def wrap_hook(fn):
@wraps(fn)
def processor(logger, method_name, event_dict):
fn(event_dict)
return event_dict

return processor
assert fmt in ["json", "pretty"]

processors = define_log_processors()
processors.extend([wrap_hook(h) for h in pre_hooks])
_processors = define_log_processors()
_processors += processors or []
if metric_grouping_interval:
processors.append(metrics_grouping_processor)
_processors.append(metrics_grouping_processor)

log_renderer = define_log_renderer(fmt, fpath, quiet)
stderr_required = not quiet
pretty_to_stderr = stderr_required and (
fmt == "pretty" or (fmt is None and sys.stderr.isatty())
)

should_inject_pretty_renderer = pretty_to_stderr and not isinstance(
log_renderer, structlog.dev.ConsoleRenderer
)
if should_inject_pretty_renderer:
stderr_required = False
processors.append(StderrConsoleRenderer())
streams = []

processors.append(log_renderer)
processors.extend([wrap_hook(h) for h in post_hooks])
if fpath:
streams.append(open(fpath, "a"))

streams = []
# we need to use a stream if we are writing to both file and stderr, and both are json
if stderr_required:
if fmt == "json" and not quiet:
streams.append(sys.stderr)

if fpath is not None:
# TODO handle creating a directory for this log file ?
# TODO set mode and encoding appropriately
streams.append(open(fpath, "a"))
if fmt == "pretty" and not quiet:
_processors.append(StderrConsoleRenderer())

assert len(streams) != 0, "cannot configure logger for 0 streams"
_processors.append(structlog.processors.JSONRenderer())

# a global level struct log config unless otherwise specified.
level = getattr(logging, level.upper())

stream = streams[0] if len(streams) == 1 else Stream(*streams)
atexit.register(stream.close)

# a global level struct log config unless otherwise specified.
structlog.configure(
processors=processors,
processors=_processors,
context_class=dict,
logger_factory=LevelLoggerFactory(stream, level=level),
wrapper_class=BoundLevelLogger,
Expand All @@ -460,10 +416,12 @@ def init_logger(
quiet=False,
level="INFO",
fpath=None,
pre_hooks=[],
post_hooks=[],
processors=None,
metric_grouping_interval=None,
):
"""
fmt=pretty/json controls only stderr; file always gets json.
"""

global LOG
if LOG is not None:
Expand All @@ -473,14 +431,12 @@ def init_logger(
# no need for a log - return a dummy
return Dummy()

_configure_logger(
fmt, quiet, level, fpath, pre_hooks, post_hooks, metric_grouping_interval
)
if not fmt and not quiet:
fmt = "pretty" if sys.stderr.isatty() else "json"

log = structlog.get_logger()
level = getattr(logging, level.upper())
log.setLevel(level)
_configure_logger(fmt, quiet, level, fpath, processors, metric_grouping_interval)

log = structlog.get_logger()
log._force_flush_q = queue.Queue(maxsize=FORCE_FLUSH_Q_SIZE)

if metric_grouping_interval:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def get_long_description():

long_description = get_long_description()

version = "0.3.1"
version = "0.3.2"
setup(
name="basescript",
version=version,
Expand Down