Skip to content
This repository has been archived by the owner on Feb 3, 2021. It is now read-only.

Commit

Permalink
Fix: set logger to stdout (#588)
Browse files Browse the repository at this point in the history
* set logger to stdout

* typo

* add log.print level
  • Loading branch information
jafreck committed Jun 5, 2018
1 parent f16aac0 commit 3f0c8f9
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 20 deletions.
23 changes: 19 additions & 4 deletions aztk_cli/logger.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import logging
import platform
import sys

root = logging.getLogger("aztk")

DEFAULT_FORMAT = '%(message)s'
VERBOSE_FORMAT = '%(asctime)s: %(levelname)s: %(message)s'
VERBOSE_FORMAT = '[%(asctime)s] [%(filename)s:%(module)s:%(funcName)s:%(lineno)d] %(levelname)s - %(message)s'


def add_coloring_to_emit_windows(fn):
Expand Down Expand Up @@ -61,6 +62,8 @@ def new(*args):
color = FOREGROUND_YELLOW | FOREGROUND_INTENSITY
elif levelno >= 20:
color = FOREGROUND_GREEN
elif levelno >= 19:
color = FOREGROUND_WHITE
elif levelno >= 10:
color = FOREGROUND_MAGENTA
else:
Expand All @@ -86,6 +89,8 @@ def new(*args):
color = '\x1b[33m' # yellow
elif levelno >= 20:
color = '\x1b[32m' # green
elif levelno >= 19:
color = '\x1b[0m' # normal
elif levelno >= 10:
color = '\x1b[35m' # pink
else:
Expand All @@ -104,10 +109,20 @@ def new(*args):
logging.StreamHandler.emit = add_coloring_to_emit_ansi(logging.StreamHandler.emit)


logging.PRINT = 19
logging.addLevelName(logging.PRINT, "PRINT")

def print_level(self, message, *args, **kwargs):
self._log(logging.PRINT, message, args, **kwargs)


def setup_logging(verbose=False):
if verbose:
root.setLevel(logging.DEBUG)
logging.basicConfig(format=VERBOSE_FORMAT, datefmt="%Y-%m-%d %H:%M:%S")
logging.basicConfig(format=VERBOSE_FORMAT, datefmt="%Y-%m-%d %H:%M:%S", stream=sys.stdout)
else:
root.setLevel(logging.INFO)
logging.basicConfig(format=DEFAULT_FORMAT)
root.setLevel(logging.PRINT)
logging.basicConfig(format=DEFAULT_FORMAT, stream=sys.stdout)

# add custom levels
logging.Logger.print = print_level
4 changes: 2 additions & 2 deletions aztk_cli/spark/endpoints/cluster/cluster_app_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import typing

import aztk
from aztk_cli import config, utils
from aztk_cli import config, utils, log


def setup_parser(parser: argparse.ArgumentParser):
Expand Down Expand Up @@ -36,4 +36,4 @@ def execute(args: typing.NamedTuple):
with open(os.path.abspath(os.path.expanduser(args.output)), "w", encoding="UTF-8") as f:
f.write(app_log.log)
else:
print(app_log.log)
log.print(app_log.log)
12 changes: 6 additions & 6 deletions aztk_cli/spark/endpoints/cluster/cluster_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import typing

import aztk.spark
from aztk_cli import config, utils
from aztk_cli import config, log, utils


def setup_parser(parser: argparse.ArgumentParser):
Expand Down Expand Up @@ -35,10 +35,10 @@ def execute(args: typing.NamedTuple):


def print_copy_result(node_id, success, err):
print("-" * (len(node_id) + 6))
print("| ", node_id, " |")
print("-" * (len(node_id) + 6))
log.print("-" * (len(node_id) + 6))
log.print("| %s |", node_id)
log.print("-" * (len(node_id) + 6))
if success:
print("Copy successful")
log.print("Copy successful")
else:
print(err)
log.print(err)
2 changes: 1 addition & 1 deletion aztk_cli/spark/endpoints/cluster/cluster_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import typing

import aztk.spark
from aztk_cli import config, utils
from aztk_cli import config, log, utils


def setup_parser(parser: argparse.ArgumentParser):
Expand Down
4 changes: 2 additions & 2 deletions aztk_cli/spark/endpoints/job/get_app_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import typing

import aztk.spark
from aztk_cli import config, utils
from aztk_cli import config, log, utils


def setup_parser(parser: argparse.ArgumentParser):
Expand All @@ -28,4 +28,4 @@ def execute(args: typing.NamedTuple):
with open(os.path.abspath(os.path.expanduser(args.output)), "w", encoding="UTF-8") as f:
f.write(app_log.log)
else:
print(app_log.log)
log.print(app_log.log)
4 changes: 2 additions & 2 deletions aztk_cli/spark/endpoints/job/stop.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import typing

import aztk.spark
from aztk_cli import config, utils
from aztk_cli import config, log, utils


def setup_parser(parser: argparse.ArgumentParser):
Expand All @@ -16,4 +16,4 @@ def setup_parser(parser: argparse.ArgumentParser):
def execute(args: typing.NamedTuple):
spark_client = aztk.spark.Client(config.load_aztk_secrets())
spark_client.stop_job(args.job_id)
print("Stopped Job {0}".format(args.job_id))
log.print("Stopped Job {0}".format(args.job_id))
5 changes: 2 additions & 3 deletions aztk_cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import sys
import threading
import time
import yaml
from subprocess import call
from typing import List

Expand Down Expand Up @@ -131,7 +130,7 @@ def stream_logs(client, cluster_id, application_name):
application_name=application_name,
tail=True,
current_bytes=current_bytes)
print(app_logs.log, end="")
log.print(app_logs.log)
if app_logs.application_state == 'completed':
return app_logs.exit_code
current_bytes = app_logs.total_bytes
Expand Down Expand Up @@ -466,4 +465,4 @@ def log_execute_result(node_id, result):
log.info("%s\n", result)
else:
for line in result:
print(line)
log.print(line)

0 comments on commit 3f0c8f9

Please sign in to comment.