Skip to content

Commit

Permalink
Merge branch 'add_logger' of https://github.com/jverswijver/datajoint…
Browse files Browse the repository at this point in the history
…-python into loggerPR
  • Loading branch information
zitrosolrac committed Jun 10, 2022
2 parents ec60d51 + 0274a23 commit 41ddc63
Show file tree
Hide file tree
Showing 20 changed files with 144 additions and 46 deletions.
7 changes: 6 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
## Release notes

### 0.13.5 -- May 13, 2022
### 0.13.6 -- TBD
* Add - unified package level logger for package
* Update - swap various datajoint messages, warnings, ect. to use the new logger.

### 0.13.5 -- May 19, 2022
* Update - Import ABC from collections.abc for Python 3.10 compatibility
* Bugfix - Fix multiprocessing value error (#1013) PR #1026

### 0.13.4 -- March, 28 2022
* Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995
Expand Down
2 changes: 1 addition & 1 deletion LNX-docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ services:
interval: 1s
fakeservices.datajoint.io:
<<: *net
image: datajoint/nginx:v0.1.1
image: datajoint/nginx:v0.2.1
environment:
- ADD_db_TYPE=DATABASE
- ADD_db_ENDPOINT=db:3306
Expand Down
1 change: 1 addition & 0 deletions datajoint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"key_hash",
]

from .logging import logger
from .version import __version__
from .settings import config
from .connection import conn, Connection
Expand Down
12 changes: 6 additions & 6 deletions datajoint/autopopulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

# noinspection PyExceptionInherit,PyCallingNonCallable

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


# --- helper functions for multiprocessing --
Expand Down Expand Up @@ -173,8 +173,7 @@ def populate(
:param limit: if not None, check at most this many keys
:param max_calls: if not None, populate at most this many keys
:param display_progress: if True, report progress_bar
:param processes: number of processes to use. When set to a large number, then
uses as many as CPU cores
:param processes: number of processes to use. Set to None to use all cores
:param make_kwargs: Keyword arguments which do not affect the result of computation
to be passed down to each ``make()`` call. Computation arguments should be
specified within the pipeline e.g. using a `dj.Lookup` table.
Expand Down Expand Up @@ -211,9 +210,10 @@ def handler(signum, frame):

keys = keys[:max_calls]
nkeys = len(keys)
if not nkeys:
return

if processes > 1:
processes = min(processes, nkeys, mp.cpu_count())
processes = min(*(_ for _ in (processes, nkeys, mp.cpu_count()) if _))

error_list = []
populate_kwargs = dict(
Expand Down Expand Up @@ -275,7 +275,7 @@ def _populate1(
if jobs is not None:
jobs.complete(self.target.table_name, self._job_key(key))
else:
logger.info("Populating: " + str(key))
logger.debug("Populating: " + str(key))
self.__class__._allow_insert = True
try:
make(dict(key), **(make_kwargs or {}))
Expand Down
12 changes: 6 additions & 6 deletions datajoint/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from .hash import uuid_from_buffer
from .plugin import connection_plugins

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])
query_log_max_length = 300


Expand Down Expand Up @@ -184,7 +184,7 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None)
self.conn_info["ssl_input"] = use_tls
self.conn_info["host_input"] = host_input
self.init_fun = init_fun
print("Connecting {user}@{host}:{port}".format(**self.conn_info))
logger.info("Connecting {user}@{host}:{port}".format(**self.conn_info))
self._conn = None
self._query_cache = None
connect_host_hook(self)
Expand Down Expand Up @@ -339,7 +339,7 @@ def query(
except errors.LostConnectionError:
if not reconnect:
raise
warnings.warn("MySQL server has gone away. Reconnecting to the server.")
logger.warning("MySQL server has gone away. Reconnecting to the server.")
connect_host_hook(self)
if self._in_transaction:
self.cancel_transaction()
Expand Down Expand Up @@ -380,15 +380,15 @@ def start_transaction(self):
raise errors.DataJointError("Nested connections are not supported.")
self.query("START TRANSACTION WITH CONSISTENT SNAPSHOT")
self._in_transaction = True
logger.info("Transaction started")
logger.debug("Transaction started")

def cancel_transaction(self):
"""
Cancels the current transaction and rolls back all changes made during the transaction.
"""
self.query("ROLLBACK")
self._in_transaction = False
logger.info("Transaction cancelled. Rolling back ...")
logger.debug("Transaction cancelled. Rolling back ...")

def commit_transaction(self):
"""
Expand All @@ -397,7 +397,7 @@ def commit_transaction(self):
"""
self.query("COMMIT")
self._in_transaction = False
logger.info("Transaction committed and closed.")
logger.debug("Transaction committed and closed.")

# -------- context manager for transactions
@property
Expand Down
4 changes: 2 additions & 2 deletions datajoint/declare.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def match_type(attribute_type):
)


logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


def build_foreign_key_parser_old():
Expand Down Expand Up @@ -207,7 +207,7 @@ def compile_foreign_key(
)

if obsolete:
warnings.warn(
logger.warning(
'Line "{line}" uses obsolete syntax that will no longer be supported in datajoint 0.14. '
"For details, see issue #780 https://github.com/datajoint/datajoint-python/issues/780".format(
line=line
Expand Down
13 changes: 7 additions & 6 deletions datajoint/diagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,15 @@
import re
import functools
import io
import warnings
import logging
import inspect
from .table import Table
from .dependencies import unite_master_parts
from .user_tables import Manual, Imported, Computed, Lookup, Part
from .errors import DataJointError
from .table import lookup_class_name

logger = logging.getLogger(__name__.split(".")[0])

try:
from matplotlib import pyplot as plt
Expand All @@ -21,10 +26,6 @@
except:
diagram_active = False

from .user_tables import Manual, Imported, Computed, Lookup, Part
from .errors import DataJointError
from .table import lookup_class_name


user_table_classes = (Manual, Lookup, Computed, Imported, Part)

Expand Down Expand Up @@ -63,7 +64,7 @@ class Diagram:
"""

def __init__(self, *args, **kwargs):
warnings.warn(
logger.warning(
"Please install matplotlib and pygraphviz libraries to enable the Diagram feature."
)

Expand Down
2 changes: 1 addition & 1 deletion datajoint/expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)
from .declare import CONSTANT_LITERALS

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


class QueryExpression:
Expand Down
6 changes: 4 additions & 2 deletions datajoint/fetch.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from functools import partial
from pathlib import Path
import warnings
import logging
import pandas
import itertools
import re
Expand All @@ -12,6 +12,8 @@
from .settings import config
from .utils import safe_write

logger = logging.getLogger(__name__.split(".")[0])


class key:
"""
Expand Down Expand Up @@ -222,7 +224,7 @@ def __call__(
)

if limit is None and offset is not None:
warnings.warn(
logger.warning(
"Offset set, but no limit. Setting limit to a large number. "
"Consider setting a limit explicitly."
)
Expand Down
2 changes: 1 addition & 1 deletion datajoint/heading.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from .attribute_adapter import get_adapter, AttributeAdapter


logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])

default_attribute_properties = (
dict( # these default values are set in computed attributes
Expand Down
57 changes: 57 additions & 0 deletions datajoint/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import logging
import os
import sys
import io

logger = logging.getLogger(__name__.split(".")[0])

log_level = os.environ.get("DJ_LOG_LEVEL", "warning").upper()

log_format = logging.Formatter(
"[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s"
)

stream_handler = logging.StreamHandler() # default handler
stream_handler.setFormatter(log_format)

logger.setLevel(level=log_level)
logger.handlers = [stream_handler]


def excepthook(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return

if logger.getEffectiveLevel() == 10:
logger.debug(
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)
else:
logger.error(f"Uncaught exception: {exc_value}")


sys.excepthook = excepthook


# https://github.com/tqdm/tqdm/issues/313#issuecomment-267959111
class TqdmToLogger(io.StringIO):
"""
Output stream for TQDM which will output to logger module instead of
the StdOut.
"""

logger = None
level = None
buf = ""

def __init__(self, logger, level=None):
super(TqdmToLogger, self).__init__()
self.logger = logger
self.level = level or logging.INFO

def write(self, buf):
self.buf = buf.strip("\r\n\t ")

def flush(self):
self.logger.log(self.level, self.buf)
11 changes: 7 additions & 4 deletions datajoint/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from pathlib import Path
from cryptography.exceptions import InvalidSignature
from otumat import hash_pkg, verify
import logging

logger = logging.getLogger(__name__.split(".")[0])


def _update_error_stack(plugin_name):
Expand All @@ -12,13 +15,13 @@ def _update_error_stack(plugin_name):
plugin_meta = pkg_resources.get_distribution(plugin_name)

data = hash_pkg(pkgpath=str(Path(plugin_meta.module_path, plugin_name)))
signature = plugin_meta.get_metadata("{}.sig".format(plugin_name))
pubkey_path = str(Path(base_meta.egg_info, "{}.pub".format(base_name)))
signature = plugin_meta.get_metadata(f"{plugin_name}.sig")
pubkey_path = str(Path(base_meta.egg_info, f"{base_name}.pub"))
verify(pubkey_path=pubkey_path, data=data, signature=signature)
print("DataJoint verified plugin `{}` detected.".format(plugin_name))
logger.info(f"DataJoint verified plugin `{plugin_name}` detected.")
return True
except (FileNotFoundError, InvalidSignature):
print("Unverified plugin `{}` detected.".format(plugin_name))
logger.warning(f"Unverified plugin `{plugin_name}` detected.")
return False


Expand Down
3 changes: 1 addition & 2 deletions datajoint/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
from io import BytesIO
import minio # https://docs.minio.io/docs/python-client-api-reference
import urllib3
import warnings
import uuid
import logging
from pathlib import Path
from . import errors

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


class Folder:
Expand Down
2 changes: 1 addition & 1 deletion datajoint/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from .table import lookup_class_name, Log, FreeTable
import types

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


def ordered_dir(class_):
Expand Down
4 changes: 2 additions & 2 deletions datajoint/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
}
)

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])
log_levels = {
"INFO": logging.INFO,
"WARNING": logging.WARNING,
Expand Down Expand Up @@ -104,7 +104,7 @@ def save(self, filename, verbose=False):
with open(filename, "w") as fid:
json.dump(self._conf, fid, indent=4)
if verbose:
print("Saved settings in " + filename)
logger.info("Saved settings in " + filename)

def load(self, filename):
"""
Expand Down
7 changes: 3 additions & 4 deletions datajoint/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import logging
import uuid
import re
import warnings
from pathlib import Path
from .settings import config
from .declare import declare, alter
Expand All @@ -25,7 +24,7 @@
)
from .version import __version__ as version

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])

foreign_key_error_regexp = re.compile(
r"[\w\s:]*\((?P<child>`[^`]+`.`[^`]+`), "
Expand Down Expand Up @@ -532,7 +531,7 @@ def cascade(table):
cascade(child)
else:
deleted.add(table.full_table_name)
print(
logger.info(
"Deleting {count} rows from {table}".format(
count=delete_count, table=table.full_table_name
)
Expand Down Expand Up @@ -768,7 +767,7 @@ def _update(self, attrname, value=None):
>>> (v2p.Mice() & key)._update('mouse_dob', '2011-01-01')
>>> (v2p.Mice() & key)._update( 'lens') # set the value to NULL
"""
warnings.warn(
logger.warning(
"`_update` is a deprecated function to be removed in datajoint 0.14. "
"Use `.update1` instead."
)
Expand Down

0 comments on commit 41ddc63

Please sign in to comment.