Skip to content

Commit

Permalink
Merge pull request #217 from dstansby/print-to-logger
Browse files Browse the repository at this point in the history
Use logger intstead of print statements
  • Loading branch information
dstansby committed Jun 6, 2023
2 parents ad3ed84 + 13887de commit f8dbe18
Show file tree
Hide file tree
Showing 8 changed files with 110 additions and 97 deletions.
2 changes: 2 additions & 0 deletions cdflib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from .epochs import CDFepoch as cdfepoch # noqa: F401

__all__ = ["CDF"]


try:
from ._version import version as __version__
except Exception:
Expand Down
5 changes: 0 additions & 5 deletions cdflib/cdfread.py
Original file line number Diff line number Diff line change
Expand Up @@ -2031,7 +2031,6 @@ def _file_or_url_or_s3_handler(
) -> Union["S3object", io.BufferedReader, io.BytesIO]:
bdata: Union["S3object", io.BufferedReader, io.BytesIO]
if filetype == "url":
# print("debug, opening url")
req = urllib.request.Request(filename)
response = urllib.request.urlopen(req)
bdata = io.BytesIO(response.read())
Expand All @@ -2048,16 +2047,13 @@ def _file_or_url_or_s3_handler(
s3c = boto3.resource("s3")
obj = s3c.Object(bucket_name=mybucket, key=mykey)
bdata = S3object(obj) # type: ignore
# print("Debug, using S3 read-in-place, flag ",s3_flag)
else:
# for store in memory or as temp copy
s3c = boto3.client("s3")
obj = s3c.get_object(Bucket=mybucket, Key=mykey)
bdata = s3_fetchall(obj)
# print("Debug, using S3 copy, flag ",s3_flag)
return bdata
else:
# print("debug, opening ",fname)
bdata = open(filename, "rb")

return bdata
Expand All @@ -2070,7 +2066,6 @@ def _unstream_file(self, f) -> None: # type: ignore
"""
raw_data = f.read(-1)
self.temp_file = Path(tempfile.NamedTemporaryFile(suffix=".cdf").name)
# print("debug, using temp file: ",self.temp_file)
with self.temp_file.open("wb") as g:
g.write(raw_data)
self.original_stream = self.file
Expand Down
44 changes: 22 additions & 22 deletions cdflib/cdfwrite.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import platform as pf
import struct
import sys
import warnings
from functools import wraps
from numbers import Number
from pathlib import Path
Expand All @@ -18,6 +17,7 @@
import numpy as np

import cdflib.epochs as cdfepoch
from cdflib.logging import logger

__all__ = ["CDF"]

Expand Down Expand Up @@ -393,7 +393,7 @@ def write_globalattrs(self, globalAttrs):
data = value[0]
if dataType == self.CDF_CHAR or dataType == self.CDF_UCHAR:
if isinstance(data, list) or isinstance(data, tuple):
warnings.warn("Invalid global attribute value")
logger.warning("Invalid global attribute value")
return
numElems = len(data)
elif dataType == self.CDF_EPOCH or dataType == self.CDF_EPOCH16 or dataType == self.CDF_TIME_TT2000:
Expand Down Expand Up @@ -430,7 +430,7 @@ def write_globalattrs(self, globalAttrs):
data = value
numElems, dataType = self._datatype_define(value)
if numElems is None:
warnings.warn("Unknown data")
logger.warning("Unknown data")
return

offset = self._write_aedr(f, True, attrNum, entryNum, data, dataType, numElems, None)
Expand Down Expand Up @@ -583,7 +583,7 @@ def write_variableattrs(self, variableAttrs):
data = value
numElems, dataType = self._datatype_define(value)
if numElems is None:
warnings.warn("Unknown data")
logger.warning("Unknown data")
return
offset = self._write_aedr(f, False, attrNum, entryNum, data, dataType, numElems, zVar)
if entries == 0:
Expand Down Expand Up @@ -800,9 +800,11 @@ def write_var(self, var_spec, var_attrs=None, var_data=None):
notsupport = True

if notsupport or len(var_data) != 2:
print("Sparse record #s and data are not of list/tuple form:")
print(" [ [rec_#1, rec_#2, rec_#3, ],")
print(" [data_#1, data_#2, data_#3, ....] ]")
logger.warning(
"Sparse record #s and data are not of list/tuple form:\n"
" [ [rec_#1, rec_#2, rec_#3, ],\n"
" [data_#1, data_#2, data_#3, ....] ]"
)
return

# Format data into: [[recstart1, recend1, data1],
Expand Down Expand Up @@ -841,7 +843,7 @@ def _write_var_attrs(self, f: io.BufferedWriter, varNum: int, var_attrs: Dict[st

for attr, entry in var_attrs.items():
if attr in self.gattrs:
warnings.warn(f"Attribute: {attr}" + " already defined as a global attribute... Skip")
logger.warning(f"Attribute: {attr}" + " already defined as a global attribute... Skip")
continue

if not (attr in self.attrs):
Expand Down Expand Up @@ -1326,7 +1328,7 @@ def _majority_token(major: str) -> int:
try:
return majors[major.upper()]
except Exception:
warnings.warn(f"bad major.... {major}")
logger.warning(f"bad major.... {major}")
return 0

@staticmethod
Expand Down Expand Up @@ -1356,7 +1358,7 @@ def _encoding_token(encoding: str) -> int:
try:
return encodings[encoding.upper()]
except Exception:
warnings.warn(f"bad encoding.... {encoding}")
logger.warning(f"bad encoding.... {encoding}")
return 0

@staticmethod
Expand Down Expand Up @@ -1412,10 +1414,10 @@ def _datatype_define(self, value: Union[str, int, float, complex, np.ndarray]) -
elif value.dtype.type == np.str_:
return numElems, self.CDF_CHAR
else:
warnings.warn("Invalid data type for data.... Skip")
logger.warning("Invalid data type for data.... Skip")
return None, None
else:
warnings.warn("Invalid data type for data.... Skip")
logger.warning("Invalid data type for data.... Skip")
return None, None

@staticmethod
Expand Down Expand Up @@ -2622,7 +2624,7 @@ def _make_sparse_blocks(self, variable, records, data: List[Tuple[int, int, np.n
try:
data = data["Data"]
except Exception:
warnings.warn("Unknown dictionary.... Skip")
logger.warning("Unknown dictionary.... Skip")
return None
if isinstance(data, np.ndarray):
if len(records) == len(data):
Expand All @@ -2632,7 +2634,7 @@ def _make_sparse_blocks(self, variable, records, data: List[Tuple[int, int, np.n
# There are some virtual data
return self._make_sparse_blocks_with_virtual(variable, records, data)
else:
warnings.warn("Invalid sparse data... " "Less data than the specified records... Skip")
logger.warning("Invalid sparse data... " "Less data than the specified records... Skip")
elif isinstance(data, bytes):
record_length = len(records)
for z in range(0, variable["Num_Dims"]):
Expand All @@ -2644,16 +2646,14 @@ def _make_sparse_blocks(self, variable, records, data: List[Tuple[int, int, np.n
# There are some virtual data
return self._make_sparse_blocks_with_virtual(variable, records, data)
else:
warnings.warn("Invalid sparse data... " "Less data than the specified records... Skip")
logger.warning("Invalid sparse data... " "Less data than the specified records... Skip")
elif isinstance(data, list):
if isinstance(data[0], list):
if not (all(isinstance(el, str) for el in data[0])):
print("Can not handle list data.... ", "Only support list of str... Skip")
return
raise RuntimeError("Can not handle list data.... ", "Only support list of str...")
else:
if not (all(isinstance(el, str) for el in data)):
print("Can not handle list data.... ", "Only support list of str... Skip")
return
raise RuntimeError("Can not handle list data.... ", "Only support list of str...")
record_length = len(records)
# for z in range(0, variable['Num_Dims']):
# record_length = record_length * variable['Dim_Sizes'][z]
Expand All @@ -2664,9 +2664,9 @@ def _make_sparse_blocks(self, variable, records, data: List[Tuple[int, int, np.n
# There are some virtual data
return self._make_sparse_blocks_with_virtual(variable, records, data)
else:
print("Invalid sparse data... ", "Less data than the specified records... Skip")
logger.warning("Invalid sparse data... ", "Less data than the specified records... Skip")
else:
print("Invalid sparse data... ", "Less data than the specified records... Skip")
logger.warning("Invalid sparse data... ", "Less data than the specified records... Skip")
return

def _make_sparse_blocks_with_virtual(self, variable, records, data) -> List[Tuple[int, int, np.ndarray]]: # type: ignore[no-untyped-def]
Expand Down Expand Up @@ -2722,7 +2722,7 @@ def _make_sparse_blocks_with_virtual(self, variable, records, data) -> List[Tupl
sparse_data.append((sblock[0], sblock[1], np.array(datax)))
return sparse_data
else:
print("Can not handle data... Skip")
logger.warning("Can not handle data... Skip")
return None

def _make_sparse_blocks_with_physical(self, variable, records, data) -> List[Tuple[int, int, np.ndarray]]: # type: ignore[no-untyped-def]
Expand Down
3 changes: 3 additions & 0 deletions cdflib/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import logging

logger = logging.getLogger(__name__)
1 change: 0 additions & 1 deletion cdflib/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def read(self, isize):
if isize == -1:
isize = self.content_length
myrange = "bytes=%d-%d" % (self.pos, (self.pos + isize - 1))
# print("debug: byte range ",myrange)
self.pos += isize # advance the pointer
stream = self.fhandle.get(Range=myrange)["Body"]
rawdata = stream.read()
Expand Down

0 comments on commit f8dbe18

Please sign in to comment.