From 7d54b8bd36c491a7e2411d1bdd683d3d7c7c8452 Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 10:48:20 +0000 Subject: [PATCH 1/8] Add Python code auto-formatter (black) as CI dependency --- ci/requirements-py36-qc.yml | 1 + ci/requirements-py36.yml | 1 + ci/requirements-py37.yml | 1 + 3 files changed, 3 insertions(+) diff --git a/ci/requirements-py36-qc.yml b/ci/requirements-py36-qc.yml index 62ad8c0..3931641 100644 --- a/ci/requirements-py36-qc.yml +++ b/ci/requirements-py36-qc.yml @@ -15,3 +15,4 @@ dependencies: - pytest-pep8 - python=3.6 - typing + - black diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index ab54408..0dc1da6 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -12,3 +12,4 @@ dependencies: - pytest-cov - python=3.6 - typing + - black diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 6ed70b5..ba68258 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -13,3 +13,4 @@ dependencies: - pytest-flakes - python=3.7 - typing + - black From 35a3de76720db0f162c153c3565093c34aee1bac Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 10:54:49 +0000 Subject: [PATCH 2/8] Add CI check for autoformatted code --- .travis.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1ea9570..99046ff 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,8 +5,8 @@ sudo: false # use container based build matrix: fast_finish: true include: - - env: ENV=py36 - - env: ENV=py37 + - env: ENV=py36 + - env: ENV=py37 before_install: - wget http://repo.continuum.io/miniconda/Miniconda3-3.16.0-Linux-x86_64.sh -O miniconda.sh; @@ -24,15 +24,16 @@ install: - pip install --no-deps -e . script: + - black --check . - which python - python --version - python -c "import eccodes" - if [[ "$ENV" == "docs" ]]; then - sphinx-build -W -b html docs build/sphinx/html; + sphinx-build -W -b html docs build/sphinx/html; elif [[ "$ENV" == *"-qc" ]]; then - pytest -v --flakes --doctest-glob="*.rst" --cov=eccodes --cov=gribapi --cov-report term-missing --pep8 --mccabe $EXTRA_FLAGS; + pytest -v --flakes --doctest-glob="*.rst" --cov=eccodes --cov=gribapi --cov-report term-missing --pep8 --mccabe $EXTRA_FLAGS; else - pytest -v --cov=eccodes --cov=gribapi --cov-report term-missing $EXTRA_FLAGS; + pytest -v --cov=eccodes --cov=gribapi --cov-report term-missing $EXTRA_FLAGS; fi after_success: From 9b0555c9bdaca3879b1be5b98df2b8bac34f4b05 Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 11:38:55 +0000 Subject: [PATCH 3/8] Autoformat all gribapi/*.py files --- gribapi/bindings.py | 14 +- gribapi/errors.py | 267 +++++++++++++++++++++++++++---------- gribapi/gribapi.py | 318 ++++++++++++++++++++++++++++---------------- 3 files changed, 409 insertions(+), 190 deletions(-) diff --git a/gribapi/bindings.py b/gribapi/bindings.py index 6832861..832155d 100644 --- a/gribapi/bindings.py +++ b/gribapi/bindings.py @@ -25,7 +25,7 @@ import cffi -__version__ = '0.9.5.dev0' +__version__ = "0.9.5.dev0" LOG = logging.getLogger(__name__) @@ -34,14 +34,14 @@ except ModuleNotFoundError: ffi = cffi.FFI() ffi.cdef( - pkgutil.get_data(__name__, 'grib_api.h').decode('utf-8') + - pkgutil.get_data(__name__, 'eccodes.h').decode('utf-8') + pkgutil.get_data(__name__, "grib_api.h").decode("utf-8") + + pkgutil.get_data(__name__, "eccodes.h").decode("utf-8") ) - LIBNAMES = ['eccodes', 'libeccodes.so', 'libeccodes'] + LIBNAMES = ["eccodes", "libeccodes.so", "libeccodes"] - if os.environ.get('ECCODES_DIR'): - LIBNAMES.insert(0, os.path.join(os.environ['ECCODES_DIR'], 'lib/libeccodes.so')) + if os.environ.get("ECCODES_DIR"): + LIBNAMES.insert(0, os.path.join(os.environ["ECCODES_DIR"], "lib/libeccodes.so")) for libname in LIBNAMES: try: @@ -54,4 +54,4 @@ LOG.info("ecCodes library not found using name '%s'.", libname) # default encoding for ecCodes strings -ENC = 'ascii' +ENC = "ascii" diff --git a/gribapi/errors.py b/gribapi/errors.py index 3e7041b..4ff133f 100644 --- a/gribapi/errors.py +++ b/gribapi/errors.py @@ -27,207 +27,340 @@ def __str__(self): class FunctionalityNotEnabledError(GribInternalError): """Functionality not enabled.""" + + class WrongBitmapSizeError(GribInternalError): """Size of bitmap is incorrect.""" + + class OutOfRangeError(GribInternalError): """Value out of coding range.""" + + class UnsupportedEditionError(GribInternalError): """Edition not supported..""" + + class AttributeNotFoundError(GribInternalError): """Attribute not found..""" + + class TooManyAttributesError(GribInternalError): """Too many attributes. Increase MAX_ACCESSOR_ATTRIBUTES.""" + + class AttributeClashError(GribInternalError): """Attribute is already present, cannot add.""" + + class NullPointerError(GribInternalError): """Null pointer.""" + + class MissingBufrEntryError(GribInternalError): """Missing BUFR table entry for descriptor.""" + + class WrongConversionError(GribInternalError): """Wrong type conversion.""" + + class StringTooSmallError(GribInternalError): """String is smaller than requested.""" + + class InvalidKeyValueError(GribInternalError): """Invalid key value.""" + + class ValueDifferentError(GribInternalError): """Value is different.""" + + class DifferentEditionError(GribInternalError): """Edition of two messages is different.""" + + class InvalidBitsPerValueError(GribInternalError): """Invalid number of bits per value.""" + + class CorruptedIndexError(GribInternalError): """Index is corrupted.""" + + class MessageMalformedError(GribInternalError): """Message malformed.""" + + class UnderflowError(GribInternalError): """Underflow.""" + + class SwitchNoMatchError(GribInternalError): """Switch unable to find a matching case.""" + + class ConstantFieldError(GribInternalError): """Constant field.""" + + class MessageTooLargeError(GribInternalError): """Message is too large for the current architecture.""" + + class InternalArrayTooSmallError(GribInternalError): """An internal array is too small.""" + + class PrematureEndOfFileError(GribInternalError): """End of resource reached when reading message.""" + + class NullIndexError(GribInternalError): """Null index.""" + + class EndOfIndexError(GribInternalError): """End of index reached.""" + + class WrongGridError(GribInternalError): """Grid description is wrong or inconsistent.""" + + class NoValuesError(GribInternalError): """Unable to code a field without values.""" + + class EndError(GribInternalError): """End of resource.""" + + class WrongTypeError(GribInternalError): """Wrong type while packing.""" + + class NoDefinitionsError(GribInternalError): """Definitions files not found.""" + + class HashArrayNoMatchError(GribInternalError): """Hash array no match.""" + + class ConceptNoMatchError(GribInternalError): """Concept no match.""" + + class OutOfAreaError(GribInternalError): """The point is out of the grid area.""" + + class MissingKeyError(GribInternalError): """Missing a key from the fieldset.""" + + class InvalidOrderByError(GribInternalError): """Invalid order by.""" + + class InvalidNearestError(GribInternalError): """Invalid nearest id.""" + + class InvalidKeysIteratorError(GribInternalError): """Invalid keys iterator id.""" + + class InvalidIteratorError(GribInternalError): """Invalid iterator id.""" + + class InvalidIndexError(GribInternalError): """Invalid index id.""" + + class InvalidGribError(GribInternalError): """Invalid grib id.""" + + class InvalidFileError(GribInternalError): """Invalid file id.""" + + class WrongStepUnitError(GribInternalError): """Wrong units for step (step must be integer).""" + + class WrongStepError(GribInternalError): """Unable to set step.""" + + class InvalidTypeError(GribInternalError): """Invalid key type.""" + + class WrongLengthError(GribInternalError): """Wrong message length.""" + + class ValueCannotBeMissingError(GribInternalError): """Value cannot be missing.""" + + class InvalidSectionNumberError(GribInternalError): """Invalid section number.""" + + class NullHandleError(GribInternalError): """Null handle.""" + + class InvalidArgumentError(GribInternalError): """Invalid argument.""" + + class ReadOnlyError(GribInternalError): """Value is read only.""" + + class MemoryAllocationError(GribInternalError): """Memory allocation error.""" + + class GeocalculusError(GribInternalError): """Problem with calculation of geographic attributes.""" + + class NoMoreInSetError(GribInternalError): """Code cannot unpack because of string too small.""" + + class EncodingError(GribInternalError): """Encoding invalid.""" + + class DecodingError(GribInternalError): """Decoding invalid.""" + + class MessageInvalidError(GribInternalError): """Message invalid.""" + + class IOProblemError(GribInternalError): """Input output problem.""" + + class KeyValueNotFoundError(GribInternalError): """Key/value not found.""" + + class WrongArraySizeError(GribInternalError): """Array size mismatch.""" + + class CodeNotFoundInTableError(GribInternalError): """Code not found in code table.""" + + class FileNotFoundError(GribInternalError): """File not found.""" + + class ArrayTooSmallError(GribInternalError): """Passed array is too small.""" + + class MessageEndNotFoundError(GribInternalError): """Missing 7777 at end of message.""" + + class FunctionNotImplementedError(GribInternalError): """Function not yet implemented.""" + + class BufferTooSmallError(GribInternalError): """Passed buffer is too small.""" + + class InternalError(GribInternalError): """Internal error.""" + + class EndOfFileError(GribInternalError): """End of resource reached.""" + ERROR_MAP = { - -67 : FunctionalityNotEnabledError, - -66 : WrongBitmapSizeError, - -65 : OutOfRangeError, - -64 : UnsupportedEditionError, - -63 : AttributeNotFoundError, - -62 : TooManyAttributesError, - -61 : AttributeClashError, - -60 : NullPointerError, - -59 : MissingBufrEntryError, - -58 : WrongConversionError, - -57 : StringTooSmallError, - -56 : InvalidKeyValueError, - -55 : ValueDifferentError, - -54 : DifferentEditionError, - -53 : InvalidBitsPerValueError, - -52 : CorruptedIndexError, - -51 : MessageMalformedError, - -50 : UnderflowError, - -49 : SwitchNoMatchError, - -48 : ConstantFieldError, - -47 : MessageTooLargeError, - -46 : InternalArrayTooSmallError, - -45 : PrematureEndOfFileError, - -44 : NullIndexError, - -43 : EndOfIndexError, - -42 : WrongGridError, - -41 : NoValuesError, - -40 : EndError, - -39 : WrongTypeError, - -38 : NoDefinitionsError, - -37 : HashArrayNoMatchError, - -36 : ConceptNoMatchError, - -35 : OutOfAreaError, - -34 : MissingKeyError, - -33 : InvalidOrderByError, - -32 : InvalidNearestError, - -31 : InvalidKeysIteratorError, - -30 : InvalidIteratorError, - -29 : InvalidIndexError, - -28 : InvalidGribError, - -27 : InvalidFileError, - -26 : WrongStepUnitError, - -25 : WrongStepError, - -24 : InvalidTypeError, - -23 : WrongLengthError, - -22 : ValueCannotBeMissingError, - -21 : InvalidSectionNumberError, - -20 : NullHandleError, - -19 : InvalidArgumentError, - -18 : ReadOnlyError, - -17 : MemoryAllocationError, - -16 : GeocalculusError, - -15 : NoMoreInSetError, - -14 : EncodingError, - -13 : DecodingError, - -12 : MessageInvalidError, - -11 : IOProblemError, - -10 : KeyValueNotFoundError, - -9 : WrongArraySizeError, - -8 : CodeNotFoundInTableError, - -7 : FileNotFoundError, - -6 : ArrayTooSmallError, - -5 : MessageEndNotFoundError, - -4 : FunctionNotImplementedError, - -3 : BufferTooSmallError, - -2 : InternalError, - -1 : EndOfFileError + -67: FunctionalityNotEnabledError, + -66: WrongBitmapSizeError, + -65: OutOfRangeError, + -64: UnsupportedEditionError, + -63: AttributeNotFoundError, + -62: TooManyAttributesError, + -61: AttributeClashError, + -60: NullPointerError, + -59: MissingBufrEntryError, + -58: WrongConversionError, + -57: StringTooSmallError, + -56: InvalidKeyValueError, + -55: ValueDifferentError, + -54: DifferentEditionError, + -53: InvalidBitsPerValueError, + -52: CorruptedIndexError, + -51: MessageMalformedError, + -50: UnderflowError, + -49: SwitchNoMatchError, + -48: ConstantFieldError, + -47: MessageTooLargeError, + -46: InternalArrayTooSmallError, + -45: PrematureEndOfFileError, + -44: NullIndexError, + -43: EndOfIndexError, + -42: WrongGridError, + -41: NoValuesError, + -40: EndError, + -39: WrongTypeError, + -38: NoDefinitionsError, + -37: HashArrayNoMatchError, + -36: ConceptNoMatchError, + -35: OutOfAreaError, + -34: MissingKeyError, + -33: InvalidOrderByError, + -32: InvalidNearestError, + -31: InvalidKeysIteratorError, + -30: InvalidIteratorError, + -29: InvalidIndexError, + -28: InvalidGribError, + -27: InvalidFileError, + -26: WrongStepUnitError, + -25: WrongStepError, + -24: InvalidTypeError, + -23: WrongLengthError, + -22: ValueCannotBeMissingError, + -21: InvalidSectionNumberError, + -20: NullHandleError, + -19: InvalidArgumentError, + -18: ReadOnlyError, + -17: MemoryAllocationError, + -16: GeocalculusError, + -15: NoMoreInSetError, + -14: EncodingError, + -13: DecodingError, + -12: MessageInvalidError, + -11: IOProblemError, + -10: KeyValueNotFoundError, + -9: WrongArraySizeError, + -8: CodeNotFoundInTableError, + -7: FileNotFoundError, + -6: ArrayTooSmallError, + -5: MessageEndNotFoundError, + -4: FunctionNotImplementedError, + -3: BufferTooSmallError, + -2: InternalError, + -1: EndOfFileError, } diff --git a/gribapi/gribapi.py b/gribapi/gribapi.py index 34d681a..2884f86 100644 --- a/gribapi/gribapi.py +++ b/gribapi/gribapi.py @@ -26,6 +26,7 @@ type(file) except NameError: import io + file = io.IOBase long = int @@ -49,13 +50,13 @@ """ TAF product kind """ # Constants for 'missing' -GRIB_MISSING_DOUBLE = -1e+100 +GRIB_MISSING_DOUBLE = -1e100 GRIB_MISSING_LONG = 2147483647 # ECC-1029: Disable function-arguments type-checking unless # environment variable is defined and equal to 1 -enable_type_checks = (os.environ.get('ECCODES_PYTHON_ENABLE_TYPE_CHECKS') == '1') +enable_type_checks = os.environ.get("ECCODES_PYTHON_ENABLE_TYPE_CHECKS") == "1" # Function-arguments type-checking decorator @@ -66,6 +67,7 @@ def require(**_params_): """ The actual decorator. Receives the target function in _func_ """ + def check_types(_func_, _params_=_params_): if not enable_type_checks: return _func_ @@ -80,10 +82,14 @@ def modified(*args, **kw): param = kw[name] if isinstance(allowed_types, type): allowed_types = (allowed_types,) - assert any([isinstance(param, type1) for type1 in allowed_types]), \ - "Parameter '%s' should be of type %s" % (name, " or ".join([t.__name__ for t in allowed_types])) + assert any([isinstance(param, type1) for type1 in allowed_types]), ( + "Parameter '%s' should be of type %s" + % (name, " or ".join([t.__name__ for t in allowed_types])) + ) return _func_(**kw) + return modified + return check_types @@ -92,6 +98,7 @@ class Bunch(dict): """ The collector of a bunch of named stuff :). """ + def __init__(self, **kw): dict.__init__(self, kw) self.__dict__.update(kw) @@ -113,18 +120,19 @@ def __delattr__(self, key): del self.__dict__[key] def __str__(self): - state = ["%s=%r" % (attribute, value) - for (attribute, value) - in self.__dict__.items()] - return '\n'.join(state) + state = [ + "%s=%r" % (attribute, value) for (attribute, value) in self.__dict__.items() + ] + return "\n".join(state) + + # @endcond def err_last(func): - @wraps(func) def wrapper(*args): - err = ffi.new('int *') + err = ffi.new("int *") args += (err,) retval = func(*args) return err[0], retval @@ -134,7 +142,7 @@ def wrapper(*args): def get_handle(msgid): assert isinstance(msgid, int) - h = ffi.cast('grib_handle*', msgid) + h = ffi.cast("grib_handle*", msgid) if h == ffi.NULL: raise errors.InvalidGribError return h @@ -143,52 +151,52 @@ def get_handle(msgid): def put_handle(handle): if handle == ffi.NULL: raise errors.InvalidGribError - return int(ffi.cast('unsigned long', handle)) + return int(ffi.cast("unsigned long", handle)) def get_multi_handle(msgid): assert isinstance(msgid, int) - return ffi.cast('grib_multi_handle*', msgid) + return ffi.cast("grib_multi_handle*", msgid) def put_multi_handle(handle): - return int(ffi.cast('unsigned long', handle)) + return int(ffi.cast("unsigned long", handle)) def get_index(indexid): assert isinstance(indexid, int) - return ffi.cast('grib_index*', indexid) + return ffi.cast("grib_index*", indexid) def put_index(indexh): - return int(ffi.cast('unsigned long', indexh)) + return int(ffi.cast("unsigned long", indexh)) def get_iterator(iterid): assert isinstance(iterid, int) - return ffi.cast('grib_iterator*', iterid) + return ffi.cast("grib_iterator*", iterid) def put_iterator(iterh): - return int(ffi.cast('unsigned long', iterh)) + return int(ffi.cast("unsigned long", iterh)) def get_grib_keys_iterator(iterid): assert isinstance(iterid, int) - return ffi.cast('grib_keys_iterator*', iterid) + return ffi.cast("grib_keys_iterator*", iterid) def put_grib_keys_iterator(iterh): - return int(ffi.cast('unsigned long', iterh)) + return int(ffi.cast("unsigned long", iterh)) def get_bufr_keys_iterator(iterid): assert isinstance(iterid, int) - return ffi.cast('bufr_keys_iterator*', iterid) + return ffi.cast("bufr_keys_iterator*", iterid) def put_bufr_keys_iterator(iterh): - return int(ffi.cast('unsigned long', iterh)) + return int(ffi.cast("unsigned long", iterh)) # @cond @@ -203,6 +211,8 @@ def GRIB_CHECK(errid): """ if errid: errors.raise_grib_error(errid) + + # @endcond @@ -220,7 +230,9 @@ def gts_new_from_file(fileobj, headers_only=False): @exception GribInternalError """ # err, h = err_last(lib.gts_new_from_file)(ffi.NULL, fileobj) - err, h = err_last(lib.codes_handle_new_from_file)(ffi.NULL, fileobj, CODES_PRODUCT_GTS) + err, h = err_last(lib.codes_handle_new_from_file)( + ffi.NULL, fileobj, CODES_PRODUCT_GTS + ) if err: if err == lib.GRIB_END_OF_FILE: return None @@ -247,7 +259,9 @@ def metar_new_from_file(fileobj, headers_only=False): @exception GribInternalError """ # err, h = err_last(lib.metar_new_from_file)(ffi.NULL, fileobj) - err, h = err_last(lib.codes_handle_new_from_file)(ffi.NULL, fileobj, CODES_PRODUCT_METAR) + err, h = err_last(lib.codes_handle_new_from_file)( + ffi.NULL, fileobj, CODES_PRODUCT_METAR + ) if err: if err == lib.GRIB_END_OF_FILE: return None @@ -304,7 +318,9 @@ def any_new_from_file(fileobj, headers_only=False): @return id of the message loaded in memory or None @exception GribInternalError """ - err, h = err_last(lib.codes_handle_new_from_file)(ffi.NULL, fileobj, CODES_PRODUCT_ANY) + err, h = err_last(lib.codes_handle_new_from_file)( + ffi.NULL, fileobj, CODES_PRODUCT_ANY + ) if err: if err == lib.GRIB_END_OF_FILE: return None @@ -332,7 +348,9 @@ def bufr_new_from_file(fileobj, headers_only=False): @return id of the BUFR loaded in memory or None @exception GribInternalError """ - err, h = err_last(lib.codes_handle_new_from_file)(ffi.NULL, fileobj, CODES_PRODUCT_BUFR) + err, h = err_last(lib.codes_handle_new_from_file)( + ffi.NULL, fileobj, CODES_PRODUCT_BUFR + ) if err: if err == lib.GRIB_END_OF_FILE: return None @@ -368,7 +386,9 @@ def grib_new_from_file(fileobj, headers_only=False): @exception GribInternalError """ # err, h = err_last(lib.grib_new_from_file)(ffi.NULL, fileobj, headers_only) - err, h = err_last(lib.codes_handle_new_from_file)(ffi.NULL, fileobj, CODES_PRODUCT_GRIB) + err, h = err_last(lib.codes_handle_new_from_file)( + ffi.NULL, fileobj, CODES_PRODUCT_GRIB + ) if err: if err == lib.GRIB_END_OF_FILE: return None @@ -401,7 +421,7 @@ def grib_count_in_file(fileobj): @return number of messages in the file @exception GribInternalError """ - num_p = ffi.new('int*') + num_p = ffi.new("int*") err = lib.grib_count_in_file(ffi.NULL, fileobj, num_p) GRIB_CHECK(err) return num_p[0] @@ -452,8 +472,8 @@ def grib_get_string(msgid, key): length = grib_get_string_length(msgid, key) h = get_handle(msgid) - values = ffi.new('char[]', length) - length_p = ffi.new('size_t *', length) + values = ffi.new("char[]", length) + length_p = ffi.new("size_t *", length) err = lib.grib_get_string(h, key.encode(ENC), values, length_p) GRIB_CHECK(err) return ffi.string(values, length_p[0]).decode(ENC) @@ -471,7 +491,7 @@ def grib_set_string(msgid, key, value): """ h = get_handle(msgid) bvalue = value.encode(ENC) - length_p = ffi.new('size_t *', len(bvalue)) + length_p = ffi.new("size_t *", len(bvalue)) GRIB_CHECK(lib.grib_set_string(h, key.encode(ENC), bvalue, length_p)) @@ -557,7 +577,7 @@ def grib_get_size(msgid, key): @exception GribInternalError """ h = get_handle(msgid) - size_p = ffi.new('size_t*') + size_p = ffi.new("size_t*") err = lib.grib_get_size(h, key.encode(ENC), size_p) GRIB_CHECK(err) return size_p[0] @@ -573,7 +593,7 @@ def grib_get_string_length(msgid, key): @exception GribInternalError """ h = get_handle(msgid) - size = ffi.new('size_t *') + size = ffi.new("size_t *") err = lib.grib_get_length(h, key.encode(ENC), size) GRIB_CHECK(err) return size[0] @@ -716,9 +736,9 @@ def grib_iterator_next(iterid): @exception GribInternalError """ iterh = get_iterator(iterid) - lat_p = ffi.new('double*') - lon_p = ffi.new('double*') - value_p = ffi.new('double*') + lat_p = ffi.new("double*") + lon_p = ffi.new("double*") + value_p = ffi.new("double*") err = lib.grib_iterator_next(iterh, lat_p, lon_p, value_p) if err == 0: return [] @@ -912,7 +932,7 @@ def grib_get_long(msgid, key): @exception GribInternalError """ h = get_handle(msgid) - value_p = ffi.new('long*') + value_p = ffi.new("long*") err = lib.grib_get_long(h, key.encode(ENC), value_p) GRIB_CHECK(err) return value_p[0] @@ -929,7 +949,7 @@ def grib_get_double(msgid, key): @exception GribInternalError """ h = get_handle(msgid) - value_p = ffi.new('double*') + value_p = ffi.new("double*") err = lib.grib_get_double(h, key.encode(ENC), value_p) GRIB_CHECK(err) return value_p[0] @@ -1106,9 +1126,9 @@ def grib_set_double_array(msgid, key, inarray): a = inarray if isinstance(inarray, np.ndarray): # ECC-1007: Could also call numpy.ascontiguousarray - if not inarray.flags['C_CONTIGUOUS']: - a = a.copy(order='C') - a = ffi.cast('double*', a.ctypes.data) + if not inarray.flags["C_CONTIGUOUS"]: + a = a.copy(order="C") + a = ffi.cast("double*", a.ctypes.data) GRIB_CHECK(lib.grib_set_double_array(h, key.encode(ENC), a, length)) @@ -1125,9 +1145,9 @@ def grib_get_double_array(msgid, key): """ h = get_handle(msgid) nval = grib_get_size(msgid, key) - length_p = ffi.new('size_t*', nval) - arr = np.empty((nval,), dtype='float64') - vals_p = ffi.cast('double *', arr.ctypes.data) + length_p = ffi.new("size_t*", nval) + arr = np.empty((nval,), dtype="float64") + vals_p = ffi.cast("double *", arr.ctypes.data) err = lib.grib_get_double_array(h, key.encode(ENC), vals_p, length_p) GRIB_CHECK(err) return arr @@ -1146,9 +1166,9 @@ def grib_get_string_array(msgid, key): length = grib_get_string_length(msgid, key) size = grib_get_size(msgid, key) h = get_handle(msgid) - values_keepalive = [ffi.new('char[]', length) for _ in range(size)] - values = ffi.new('char*[]', values_keepalive) - size_p = ffi.new('size_t *', size) + values_keepalive = [ffi.new("char[]", length) for _ in range(size)] + values = ffi.new("char*[]", values_keepalive) + size_p = ffi.new("size_t *", size) err = lib.grib_get_string_array(h, key.encode(ENC), values, size_p) GRIB_CHECK(err) return [ffi.string(values[i]).decode(ENC) for i in range(size_p[0])] @@ -1171,8 +1191,8 @@ def grib_set_string_array(msgid, key, inarray): h = get_handle(msgid) size = len(inarray) # See https://cffi.readthedocs.io/en/release-1.3/using.html - values_keepalive = [ffi.new('char[]', s.encode(ENC)) for s in inarray] - values_p = ffi.new('const char *[]', values_keepalive) + values_keepalive = [ffi.new("char[]", s.encode(ENC)) for s in inarray] + values_p = ffi.new("const char *[]", values_keepalive) GRIB_CHECK(lib.grib_set_string_array(h, key.encode(ENC), values_p, size)) @@ -1208,9 +1228,9 @@ def grib_get_long_array(msgid, key): """ h = get_handle(msgid) nval = grib_get_size(msgid, key) - length_p = ffi.new('size_t*', nval) - arr = np.empty((nval,), dtype='int64') - vals_p = ffi.cast('long *', arr.ctypes.data) + length_p = ffi.new("size_t*", nval) + arr = np.empty((nval,), dtype="int64") + vals_p = ffi.cast("long *", arr.ctypes.data) err = lib.grib_get_long_array(h, key.encode(ENC), vals_p, length_p) GRIB_CHECK(err) return arr @@ -1275,7 +1295,9 @@ def grib_index_new_from_file(filename, keys): @exception GribInternalError """ ckeys = ",".join(keys) - err, iid = err_last(lib.grib_index_new_from_file)(ffi.NULL, filename.encode(ENC), ckeys.encode(ENC)) + err, iid = err_last(lib.grib_index_new_from_file)( + ffi.NULL, filename.encode(ENC), ckeys.encode(ENC) + ) GRIB_CHECK(err) return put_index(iid) @@ -1324,7 +1346,7 @@ def grib_index_get_size(indexid, key): @exception GribInternalError """ ih = get_index(indexid) - size_p = ffi.new('size_t*') + size_p = ffi.new("size_t*") err = lib.grib_index_get_size(ih, key.encode(ENC), size_p) GRIB_CHECK(err) return size_p[0] @@ -1348,8 +1370,8 @@ def grib_index_get_long(indexid, key): nval = grib_index_get_size(indexid, key) ih = get_index(indexid) - values_p = ffi.new('long[]', nval) - size_p = ffi.new('size_t *', nval) + values_p = ffi.new("long[]", nval) + size_p = ffi.new("size_t *", nval) err = lib.grib_index_get_long(ih, key.encode(ENC), values_p, size_p) GRIB_CHECK(err) return tuple(int(values_p[i]) for i in range(size_p[0])) @@ -1373,9 +1395,9 @@ def grib_index_get_string(indexid, key): nval = grib_index_get_size(indexid, key) ih = get_index(indexid) max_val_size = 1024 - values_keepalive = [ffi.new('char[]', max_val_size) for _ in range(nval)] - values_p = ffi.new('const char *[]', values_keepalive) - size_p = ffi.new('size_t *', max_val_size) + values_keepalive = [ffi.new("char[]", max_val_size) for _ in range(nval)] + values_p = ffi.new("const char *[]", values_keepalive) + size_p = ffi.new("size_t *", max_val_size) err = lib.grib_index_get_string(ih, key.encode(ENC), values_p, size_p) GRIB_CHECK(err) return tuple(ffi.string(values_p[i]).decode(ENC) for i in range(size_p[0])) @@ -1399,8 +1421,8 @@ def grib_index_get_double(indexid, key): nval = grib_index_get_size(indexid, key) ih = get_index(indexid) - values_p = ffi.new('double[]', nval) - size_p = ffi.new('size_t *', nval) + values_p = ffi.new("double[]", nval) + size_p = ffi.new("size_t *", nval) err = lib.grib_index_get_doule(ih, key.encode(ENC), values_p, size_p) GRIB_CHECK(err) return tuple(int(values_p[i]) for i in range(size_p[0])) @@ -1501,7 +1523,7 @@ def grib_get_message_size(msgid): @exception GribInternalError """ h = get_handle(msgid) - size_p = ffi.new('size_t*') + size_p = ffi.new("size_t*") err = lib.grib_get_message_size(h, size_p) GRIB_CHECK(err) return size_p[0] @@ -1517,7 +1539,7 @@ def grib_get_message_offset(msgid): @exception GribInternalError """ h = get_handle(msgid) - offset_p = ffi.new('long int*') + offset_p = ffi.new("long int*") err = lib.grib_get_message_offset(h, offset_p) GRIB_CHECK(err) return offset_p[0] @@ -1536,7 +1558,7 @@ def grib_get_double_element(msgid, key, index): """ h = get_handle(msgid) - value_p = ffi.new('double *') + value_p = ffi.new("double *") err = lib.grib_get_double_element(h, key.encode(ENC), index) GRIB_CHECK(err) return value_p[0] @@ -1556,8 +1578,8 @@ def grib_get_double_elements(msgid, key, indexes): """ nidx = len(indexes) h = get_handle(msgid) - i_p = ffi.new('int[]', indexes) - value_p = ffi.new('double[]', nidx) + i_p = ffi.new("int[]", indexes) + value_p = ffi.new("double[]", nidx) err = lib.grib_get_double_elements(h, key.encode(ENC), i_p, nidx, value_p) GRIB_CHECK(err) return [float(v) for v in value_p] @@ -1616,30 +1638,34 @@ def grib_set_key_vals(gribid, key_vals): key_vals_str = "" if isinstance(key_vals, str): # Plain string. We need to do a DEEP copy so as not to change the original - key_vals_str = ''.join(key_vals) + key_vals_str = "".join(key_vals) elif isinstance(key_vals, (list, tuple)): # A list of key=val strings for kv in key_vals: if not isinstance(kv, str): raise TypeError("Invalid list/tuple element type '%s'" % kv) - if '=' not in str(kv): - raise errors.GribInternalError("Invalid list/tuple element format '%s'" % kv) + if "=" not in str(kv): + raise errors.GribInternalError( + "Invalid list/tuple element format '%s'" % kv + ) if len(key_vals_str) > 0: - key_vals_str += ',' + key_vals_str += "," key_vals_str += kv elif isinstance(key_vals, dict): # A dictionary mapping keys to values for key in key_vals.keys(): if len(key_vals_str) > 0: - key_vals_str += ',' - key_vals_str += key + '=' + str(key_vals[key]) + key_vals_str += "," + key_vals_str += key + "=" + str(key_vals[key]) else: raise TypeError("Invalid argument type") h = get_handle(gribid) - values = ffi.new('grib_values[]', 1024) - count_p = ffi.new('int*', 1000) - err = lib.parse_keyval_string(ffi.NULL, key_vals_str.encode(ENC), 1, lib.GRIB_TYPE_UNDEFINED, values, count_p) + values = ffi.new("grib_values[]", 1024) + count_p = ffi.new("int*", 1000) + err = lib.parse_keyval_string( + ffi.NULL, key_vals_str.encode(ENC), 1, lib.GRIB_TYPE_UNDEFINED, values, count_p + ) GRIB_CHECK(err) err = lib.grib_set_values(h, values, count_p[0]) GRIB_CHECK(err) @@ -1697,32 +1723,53 @@ def grib_find_nearest(gribid, inlat, inlon, is_lsm=False, npoints=1): @exception GribInternalError """ h = get_handle(gribid) - inlats_p = ffi.new('double*', inlat) - inlons_p = ffi.new('double*', inlon) + inlats_p = ffi.new("double*", inlat) + inlons_p = ffi.new("double*", inlon) if npoints == 1: - outlats_p = ffi.new('double[]', 1) - outlons_p = ffi.new('double[]', 1) - values_p = ffi.new('double[]', 1) - distances_p = ffi.new('double[]', 1) - indexes_p = ffi.new('int[]', 1) + outlats_p = ffi.new("double[]", 1) + outlons_p = ffi.new("double[]", 1) + values_p = ffi.new("double[]", 1) + distances_p = ffi.new("double[]", 1) + indexes_p = ffi.new("int[]", 1) num_input_points = 1 # grib_nearest_find_multiple always returns ONE nearest neighbour - err = lib.grib_nearest_find_multiple(h, is_lsm, inlats_p, inlons_p, num_input_points, - outlats_p, outlons_p, values_p, distances_p, indexes_p) + err = lib.grib_nearest_find_multiple( + h, + is_lsm, + inlats_p, + inlons_p, + num_input_points, + outlats_p, + outlons_p, + values_p, + distances_p, + indexes_p, + ) GRIB_CHECK(err) elif npoints == 4: - outlats_p = ffi.new('double[]', npoints) - outlons_p = ffi.new('double[]', npoints) - values_p = ffi.new('double[]', npoints) - distances_p = ffi.new('double[]', npoints) - indexes_p = ffi.new('int[]', npoints) - size = ffi.new('size_t *') + outlats_p = ffi.new("double[]", npoints) + outlons_p = ffi.new("double[]", npoints) + values_p = ffi.new("double[]", npoints) + distances_p = ffi.new("double[]", npoints) + indexes_p = ffi.new("int[]", npoints) + size = ffi.new("size_t *") err, nid = err_last(lib.grib_nearest_new)(h) GRIB_CHECK(err) flags = 0 - err = lib.grib_nearest_find(nid, h, inlat, inlon, flags, - outlats_p, outlons_p, values_p, distances_p, indexes_p, size) + err = lib.grib_nearest_find( + nid, + h, + inlat, + inlon, + flags, + outlats_p, + outlons_p, + values_p, + distances_p, + indexes_p, + size, + ) GRIB_CHECK(err) GRIB_CHECK(lib.grib_nearest_delete(nid)) else: @@ -1730,7 +1777,15 @@ def grib_find_nearest(gribid, inlat, inlon, is_lsm=False, npoints=1): result = [] for i in range(npoints): - result.append(Bunch(lat=outlats_p[i], lon=outlons_p[i], value=values_p[i], distance=distances_p[i], index=indexes_p[i])) + result.append( + Bunch( + lat=outlats_p[i], + lon=outlons_p[i], + value=values_p[i], + distance=distances_p[i], + index=indexes_p[i], + ) + ) return tuple(result) @@ -1750,24 +1805,44 @@ def grib_find_nearest_multiple(gribid, is_lsm, inlats, inlons): h = get_handle(gribid) npoints = len(inlats) if len(inlons) != npoints: - raise ValueError('grib_find_nearest_multiple: input arrays inlats and inlons must have the same length') + raise ValueError( + "grib_find_nearest_multiple: input arrays inlats and inlons must have the same length" + ) - inlats_p = ffi.new('double[]', inlats) - inlons_p = ffi.new('double[]', inlons) + inlats_p = ffi.new("double[]", inlats) + inlons_p = ffi.new("double[]", inlons) - outlats_p = ffi.new('double[]', npoints) - outlons_p = ffi.new('double[]', npoints) - values_p = ffi.new('double[]', npoints) - distances_p = ffi.new('double[]', npoints) - indexes_p = ffi.new('int[]', npoints) + outlats_p = ffi.new("double[]", npoints) + outlons_p = ffi.new("double[]", npoints) + values_p = ffi.new("double[]", npoints) + distances_p = ffi.new("double[]", npoints) + indexes_p = ffi.new("int[]", npoints) # Note: grib_nearest_find_multiple always returns ONE nearest neighbour - err = lib.grib_nearest_find_multiple(h, is_lsm, inlats_p, inlons_p, npoints, - outlats_p, outlons_p, values_p, distances_p, indexes_p) + err = lib.grib_nearest_find_multiple( + h, + is_lsm, + inlats_p, + inlons_p, + npoints, + outlats_p, + outlons_p, + values_p, + distances_p, + indexes_p, + ) GRIB_CHECK(err) result = [] for i in range(npoints): - result.append(Bunch(lat=outlats_p[i], lon=outlons_p[i], value=values_p[i], distance=distances_p[i], index=indexes_p[i])) + result.append( + Bunch( + lat=outlats_p[i], + lon=outlons_p[i], + value=values_p[i], + distance=distances_p[i], + index=indexes_p[i], + ) + ) return tuple(result) @@ -1785,7 +1860,7 @@ def grib_get_native_type(msgid, key): @exception GribInternalError """ h = get_handle(msgid) - itype_p = ffi.new('int*') + itype_p = ffi.new("int*") err = lib.grib_get_native_type(h, key.encode(ENC), itype_p) GRIB_CHECK(err) if itype_p[0] in KEYTYPES: @@ -1921,7 +1996,9 @@ def grib_set(msgid, key, value): # # The value passed in is iterable; i.e. a list or array etc # grib_set_array(msgid, key, value) else: - raise errors.GribInternalError("Invalid type of value when setting key '%s'." % key) + raise errors.GribInternalError( + "Invalid type of value when setting key '%s'." % key + ) @require(msgid=int, key=str) @@ -1956,7 +2033,9 @@ def grib_set_array(msgid, key, value): try: int(val0) except (ValueError, TypeError): - raise errors.GribInternalError("Invalid type of value when setting key '%s'." % key) + raise errors.GribInternalError( + "Invalid type of value when setting key '%s'." % key + ) grib_set_long_array(msgid, key, value) @@ -2011,7 +2090,9 @@ def grib_index_select(indexid, key, value): elif isinstance(value, str): grib_index_select_string(indexid, key, value) else: - raise errors.GribInternalError("Invalid type of value when setting key '%s'." % key) + raise errors.GribInternalError( + "Invalid type of value when setting key '%s'." % key + ) @require(indexid=int, filename=str) @@ -2077,6 +2158,7 @@ def grib_get_api_version(): Returns the version of the API as a string in the format "major.minor.revision". """ + def div(v, d): return (v / d, v % d) @@ -2101,8 +2183,8 @@ def codes_get_version_info(): Returns a dictionary containing the versions of the ecCodes API and the Python bindings """ vinfo = dict() - vinfo['eccodes'] = grib_get_api_version() - vinfo['bindings'] = bindings_version + vinfo["eccodes"] = grib_get_api_version() + vinfo["bindings"] = bindings_version return vinfo @@ -2120,12 +2202,14 @@ def grib_get_message(msgid): @exception GribInternalError """ h = get_handle(msgid) - message_p = ffi.new('const void**') - message_length_p = ffi.new('size_t*') + message_p = ffi.new("const void**") + message_length_p = ffi.new("size_t*") err = lib.grib_get_message(h, message_p, message_length_p) GRIB_CHECK(err) # NOTE: ffi.string would stop on the first nul-character. - fixed_length_buffer = ffi.buffer(ffi.cast('char*', message_p[0]), message_length_p[0]) + fixed_length_buffer = ffi.buffer( + ffi.cast("char*", message_p[0]), message_length_p[0] + ) # Convert to bytes return fixed_length_buffer[:] @@ -2201,7 +2285,7 @@ def _convert_struct_to_dict(s): ident_found = False for a in dir(s): value = getattr(s, a) - if not ident_found and a == 'ident': + if not ident_found and a == "ident": value = ffi.string(value).decode(ENC) ident_found = True result[a] = value @@ -2219,9 +2303,11 @@ def codes_bufr_extract_headers(filepath, is_strict=True): """ context = lib.grib_context_get_default() headers_p = ffi.new("struct codes_bufr_header**") - num_message_p = ffi.new('int*') + num_message_p = ffi.new("int*") - err = lib.codes_bufr_extract_headers_malloc(context, filepath.encode(ENC), headers_p, num_message_p, is_strict) + err = lib.codes_bufr_extract_headers_malloc( + context, filepath.encode(ENC), headers_p, num_message_p, is_strict + ) GRIB_CHECK(err) num_messages = num_message_p[0] From b4503454a4824128c0a32edcb21da8e3cee2c35d Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 11:50:24 +0000 Subject: [PATCH 4/8] Change setup regex pattern to tolerate single and double quotes The regex matching to establish file version is sensitive to handling autoformatted files. Autoformatted code by default prefers double quotes. Python best practice suggests both quotemarks should be used interchangably. --- setup.py | 61 ++++++++++++++++++++++++++------------------------------ 1 file changed, 28 insertions(+), 33 deletions(-) diff --git a/setup.py b/setup.py index 809cade..3dadf8e 100644 --- a/setup.py +++ b/setup.py @@ -22,54 +22,49 @@ def read(path): - file_path = os.path.join(os.path.dirname(__file__), *path.split('/')) - return io.open(file_path, encoding='utf-8').read() + file_path = os.path.join(os.path.dirname(__file__), *path.split("/")) + return io.open(file_path, encoding="utf-8").read() # single-sourcing the package version using method 1 of: # https://packaging.python.org/guides/single-sourcing-package-version/ def parse_version_from(path): + version_pattern = ( + r"^__version__ = [\"\'](.*)[\"\']" # More permissive regex pattern + ) version_file = read(path) - version_match = re.search(r"^__version__ = '(.*)'", version_file, re.M) + version_match = re.search(version_pattern, version_file, re.M) if version_match is None or len(version_match.groups()) > 1: raise ValueError("couldn't parse version") return version_match.group(1) setuptools.setup( - name='eccodes-python', - version=parse_version_from('gribapi/bindings.py'), - description='Python interface to the ecCodes GRIB and BUFR decoder/encoder', - long_description=read('README.rst') + read('CHANGELOG.rst'), - author='European Centre for Medium-Range Weather Forecasts (ECMWF)', - author_email='software.support@ecmwf.int', - license='Apache License Version 2.0', - url='https://github.com/ecmwf/eccodes-python', + name="eccodes-python", + version=parse_version_from("gribapi/bindings.py"), + description="Python interface to the ecCodes GRIB and BUFR decoder/encoder", + long_description=read("README.rst") + read("CHANGELOG.rst"), + author="European Centre for Medium-Range Weather Forecasts (ECMWF)", + author_email="software.support@ecmwf.int", + license="Apache License Version 2.0", + url="https://github.com/ecmwf/eccodes-python", packages=setuptools.find_packages(), include_package_data=True, - install_requires=[ - 'attrs', - 'cffi', - 'numpy', - ], - tests_require=[ - 'pytest', - 'pytest-cov', - 'pytest-flakes', - ], - test_suite='tests', + install_requires=["attrs", "cffi", "numpy",], + tests_require=["pytest", "pytest-cov", "pytest-flakes",], + test_suite="tests", zip_safe=True, - keywords='ecCodes GRIB BUFR', + keywords="ecCodes GRIB BUFR", classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Operating System :: OS Independent', + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Operating System :: OS Independent", ], ) From 80be127f1fec4fbefe461b2879d9465f4d1804d7 Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 12:03:45 +0000 Subject: [PATCH 5/8] Autoformat remaining *.py files --- builder.py | 9 +- docs/conf.py | 22 ++--- eccodes/__main__.py | 10 +- eccodes/high_level/bufr.py | 38 ++++---- eccodes/high_level/codesfile.py | 3 +- eccodes/high_level/codesmessage.py | 37 +++++--- eccodes/high_level/gribfile.py | 3 +- eccodes/high_level/gribindex.py | 8 +- eccodes/high_level/gribmessage.py | 27 ++++-- eccodes/messages.py | 75 +++++++++------ gribapi/__init__.py | 10 +- tests/test_20_main.py | 6 +- tests/test_20_messages.py | 148 +++++++++++++++-------------- tests/test_eccodes.py | 79 +++++++-------- 14 files changed, 260 insertions(+), 215 deletions(-) diff --git a/builder.py b/builder.py index 27da67a..939088e 100644 --- a/builder.py +++ b/builder.py @@ -5,14 +5,9 @@ ffibuilder = cffi.FFI() ffibuilder.set_source( - "gribapi._bindings", - '#include ', - libraries=["eccodes"], -) -ffibuilder.cdef( - open("gribapi/grib_api.h").read() + - open("gribapi/eccodes.h").read() + "gribapi._bindings", "#include ", libraries=["eccodes"], ) +ffibuilder.cdef(open("gribapi/grib_api.h").read() + open("gribapi/eccodes.h").read()) if __name__ == "__main__": try: diff --git a/docs/conf.py b/docs/conf.py index d9d5c07..80e5103 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,22 +17,22 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'eccodes-python' +project = u"eccodes-python" copyright = u"2017-2019, European Centre for Medium-Range Weather Forecasts (ECMWF)." # The version info for the project you're documenting, acts as replacement @@ -42,7 +42,7 @@ # The full version, including alpha/beta/rc tags. release = pkg_resources.get_distribution("eccodes-python").version # The short X.Y version. -version = '.'.join(release.split('.')[:2]) +version = ".".join(release.split(".")[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -56,7 +56,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -74,7 +74,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -88,7 +88,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the @@ -119,7 +119,7 @@ # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. @@ -165,4 +165,4 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'cfgribdoc' +htmlhelp_basename = "cfgribdoc" diff --git a/eccodes/__main__.py b/eccodes/__main__.py index 9574962..889b27e 100644 --- a/eccodes/__main__.py +++ b/eccodes/__main__.py @@ -29,13 +29,15 @@ def selfcheck(): def main(argv=None): parser = argparse.ArgumentParser() - parser.add_argument('command') + parser.add_argument("command") args = parser.parse_args(args=argv) - if args.command == 'selfcheck': + if args.command == "selfcheck": selfcheck() else: - raise RuntimeError("Command not recognised %r. See usage with --help." % args.command) + raise RuntimeError( + "Command not recognised %r. See usage with --help." % args.command + ) -if __name__ == '__main__': # pragma: no cover +if __name__ == "__main__": # pragma: no cover main() diff --git a/eccodes/high_level/bufr.py b/eccodes/high_level/bufr.py index e333391..109f576 100644 --- a/eccodes/high_level/bufr.py +++ b/eccodes/high_level/bufr.py @@ -16,33 +16,31 @@ class BufrMessage(CodesMessage): __doc__ = "\n".join(CodesMessage.__doc__.splitlines()[4:]).format( - prod_type="BUFR", classname="BufrMessage", parent="BufrFile", - alias="bufr") + prod_type="BUFR", classname="BufrMessage", parent="BufrFile", alias="bufr" + ) product_kind = eccodes.CODES_PRODUCT_BUFR # Arguments included explicitly to support introspection # TODO: Can we get this to work with an index? - def __init__(self, codes_file=None, clone=None, sample=None, - headers_only=False): + def __init__(self, codes_file=None, clone=None, sample=None, headers_only=False): """ Open a message and inform the GRIB file that it's been incremented. The message is taken from ``codes_file``, cloned from ``clone`` or ``sample``, or taken from ``index``, in that order of precedence. """ - super(self.__class__, self).__init__(codes_file, clone, sample, - headers_only) - #self._unpacked = False + super(self.__class__, self).__init__(codes_file, clone, sample, headers_only) + # self._unpacked = False - #def get(self, key, ktype=None): + # def get(self, key, ktype=None): # """Return requested value, unpacking data values if necessary.""" # # TODO: Only do this if accessing arrays that need unpacking # if not self._unpacked: # self.unpacked = True # return super(self.__class__, self).get(key, ktype) - #def missing(self, key): + # def missing(self, key): # """ # Report if key is missing.# # @@ -52,15 +50,15 @@ def __init__(self, codes_file=None, clone=None, sample=None, def unpack(self): """Decode data section""" - eccodes.codes_set(self.codes_id, 'unpack', 1) + eccodes.codes_set(self.codes_id, "unpack", 1) def pack(self): """Encode data section""" - eccodes.codes_set(self.codes_id, 'pack', 1) + eccodes.codes_set(self.codes_id, "pack", 1) def keys(self, namespace=None): - #self.unpack() - #return super(self.__class__, self).keys(namespace) + # self.unpack() + # return super(self.__class__, self).keys(namespace) iterator = eccodes.codes_bufr_keys_iterator_new(self.codes_id) keys = [] while eccodes.codes_bufr_keys_iterator_next(iterator): @@ -69,16 +67,16 @@ def keys(self, namespace=None): eccodes.codes_bufr_keys_iterator_delete(iterator) return keys - #@property - #def unpacked(self): + # @property + # def unpacked(self): # return self._unpacked - #@unpacked.setter - #def unpacked(self, val): + # @unpacked.setter + # def unpacked(self, val): # eccodes.codes_set(self.codes_id, "unpack", val) # self._unpacked = val - #def __setitem__(self, key, value): + # def __setitem__(self, key, value): # """Set item and pack BUFR.""" # if not self._unpacked: # self.unpacked = True @@ -89,9 +87,11 @@ def copy_data(self, destMsg): """Copy data values from this message to another message""" return eccodes.codes_bufr_copy_data(self.codes_id, destMsg.codes_id) + class BufrFile(CodesFile): __doc__ = "\n".join(CodesFile.__doc__.splitlines()[4:]).format( - prod_type="BUFR", classname="BufrFile", alias="bufr") + prod_type="BUFR", classname="BufrFile", alias="bufr" + ) MessageClass = BufrMessage diff --git a/eccodes/high_level/codesfile.py b/eccodes/high_level/codesfile.py index 8afbbb4..8b794bf 100644 --- a/eccodes/high_level/codesfile.py +++ b/eccodes/high_level/codesfile.py @@ -8,6 +8,7 @@ from .. import eccodes import io + class CodesFile(io.FileIO): """ @@ -50,7 +51,7 @@ def __exit__(self, exception_type, exception_value, traceback): while self.open_messages: self.open_messages.pop().close() eccodes.codes_close_file(self.file_handle) - #self.file_handle.close() + # self.file_handle.close() def __len__(self): """Return total number of messages in file.""" diff --git a/eccodes/high_level/codesmessage.py b/eccodes/high_level/codesmessage.py index ae12c74..31de2ec 100644 --- a/eccodes/high_level/codesmessage.py +++ b/eccodes/high_level/codesmessage.py @@ -62,8 +62,14 @@ class CodesMessage(object): #: ecCodes enum-like PRODUCT constant product_kind = None - def __init__(self, codes_file=None, clone=None, sample=None, - headers_only=False, other_args_found=False): + def __init__( + self, + codes_file=None, + clone=None, + sample=None, + headers_only=False, + other_args_found=False, + ): """ Open a message and inform the host file that it's been incremented. @@ -75,16 +81,21 @@ def __init__(self, codes_file=None, clone=None, sample=None, :param clone: A valid ``CodesMessage`` :param sample: A valid sample path to create ``CodesMessage`` from """ - if not other_args_found and codes_file is None and clone is None and sample is None: - raise RuntimeError("CodesMessage initialization parameters not " - "present.") + if ( + not other_args_found + and codes_file is None + and clone is None + and sample is None + ): + raise RuntimeError("CodesMessage initialization parameters not " "present.") #: Unique ID, for ecCodes interface self.codes_id = None #: File containing message self.codes_file = None if codes_file is not None: self.codes_id = eccodes.codes_new_from_file( - codes_file.file_handle, self.product_kind, headers_only) + codes_file.file_handle, self.product_kind, headers_only + ) if self.codes_id is None: raise IOError("CodesFile %s is exhausted" % codes_file.name) self.codes_file = codes_file @@ -93,8 +104,7 @@ def __init__(self, codes_file=None, clone=None, sample=None, elif clone is not None: self.codes_id = eccodes.codes_clone(clone.codes_id) elif sample is not None: - self.codes_id = eccodes.codes_new_from_samples( - sample, self.product_kind) + self.codes_id = eccodes.codes_new_from_samples(sample, self.product_kind) def write(self, outfile=None): """Write message to file.""" @@ -116,14 +126,15 @@ def __setitem__(self, key, value): eccodes.codes_set(self.codes_id, key, value) else: if len(key) != len(value): - raise ValueError('Key array must have same size as value array') - eccodes.codes_set_key_vals(self.codes_id,",".join([str(key[i])+"="+str(value[i]) for i in range(len(key))])) - + raise ValueError("Key array must have same size as value array") + eccodes.codes_set_key_vals( + self.codes_id, + ",".join([str(key[i]) + "=" + str(value[i]) for i in range(len(key))]), + ) def keys(self, namespace=None): """Get available keys in message.""" - iterator = eccodes.codes_keys_iterator_new(self.codes_id, - namespace=namespace) + iterator = eccodes.codes_keys_iterator_new(self.codes_id, namespace=namespace) keys = [] while eccodes.codes_keys_iterator_next(iterator): key = eccodes.codes_keys_iterator_get_name(iterator) diff --git a/eccodes/high_level/gribfile.py b/eccodes/high_level/gribfile.py index 6e074fe..7f662fe 100644 --- a/eccodes/high_level/gribfile.py +++ b/eccodes/high_level/gribfile.py @@ -12,6 +12,7 @@ class GribFile(CodesFile): __doc__ = "\n".join(CodesFile.__doc__.splitlines()[4:]).format( - prod_type="GRIB", classname="GribFile", alias="grib") + prod_type="GRIB", classname="GribFile", alias="grib" + ) MessageClass = GribMessage diff --git a/eccodes/high_level/gribindex.py b/eccodes/high_level/gribindex.py index 934dd03..13627fc 100644 --- a/eccodes/high_level/gribindex.py +++ b/eccodes/high_level/gribindex.py @@ -44,8 +44,7 @@ def close(self): """Possibility to manually close index.""" self.__exit__(None, None, None) - def __init__(self, filename=None, keys=None, file_index=None, - grib_index=None): + def __init__(self, filename=None, keys=None, file_index=None, grib_index=None): """ Create new GRIB index over ``keys`` from ``filename``. @@ -68,8 +67,9 @@ def __init__(self, filename=None, keys=None, file_index=None, elif grib_index: self.iid = eccodes.codes_new_from_index(grib_index.iid) else: - raise RuntimeError("No source was supplied " - "(possibilities: grib_file, clone, sample).") + raise RuntimeError( + "No source was supplied " "(possibilities: grib_file, clone, sample)." + ) #: Indexed keys. Only available if GRIB is initialized from file. self.keys = keys #: Open GRIB messages diff --git a/eccodes/high_level/gribmessage.py b/eccodes/high_level/gribmessage.py index c87cfed..b16f922 100644 --- a/eccodes/high_level/gribmessage.py +++ b/eccodes/high_level/gribmessage.py @@ -17,14 +17,20 @@ class IndexNotSelectedError(Exception): class GribMessage(CodesMessage): __doc__ = "\n".join(CodesMessage.__doc__.splitlines()[4:]).format( - prod_type="GRIB", classname="GribMessage", parent="GribFile", - alias="grib") + prod_type="GRIB", classname="GribMessage", parent="GribFile", alias="grib" + ) product_kind = eccodes.CODES_PRODUCT_GRIB # Arguments included explicitly to support introspection - def __init__(self, codes_file=None, clone=None, sample=None, - headers_only=False, gribindex=None): + def __init__( + self, + codes_file=None, + clone=None, + sample=None, + headers_only=False, + gribindex=None, + ): """ Open a message and inform the GRIB file that it's been incremented. @@ -34,16 +40,19 @@ def __init__(self, codes_file=None, clone=None, sample=None, grib_args_present = True if gribindex is None: grib_args_present = False - super(self.__class__, self).__init__(codes_file, clone, sample, - headers_only, grib_args_present) + super(self.__class__, self).__init__( + codes_file, clone, sample, headers_only, grib_args_present + ) #: GribIndex referencing message self.grib_index = None if gribindex is not None: self.codes_id = eccodes.codes_new_from_index(gribindex.iid) if not self.codes_id: - raise IndexNotSelectedError("All keys must have selected " - "values before receiving message " - "from index.") + raise IndexNotSelectedError( + "All keys must have selected " + "values before receiving message " + "from index." + ) self.grib_index = gribindex gribindex.open_messages.append(self) diff --git a/eccodes/messages.py b/eccodes/messages.py index 637e5da..59322ac 100644 --- a/eccodes/messages.py +++ b/eccodes/messages.py @@ -47,13 +47,15 @@ class Message(collections.abc.MutableMapping): """Dictionary-line interface to access Message headers.""" codes_id = attr.attrib() - encoding = attr.attrib(default='ascii', type=str) + encoding = attr.attrib(default="ascii", type=str) errors = attr.attrib( - default='warn', validator=attr.validators.in_(['ignore', 'warn', 'raise']) + default="warn", validator=attr.validators.in_(["ignore", "warn", "raise"]) ) @classmethod - def from_file(cls, file, offset=None, product_kind=eccodes.CODES_PRODUCT_ANY, **kwargs): + def from_file( + cls, file, offset=None, product_kind=eccodes.CODES_PRODUCT_ANY, **kwargs + ): # type: (T.IO[bytes], int, int, T.Any) -> Message field_in_message = 0 if isinstance(offset, tuple): @@ -69,7 +71,9 @@ def from_file(cls, file, offset=None, product_kind=eccodes.CODES_PRODUCT_ANY, ** return cls(codes_id=codes_id, **kwargs) @classmethod - def from_sample_name(cls, sample_name, product_kind=eccodes.CODES_PRODUCT_GRIB, **kwargs): + def from_sample_name( + cls, sample_name, product_kind=eccodes.CODES_PRODUCT_GRIB, **kwargs + ): codes_id = eccodes.codes_new_from_samples(sample_name, product_kind) return cls(codes_id=codes_id, **kwargs) @@ -87,7 +91,7 @@ def message_get(self, item, key_type=None, default=_MARKER): try: values = eccodes.codes_get_array(self.codes_id, item, key_type) if values is None: - values = ['unsupported_key_type'] + values = ["unsupported_key_type"] except eccodes.KeyValueNotFoundError: if default is _MARKER: raise KeyError(item) @@ -101,7 +105,9 @@ def message_get(self, item, key_type=None, default=_MARKER): def message_set(self, item, value): # type: (str, T.Any) -> None - set_array = isinstance(value, T.Sequence) and not isinstance(value, (str, bytes)) + set_array = isinstance(value, T.Sequence) and not isinstance( + value, (str, bytes) + ) if set_array: eccodes.codes_set_array(self.codes_id, item, value) else: @@ -130,9 +136,9 @@ def __setitem__(self, item, value): try: return self.message_set(item, value) except eccodes.GribInternalError as ex: - if self.errors == 'ignore': + if self.errors == "ignore": pass - elif self.errors == 'raise': + elif self.errors == "raise": raise KeyError("failed to set key %r to %r" % (item, value)) else: if isinstance(ex, eccodes.ReadOnlyError): @@ -163,7 +169,9 @@ class ComputedKeysMessage(Message): computed_keys = attr.attrib( default={}, - type=T.Dict[str, T.Tuple[T.Callable[[Message], T.Any], T.Callable[[Message], T.Any]]], + type=T.Dict[ + str, T.Tuple[T.Callable[[Message], T.Any], T.Callable[[Message], T.Any]] + ], ) def __getitem__(self, item): @@ -197,13 +205,13 @@ class FileStream(collections.abc.Iterable): path = attr.attrib(type=str) message_class = attr.attrib(default=Message, type=Message, repr=False) errors = attr.attrib( - default='warn', validator=attr.validators.in_(['ignore', 'warn', 'raise']) + default="warn", validator=attr.validators.in_(["ignore", "warn", "raise"]) ) product_kind = attr.attrib(default=eccodes.CODES_PRODUCT_ANY) def __iter__(self): # type: () -> T.Generator[Message, None, None] - with open(self.path, 'rb') as file: + with open(self.path, "rb") as file: valid_message_found = False while True: try: @@ -214,9 +222,9 @@ def __iter__(self): raise EOFError("No valid message found in file: %r" % self.path) break except Exception: - if self.errors == 'ignore': + if self.errors == "ignore": pass - elif self.errors == 'raise': + elif self.errors == "raise": raise else: LOG.exception("skipping corrupted Message") @@ -228,7 +236,7 @@ def first(self): # type: () -> Message return next(iter(self)) - def index(self, index_keys, indexpath='{path}.{short_hash}.idx'): + def index(self, index_keys, indexpath="{path}.{short_hash}.idx"): # type: (T.List[str], str) -> FileIndex return FileIndex.from_indexpath_or_filestream(self, index_keys, indexpath) @@ -236,7 +244,7 @@ def index(self, index_keys, indexpath='{path}.{short_hash}.idx'): @contextlib.contextmanager def compat_create_exclusive(path, *args, **kwargs): fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_EXCL) - with io.open(fd, mode='wb', *args, **kwargs) as file: + with io.open(fd, mode="wb", *args, **kwargs) as file: try: yield file except Exception: @@ -247,10 +255,12 @@ def compat_create_exclusive(path, *args, **kwargs): @attr.attrs() class FileIndex(collections.abc.Mapping): - allowed_protocol_version = '1' + allowed_protocol_version = "1" filestream = attr.attrib(type=FileStream) index_keys = attr.attrib(type=T.List[str]) - offsets = attr.attrib(repr=False, type=T.List[T.Tuple[T.Tuple[T.Any, ...], T.List[int]]]) + offsets = attr.attrib( + repr=False, type=T.List[T.Tuple[T.Tuple[T.Any, ...], T.List[int]]] + ) filter_by_keys = attr.attrib(default={}, type=T.Dict[str, T.Any]) @classmethod @@ -263,11 +273,11 @@ def from_filestream(cls, filestream, index_keys): try: value = message[key] except: - value = 'undef' + value = "undef" if isinstance(value, (list, np.ndarray)): value = tuple(value) header_values.append(value) - offset = message.message_get('offset', int) + offset = message.message_get("offset", int) if offset in count_offsets: count_offsets[offset] += 1 offset_field = (offset, count_offsets[offset]) @@ -275,19 +285,21 @@ def from_filestream(cls, filestream, index_keys): count_offsets[offset] = 0 offset_field = offset offsets.setdefault(tuple(header_values), []).append(offset_field) - self = cls(filestream=filestream, index_keys=index_keys, offsets=list(offsets.items())) + self = cls( + filestream=filestream, index_keys=index_keys, offsets=list(offsets.items()) + ) # record the index protocol version in the instance so it is dumped with pickle self.index_protocol_version = cls.allowed_protocol_version return self @classmethod def from_indexpath(cls, indexpath): - with io.open(indexpath, 'rb') as file: + with io.open(indexpath, "rb") as file: return pickle.load(file) @classmethod def from_indexpath_or_filestream( - cls, filestream, index_keys, indexpath='{path}.{short_hash}.idx', log=LOG + cls, filestream, index_keys, indexpath="{path}.{short_hash}.idx", log=LOG ): # type: (FileStream, T.List[str], str, logging.Logger) -> FileIndex @@ -295,8 +307,10 @@ def from_indexpath_or_filestream( if not indexpath: return cls.from_filestream(filestream, index_keys) - hash = hashlib.md5(repr(index_keys).encode('utf-8')).hexdigest() - indexpath = indexpath.format(path=filestream.path, hash=hash, short_hash=hash[:5]) + hash = hashlib.md5(repr(index_keys).encode("utf-8")).hexdigest() + indexpath = indexpath.format( + path=filestream.path, hash=hash, short_hash=hash[:5] + ) try: with compat_create_exclusive(indexpath) as new_index_file: self = cls.from_filestream(filestream, index_keys) @@ -314,13 +328,16 @@ def from_indexpath_or_filestream( self = cls.from_indexpath(indexpath) allowed_protocol_version = self.allowed_protocol_version if ( - getattr(self, 'index_keys', None) == index_keys - and getattr(self, 'filestream', None) == filestream - and getattr(self, 'index_protocol_version', None) == allowed_protocol_version + getattr(self, "index_keys", None) == index_keys + and getattr(self, "filestream", None) == filestream + and getattr(self, "index_protocol_version", None) + == allowed_protocol_version ): return self else: - log.warning("Ignoring index file %r incompatible with GRIB file", indexpath) + log.warning( + "Ignoring index file %r incompatible with GRIB file", indexpath + ) else: log.warning("Ignoring index file %r older than GRIB file", indexpath) except Exception: @@ -336,7 +353,7 @@ def __len__(self): @property def header_values(self): - if not hasattr(self, '_header_values'): + if not hasattr(self, "_header_values"): self._header_values = {} for header_values, _ in self.offsets: for i, value in enumerate(header_values): diff --git a/gribapi/__init__.py b/gribapi/__init__.py index b1d1926..8404707 100644 --- a/gribapi/__init__.py +++ b/gribapi/__init__.py @@ -1,11 +1,13 @@ -from .gribapi import * # noqa +from .gribapi import * # noqa from .gribapi import __version__ from .gribapi import bindings_version # The minimum required version for the ecCodes package -min_reqd_version_str = '2.16.0' +min_reqd_version_str = "2.16.0" min_reqd_version_int = 21600 if lib.grib_get_api_version() < min_reqd_version_int: - print('Warning: ecCodes %s or higher is recommended. You are running version %s' % ( - min_reqd_version_str, __version__)) + print( + "Warning: ecCodes %s or higher is recommended. You are running version %s" + % (min_reqd_version_str, __version__) + ) diff --git a/tests/test_20_main.py b/tests/test_20_main.py index 92bb065..e9b986a 100644 --- a/tests/test_20_main.py +++ b/tests/test_20_main.py @@ -4,10 +4,10 @@ def test_main(capsys): - __main__.main(argv=['selfcheck']) + __main__.main(argv=["selfcheck"]) stdout, _ = capsys.readouterr() - assert 'Your system is ready.' in stdout + assert "Your system is ready." in stdout with pytest.raises(RuntimeError): - __main__.main(argv=['non-existent-command']) + __main__.main(argv=["non-existent-command"]) diff --git a/tests/test_20_messages.py b/tests/test_20_messages.py index bb6a755..43d29ae 100644 --- a/tests/test_20_messages.py +++ b/tests/test_20_messages.py @@ -6,25 +6,25 @@ from eccodes import messages -SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') -TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib') +SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), "sample-data") +TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, "era5-levels-members.grib") def _test_Message_read(): with open(TEST_DATA) as file: res1 = messages.Message.from_file(file) - assert res1.message_get('paramId') == 129 - assert res1['paramId'] == 129 - assert list(res1)[0] == 'globalDomain' - assert list(res1.message_grib_keys('time'))[0] == 'dataDate' - assert 'paramId' in res1 + assert res1.message_get("paramId") == 129 + assert res1["paramId"] == 129 + assert list(res1)[0] == "globalDomain" + assert list(res1.message_grib_keys("time"))[0] == "dataDate" + assert "paramId" in res1 assert len(res1) > 100 with pytest.raises(KeyError): - res1['non-existent-key'] + res1["non-existent-key"] - assert res1.message_get('non-existent-key', default=1) == 1 + assert res1.message_get("non-existent-key", default=1) == 1 res2 = messages.Message.from_message(res1) for (k2, v2), (k1, v1) in zip(res2.items(), res1.items()): @@ -41,81 +41,81 @@ def _test_Message_read(): def test_Message_write(tmpdir): - res = messages.Message.from_sample_name('regular_ll_pl_grib2') - assert res['gridType'] == 'regular_ll' + res = messages.Message.from_sample_name("regular_ll_pl_grib2") + assert res["gridType"] == "regular_ll" - res.message_set('Ni', 20) - assert res['Ni'] == 20 + res.message_set("Ni", 20) + assert res["Ni"] == 20 - res['iDirectionIncrementInDegrees'] = 1.0 - assert res['iDirectionIncrementInDegrees'] == 1.0 + res["iDirectionIncrementInDegrees"] = 1.0 + assert res["iDirectionIncrementInDegrees"] == 1.0 - res.message_set('gridType', 'reduced_gg') - assert res['gridType'] == 'reduced_gg' + res.message_set("gridType", "reduced_gg") + assert res["gridType"] == "reduced_gg" - res['pl'] = [2.0, 3.0] - assert np.allclose(res['pl'], [2.0, 3.0]) + res["pl"] = [2.0, 3.0] + assert np.allclose(res["pl"], [2.0, 3.0]) # warn on errors - res['centreDescription'] = 'DUMMY' - assert res['centreDescription'] != 'DUMMY' - res['edition'] = -1 - assert res['edition'] != -1 + res["centreDescription"] = "DUMMY" + assert res["centreDescription"] != "DUMMY" + res["edition"] = -1 + assert res["edition"] != -1 # ignore errors - res.errors = 'ignore' - res['centreDescription'] = 'DUMMY' - assert res['centreDescription'] != 'DUMMY' + res.errors = "ignore" + res["centreDescription"] = "DUMMY" + assert res["centreDescription"] != "DUMMY" # raise errors - res.errors = 'raise' + res.errors = "raise" with pytest.raises(KeyError): - res['centreDescription'] = 'DUMMY' + res["centreDescription"] = "DUMMY" with pytest.raises(NotImplementedError): - del res['gridType'] + del res["gridType"] - out = tmpdir.join('test.grib') - with open(str(out), 'wb') as file: + out = tmpdir.join("test.grib") + with open(str(out), "wb") as file: res.write(file) def _test_ComputedKeysMessage_read(): computed_keys = { - 'ref_time': (lambda m: str(m['dataDate']) + str(m['dataTime']), None), - 'error_key': (lambda m: 1 / 0, None), - 'centre': (lambda m: -1, lambda m, v: None), + "ref_time": (lambda m: str(m["dataDate"]) + str(m["dataTime"]), None), + "error_key": (lambda m: 1 / 0, None), + "centre": (lambda m: -1, lambda m, v: None), } with open(TEST_DATA) as file: res = messages.ComputedKeysMessage.from_file(file, computed_keys=computed_keys) - assert res['paramId'] == 129 - assert res['ref_time'] == '201701010' + assert res["paramId"] == 129 + assert res["ref_time"] == "201701010" assert len(res) > 100 - assert res['centre'] == -1 + assert res["centre"] == -1 with pytest.raises(ZeroDivisionError): - res['error_key'] + res["error_key"] def test_ComputedKeysMessage_write(): computed_keys = { - 'ref_time': (lambda m: '%s%04d' % (m['dataDate'], m['dataTime']), None), - 'error_key': (lambda m: 1 / 0, None), - 'centre': (lambda m: -1, lambda m, v: None), + "ref_time": (lambda m: "%s%04d" % (m["dataDate"], m["dataTime"]), None), + "error_key": (lambda m: 1 / 0, None), + "centre": (lambda m: -1, lambda m, v: None), } res = messages.ComputedKeysMessage.from_sample_name( - 'regular_ll_pl_grib2', computed_keys=computed_keys + "regular_ll_pl_grib2", computed_keys=computed_keys ) - res['dataDate'] = 20180101 - res['dataTime'] = 0 - assert res['ref_time'] == '201801010000' + res["dataDate"] = 20180101 + res["dataTime"] = 0 + assert res["ref_time"] == "201801010000" - res['centre'] = 1 + res["centre"] = 1 def test_compat_create_exclusive(tmpdir): - test_file = tmpdir.join('file.grib.idx') + test_file = tmpdir.join("file.grib.idx") try: with messages.compat_create_exclusive(str(test_file)): @@ -124,101 +124,103 @@ def test_compat_create_exclusive(tmpdir): pass with messages.compat_create_exclusive(str(test_file)) as file: - file.write(b'Hi!') + file.write(b"Hi!") with pytest.raises(OSError): with messages.compat_create_exclusive(str(test_file)) as file: - file.write(b'Hi!') + file.write(b"Hi!") def _test_FileIndex(): - res = messages.FileIndex.from_filestream(messages.FileStream(TEST_DATA), ['paramId']) - assert res['paramId'] == [129, 130] + res = messages.FileIndex.from_filestream( + messages.FileStream(TEST_DATA), ["paramId"] + ) + assert res["paramId"] == [129, 130] assert len(res) == 1 - assert list(res) == ['paramId'] + assert list(res) == ["paramId"] assert res.first() with pytest.raises(ValueError): - res.getone('paramId') + res.getone("paramId") with pytest.raises(KeyError): - res['non-existent-key'] + res["non-existent-key"] subres = res.subindex(paramId=130) - assert subres.get('paramId') == [130] - assert subres.getone('paramId') == 130 + assert subres.get("paramId") == [130] + assert subres.getone("paramId") == 130 assert len(subres) == 1 def _test_FileIndex_from_indexpath_or_filestream(tmpdir): - grib_file = tmpdir.join('file.grib') + grib_file = tmpdir.join("file.grib") - with open(TEST_DATA, 'rb') as file: + with open(TEST_DATA, "rb") as file: grib_file.write_binary(file.read()) # create index file res = messages.FileIndex.from_indexpath_or_filestream( - messages.FileStream(str(grib_file)), ['paramId'] + messages.FileStream(str(grib_file)), ["paramId"] ) assert isinstance(res, messages.FileIndex) # read index file res = messages.FileIndex.from_indexpath_or_filestream( - messages.FileStream(str(grib_file)), ['paramId'] + messages.FileStream(str(grib_file)), ["paramId"] ) assert isinstance(res, messages.FileIndex) # do not read nor create the index file res = messages.FileIndex.from_indexpath_or_filestream( - messages.FileStream(str(grib_file)), ['paramId'], indexpath='' + messages.FileStream(str(grib_file)), ["paramId"], indexpath="" ) assert isinstance(res, messages.FileIndex) # can't create nor read index file res = messages.FileIndex.from_indexpath_or_filestream( messages.FileStream(str(grib_file)), - ['paramId'], - indexpath=str(tmpdir.join('non-existent-folder').join('non-existent-file')), + ["paramId"], + indexpath=str(tmpdir.join("non-existent-folder").join("non-existent-file")), ) assert isinstance(res, messages.FileIndex) # trigger mtime check grib_file.remove() - with open(TEST_DATA, 'rb') as file: + with open(TEST_DATA, "rb") as file: grib_file.write_binary(file.read()) res = messages.FileIndex.from_indexpath_or_filestream( - messages.FileStream(str(grib_file)), ['paramId'] + messages.FileStream(str(grib_file)), ["paramId"] ) assert isinstance(res, messages.FileIndex) def _test_FileIndex_errors(): class MyMessage(messages.ComputedKeysMessage): - computed_keys = {'error_key': lambda m: 1 / 0} + computed_keys = {"error_key": lambda m: 1 / 0} stream = messages.FileStream(TEST_DATA, message_class=MyMessage) - res = messages.FileIndex.from_filestream(stream, ['paramId', 'error_key']) - assert res['paramId'] == [129, 130] + res = messages.FileIndex.from_filestream(stream, ["paramId", "error_key"]) + assert res["paramId"] == [129, 130] assert len(res) == 2 - assert list(res) == ['paramId', 'error_key'] - assert res['error_key'] == ['undef'] + assert list(res) == ["paramId", "error_key"] + assert res["error_key"] == ["undef"] def _test_FileStream(): res = messages.FileStream(TEST_DATA) leader = res.first() assert len(leader) > 100 - assert sum(1 for _ in res) == leader['count'] - assert len(res.index(['paramId'])) == 1 + assert sum(1 for _ in res) == leader["count"] + assert len(res.index(["paramId"])) == 1 # __file__ is not a GRIB, but contains the "GRIB" string, so it is a very tricky corner case res = messages.FileStream(str(__file__)) with pytest.raises(EOFError): res.first() - res = messages.FileStream(str(__file__), errors='ignore') + res = messages.FileStream(str(__file__), errors="ignore") with pytest.raises(EOFError): res.first() diff --git a/tests/test_eccodes.py b/tests/test_eccodes.py index 7fc1bf0..0b0e10d 100644 --- a/tests/test_eccodes.py +++ b/tests/test_eccodes.py @@ -4,40 +4,40 @@ from eccodes import * -SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') -TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib') +SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), "sample-data") +TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, "era5-levels-members.grib") # GRIB def test_grib_read(): - gid = codes_grib_new_from_samples('regular_ll_sfc_grib1') - assert codes_get(gid, 'Ni') == 16 - assert codes_get(gid, 'Nj') == 31 - assert codes_get(gid, 'const') == 1 - assert codes_get(gid, 'centre', str) == 'ecmf' - assert codes_get(gid, 'packingType', str) == 'grid_simple' - assert codes_get(gid, 'gridType', str) == 'regular_ll' + gid = codes_grib_new_from_samples("regular_ll_sfc_grib1") + assert codes_get(gid, "Ni") == 16 + assert codes_get(gid, "Nj") == 31 + assert codes_get(gid, "const") == 1 + assert codes_get(gid, "centre", str) == "ecmf" + assert codes_get(gid, "packingType", str) == "grid_simple" + assert codes_get(gid, "gridType", str) == "regular_ll" codes_release(gid) - gid = codes_grib_new_from_samples('sh_ml_grib2') - assert codes_get(gid, 'const') == 0 - assert codes_get(gid, 'gridType', str) == 'sh' - assert codes_get(gid, 'typeOfLevel', str) == 'hybrid' - assert codes_get_long(gid, 'avg') == 185 + gid = codes_grib_new_from_samples("sh_ml_grib2") + assert codes_get(gid, "const") == 0 + assert codes_get(gid, "gridType", str) == "sh" + assert codes_get(gid, "typeOfLevel", str) == "hybrid" + assert codes_get_long(gid, "avg") == 185 codes_release(gid) def test_grib_write(tmpdir): - gid = codes_grib_new_from_samples('GRIB2') - codes_set(gid, 'backgroundProcess', 44) - output = tmpdir.join('test_grib_write.grib') - with open(str(output), 'wb') as fout: + gid = codes_grib_new_from_samples("GRIB2") + codes_set(gid, "backgroundProcess", 44) + output = tmpdir.join("test_grib_write.grib") + with open(str(output), "wb") as fout: codes_write(gid, fout) codes_release(gid) def test_grib_keys_iterator(): - gid = codes_grib_new_from_samples('reduced_gg_pl_1280_grib1') - iterid = codes_keys_iterator_new(gid, 'ls') + gid = codes_grib_new_from_samples("reduced_gg_pl_1280_grib1") + iterid = codes_keys_iterator_new(gid, "ls") count = 0 while codes_keys_iterator_next(iterid): keyname = codes_keys_iterator_get_name(iterid) @@ -49,48 +49,53 @@ def test_grib_keys_iterator(): def test_grib_nearest(): - gid = codes_grib_new_from_samples('reduced_gg_ml_grib2') - lat,lon = 30,-20 + gid = codes_grib_new_from_samples("reduced_gg_ml_grib2") + lat, lon = 30, -20 nearest = codes_grib_find_nearest(gid, lat, lon)[0] assert nearest.index == 1770 - lat,lon = 10,0 + lat, lon = 10, 0 nearest = codes_grib_find_nearest(gid, lat, lon)[0] assert nearest.index == 2545 - lat,lon = 10,20 + lat, lon = 10, 20 nearest = codes_grib_find_nearest(gid, lat, lon, False, 4) expected_indexes = (2553, 2552, 2425, 2424) - returned_indexes = (nearest[0].index, nearest[1].index, nearest[2].index, nearest[3].index) + returned_indexes = ( + nearest[0].index, + nearest[1].index, + nearest[2].index, + nearest[3].index, + ) assert sorted(expected_indexes) == sorted(returned_indexes) codes_release(gid) # BUFR def test_bufr_read_write(tmpdir): - bid = codes_bufr_new_from_samples('BUFR4') - codes_set(bid, 'unpack', 1) - assert codes_get(bid, 'typicalYear') == 2012 - assert codes_get(bid, 'centre', str) == 'ecmf' - codes_set(bid, 'totalSunshine', 13) - codes_set(bid, 'pack', 1) - output = tmpdir.join('test_bufr_write.bufr') - with open(str(output), 'wb') as fout: + bid = codes_bufr_new_from_samples("BUFR4") + codes_set(bid, "unpack", 1) + assert codes_get(bid, "typicalYear") == 2012 + assert codes_get(bid, "centre", str) == "ecmf" + codes_set(bid, "totalSunshine", 13) + codes_set(bid, "pack", 1) + output = tmpdir.join("test_bufr_write.bufr") + with open(str(output), "wb") as fout: codes_write(bid, fout) - assert codes_get(bid, 'totalSunshine') == 13 + assert codes_get(bid, "totalSunshine") == 13 codes_release(bid) def test_bufr_keys_iterator(): - bid = codes_bufr_new_from_samples('BUFR3_local_satellite') + bid = codes_bufr_new_from_samples("BUFR3_local_satellite") # Header keys only iterid = codes_bufr_keys_iterator_new(bid) count = 0 while codes_bufr_keys_iterator_next(iterid): keyname = codes_bufr_keys_iterator_get_name(iterid) - assert '#' not in keyname + assert "#" not in keyname count += 1 assert count == 53 - codes_set(bid, 'unpack', 1) + codes_set(bid, "unpack", 1) codes_bufr_keys_iterator_rewind(iterid) count = 0 while codes_bufr_keys_iterator_next(iterid): From 1b9e7f25dfb0eca05b606e0c659964c4cbce3d37 Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 12:14:27 +0000 Subject: [PATCH 6/8] Ignore pytype static type helper directory --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 279da99..87f1cc5 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,9 @@ __pycache__/ *.py[cod] *$py.class +# Static typed files +.pytype + # C extensions *.so @@ -76,4 +79,4 @@ venv.bak/ .docker-tox/ tests/sample-data/cds*.grib tests/sample-data/cds*.nc -*.idx \ No newline at end of file +*.idx From f6e37104556582561530e643cf8586502877133a Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 12:15:38 +0000 Subject: [PATCH 7/8] Include code auto-formatter (Black) --- ci/requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/requirements-dev.txt b/ci/requirements-dev.txt index fdec266..d1dadd2 100644 --- a/ci/requirements-dev.txt +++ b/ci/requirements-dev.txt @@ -11,3 +11,4 @@ tox tox-pyenv wheel zest.releaser +black From cd589d5eb0716e7d7a1c5bd5bf3e1d6c47c9f5e3 Mon Sep 17 00:00:00 2001 From: Ian Vermes Date: Tue, 3 Dec 2019 12:16:44 +0000 Subject: [PATCH 8/8] Add step to `Get Started!` for autoformatting the code --- CONTRIBUTING.rst | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 6426f91..b7d63fe 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -8,7 +8,7 @@ Contributing Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. -Please note, that we have hooked a CLA assiatant to this GitHub Repo. Please accept the contributors license agreement to allow us to keep a legal track of contributions and keep this package open source for the future. +Please note, that we have hooked a CLA assiatant to this GitHub Repo. Please accept the contributors license agreement to allow us to keep a legal track of contributions and keep this package open source for the future. You can contribute in many ways: @@ -82,13 +82,17 @@ you already have `virtualenv` and `Git` installed and ready to go. 8. If your contribution is a bug fix or new feature, you should add a test to the existing test suite. -9. Commit your changes and push your branch to GitHub:: +9. Format your Python code with the Black auto-formatter, to ensure the code is uses the library's style. We use the default Black configuration (88 lines per character and `"` instead of `'` for string encapsulation):: + + $ black . + +10. Commit your changes and push your branch to GitHub:: $ git add . $ git commit -m "Your detailed description of your changes." $ git push origin name-of-your-bugfix-or-feature -10. Submit a pull request through the GitHub website. +11. Submit a pull request through the GitHub website. Pull Request Guidelines -----------------------