Skip to content

Commit

Permalink
Update for Pylint 1.5
Browse files Browse the repository at this point in the history
  • Loading branch information
jeremyh committed Feb 6, 2016
1 parent 06c293e commit ead7c5a
Show file tree
Hide file tree
Showing 9 changed files with 64 additions and 64 deletions.
18 changes: 9 additions & 9 deletions eodatasets/browseimage.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
# coding=utf-8
from __future__ import absolute_import

import logging
import math
import os
import shutil
from subprocess import check_call
import math
import tempfile
from subprocess import check_call

import gdalconst
import gdal
import gdalconst
import numpy
from pathlib import Path
import pathlib

from eodatasets import serialise, drivers
import eodatasets.type as ptype

from eodatasets import serialise, drivers

GDAL_CACHE_MAX_MB = 512

Expand Down Expand Up @@ -68,7 +68,7 @@ def _calculate_scale_offset(nodata, band):

# From the old Jobmanager codebase: avoid divide by zero caused by some stats.
if diff_ == 0:
_LOG.warn("dfScaleSrc Min and Max are equal! Applying correction")
_LOG.warning("dfScaleSrc Min and Max are equal! Applying correction")
diff_ = 1

dfScale = (dfScaleDstMax - dfScaleDstMin) / diff_
Expand Down Expand Up @@ -102,7 +102,7 @@ def _create_thumbnail(red_file, green_file, blue_file, thumb_image,
nodata = int(nodata)

# GDAL calls need absolute paths.
thumbnail_path = Path(thumb_image).absolute()
thumbnail_path = pathlib.Path(thumb_image).absolute()

if thumbnail_path.exists() and not overwrite:
_LOG.warning('File already exists. Skipping creation of %s', thumbnail_path)
Expand Down Expand Up @@ -205,7 +205,7 @@ def _create_thumbnail(red_file, green_file, blue_file, thumb_image,

# Newer versions of GDAL create aux files due to the histogram. Clean them up.
for f in (red_file, blue_file, green_file):
f = Path(f)
f = pathlib.Path(f)
aux_file = f.with_name(f.name + '.aux.xml')
if aux_file.exists():
_LOG.info('Cleaning aux: %s', aux_file)
Expand Down
32 changes: 16 additions & 16 deletions eodatasets/drivers.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
# coding=utf-8
from __future__ import absolute_import

import datetime
import logging
import re
import string
import datetime
import xml.etree.cElementTree as etree

from pathlib import Path
from dateutil.parser import parse
from pathlib import Path

from eodatasets import type as ptype, metadata
from eodatasets.metadata import _GROUNDSTATION_LIST
from eodatasets.metadata import mdf, mtl, adsfolder, rccfile, \
passinfo, pds, npphdf5, image as md_image
from eodatasets.metadata import _GROUNDSTATION_LIST
from eodatasets import type as ptype, metadata

_LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -216,7 +217,7 @@ def _get_process_code(dataset):
if dataset.ga_level == 'P00':
return 'satellite_telemetry_data', 'P00'

_LOG.warn('No process code mapped for level/orientation: %r, %r', level, orientation)
_LOG.warning('No process code mapped for level/orientation: %r, %r', level, orientation)
return None, None


Expand Down Expand Up @@ -716,18 +717,17 @@ def els2date(els):

# check if the dates in the metadata file are at least as accurate as what we have
filename_time = datetime.datetime.strptime(fields["date"], "%Y%m%d")
if abs(start_time - filename_time).days == 0:
dataset.acquisition.aos = aos
dataset.acquisition.los = los
dataset.extent.center_dt = start_time + (end_time - start_time)/2
dataset.extent.from_dt = start_time
dataset.extent.to_dt = end_time
else:
dataset.acquisition.aos = filename_time.date()
dataset.acquisition.los = dataset.acquisition.aos
if dataset.extent and not dataset.extent.center_dt:
dataset.extent.center_dt = dataset.acquisition.aos
time_diff = start_time - filename_time

# Is the EODS metadata extremely off?
if abs(time_diff).days != 0:
raise ValueError('EODS time information differs too much from source files: %s' % time_diff)

dataset.acquisition.aos = aos
dataset.acquisition.los = los
dataset.extent.center_dt = start_time + (end_time - start_time)/2
dataset.extent.from_dt = start_time
dataset.extent.to_dt = end_time
return dataset


Expand Down
2 changes: 1 addition & 1 deletion eodatasets/metadata/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def get_groundstation(gsi):
gsi = normalise_gsi(gsi)
stations = [g for g in _GROUNDSTATION_LIST if g['code'].upper() == gsi]
if not stations:
_LOG.warn('Station GSI not known: %r', gsi)
_LOG.warning('Station GSI not known: %r', gsi)
return None
station = stations[0]
return ptype.GroundstationMetadata(
Expand Down
8 changes: 4 additions & 4 deletions eodatasets/metadata/passinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
Metadata extraction from passinfo files.
"""
from __future__ import absolute_import
import logging

import datetime
import logging

from eodatasets import type as ptype


_log = logging.getLogger(__name__)


Expand All @@ -29,7 +29,7 @@ def extract_md(base_md, directory):
return base_md

if len(passinfos) > 1:
_log.warn('Multiple passinfo files in directory: %r', passinfos)
_log.warning('Multiple passinfo files in directory: %r', passinfos)

passinfo = passinfos[0]
_log.info("Found passinfo '%s'", passinfo)
Expand All @@ -51,7 +51,7 @@ def station_to_gsi(station):
# Hobart
gsi = 'HOA'
else:
_log.warn("Unknown station value %r. Falling back to RCC extraction.", station)
_log.warning("Unknown station value %r. Falling back to RCC extraction.", station)
gsi = None
return gsi

Expand Down
5 changes: 3 additions & 2 deletions eodatasets/metadata/pds.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
PDF file metadata extraction.
"""
from __future__ import absolute_import

import datetime
import logging
import re
import datetime
from subprocess import check_output

from pathlib import Path
Expand Down Expand Up @@ -101,7 +102,7 @@ def find_pds_file(path):
return None

if len(pds_files) > 1:
_LOG.warn('Multiple PDS files founds %s', pds_files)
_LOG.warning('Multiple PDS files founds %s', pds_files)

return pds_files[0]

Expand Down
9 changes: 5 additions & 4 deletions eodatasets/metadata/rccfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
Metadata extraction from RCC files.
"""
from __future__ import absolute_import

import datetime
import logging
import re
import datetime

from eodatasets import type as ptype

Expand Down Expand Up @@ -110,17 +111,17 @@ def _expand_platform_info(vehicle_char, vehicle_num, instrument_char, sensor_mod
if instrument_char == 'E':
instrument_name = 'ETM'
else:
_log.warn('Unknown LS7 sensor char: %s', instrument_char)
_log.warning('Unknown LS7 sensor char: %s', instrument_char)
elif vehicle_num == '5':
if instrument_char == 'T':
instrument_name = 'TM'
else:
_log.warn('Unknown LS4/5 sensor char: %s', instrument_char)
_log.warning('Unknown LS4/5 sensor char: %s', instrument_char)

operation_mode = _INSTRUMENT_MODES.get(sensor_mode_char)

else:
_log.warn('Unknown vehicle: %s', vehicle_char)
_log.warning('Unknown vehicle: %s', vehicle_char)
return platform_code, instrument_name, operation_mode


Expand Down
12 changes: 6 additions & 6 deletions eodatasets/serialise.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
# coding=utf-8
from __future__ import absolute_import

import collections
import datetime
import logging
import os
import collections
import uuid
import time
import logging
import uuid

import pathlib
import yaml
from pathlib import Path
import pathlib

from eodatasets import compat
import eodatasets.type as ptype
from eodatasets import compat


_LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -280,7 +280,7 @@ def namespace(k, key_prefix):
yield key_prefix, str(o)
elif isinstance(o, Path):
if not o.is_absolute():
_LOG.warn('Non-absolute path: %r', o)
_LOG.warning('Non-absolute path: %r', o)
val = o.relative_to(relative_to) if o.is_absolute() else o
yield key_prefix, str(val)
elif o is None:
Expand Down
12 changes: 9 additions & 3 deletions eodatasets/type.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
# Our metadata 'classes' validly have many arguments, to match the metadata format.
# pylint: disable=too-many-arguments,too-many-instance-attributes,too-many-locals
from __future__ import absolute_import

import datetime
import inspect
import uuid
import logging
import uuid

from pathlib import Path


_LOG = logging.getLogger()


Expand Down Expand Up @@ -66,6 +66,8 @@ def item_defaults(cls):
(ordered output is primarily useful for readability: such as repr() or log output.)
:rtype: [(str, obj)]
"""
# inspect.signature() is not available in Python 2.7, so we use the py3-deprecated getargspec().
# pylint: disable=deprecated-method
constructor_spec = inspect.getargspec(cls.__init__)
constructor_args = constructor_spec.args[1:]

Expand Down Expand Up @@ -123,7 +125,7 @@ def from_dict(cls, dict_):
if key not in possible_properties:
# Reserved python words may have an underscore appended
if key + '_' not in possible_properties:
_LOG.warn('Unknown property %r in %r', key, cls.__name__)
_LOG.warning('Unknown property %r in %r', key, cls.__name__)
continue

key += '_'
Expand Down Expand Up @@ -655,7 +657,9 @@ def __init__(self, aos=None, los=None, groundstation=None, heading=None, platfor
"""

# Acquisition/Loss Of signal
#: :type: datetime.datetime or datetime.date
self.aos = aos
#: :type: datetime.datetime or datetime.date
self.los = los

self.groundstation = groundstation
Expand Down Expand Up @@ -868,12 +872,14 @@ def rebase_paths(source_path, destination_path, object_):
:type source_path: Path
:type destination_path: Path
"""

def rebase_if_path(o):
if isinstance(o, Path):
return rebase_path(source_path, destination_path, o)
return o

return map_values(rebase_if_path, object_)


# Circular reference.
LineageMetadata.PROPERTY_PARSERS['source_datasets'] = DatasetMetadata.from_named_dicts
30 changes: 11 additions & 19 deletions pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@
# pygtk.require().
#init-hook=

# Profiled execution.
profile=no

# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
Expand Down Expand Up @@ -42,7 +39,17 @@ load-plugins=
# trailing-whitespace is due to a dos line-endings bug. This is fixed in the next version of
# pylint
#disable=bare-except,no-self-use,unused-argument,duplicate-code,trailing-whitespace,abstract-class-little-used,star-args,abstract-class-not-used,missing-docstring,no-member,unused-variable,unused-import,locally-disabled
disable=no-self-use,star-args,duplicate-code,unused-argument,missing-docstring,no-member,unused-variable,unused-import,locally-disabled
disable=no-self-use,
star-args,
duplicate-code,
unused-argument,
missing-docstring,
no-member,
unused-variable,
unused-import,
locally-disabled,
# The different import orders on py 2 and 3 make this unpleasant.
wrong-import-order


[REPORTS]
Expand All @@ -67,20 +74,13 @@ reports=yes
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)

# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no

# Template used to display messages. This is a python new-style format string
# used to format the massage information. See doc for all details
#msg-template=


[BASIC]

# Required attributes for module, separated by a comma
required-attributes=

# List of builtins function names that should not be used, separated by a comma
bad-functions=apply,input

Expand Down Expand Up @@ -176,10 +176,6 @@ ignore-mixin-members=yes
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject

# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no

# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
Expand Down Expand Up @@ -222,10 +218,6 @@ int-import-graph=

[CLASSES]

# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by

# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp

Expand Down

0 comments on commit ead7c5a

Please sign in to comment.