diff --git a/csv2sql/core/prefetching.py b/csv2sql/core/prefetching.py index 114dca5..925de8b 100644 --- a/csv2sql/core/prefetching.py +++ b/csv2sql/core/prefetching.py @@ -4,7 +4,7 @@ import tempfile -class RewindableFileIterator(object): +class RewindableFileIterator: """A file iterator class that can be rewinded. An instances of this class can create a temporary file and should be closed by `close()` or using `with` statement. diff --git a/csv2sql/core/type_inference.py b/csv2sql/core/type_inference.py index 92913af..b206e23 100644 --- a/csv2sql/core/type_inference.py +++ b/csv2sql/core/type_inference.py @@ -104,9 +104,8 @@ def _always_true(_): def _create_any_predicate(args): - if len(args) != 0: + if args: raise InterpretationError('Match predicate takes no argument.') - return _always_true @@ -141,11 +140,7 @@ def interpret_predicate(obj): 'Predicate type`{0}` is invalid'.format(predicate_type)) args = obj.get('args', []) # `args` is an optional value. - if ( - isinstance(args, str) or - isinstance(args, bytes) or - not hasattr(args, '__iter__') - ): + if isinstance(args, (str, bytes)) or not hasattr(args, '__iter__'): args = [args] predicate = predicate_generator(args) # Can raise InterpretationError. @@ -163,7 +158,7 @@ def interpret_patterns(obj): return [_interpret_one_type_pattern(item) for item in obj] -class TypeInferrer(object): +class TypeInferrer: """Infers the type while reading items.""" def __init__(self, patterns, null_value=_DEFAULT_NULL_VALUE): @@ -197,7 +192,7 @@ def type_name(self): return self._current[0] -class _Inference(object): +class _Inference: def __init__(self, index, patterns, null_value): """Initialize.""" self._index = int(index) diff --git a/csv2sql/main.py b/csv2sql/main.py index 2b1fc89..48b5934 100644 --- a/csv2sql/main.py +++ b/csv2sql/main.py @@ -8,7 +8,7 @@ import yaml -import csv2sql.meta as meta +import csv2sql.meta import csv2sql.queryengines.psql from csv2sql.core.error import InterpretationError from csv2sql.core.my_logging import get_logger @@ -106,6 +106,8 @@ def _decide_patterns(args): try: with open(pattern_file_path) as pattern_file: return yaml.load(pattern_file) + # pylint: disable=try-except-raise + # since this flow is correct. except IOError: raise except TypeError as error: @@ -131,7 +133,7 @@ def _parse_column_type(column_type): return index, type_name -class _ArgsInterfaces(object): +class _ArgsInterfaces: # pylint: disable=too-few-public-methods # since this class is an namespace. # readable. @@ -221,7 +223,7 @@ def parse_args(arguments): description='Convert CSV data into an SQL dump.') parser.add_argument( '-v', '--version', action='version', - version='%(prog)s {0}'.format(meta.__version__)) + version='%(prog)s {0}'.format(csv2sql.meta.__version__)) subparsers = parser.add_subparsers( title='target', description='What to dump.') diff --git a/csv2sql/queryengines/psql.py b/csv2sql/queryengines/psql.py index fcc4ad0..178951b 100644 --- a/csv2sql/queryengines/psql.py +++ b/csv2sql/queryengines/psql.py @@ -84,7 +84,7 @@ _LINE_TERMINATOR = '\n' -class WriterWrapper(object): +class WriterWrapper: """CSV writer wrapper class to escape the special strings.""" def __init__(self, stream, *args, **kwargs):