Skip to content

Commit

Permalink
Fix more flake8 warnings
Browse files Browse the repository at this point in the history
Change-Id: Ibf5243fc7a41cde3abf3f95d1a38632d7c811da5
  • Loading branch information
wesm committed Oct 23, 2017
1 parent 2eb8bf4 commit cd4b655
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 25 deletions.
2 changes: 1 addition & 1 deletion python/pyarrow/array.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ cdef _is_array_like(obj):
try:
import pandas
return isinstance(obj, (np.ndarray, pd.Series, pd.Index, Categorical))
except:
except ImportError:
return isinstance(obj, np.ndarray)


Expand Down
2 changes: 1 addition & 1 deletion python/pyarrow/feather.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def write_feather(df, dest):
writer = FeatherWriter(dest)
try:
writer.write(df)
except:
except Exception:
# Try to make sure the resource is closed
import gc
writer = None
Expand Down
4 changes: 2 additions & 2 deletions python/pyarrow/io-hdfs.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def have_libhdfs():
with nogil:
check_status(HaveLibHdfs())
return True
except:
except Exception:
return False


Expand All @@ -41,7 +41,7 @@ def have_libhdfs3():
with nogil:
check_status(HaveLibHdfs3())
return True
except:
except Exception:
return False


Expand Down
2 changes: 1 addition & 1 deletion python/pyarrow/parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -915,7 +915,7 @@ def write_table(table, where, row_group_size=None, version='1.0',
use_deprecated_int96_timestamps=use_deprecated_int96_timestamps,
**kwargs)
writer.write_table(table, row_group_size=row_group_size)
except:
except Exception:
if writer is not None:
writer.close()
if isinstance(where, six.string_types):
Expand Down
2 changes: 1 addition & 1 deletion python/pyarrow/tests/test_feather.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def test_delete_partial_file_on_error(self):
path = random_path()
try:
write_feather(df, path)
except:
except Exception:
pass

assert not os.path.exists(path)
Expand Down
38 changes: 19 additions & 19 deletions python/pyarrow/tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def assert_equal(obj1, obj2):
# Workaround to make comparison of OrderedDicts work on Python 2.7
if obj1 == obj2:
return
except:
except Exception:
pass
if obj1.__dict__ == {}:
print("WARNING: Empty dict in ", obj1)
Expand Down Expand Up @@ -374,24 +374,24 @@ def huge_memory_map(temp_dir):
# Test that objects that are too large for Arrow throw a Python
# exception. These tests give out of memory errors on Travis and need
# to be run on a machine with lots of RAM.
l = 2 ** 29 * [1.0]
serialization_roundtrip(l, mmap)
del l
l = 2 ** 29 * ["s"]
serialization_roundtrip(l, mmap)
del l
l = 2 ** 29 * [["1"], 2, 3, [{"s": 4}]]
serialization_roundtrip(l, mmap)
del l
l = 2 ** 29 * [{"s": 1}] + 2 ** 29 * [1.0]
serialization_roundtrip(l, mmap)
del l
l = np.zeros(2 ** 25)
serialization_roundtrip(l, mmap)
del l
l = [np.zeros(2 ** 18) for _ in range(2 ** 7)]
serialization_roundtrip(l, mmap)
del l
x = 2 ** 29 * [1.0]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * ["s"]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * [["1"], 2, 3, [{"s": 4}]]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * [{"s": 1}] + 2 ** 29 * [1.0]
serialization_roundtrip(x, mmap)
del x
x = np.zeros(2 ** 25)
serialization_roundtrip(x, mmap)
del x
x = [np.zeros(2 ** 18) for _ in range(2 ** 7)]
serialization_roundtrip(x, mmap)
del x


def test_serialization_callback_error():
Expand Down

0 comments on commit cd4b655

Please sign in to comment.