Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 31 additions & 31 deletions pandas/tests/io/parser/common/test_file_buffer_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,25 +97,25 @@ def test_nonexistent_path(all_parsers):

@pytest.mark.skipif(WASM, reason="limited file system access on WASM")
@td.skip_if_windows # os.chmod does not work in windows
def test_no_permission(all_parsers):
def test_no_permission(all_parsers, temp_file):
# GH 23784
parser = all_parsers

msg = r"\[Errno 13\]"
with tm.ensure_clean() as path:
os.chmod(path, 0) # make file unreadable
path = temp_file
os.chmod(path, 0) # make file unreadable

# verify that this process cannot open the file (not running as sudo)
try:
with open(path, encoding="utf-8"):
pass
pytest.skip("Running as sudo.")
except PermissionError:
# verify that this process cannot open the file (not running as sudo)
try:
with open(path, encoding="utf-8"):
pass
pytest.skip("Running as sudo.")
except PermissionError:
pass

with pytest.raises(PermissionError, match=msg) as e:
parser.read_csv(path)
assert path == e.value.filename
with pytest.raises(PermissionError, match=msg) as e:
parser.read_csv(path)
assert str(path.resolve()) == e.value.filename


@pytest.mark.parametrize(
Expand Down Expand Up @@ -269,19 +269,19 @@ def test_internal_eof_byte(all_parsers):
tm.assert_frame_equal(result, expected)


def test_internal_eof_byte_to_file(all_parsers):
def test_internal_eof_byte_to_file(all_parsers, temp_file):
# see gh-16559
parser = all_parsers
data = b'c1,c2\r\n"test \x1a test", test\r\n'
expected = DataFrame([["test \x1a test", " test"]], columns=["c1", "c2"])
path = f"__{uuid.uuid4()}__.csv"

with tm.ensure_clean(path) as path:
with open(path, "wb") as f:
f.write(data)
path2 = temp_file.parent / path
with open(path2, "wb") as f:
f.write(data)

result = parser.read_csv(path)
tm.assert_frame_equal(result, expected)
result = parser.read_csv(path2)
tm.assert_frame_equal(result, expected)


def test_file_handle_string_io(all_parsers):
Expand Down Expand Up @@ -372,7 +372,7 @@ def test_read_csv_file_handle(all_parsers, io_class, encoding):
assert not handle.closed


def test_memory_map_compression(all_parsers, compression):
def test_memory_map_compression(all_parsers, compression, temp_file):
"""
Support memory map for compressed files.
Expand All @@ -381,16 +381,16 @@ def test_memory_map_compression(all_parsers, compression):
parser = all_parsers
expected = DataFrame({"a": [1], "b": [2]})

with tm.ensure_clean() as path:
expected.to_csv(path, index=False, compression=compression)
path = temp_file
expected.to_csv(path, index=False, compression=compression)

if parser.engine == "pyarrow":
msg = "The 'memory_map' option is not supported with the 'pyarrow' engine"
with pytest.raises(ValueError, match=msg):
parser.read_csv(path, memory_map=True, compression=compression)
return
if parser.engine == "pyarrow":
msg = "The 'memory_map' option is not supported with the 'pyarrow' engine"
with pytest.raises(ValueError, match=msg):
parser.read_csv(path, memory_map=True, compression=compression)
return

result = parser.read_csv(path, memory_map=True, compression=compression)
result = parser.read_csv(path, memory_map=True, compression=compression)

tm.assert_frame_equal(
result,
Expand Down Expand Up @@ -442,12 +442,12 @@ def test_context_manageri_user_provided(all_parsers, datapath):


@skip_pyarrow # ParserError: Empty CSV file
def test_file_descriptor_leak(all_parsers):
def test_file_descriptor_leak(all_parsers, temp_file):
# GH 31488
parser = all_parsers
with tm.ensure_clean() as path:
with pytest.raises(EmptyDataError, match="No columns to parse from file"):
parser.read_csv(path)
path = temp_file
with pytest.raises(EmptyDataError, match="No columns to parse from file"):
parser.read_csv(path)


def test_memory_map(all_parsers, csv_dir_path):
Expand Down
Loading