Skip to content

Commit 833f192

Browse files
authored
TST: Replace ensure_clean utility with temp_file pytest fixture (2 files) (#62546)
1 parent 6cca195 commit 833f192

File tree

2 files changed

+57
-63
lines changed

2 files changed

+57
-63
lines changed

pandas/tests/io/json/test_compression.py

Lines changed: 43 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -12,22 +12,21 @@
1212
import pandas._testing as tm
1313

1414

15-
def test_compression_roundtrip(compression):
15+
def test_compression_roundtrip(compression, temp_file):
1616
df = pd.DataFrame(
1717
[[0.123456, 0.234567, 0.567567], [12.32112, 123123.2, 321321.2]],
1818
index=["A", "B"],
1919
columns=["X", "Y", "Z"],
2020
)
2121

22-
with tm.ensure_clean() as path:
23-
df.to_json(path, compression=compression)
24-
tm.assert_frame_equal(df, pd.read_json(path, compression=compression))
22+
df.to_json(temp_file, compression=compression)
23+
tm.assert_frame_equal(df, pd.read_json(temp_file, compression=compression))
2524

26-
# explicitly ensure file was compressed.
27-
with tm.decompress_file(path, compression) as fh:
28-
result = fh.read().decode("utf8")
29-
data = StringIO(result)
30-
tm.assert_frame_equal(df, pd.read_json(data))
25+
# explicitly ensure file was compressed.
26+
with tm.decompress_file(temp_file, compression) as fh:
27+
result = fh.read().decode("utf8")
28+
data = StringIO(result)
29+
tm.assert_frame_equal(df, pd.read_json(data))
3130

3231

3332
def test_read_zipped_json(datapath):
@@ -43,15 +42,14 @@ def test_read_zipped_json(datapath):
4342
@td.skip_if_not_us_locale
4443
@pytest.mark.single_cpu
4544
@pytest.mark.network
46-
def test_with_s3_url(compression, s3_bucket_public, s3so):
45+
def test_with_s3_url(compression, s3_bucket_public, s3so, temp_file):
4746
# Bucket created in tests/io/conftest.py
4847
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))
4948

5049
key = f"{uuid.uuid4()}.json"
51-
with tm.ensure_clean() as path:
52-
df.to_json(path, compression=compression)
53-
with open(path, "rb") as f:
54-
s3_bucket_public.put_object(Key=key, Body=f)
50+
df.to_json(temp_file, compression=compression)
51+
with open(temp_file, "rb") as f:
52+
s3_bucket_public.put_object(Key=key, Body=f)
5553

5654
roundtripped_df = pd.read_json(
5755
f"s3://{s3_bucket_public.name}/{key}",
@@ -61,39 +59,35 @@ def test_with_s3_url(compression, s3_bucket_public, s3so):
6159
tm.assert_frame_equal(df, roundtripped_df)
6260

6361

64-
def test_lines_with_compression(compression):
65-
with tm.ensure_clean() as path:
66-
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))
67-
df.to_json(path, orient="records", lines=True, compression=compression)
68-
roundtripped_df = pd.read_json(path, lines=True, compression=compression)
69-
tm.assert_frame_equal(df, roundtripped_df)
62+
def test_lines_with_compression(compression, temp_file):
63+
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))
64+
df.to_json(temp_file, orient="records", lines=True, compression=compression)
65+
roundtripped_df = pd.read_json(temp_file, lines=True, compression=compression)
66+
tm.assert_frame_equal(df, roundtripped_df)
7067

7168

72-
def test_chunksize_with_compression(compression):
73-
with tm.ensure_clean() as path:
74-
df = pd.read_json(StringIO('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}'))
75-
df.to_json(path, orient="records", lines=True, compression=compression)
69+
def test_chunksize_with_compression(compression, temp_file):
70+
df = pd.read_json(StringIO('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}'))
71+
df.to_json(temp_file, orient="records", lines=True, compression=compression)
7672

77-
with pd.read_json(
78-
path, lines=True, chunksize=1, compression=compression
79-
) as res:
80-
roundtripped_df = pd.concat(res)
81-
tm.assert_frame_equal(df, roundtripped_df)
73+
with pd.read_json(
74+
temp_file, lines=True, chunksize=1, compression=compression
75+
) as res:
76+
roundtripped_df = pd.concat(res)
77+
tm.assert_frame_equal(df, roundtripped_df)
8278

8379

84-
def test_write_unsupported_compression_type():
80+
def test_write_unsupported_compression_type(temp_file):
8581
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))
86-
with tm.ensure_clean() as path:
87-
msg = "Unrecognized compression type: unsupported"
88-
with pytest.raises(ValueError, match=msg):
89-
df.to_json(path, compression="unsupported")
82+
msg = "Unrecognized compression type: unsupported"
83+
with pytest.raises(ValueError, match=msg):
84+
df.to_json(temp_file, compression="unsupported")
9085

9186

92-
def test_read_unsupported_compression_type():
93-
with tm.ensure_clean() as path:
94-
msg = "Unrecognized compression type: unsupported"
95-
with pytest.raises(ValueError, match=msg):
96-
pd.read_json(path, compression="unsupported")
87+
def test_read_unsupported_compression_type(temp_file):
88+
msg = "Unrecognized compression type: unsupported"
89+
with pytest.raises(ValueError, match=msg):
90+
pd.read_json(temp_file, compression="unsupported")
9791

9892

9993
@pytest.mark.parametrize(
@@ -102,25 +96,28 @@ def test_read_unsupported_compression_type():
10296
@pytest.mark.parametrize("to_infer", [True, False])
10397
@pytest.mark.parametrize("read_infer", [True, False])
10498
def test_to_json_compression(
105-
compression_only, read_infer, to_infer, compression_to_extension, infer_string
99+
compression_only,
100+
read_infer,
101+
to_infer,
102+
compression_to_extension,
103+
infer_string,
104+
tmp_path,
106105
):
107106
with pd.option_context("future.infer_string", infer_string):
108107
# see gh-15008
109108
compression = compression_only
110109

111110
# We'll complete file extension subsequently.
112-
filename = "test."
113-
filename += compression_to_extension[compression]
111+
filename = tmp_path / f"test.{compression_to_extension[compression]}"
114112

115113
df = pd.DataFrame({"A": [1]})
116114

117115
to_compression = "infer" if to_infer else compression
118116
read_compression = "infer" if read_infer else compression
119117

120-
with tm.ensure_clean(filename) as path:
121-
df.to_json(path, compression=to_compression)
122-
result = pd.read_json(path, compression=read_compression)
123-
tm.assert_frame_equal(result, df)
118+
df.to_json(filename, compression=to_compression)
119+
result = pd.read_json(filename, compression=read_compression)
120+
tm.assert_frame_equal(result, df)
124121

125122

126123
def test_to_json_compression_mode(compression):

pandas/tests/io/json/test_pandas.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -806,11 +806,10 @@ def test_reconstruction_index(self):
806806
result = read_json(StringIO(df.to_json()))
807807
tm.assert_frame_equal(result, df)
808808

809-
def test_path(self, float_frame, int_frame, datetime_frame):
810-
with tm.ensure_clean("test.json") as path:
811-
for df in [float_frame, int_frame, datetime_frame]:
812-
df.to_json(path)
813-
read_json(path)
809+
def test_path(self, float_frame, int_frame, datetime_frame, temp_file):
810+
for df in [float_frame, int_frame, datetime_frame]:
811+
df.to_json(temp_file)
812+
read_json(temp_file)
814813

815814
def test_axis_dates(self, datetime_series, datetime_frame):
816815
# frame
@@ -1423,14 +1422,13 @@ def test_read_s3_jsonl(self, s3_bucket_public_with_data, s3so):
14231422
expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
14241423
tm.assert_frame_equal(result, expected)
14251424

1426-
def test_read_local_jsonl(self):
1425+
def test_read_local_jsonl(self, temp_file):
14271426
# GH17200
1428-
with tm.ensure_clean("tmp_items.json") as path:
1429-
with open(path, "w", encoding="utf-8") as infile:
1430-
infile.write('{"a": 1, "b": 2}\n{"b":2, "a" :1}\n')
1431-
result = read_json(path, lines=True)
1432-
expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
1433-
tm.assert_frame_equal(result, expected)
1427+
with open(temp_file, "w", encoding="utf-8") as infile:
1428+
infile.write('{"a": 1, "b": 2}\n{"b":2, "a" :1}\n')
1429+
result = read_json(temp_file, lines=True)
1430+
expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
1431+
tm.assert_frame_equal(result, expected)
14341432

14351433
def test_read_jsonl_unicode_chars(self):
14361434
# GH15132: non-ascii unicode characters
@@ -1526,17 +1524,16 @@ def test_to_jsonl(self):
15261524
],
15271525
)
15281526
@pytest.mark.parametrize("dtype", ["category", object])
1529-
def test_latin_encoding(self, dtype, val):
1527+
def test_latin_encoding(self, dtype, val, temp_file):
15301528
# GH 13774
15311529
ser = Series(
15321530
[x.decode("latin-1") if isinstance(x, bytes) else x for x in val],
15331531
dtype=dtype,
15341532
)
15351533
encoding = "latin-1"
1536-
with tm.ensure_clean("test.json") as path:
1537-
ser.to_json(path, encoding=encoding)
1538-
retr = read_json(StringIO(path), encoding=encoding)
1539-
tm.assert_series_equal(ser, retr, check_categorical=False)
1534+
ser.to_json(temp_file, encoding=encoding)
1535+
retr = read_json(StringIO(temp_file), encoding=encoding)
1536+
tm.assert_series_equal(ser, retr, check_categorical=False)
15401537

15411538
def test_data_frame_size_after_to_json(self):
15421539
# GH15344

0 commit comments

Comments
 (0)