diff --git a/docs/examples/notebooks/ImpactPlot.ipynb b/docs/examples/notebooks/ImpactPlot.ipynb index aa045c124f..f5ee7fc8fa 100644 --- a/docs/examples/notebooks/ImpactPlot.ipynb +++ b/docs/examples/notebooks/ImpactPlot.ipynb @@ -76,14 +76,15 @@ "outputs": [], "source": [ "def make_model(channel_list):\n", - " spec = json.load(\n", - " open(\"1Lbb-probability-models/RegionA/BkgOnly.json\", encoding=\"utf-8\")\n", - " )\n", - " patchset = pyhf.PatchSet(\n", - " json.load(\n", - " open(\"1Lbb-probability-models/RegionA/patchset.json\", encoding=\"utf-8\")\n", - " )\n", - " )\n", + " with open(\n", + " \"1Lbb-probability-models/RegionA/BkgOnly.json\", encoding=\"utf-8\"\n", + " ) as spec_file:\n", + " spec = json.load(spec_file)\n", + " with open(\n", + " \"1Lbb-probability-models/RegionA/patchset.json\", encoding=\"utf-8\"\n", + " ) as patchset_file:\n", + " patchset = pyhf.PatchSet(json.load(patchset_file))\n", + "\n", " patch = patchset[\"sbottom_750_745_60\"]\n", " spec = jsonpatch.apply_patch(spec, patch)\n", " spec[\"channels\"] = [c for c in spec[\"channels\"] if c[\"name\"] in channel_list]\n", diff --git a/docs/examples/notebooks/multiBinPois.ipynb b/docs/examples/notebooks/multiBinPois.ipynb index ec2a0c6b59..6e9073d193 100644 --- a/docs/examples/notebooks/multiBinPois.ipynb +++ b/docs/examples/notebooks/multiBinPois.ipynb @@ -85,7 +85,8 @@ } ], "source": [ - "source = json.load(open(validation_datadir + \"/1bin_example1.json\", encoding=\"utf-8\"))\n", + "with open(validation_datadir + \"/1bin_example1.json\", encoding=\"utf-8\") as source_file:\n", + " source = json.load(source_file)\n", "model = uncorrelated_background(\n", " source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr']\n", ")\n", diff --git a/docs/examples/notebooks/multichannel-coupled-histo.ipynb b/docs/examples/notebooks/multichannel-coupled-histo.ipynb index 3c565fd93f..16c13a77c2 100644 --- a/docs/examples/notebooks/multichannel-coupled-histo.ipynb +++ b/docs/examples/notebooks/multichannel-coupled-histo.ipynb @@ -167,8 +167,8 @@ "source": [ "with open(\n", " validation_datadir + \"/2bin_2channel_coupledhisto.json\", encoding=\"utf-8\"\n", - ") as spec:\n", - " source = json.load(spec)\n", + ") as source_file:\n", + " source = json.load(source_file)\n", "\n", "data, pdf = prep_data(source[\"channels\"])\n", "\n", diff --git a/docs/examples/notebooks/pullplot.ipynb b/docs/examples/notebooks/pullplot.ipynb index a259ba2cc9..ae2b78a284 100644 --- a/docs/examples/notebooks/pullplot.ipynb +++ b/docs/examples/notebooks/pullplot.ipynb @@ -76,6 +76,7 @@ " \"1Lbb-probability-models/RegionA/BkgOnly.json\", encoding=\"utf-8\"\n", " ) as spec_file:\n", " spec = json.load(spec_file)\n", + "\n", " spec[\"channels\"] = [c for c in spec[\"channels\"] if c[\"name\"] in channel_list]\n", " spec[\"measurements\"][0][\"config\"][\"poi\"] = \"lumi\"\n", "\n", diff --git a/pyproject.toml b/pyproject.toml index 07e289515f..d5e5fd4fde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,6 @@ filterwarnings = [ 'ignore:distutils Version classes are deprecated:DeprecationWarning', # tensorflow-probability 'ignore:the `interpolation=` argument to percentile was renamed to `method=`, which has additional options:DeprecationWarning', # Issue #1772 "ignore:The interpolation= argument to 'quantile' is deprecated. Use 'method=' instead:DeprecationWarning", # Issue #1772 - 'ignore: Exception ignored in:pytest.PytestUnraisableExceptionWarning', #FIXME: Exception ignored in: <_io.FileIO [closed]> 'ignore:invalid value encountered in (true_)?divide:RuntimeWarning', #FIXME 'ignore:invalid value encountered in add:RuntimeWarning', #FIXME "ignore:In future, it will be an error for 'np.bool_' scalars to be interpreted as an index:DeprecationWarning", #FIXME: tests/test_tensor.py::test_pdf_eval[pytorch] diff --git a/tests/contrib/test_viz.py b/tests/contrib/test_viz.py index 7a49eddb49..9193947abd 100644 --- a/tests/contrib/test_viz.py +++ b/tests/contrib/test_viz.py @@ -14,7 +14,8 @@ def test_brazil_band_collection(datadir): - data = json.load(datadir.joinpath("hypotest_results.json").open(encoding="utf-8")) + with open(datadir.join("hypotest_results.json"), encoding="utf-8") as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -32,9 +33,10 @@ def test_brazil_band_collection(datadir): assert brazil_band_collection.clb is None assert brazil_band_collection.axes == ax - data = json.load( - datadir.joinpath("tail_probs_hypotest_results.json").open(encoding="utf-8") - ) + with open( + datadir.join("tail_probs_hypotest_results.json"), encoding="utf-8" + ) as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -55,7 +57,8 @@ def test_brazil_band_collection(datadir): @pytest.mark.mpl_image_compare def test_plot_results(datadir): - data = json.load(datadir.joinpath("hypotest_results.json").open(encoding="utf-8")) + with open(datadir.join("hypotest_results.json"), encoding="utf-8") as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -73,7 +76,8 @@ def test_plot_results(datadir): reason="baseline image generated with matplotlib v3.6.0 which is Python 3.8+", ) def test_plot_results_no_axis(datadir): - data = json.load(datadir.joinpath("hypotest_results.json").open(encoding="utf-8")) + with open(datadir.join("hypotest_results.json"), encoding="utf-8") as data_file: + data = json.load(data_file) matplotlib.use("agg") # Use non-gui backend fig, ax = plt.subplots() @@ -85,9 +89,10 @@ def test_plot_results_no_axis(datadir): @pytest.mark.mpl_image_compare def test_plot_results_components(datadir): - data = json.load( - datadir.joinpath("tail_probs_hypotest_results.json").open(encoding="utf-8") - ) + with open( + datadir.join("tail_probs_hypotest_results.json"), encoding="utf-8" + ) as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -99,9 +104,10 @@ def test_plot_results_components(datadir): @pytest.mark.mpl_image_compare def test_plot_results_components_no_clb(datadir): - data = json.load( - datadir.joinpath("tail_probs_hypotest_results.json").open(encoding="utf-8") - ) + with open( + datadir.join("tail_probs_hypotest_results.json"), encoding="utf-8" + ) as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -121,9 +127,10 @@ def test_plot_results_components_no_clb(datadir): @pytest.mark.mpl_image_compare def test_plot_results_components_no_clsb(datadir): - data = json.load( - datadir.joinpath("tail_probs_hypotest_results.json").open(encoding="utf-8") - ) + with open( + datadir.join("tail_probs_hypotest_results.json"), encoding="utf-8" + ) as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -143,9 +150,10 @@ def test_plot_results_components_no_clsb(datadir): @pytest.mark.mpl_image_compare def test_plot_results_components_no_cls(datadir): - data = json.load( - datadir.joinpath("tail_probs_hypotest_results.json").open(encoding="utf-8") - ) + with open( + datadir.join("tail_probs_hypotest_results.json"), encoding="utf-8" + ) as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() @@ -173,7 +181,8 @@ def test_plot_results_components_data_structure(datadir): """ test results should have format of: [CLs_obs, [CLsb, CLb], [CLs_exp band]] """ - data = json.load(datadir.joinpath("hypotest_results.json").open(encoding="utf-8")) + with open(datadir.join("hypotest_results.json"), encoding="utf-8") as data_file: + data = json.load(data_file) fig = Figure() ax = fig.subplots() diff --git a/tests/test_export.py b/tests/test_export.py index bba0aa224e..bfc47b0282 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -55,8 +55,8 @@ def spec_staterror(): def spec_histosys(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -93,8 +93,8 @@ def spec_histosys(): def spec_normsys(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -128,8 +128,8 @@ def spec_normsys(): def spec_shapesys(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -159,8 +159,8 @@ def spec_shapesys(): def spec_shapefactor(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { diff --git a/tests/test_modifiers.py b/tests/test_modifiers.py index 6432e75e3b..df837ddb0d 100644 --- a/tests/test_modifiers.py +++ b/tests/test_modifiers.py @@ -189,8 +189,8 @@ def test_invalid_bin_wise_modifier(datadir, patch_file): assert pyhf.Model(spec) - with open(datadir.joinpath(patch_file), encoding="utf-8") as spec_file: - patch = JsonPatch.from_string(spec_file.read()) + with open(datadir.joinpath(patch_file), encoding="utf-8") as read_file: + patch = JsonPatch.from_string(read_file.read()) bad_spec = patch.apply(spec) with pytest.raises(pyhf.exceptions.InvalidModifier): diff --git a/tests/test_patchset.py b/tests/test_patchset.py index 64eb392350..b7abca2c30 100644 --- a/tests/test_patchset.py +++ b/tests/test_patchset.py @@ -33,8 +33,8 @@ def patch(): ], ) def test_patchset_invalid_spec(datadir, patchset_file): - with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: - patchsetspec = json.load(patch_file) + with open(datadir.joinpath(patchset_file), encoding="utf-8") as patchset_spec_file: + patchsetspec = json.load(patchset_spec_file) with pytest.raises(pyhf.exceptions.InvalidSpecification): pyhf.PatchSet(patchsetspec) @@ -48,8 +48,8 @@ def test_patchset_invalid_spec(datadir, patchset_file): ], ) def test_patchset_bad(datadir, patchset_file): - with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: - patchsetspec = json.load(patch_file) + with open(datadir.joinpath(patchset_file), encoding="utf-8") as patchset_spec_file: + patchsetspec = json.load(patchset_spec_file) with pytest.raises(pyhf.exceptions.InvalidPatchSet): pyhf.PatchSet(patchsetspec) @@ -102,18 +102,18 @@ def test_patchset_repr(patchset): def test_patchset_verify(datadir): with open( datadir.joinpath("example_patchset.json"), encoding="utf-8" - ) as patch_file: - patchset = pyhf.PatchSet(json.load(patch_file)) - with open(datadir.joinpath("example_bkgonly.json"), encoding="utf-8") as ws_file: - ws = pyhf.Workspace(json.load(ws_file)) + ) as patchset_file: + patchset = pyhf.PatchSet(json.load(patchset_file)) + with open(datadir.joinpath("example_bkgonly.json"), encoding="utf-8") as bkg_file: + ws = pyhf.Workspace(json.load(bkg_file)) assert patchset.verify(ws) is None def test_patchset_verify_failure(datadir): with open( datadir.joinpath("example_patchset.json"), encoding="utf-8" - ) as patch_file: - patchset = pyhf.PatchSet(json.load(patch_file)) + ) as patchset_file: + patchset = pyhf.PatchSet(json.load(patchset_file)) with pytest.raises(pyhf.exceptions.PatchSetVerificationError): assert patchset.verify({}) @@ -121,10 +121,10 @@ def test_patchset_verify_failure(datadir): def test_patchset_apply(datadir): with open( datadir.joinpath("example_patchset.json"), encoding="utf-8" - ) as patch_file: - patchset = pyhf.PatchSet(json.load(patch_file)) - with open(datadir.joinpath("example_bkgonly.json"), encoding="utf-8") as ws_file: - ws = pyhf.Workspace(json.load(ws_file)) + ) as patchset_file: + patchset = pyhf.PatchSet(json.load(patchset_file)) + with open(datadir.joinpath("example_bkgonly.json"), encoding="utf-8") as bkg_file: + ws = pyhf.Workspace(json.load(bkg_file)) with mock.patch('pyhf.patchset.PatchSet.verify') as m: assert m.call_count == 0 assert patchset.apply(ws, 'patch_channel1_signal_syst1') @@ -149,9 +149,10 @@ def test_patch_equality(patch): def test_patchset_get_string_values(datadir): with open( - datadir.joinpath('patchset_good_stringvalues.json'), encoding="utf-8" - ) as patch_file: - patchset = pyhf.PatchSet(json.load(patch_file)) + datadir.joinpath("patchset_good_stringvalues.json"), encoding="utf-8" + ) as patchset_file: + patchset = pyhf.PatchSet(json.load(patchset_file)) + assert patchset["Gtt_2100_5000_800"] assert patchset["Gbb_2200_5000_800"] assert patchset[[2100, 800, "Gtt"]] diff --git a/tests/test_pdf.py b/tests/test_pdf.py index 045575d725..d0b0808913 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -356,8 +356,8 @@ def test_pdf_integration_shapesys_zeros(backend): def test_pdf_integration_histosys(backend): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -438,8 +438,8 @@ def test_pdf_integration_histosys(backend): def test_pdf_integration_normsys(backend): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -502,8 +502,8 @@ def test_pdf_integration_normsys(backend): def test_pdf_integration_shapesys(backend): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -628,8 +628,8 @@ def test_invalid_modifier_name_resuse(): def test_override_paramset_defaults(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { @@ -664,8 +664,8 @@ def test_override_paramset_defaults(): def test_override_paramsets_incorrect_num_parameters(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" - ) as spec_file: - source = json.load(spec_file) + ) as source_file: + source = json.load(source_file) spec = { 'channels': [ { diff --git a/tests/test_schema.py b/tests/test_schema.py index 384fcf0276..7d86c34812 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -575,9 +575,9 @@ def test_jsonpatch_fail(patch): @pytest.mark.parametrize('patchset_file', ['patchset_good.json']) def test_patchset(datadir, patchset_file): - with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: - patchset = json.load(patch_file) - pyhf.schema.validate(patchset, 'patchset.json') + with open(datadir.joinpath(patchset_file), encoding="utf-8") as read_file: + patchset = json.load(read_file) + pyhf.schema.validate(patchset, "patchset.json") @pytest.mark.parametrize( @@ -596,10 +596,10 @@ def test_patchset(datadir, patchset_file): ], ) def test_patchset_fail(datadir, patchset_file): - with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: - patchset = json.load(patch_file) + with open(datadir.joinpath(patchset_file), encoding="utf-8") as read_file: + patchset = json.load(read_file) with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.schema.validate(patchset, 'patchset.json') + pyhf.schema.validate(patchset, "patchset.json") def test_defs_always_cached(