/
muscan.py
executable file
·160 lines (133 loc) · 4.48 KB
/
muscan.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
import os
import jsonpatch
import numpy as np
os.environ["MPLCONFIGDIR"] = os.getcwd() + "/configs/"
import matplotlib.pyplot as plt # noqa: E402
parser = argparse.ArgumentParser(description="Process some arguments.")
parser.add_argument("-s", "--signal", help="name of analysis")
parser.add_argument("-b", "--background", help="path to JSON background-only file")
parser.add_argument(
"-n", "--tag", default="SUSY_13_Higgsino_101_isrinc_J125", help="tag for data files"
)
parser.add_argument(
"-c",
"--cpu",
action="store_true",
help="do not use a GPU even if available. Sets backend to NumPy instead.",
)
parser.add_argument(
"-B",
"--backend",
default="jax",
help="choose backend for pyhf. Jax will be used if running with a GPU.",
)
parser.add_argument(
"-o",
"--optimizer",
default="scipy",
help="choose optimizer. 'scipy', 'minuit' ....",
)
parser.add_argument(
"-l",
"--likelihood",
default=None,
help="pass in full likelihood, with both signal and background included.",
)
parser.add_argument(
"-S",
"--skipfit",
action="store_true",
help="only manage the json stuff, don't actually perform the fit.",
)
parser.add_argument("-p", "--plot", action="store_true", help="make a plot of the CLs")
args = parser.parse_args()
if args.cpu and args.backend == "jax":
os.environ["JAX_PLATFORM_NAME"] = "cpu"
import pyhf # noqa: E402
from pyhf.contrib.viz import brazil # noqa: E402
tolerance = 1e-2 # 0.01 works most of the time, monojet uses 0.001
optimizer = args.optimizer
if optimizer == "minuit":
optimizer = pyhf.optimize.minuit_optimizer(tolerance=tolerance)
pyhf.set_backend(args.backend, optimizer)
if not args.cpu:
# useful when running on a machine with a GPU
pyhf.set_backend("jax", optimizer)
# We can either pass in a full likelihood with signal+background already there, or pass in
# a background likelihood and a signal patch file. If we do use a signal patch file, then
# write the full likelihood out to a file for reference later.
spec = None
if args.likelihood is None:
with open(args.background) as f:
bgonly = json.load(f)
with open(args.signal) as f:
signal = json.load(f)
spec = jsonpatch.apply_patch(bgonly, signal)
ana = args.signal.replace("_patch.json", "")
with open(ana + ".json", "w") as f:
f.write(json.dumps(spec, indent=4, sort_keys=True))
else:
spec = json.load(open(args.likelihood))
ana = args.likelihood.replace(".json", "")
# we may only want to write out the json likelihood file.
if args.skipfit:
exit(0)
ws = pyhf.Workspace(spec)
pdf = ws.model()
observations = ws.data(pdf)
poi_values = np.linspace(0.1, 2, 10)
init_pars = pdf.config.suggested_init()
init_pars[pdf.config.poi_index] = 1.0
obs_limit, exp_limits, (scan, results) = pyhf.infer.intervals.upper_limits.upper_limit(
observations, pdf, poi_values, level=0.05, return_results=True
)
print(f"Observed limit: {obs_limit}")
print("Expected limit: %5.3f" % exp_limits[2])
print(" -1 sigma: %5.3f" % exp_limits[1])
print(" +1 sigma: %5.3f" % exp_limits[3])
print(" -2 sigma: %5.3f" % exp_limits[0])
print(" +2 sigma: %5.3f" % exp_limits[4])
# getting some values at mu=1.
cls_results = pyhf.infer.hypotest(
1.0,
observations,
pdf,
init_pars=init_pars,
test_stat="qtilde",
return_expected_set=True,
)
if args.plot:
print("making plot")
fig, ax = plt.subplots()
brazil.plot_results(poi_values, results, ax=ax)
fig.savefig(f"muscan_{args.tag}__{ana}.pdf")
jsonoutput = {
"mu": {
"observed": float(obs_limit),
"expected": float(exp_limits[2]),
"p1sigma": float(exp_limits[3]),
"m1sigma": float(exp_limits[1]),
"p2sigma": float(exp_limits[4]),
"m2sigma": float(exp_limits[0]),
},
"cls": {
"observed": float(cls_results[0]),
"expected": float(cls_results[1][2]),
"p1sigma": float(cls_results[1][3]),
"m1sigma": float(cls_results[1][1]),
"p2sigma": float(cls_results[1][4]),
"m2sigma": float(cls_results[1][0]),
},
"scan": pyhf.tensorlib.tolist(scan),
"results": [
(pyhf.tensorlib.tolist(x[0]), pyhf.tensorlib.tolist(x[1])) for x in results
],
}
# print(json.dumps(jsonoutput,indent=4))
# make a json output file
with open("muscan_results.json", "w") as jsonoutputfile:
json.dump(jsonoutput, jsonoutputfile, indent=4)