Skip to content

Commit

Permalink
Merge pull request #79 from gaia-verna/pyirf
Browse files Browse the repository at this point in the history
Towards using Pyirf
  • Loading branch information
HealthyPear committed Dec 11, 2020
2 parents d7a9b90 + 9f67772 commit f0d73e6
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 98 deletions.
4 changes: 2 additions & 2 deletions benchmarks/DL3/benchmarks_DL3_IRFs_and_sensitivity.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@
"\n",
"def plot_energy_response(ax, rmf, label, energy_range, **kwargs):\n",
" \"\"\"Simple function to plot the energy resolution (Gammapy)\"\"\"\n",
" energy = rmf.e_true.nodes\n",
" energy = rmf.e_true.center\n",
" bias = rmf.get_bias(energy)\n",
" resol = rmf.get_resolution(energy)\n",
" \n",
Expand Down Expand Up @@ -512,7 +512,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.8"
"version": "3.7.4"
}
},
"nbformat": 4,
Expand Down
26 changes: 16 additions & 10 deletions protopipe/aux/example_config_files/performance.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

general:
# Directory with input data file
# [...] = your analysis local full path OUTSIDE the Vagrant box
Expand All @@ -18,7 +19,10 @@ analysis:

# Normalisation between ON and OFF regions
alpha: 0.2


# Radius to use for calculating bg rate
max_bg_radius: 1.

# Minimimal significance
min_sigma: 5

Expand All @@ -42,30 +46,31 @@ analysis:

particle_information:
gamma:
n_events_per_file: 1000000 # 10**5 * 10
num_use: 10
num_showers: 10**5
e_min: 0.003
e_max: 330
gen_radius: 1400
diff_cone: 0
gen_gamma: 2
gen_gamma: -2
diff_cone: 0

proton:
n_events_per_file: 4000000 # 2 * 10**5 * 20
num_use: 20
num_showers: 2 * 10**5
e_min: 0.004
e_max: 600
gen_radius: 1900
gen_gamma: -2
diff_cone: 10
gen_gamma: 2
offset_cut: 1.

electron:
n_events_per_file: 2000000 # 10**5 * 20
num_use: 20
num_showers: 10**5
e_min: 0.003
e_max: 330
gen_radius: 1900
gen_gamma: -2
diff_cone: 10
gen_gamma: 2
offset_cut: 1.

column_definition:
# Column name for true energy
Expand All @@ -77,3 +82,4 @@ column_definition:
name: 'gammaness'
range: [0, 1]
angular_distance_to_the_src: 'xi'

56 changes: 12 additions & 44 deletions protopipe/perf/cut_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,8 @@
class CutsApplicator(object):
"""
Apply best cut and angular cut to events.
Apply cuts to gamma, proton and electrons that will be further used for
performance estimation (irf, sensitivity, etc.).
Parameters
----------
config: `dict`
Expand Down Expand Up @@ -57,7 +55,6 @@ def apply_cuts(self):
def apply_cuts_on_data(self, data):
"""
Flag particle passing angular cut and the best cutoff
Parameters
----------
data: `pandas.DataFrame`
Expand Down Expand Up @@ -128,7 +125,6 @@ def apply_cuts_on_data(self, data):
class CutsDiagnostic(object):
"""
Class used to get some diagnostic related to the optimal working point.
Parameters
----------
config: `dict`
Expand Down Expand Up @@ -376,7 +372,6 @@ class CutsOptimisation(object):
"""
Class used to find best cutoff to obtain minimal
sensitivity in a given amount of time.
Parameters
----------
config: `dict`
Expand All @@ -393,7 +388,6 @@ def __init__(self, config, evt_dict, verbose_level=0):
def weight_events(self, model_dict, colname_mc_energy):
"""
Add a weight column to the files, in order to scale simulated data to reality.
Parameters
----------
model_dict: dict
Expand All @@ -417,20 +411,13 @@ def compute_weight(self, energy, particle, model):

area_simu = (np.pi * conf_part["gen_radius"] ** 2) * u.Unit("m2")

omega_simu = (
2 * np.pi * (1 - np.cos(conf_part["diff_cone"] * np.pi / 180.0)) * u.sr
)
if particle in "gamma": # Gamma are point-like
omega_simu = 1.0

nsimu = conf_part["n_simulated"]
index_simu = conf_part["gen_gamma"]
emin = conf_part["e_min"] * u.TeV
emax = conf_part["e_max"] * u.TeV
amplitude = 1.0 * u.Unit("1 / (cm2 s TeV)")
pwl_integral = PowerLaw(index=index_simu, amplitude=amplitude).integral(
emin=emin, emax=emax
)
nsimu = conf_part['n_simulated']
index_simu = np.absolute(conf_part['gen_gamma'])
emin = conf_part['e_min'] * u.TeV
emax = conf_part['e_max'] * u.TeV
amplitude = 1. * u.Unit('1 / (cm2 s TeV)')
pwl_integral = PowerLaw(
index=index_simu, amplitude=amplitude).integral(emin=emin, emax=emax)

tsimu = nsimu / (area_simu * omega_simu * pwl_integral)
tobs = self.config["analysis"]["obs_time"]["value"] * u.Unit(
Expand All @@ -456,7 +443,6 @@ def find_best_cutoff(self, energy_values, angular_values):
of energy and theta square cut. Correct the number of events
according to the ON region which correspond to the angular cut applied to
the gamma-ray events.
Parameters
----------
energy_values: `astropy.Quantity`
Expand Down Expand Up @@ -525,30 +511,12 @@ def find_best_cutoff(self, energy_values, angular_values):
sel_g = g.query(th_query).copy()

# Correct number of background due to acceptance
acceptance_g = 2 * np.pi * (1 - np.cos(th_cut.to("rad").value))
acceptance_p = (
2
* np.pi
* (
1
- np.cos(
self.config["particle_information"]["proton"]["offset_cut"]
* u.deg.to("rad")
)
)
acceptance_g = 2 * np.pi * (1 - np.cos(th_cut.to('rad').value))
acceptance_p = 2 * np.pi * (
1 - np.cos(self.config["analysis"]["max_bg_radius"] * u.deg.to('rad'))
)
acceptance_e = (
2
* np.pi
* (
1
- np.cos(
self.config["particle_information"]["electron"][
"offset_cut"
]
* u.deg.to("rad")
)
)
acceptance_e = 2 * np.pi * (
1 - np.cos(self.config["analysis"]["max_bg_radius"] * u.deg.to('rad'))
)

# Add corrected weight taking into angular cuts applied to gamma-rays
Expand Down
13 changes: 5 additions & 8 deletions protopipe/perf/irf_maker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
class BkgData(object):
"""
Class storing background data in a NDDataArray object.
It's a bit hacky, but gammapy sensitivity estimator does not take individual IRF,
it takes a CTAPerf object. So i'm emulating that... We need a bkg format!!!
"""
Expand Down Expand Up @@ -44,7 +43,6 @@ def __init__(self, bkg, aeff, rmf):
class SensitivityMaker(object):
"""
Class which estimate sensitivity with IRF
Parameters
----------
config: `dict`
Expand Down Expand Up @@ -158,7 +156,6 @@ def add_sensitivity_to_irf(self):
class IrfMaker(object):
"""
Class building IRF for point-like analysis.
Parameters
----------
config: `dict`
Expand Down Expand Up @@ -293,7 +290,7 @@ def make_bkg_rate(self):
* (
1
- np.cos(
self.config["particle_information"]["proton"]["offset_cut"]
self.config["analysis"]["max_bg_radius"]
* u.deg.to("rad")
)
)
Expand All @@ -304,7 +301,7 @@ def make_bkg_rate(self):
* (
1
- np.cos(
self.config["particle_information"]["electron"]["offset_cut"]
self.config["analysis"]["max_bg_radius"]
* u.deg.to("rad")
)
)
Expand Down Expand Up @@ -377,11 +374,11 @@ def make_effective_area(

# Get simulation infos
cfg_particule = self.config["particle_information"]["gamma"]
simu_index = cfg_particule["gen_gamma"]
simu_index = np.absolute(cfg_particule["gen_gamma"])
index = 1.0 - simu_index # for futur integration
nsimu_tot = float(cfg_particule["n_files"]) * float(
cfg_particule["n_events_per_file"]
)
cfg_particule["num_showers"]) * float(
cfg_particule["num_use"])
emin_simu = cfg_particule["e_min"]
emax_simu = cfg_particule["e_max"]
area_simu = (np.pi * cfg_particule["gen_radius"] ** 2) * u.Unit("m2")
Expand Down
15 changes: 6 additions & 9 deletions protopipe/scripts/make_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,19 +83,16 @@ def main():
# Apply offset cut to proton and electron
for particle in ["electron", "proton"]:
# print('Initial stat: {} {}'.format(len(evt_dict[particle]), particle))
evt_dict[particle] = evt_dict[particle].query(
"offset <= {}".format(cfg["particle_information"][particle]["offset_cut"])
evt_dict[particle] = evt_dict[particle].query('offset <= {}'.format(
cfg['analysis']['max_bg_radius'])
)

# Add required data in configuration file for future computation
for particle in particles:
cfg["particle_information"][particle]["n_files"] = len(
np.unique(evt_dict[particle]["obs_id"])
)
cfg["particle_information"][particle]["n_simulated"] = (
cfg["particle_information"][particle]["n_files"]
* cfg["particle_information"][particle]["n_events_per_file"]
)
cfg['particle_information'][particle]['n_files'] = \
len(np.unique(evt_dict[particle]['obs_id']))
cfg['particle_information'][particle]['n_simulated'] = \
cfg['particle_information'][particle]['n_files'] * cfg['particle_information'][particle]['num_showers'] * cfg['particle_information'][particle]['num_use']

# Define model for the particles
model_dict = {
Expand Down
64 changes: 39 additions & 25 deletions protopipe/scripts/write_dl2.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,23 +201,27 @@ class RecoEvent(tb.IsDescription):
NTels_reco_lst = tb.Int16Col(dflt=0, pos=4)
NTels_reco_mst = tb.Int16Col(dflt=0, pos=5)
NTels_reco_sst = tb.Int16Col(dflt=0, pos=6)
mc_energy = tb.Float32Col(dflt=np.nan, pos=7)
reco_energy = tb.Float32Col(dflt=np.nan, pos=8)
reco_alt = tb.Float32Col(dflt=np.nan, pos=9)
reco_az = tb.Float32Col(dflt=np.nan, pos=10)
offset = tb.Float32Col(dflt=np.nan, pos=11)
xi = tb.Float32Col(dflt=np.nan, pos=12)
ErrEstPos = tb.Float32Col(dflt=np.nan, pos=13)
ErrEstDir = tb.Float32Col(dflt=np.nan, pos=14)
gammaness = tb.Float32Col(dflt=np.nan, pos=15)
success = tb.BoolCol(dflt=False, pos=16)
score = tb.Float32Col(dflt=np.nan, pos=17)
h_max = tb.Float32Col(dflt=np.nan, pos=18)
reco_core_x = tb.Float32Col(dflt=np.nan, pos=19)
reco_core_y = tb.Float32Col(dflt=np.nan, pos=20)
mc_core_x = tb.Float32Col(dflt=np.nan, pos=21)
mc_core_y = tb.Float32Col(dflt=np.nan, pos=22)
is_valid=tb.BoolCol(dflt=False, pos=23)
pointing_az = tb.Float32Col(dflt=np.nan, pos=7)
pointing_alt = tb.Float32Col(dflt=np.nan, pos=8)
true_az = tb.Float32Col(dflt=np.nan, pos=9)
true_alt = tb.Float32Col(dflt=np.nan, pos=10)
true_energy = tb.Float32Col(dflt=np.nan, pos=11)
reco_energy = tb.Float32Col(dflt=np.nan, pos=12)
reco_alt = tb.Float32Col(dflt=np.nan, pos=13)
reco_az = tb.Float32Col(dflt=np.nan, pos=14)
offset = tb.Float32Col(dflt=np.nan, pos=15)
xi = tb.Float32Col(dflt=np.nan, pos=16)
ErrEstPos = tb.Float32Col(dflt=np.nan, pos=17)
ErrEstDir = tb.Float32Col(dflt=np.nan, pos=18)
gammaness = tb.Float32Col(dflt=np.nan, pos=19)
success = tb.BoolCol(dflt=False, pos=20)
score = tb.Float32Col(dflt=np.nan, pos=21)
h_max = tb.Float32Col(dflt=np.nan, pos=22)
reco_core_x = tb.Float32Col(dflt=np.nan, pos=23)
reco_core_y = tb.Float32Col(dflt=np.nan, pos=24)
true_core_x = tb.Float32Col(dflt=np.nan, pos=25)
true_core_y = tb.Float32Col(dflt=np.nan, pos=26)
is_valid=tb.BoolCol(dflt=False, pos=27)

reco_outfile = tb.open_file(
mode="w",
Expand Down Expand Up @@ -270,8 +274,16 @@ class RecoEvent(tb.IsDescription):
source, save_images=args.save_images, debug=args.debug
):

# True energy
true_energy = event.mc.energy.value

# True direction
true_az = event.mc.az
true_alt = event.mc.alt

# Angular quantities
run_array_direction = event.mcheader.run_array_direction
pointing_az, pointing_alt = run_array_direction[0], run_array_direction[1]

if good_event: # aka it has been successfully reconstructed

Expand All @@ -286,8 +298,8 @@ class RecoEvent(tb.IsDescription):
# - center of the array's FoV
# - reconstructed direction
offset = angular_separation(
run_array_direction[0], # az
run_array_direction[1], # alt
pointing_az,
pointing_alt,
reco_result.az,
reco_result.alt,
)
Expand Down Expand Up @@ -504,16 +516,17 @@ class RecoEvent(tb.IsDescription):
+ n_tels["SST_ASTRI_ASTRICam"]
+ n_tels["SST_GCT_CHEC"]
)
reco_event["pointing_az"] = pointing_az.to("deg").value
reco_event["pointing_alt"] = pointing_alt.to("deg").value
reco_event["reco_energy"] = reco_energy

reco_event["is_valid"] = is_valid
reco_event["reco_alt"] = alt.to("deg").value
reco_event["reco_az"] = az.to("deg").value
reco_event["offset"] = offset.to("deg").value
reco_event["xi"] = xi.to("deg").value
reco_event["h_max"] = h_max.to("m").value
reco_event["reco_core_x"] = reco_core_x.to("m").value
reco_event["reco_core_y"] = reco_core_y.to("m").value
reco_event["is_valid"] = is_valid

if use_proba_for_classifier is True:
reco_event["gammaness"] = gammaness
Expand All @@ -526,9 +539,11 @@ class RecoEvent(tb.IsDescription):
shower = event.mc
mc_core_x = shower.core_x
mc_core_y = shower.core_y
reco_event["mc_energy"] = event.mc.energy.to("TeV").value
reco_event["mc_core_x"] = mc_core_x.to("m").value
reco_event["mc_core_y"] = mc_core_y.to("m").value
reco_event["true_energy"] = event.mc.energy.to("TeV").value
reco_event["true_az"] = true_az.to("deg").value
reco_event["true_alt"] = true_alt.to("deg").value
reco_event["true_core_x"] = mc_core_x.to("m").value
reco_event["true_core_y"] = mc_core_y.to("m").value

# Fill table
reco_table.flush()
Expand Down Expand Up @@ -558,6 +573,5 @@ class RecoEvent(tb.IsDescription):

print("Job done!")


if __name__ == "__main__":
main()

0 comments on commit f0d73e6

Please sign in to comment.