/
map.py
2821 lines (2297 loc) · 92.3 KB
/
map.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
import numpy as np
import astropy.units as u
from astropy.io import fits
from astropy.table import Table
from regions import CircleSkyRegion
import matplotlib.pyplot as plt
from gammapy.data import GTI
from gammapy.irf import EDispKernelMap, EDispMap, PSFKernel, PSFMap, RecoPSFMap
from gammapy.maps import LabelMapAxis, Map, MapAxis
from gammapy.modeling.models import DatasetModels, FoVBackgroundModel
from gammapy.stats import (
CashCountsStatistic,
WStatCountsStatistic,
cash,
cash_sum_cython,
get_wstat_mu_bkg,
wstat,
)
from gammapy.utils.deprecation import deprecated_renamed_argument
from gammapy.utils.fits import HDULocation, LazyFitsData
from gammapy.utils.random import get_random_state
from gammapy.utils.scripts import make_name, make_path
from gammapy.utils.table import hstack_columns
from .core import Dataset
from .evaluator import MapEvaluator
from .utils import get_axes
__all__ = ["MapDataset", "MapDatasetOnOff", "create_map_dataset_geoms"]
log = logging.getLogger(__name__)
RAD_MAX = 0.66
RAD_AXIS_DEFAULT = MapAxis.from_bounds(
0, RAD_MAX, nbin=66, node_type="edges", name="rad", unit="deg"
)
MIGRA_AXIS_DEFAULT = MapAxis.from_bounds(
0.2, 5, nbin=48, node_type="edges", name="migra"
)
BINSZ_IRF_DEFAULT = 0.2
EVALUATION_MODE = "local"
USE_NPRED_CACHE = True
def create_map_dataset_geoms(
geom,
energy_axis_true=None,
migra_axis=None,
rad_axis=None,
binsz_irf=None,
reco_psf=False,
):
"""Create map geometries for a `MapDataset`
Parameters
----------
geom : `~gammapy.maps.WcsGeom`
Reference target geometry in reco energy, used for counts and background maps
energy_axis_true : `~gammapy.maps.MapAxis`
True energy axis used for IRF maps
migra_axis : `~gammapy.maps.MapAxis`
If set, this provides the migration axis for the energy dispersion map.
If not set, an EDispKernelMap is produced instead. Default is None
rad_axis : `~gammapy.maps.MapAxis`
Rad axis for the psf map
binsz_irf : float
IRF Map pixel size in degrees.
reco_psf : bool
Use reconstructed energy for the PSF geometry. Default is False
Returns
-------
geoms : dict
Dict with map geometries.
"""
rad_axis = rad_axis or RAD_AXIS_DEFAULT
if energy_axis_true is not None:
energy_axis_true.assert_name("energy_true")
else:
energy_axis_true = geom.axes["energy"].copy(name="energy_true")
binsz_irf = binsz_irf if binsz_irf is not None else BINSZ_IRF_DEFAULT
geom_image = geom.to_image()
geom_exposure = geom_image.to_cube([energy_axis_true])
geom_irf = geom_image.to_binsz(binsz=binsz_irf)
if reco_psf:
geom_psf = geom_irf.to_cube([rad_axis, geom.axes["energy"]])
else:
geom_psf = geom_irf.to_cube([rad_axis, energy_axis_true])
if migra_axis:
geom_edisp = geom_irf.to_cube([migra_axis, energy_axis_true])
else:
geom_edisp = geom_irf.to_cube([geom.axes["energy"], energy_axis_true])
return {
"geom": geom,
"geom_exposure": geom_exposure,
"geom_psf": geom_psf,
"geom_edisp": geom_edisp,
}
class MapDataset(Dataset):
"""
Bundle together binned counts, background, IRFs, models and compute a likelihood.
It uses the Cash statistics by default.
For more information see :ref:`datasets`.
Parameters
----------
models : `~gammapy.modeling.models.Models`
Source sky models.
counts : `~gammapy.maps.WcsNDMap` or `~gammapy.utils.fits.HDULocation`
Counts cube
exposure : `~gammapy.maps.WcsNDMap` or `~gammapy.utils.fits.HDULocation`
Exposure cube
background : `~gammapy.maps.WcsNDMap` or `~gammapy.utils.fits.HDULocation`
Background cube
mask_fit : `~gammapy.maps.WcsNDMap` or `~gammapy.utils.fits.HDULocation`
Mask to apply to the likelihood for fitting.
psf : `~gammapy.irf.PSFMap` or `~gammapy.utils.fits.HDULocation`
PSF kernel
edisp : `~gammapy.irf.EDispMap` or `~gammapy.utils.fits.HDULocation`
Energy dispersion kernel
mask_safe : `~gammapy.maps.WcsNDMap` or `~gammapy.utils.fits.HDULocation`
Mask defining the safe data range.
gti : `~gammapy.data.GTI`
GTI of the observation or union of GTI if it is a stacked observation
meta_table : `~astropy.table.Table`
Table listing information on observations used to create the dataset.
One line per observation for stacked datasets.
If an `HDULocation` is passed the map is loaded lazily. This means the
map data is only loaded in memory as the corresponding data attribute
on the MapDataset is accessed. If it was accessed once it is cached for
the next time.
Examples
--------
>>> from gammapy.datasets import MapDataset
>>> filename = "$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz"
>>> dataset = MapDataset.read(filename, name="cta-dataset")
>>> print(dataset)
MapDataset
----------
<BLANKLINE>
Name : cta-dataset
<BLANKLINE>
Total counts : 104317
Total background counts : 91507.70
Total excess counts : 12809.30
<BLANKLINE>
Predicted counts : 91507.69
Predicted background counts : 91507.70
Predicted excess counts : nan
<BLANKLINE>
Exposure min : 6.28e+07 m2 s
Exposure max : 1.90e+10 m2 s
<BLANKLINE>
Number of total bins : 768000
Number of fit bins : 691680
<BLANKLINE>
Fit statistic type : cash
Fit statistic value (-2 log(L)) : nan
<BLANKLINE>
Number of models : 0
Number of parameters : 0
Number of free parameters : 0
See Also
--------
MapDatasetOnOff, SpectrumDataset, FluxPointsDataset
"""
stat_type = "cash"
tag = "MapDataset"
counts = LazyFitsData(cache=True)
exposure = LazyFitsData(cache=True)
edisp = LazyFitsData(cache=True)
background = LazyFitsData(cache=True)
psf = LazyFitsData(cache=True)
mask_fit = LazyFitsData(cache=True)
mask_safe = LazyFitsData(cache=True)
_lazy_data_members = [
"counts",
"exposure",
"edisp",
"psf",
"mask_fit",
"mask_safe",
"background",
]
def __init__(
self,
models=None,
counts=None,
exposure=None,
background=None,
psf=None,
edisp=None,
mask_safe=None,
mask_fit=None,
gti=None,
meta_table=None,
name=None,
):
self._name = make_name(name)
self._evaluators = {}
self.counts = counts
self.exposure = exposure
self.background = background
self._background_cached = None
self._background_parameters_cached = None
self.mask_fit = mask_fit
if psf and not isinstance(psf, (PSFMap, HDULocation)):
raise ValueError(
f"'psf' must be a 'PSFMap' or `HDULocation` object, got {type(psf)}"
)
self.psf = psf
if edisp and not isinstance(edisp, (EDispMap, EDispKernelMap, HDULocation)):
raise ValueError(
"'edisp' must be a 'EDispMap', `EDispKernelMap` or 'HDULocation' "
f"object, got `{type(edisp)}` instead."
)
self.edisp = edisp
self.mask_safe = mask_safe
self.gti = gti
self.models = models
self.meta_table = meta_table
# TODO: keep or remove?
@property
def background_model(self):
try:
return self.models[f"{self.name}-bkg"]
except (ValueError, TypeError):
pass
def __str__(self):
str_ = f"{self.__class__.__name__}\n"
str_ += "-" * len(self.__class__.__name__) + "\n"
str_ += "\n"
str_ += "\t{:32}: {{name}} \n\n".format("Name")
str_ += "\t{:32}: {{counts:.0f}} \n".format("Total counts")
str_ += "\t{:32}: {{background:.2f}}\n".format("Total background counts")
str_ += "\t{:32}: {{excess:.2f}}\n\n".format("Total excess counts")
str_ += "\t{:32}: {{npred:.2f}}\n".format("Predicted counts")
str_ += "\t{:32}: {{npred_background:.2f}}\n".format(
"Predicted background counts"
)
str_ += "\t{:32}: {{npred_signal:.2f}}\n\n".format("Predicted excess counts")
str_ += "\t{:32}: {{exposure_min:.2e}}\n".format("Exposure min")
str_ += "\t{:32}: {{exposure_max:.2e}}\n\n".format("Exposure max")
str_ += "\t{:32}: {{n_bins}} \n".format("Number of total bins")
str_ += "\t{:32}: {{n_fit_bins}} \n\n".format("Number of fit bins")
# likelihood section
str_ += "\t{:32}: {{stat_type}}\n".format("Fit statistic type")
str_ += "\t{:32}: {{stat_sum:.2f}}\n\n".format(
"Fit statistic value (-2 log(L))"
)
info = self.info_dict()
str_ = str_.format(**info)
# model section
n_models, n_pars, n_free_pars = 0, 0, 0
if self.models is not None:
n_models = len(self.models)
n_pars = len(self.models.parameters)
n_free_pars = len(self.models.parameters.free_parameters)
str_ += "\t{:32}: {} \n".format("Number of models", n_models)
str_ += "\t{:32}: {}\n".format("Number of parameters", n_pars)
str_ += "\t{:32}: {}\n\n".format("Number of free parameters", n_free_pars)
if self.models is not None:
str_ += "\t" + "\n\t".join(str(self.models).split("\n")[2:])
return str_.expandtabs(tabsize=2)
@property
def geoms(self):
"""Map geometries
Returns
-------
geoms : dict
Dict of map geometries involved in the dataset.
"""
geoms = {}
geoms["geom"] = self._geom
if self.exposure:
geoms["geom_exposure"] = self.exposure.geom
if self.psf:
geoms["geom_psf"] = self.psf.psf_map.geom
if self.edisp:
geoms["geom_edisp"] = self.edisp.edisp_map.geom
return geoms
@property
def models(self):
"""Models set on the dataset (`~gammapy.modeling.models.Models`)."""
return self._models
@property
def excess(self):
"""Observed excess: counts-background"""
return self.counts - self.background
@models.setter
def models(self, models):
"""Models setter"""
self._evaluators = {}
if models is not None:
models = DatasetModels(models)
models = models.select(datasets_names=self.name)
for model in models:
if not isinstance(model, FoVBackgroundModel):
evaluator = MapEvaluator(
model=model,
evaluation_mode=EVALUATION_MODE,
gti=self.gti,
use_cache=USE_NPRED_CACHE,
)
self._evaluators[model.name] = evaluator
self._models = models
@property
def evaluators(self):
"""Model evaluators"""
return self._evaluators
@property
def _geom(self):
"""Main analysis geometry"""
if self.counts is not None:
return self.counts.geom
elif self.background is not None:
return self.background.geom
elif self.mask_safe is not None:
return self.mask_safe.geom
elif self.mask_fit is not None:
return self.mask_fit.geom
else:
raise ValueError(
"Either 'counts', 'background', 'mask_fit'"
" or 'mask_safe' must be defined."
)
@property
def data_shape(self):
"""Shape of the counts or background data (tuple)"""
return self._geom.data_shape
def _energy_range(self, mask_map=None):
"""Compute the energy range maps with or without the fit mask."""
geom = self._geom
energy = geom.axes["energy"].edges
e_i = geom.axes.index_data("energy")
geom = geom.drop("energy")
if mask_map is not None:
mask = mask_map.data
if mask.any():
idx = mask.argmax(e_i)
energy_min = energy.value[idx]
mask_nan = ~mask.any(e_i)
energy_min[mask_nan] = np.nan
mask = np.flip(mask, e_i)
idx = mask.argmax(e_i)
energy_max = energy.value[::-1][idx]
energy_max[mask_nan] = np.nan
else:
energy_min = np.full(geom.data_shape, np.nan)
energy_max = energy_min.copy()
else:
data_shape = geom.data_shape
energy_min = np.full(data_shape, energy.value[0])
energy_max = np.full(data_shape, energy.value[-1])
map_min = Map.from_geom(geom, data=energy_min, unit=energy.unit)
map_max = Map.from_geom(geom, data=energy_max, unit=energy.unit)
return map_min, map_max
@property
def energy_range(self):
"""Energy range maps defined by the mask_safe and mask_fit."""
return self._energy_range(self.mask)
@property
def energy_range_safe(self):
"""Energy range maps defined by the mask_safe only."""
return self._energy_range(self.mask_safe)
@property
def energy_range_fit(self):
"""Energy range maps defined by the mask_fit only."""
return self._energy_range(self.mask_fit)
@property
def energy_range_total(self):
"""Largest energy range among all pixels, defined by mask_safe and mask_fit."""
energy_min_map, energy_max_map = self.energy_range
return np.nanmin(energy_min_map.quantity), np.nanmax(energy_max_map.quantity)
def npred(self):
"""Total predicted source and background counts
Returns
-------
npred : `Map`
Total predicted counts
"""
npred_total = self.npred_signal()
if self.background:
npred_total += self.npred_background()
npred_total.data[npred_total.data < 0.0] = 0
return npred_total
def npred_background(self):
"""Predicted background counts
The predicted background counts depend on the parameters
of the `FoVBackgroundModel` defined in the dataset.
Returns
-------
npred_background : `Map`
Predicted counts from the background.
"""
background = self.background
if self.background_model and background:
if self._background_parameters_changed:
values = self.background_model.evaluate_geom(geom=self.background.geom)
if self._background_cached is None:
self._background_cached = background * values
else:
self._background_cached.quantity = (
background.quantity * values.value
)
return self._background_cached
else:
return background
return background
def _background_parameters_changed(self):
values = self.background_model.parameters.value
# TODO: possibly allow for a tolerance here?
changed = ~np.all(self._background_parameters_cached == values)
if changed:
self._background_parameters_cached = values
return changed
@deprecated_renamed_argument("model_name", "model_names", "1.1")
def npred_signal(self, model_names=None, stack=True):
"""Model predicted signal counts.
If a list of model name is passed, predicted counts from these components are returned.
If stack is set to True, a map of the sum of all the predicted counts is returned.
If stack is set to False, a map with an additional axis representing the models is returned.
Parameters
----------
model_names: list of str
List of name of SkyModel for which to compute the npred.
If none, all the SkyModel predicted counts are computed
stack: bool
Whether to stack the npred maps upon each other.
Returns
-------
npred_sig: `gammapy.maps.Map`
Map of the predicted signal counts
"""
npred_total = Map.from_geom(self._geom, dtype=float)
evaluators = self.evaluators
if model_names is not None:
if isinstance(model_names, str):
model_names = [model_names]
evaluators = {name: self.evaluators[name] for name in model_names}
npred_list = []
labels = []
for evaluator_name, evaluator in evaluators.items():
if evaluator.needs_update:
evaluator.update(
self.exposure,
self.psf,
self.edisp,
self._geom,
self.mask_image,
)
if evaluator.contributes:
npred = evaluator.compute_npred()
if stack:
npred_total.stack(npred)
else:
npred_geom = Map.from_geom(self._geom, dtype=float)
npred_geom.stack(npred)
labels.append(evaluator_name)
npred_list.append(npred_geom)
if npred_list != []:
label_axis = LabelMapAxis(labels=labels, name="models")
npred_total = Map.from_stack(npred_list, axis=label_axis)
return npred_total
@classmethod
def from_geoms(
cls,
geom,
geom_exposure=None,
geom_psf=None,
geom_edisp=None,
reference_time="2000-01-01",
name=None,
**kwargs,
):
"""
Create a MapDataset object with zero filled maps according to the specified geometries
Parameters
----------
geom : `Geom`
geometry for the counts and background maps
geom_exposure : `Geom`
geometry for the exposure map
geom_psf : `Geom`
geometry for the psf map
geom_edisp : `Geom`
geometry for the energy dispersion kernel map.
If geom_edisp has a migra axis, this will create an EDispMap instead.
reference_time : `~astropy.time.Time`
the reference time to use in GTI definition
name : str
Name of the returned dataset.
Returns
-------
dataset : `MapDataset` or `SpectrumDataset`
A dataset containing zero filled maps
"""
name = make_name(name)
kwargs = kwargs.copy()
kwargs["name"] = name
kwargs["counts"] = Map.from_geom(geom, unit="")
kwargs["background"] = Map.from_geom(geom, unit="")
if geom_exposure:
kwargs["exposure"] = Map.from_geom(geom_exposure, unit="m2 s")
if geom_edisp:
if "energy" in geom_edisp.axes.names:
kwargs["edisp"] = EDispKernelMap.from_geom(geom_edisp)
else:
kwargs["edisp"] = EDispMap.from_geom(geom_edisp)
if geom_psf:
if "energy_true" in geom_psf.axes.names:
kwargs["psf"] = PSFMap.from_geom(geom_psf)
elif "energy" in geom_psf.axes.names:
kwargs["psf"] = RecoPSFMap.from_geom(geom_psf)
kwargs.setdefault(
"gti", GTI.create([] * u.s, [] * u.s, reference_time=reference_time)
)
kwargs["mask_safe"] = Map.from_geom(geom, unit="", dtype=bool)
return cls(**kwargs)
@classmethod
def create(
cls,
geom,
energy_axis_true=None,
migra_axis=None,
rad_axis=None,
binsz_irf=None,
reference_time="2000-01-01",
name=None,
meta_table=None,
reco_psf=False,
**kwargs,
):
"""Create a MapDataset object with zero filled maps.
Parameters
----------
geom : `~gammapy.maps.WcsGeom`
Reference target geometry in reco energy, used for counts and background maps
energy_axis_true : `~gammapy.maps.MapAxis`
True energy axis used for IRF maps
migra_axis : `~gammapy.maps.MapAxis`
If set, this provides the migration axis for the energy dispersion map.
If not set, an EDispKernelMap is produced instead. Default is None
rad_axis : `~gammapy.maps.MapAxis`
Rad axis for the psf map
binsz_irf : float
IRF Map pixel size in degrees.
reference_time : `~astropy.time.Time`
the reference time to use in GTI definition
name : str
Name of the returned dataset.
meta_table : `~astropy.table.Table`
Table listing information on observations used to create the dataset.
One line per observation for stacked datasets.
reco_psf : bool
Use reconstructed energy for the PSF geometry. Default is False
Returns
-------
empty_maps : `MapDataset`
A MapDataset containing zero filled maps
Examples
--------
>>> from gammapy.datasets import MapDataset
>>> from gammapy.maps import WcsGeom, MapAxis
>>> energy_axis = MapAxis.from_energy_bounds(1.0, 10.0, 4, unit="TeV")
>>> energy_axis_true = MapAxis.from_energy_bounds(
0.5, 20, 10, unit="TeV", name="energy_true"
)
>>> geom = WcsGeom.create(
skydir=(83.633, 22.014),
binsz=0.02, width=(2, 2),
frame="icrs",
proj="CAR",
axes=[energy_axis]
)
>>> empty = MapDataset.create(geom=geom, energy_axis_true=energy_axis_true, name="empty")
"""
geoms = create_map_dataset_geoms(
geom=geom,
energy_axis_true=energy_axis_true,
rad_axis=rad_axis,
migra_axis=migra_axis,
binsz_irf=binsz_irf,
reco_psf=reco_psf,
)
kwargs.update(geoms)
return cls.from_geoms(
reference_time=reference_time, name=name, meta_table=meta_table, **kwargs
)
@property
def mask_safe_image(self):
"""Reduced mask safe"""
if self.mask_safe is None:
return None
return self.mask_safe.reduce_over_axes(func=np.logical_or)
@property
def mask_fit_image(self):
"""Reduced mask fit"""
if self.mask_fit is None:
return None
return self.mask_fit.reduce_over_axes(func=np.logical_or)
@property
def mask_image(self):
"""Reduced mask"""
if self.mask is None:
mask = Map.from_geom(self._geom.to_image(), dtype=bool)
mask.data |= True
return mask
return self.mask.reduce_over_axes(func=np.logical_or)
@property
def mask_safe_psf(self):
"""Mask safe for psf maps"""
if self.mask_safe is None or self.psf is None:
return None
geom = self.psf.psf_map.geom.squash("energy_true").squash("rad")
mask_safe_psf = self.mask_safe_image.interp_to_geom(geom.to_image())
return mask_safe_psf.to_cube(geom.axes)
@property
def mask_safe_edisp(self):
"""Mask safe for edisp maps"""
if self.mask_safe is None or self.edisp is None:
return None
if self.mask_safe.geom.is_region:
return self.mask_safe
geom = self.edisp.edisp_map.geom.squash("energy_true")
if "migra" in geom.axes.names:
geom = geom.squash("migra")
mask_safe_edisp = self.mask_safe_image.interp_to_geom(geom.to_image())
return mask_safe_edisp.to_cube(geom.axes)
return self.mask_safe.interp_to_geom(geom)
def to_masked(self, name=None, nan_to_num=True):
"""Return masked dataset
Parameters
----------
name : str
Name of the masked dataset.
nan_to_num: bool
Non-finite values are replaced by zero if True (default).
Returns
-------
dataset : `MapDataset` or `SpectrumDataset`
Masked dataset
"""
dataset = self.__class__.from_geoms(**self.geoms, name=name)
dataset.stack(self, nan_to_num=nan_to_num)
return dataset
def stack(self, other, nan_to_num=True):
r"""Stack another dataset in place. The original dataset is modified.
Safe mask is applied to compute the stacked counts data. Counts outside
each dataset safe mask are lost.
The stacking of 2 datasets is implemented as follows. Here, :math:`k`
denotes a bin in reconstructed energy and :math:`j = {1,2}` is the dataset number
The ``mask_safe`` of each dataset is defined as:
.. math::
\epsilon_{jk} =\left\{\begin{array}{cl} 1, &
\mbox{if bin k is inside the thresholds}\\ 0, &
\mbox{otherwise} \end{array}\right.
Then the total ``counts`` and model background ``bkg`` are computed according to:
.. math::
\overline{\mathrm{n_{on}}}_k = \mathrm{n_{on}}_{1k} \cdot \epsilon_{1k} +
\mathrm{n_{on}}_{2k} \cdot \epsilon_{2k}
\overline{bkg}_k = bkg_{1k} \cdot \epsilon_{1k} +
bkg_{2k} \cdot \epsilon_{2k}
The stacked ``safe_mask`` is then:
.. math::
\overline{\epsilon_k} = \epsilon_{1k} OR \epsilon_{2k}
Parameters
----------
other: `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
Map dataset to be stacked with this one. If other is an on-off
dataset alpha * counts_off is used as a background model.
nan_to_num: bool
Non-finite values are replaced by zero if True (default).
"""
if self.counts and other.counts:
self.counts.stack(
other.counts, weights=other.mask_safe, nan_to_num=nan_to_num
)
if self.exposure and other.exposure:
self.exposure.stack(
other.exposure, weights=other.mask_safe_image, nan_to_num=nan_to_num
)
# TODO: check whether this can be improved e.g. handling this in GTI
if "livetime" in other.exposure.meta and np.any(other.mask_safe_image):
if "livetime" in self.exposure.meta:
self.exposure.meta["livetime"] += other.exposure.meta["livetime"]
else:
self.exposure.meta["livetime"] = other.exposure.meta[
"livetime"
].copy()
if self.stat_type == "cash":
if self.background and other.background:
background = self.npred_background()
background.stack(
other.npred_background(),
weights=other.mask_safe,
nan_to_num=nan_to_num,
)
self.background = background
if self.psf and other.psf:
self.psf.stack(other.psf, weights=other.mask_safe_psf)
if self.edisp and other.edisp:
self.edisp.stack(other.edisp, weights=other.mask_safe_edisp)
if self.mask_safe and other.mask_safe:
self.mask_safe.stack(other.mask_safe)
if self.mask_fit and other.mask_fit:
self.mask_fit.stack(other.mask_fit)
elif other.mask_fit:
self.mask_fit = other.mask_fit.copy()
if self.gti and other.gti:
self.gti.stack(other.gti)
self.gti = self.gti.union()
if self.meta_table and other.meta_table:
self.meta_table = hstack_columns(self.meta_table, other.meta_table)
elif other.meta_table:
self.meta_table = other.meta_table.copy()
def stat_array(self):
"""Statistic function value per bin given the current model parameters"""
return cash(n_on=self.counts.data, mu_on=self.npred().data)
def residuals(self, method="diff", **kwargs):
"""Compute residuals map.
Parameters
----------
method: {"diff", "diff/model", "diff/sqrt(model)"}
Method used to compute the residuals. Available options are:
- "diff" (default): data - model
- "diff/model": (data - model) / model
- "diff/sqrt(model)": (data - model) / sqrt(model)
**kwargs : dict
Keyword arguments forwarded to `Map.smooth()`
Returns
-------
residuals : `gammapy.maps.Map`
Residual map.
"""
npred, counts = self.npred(), self.counts.copy()
if self.mask:
npred = npred * self.mask
counts = counts * self.mask
if kwargs:
kwargs.setdefault("mode", "constant")
kwargs.setdefault("width", "0.1 deg")
kwargs.setdefault("kernel", "gauss")
with np.errstate(invalid="ignore", divide="ignore"):
npred = npred.smooth(**kwargs)
counts = counts.smooth(**kwargs)
if self.mask:
mask = self.mask.smooth(**kwargs)
npred /= mask
counts /= mask
residuals = self._compute_residuals(counts, npred, method=method)
if self.mask:
residuals.data[~self.mask.data] = np.nan
return residuals
def plot_residuals_spatial(
self,
ax=None,
method="diff",
smooth_kernel="gauss",
smooth_radius="0.1 deg",
**kwargs,
):
"""Plot spatial residuals.
The normalization used for the residuals computation can be controlled
using the method parameter.
Parameters
----------
ax : `~astropy.visualization.wcsaxes.WCSAxes`
Axes to plot on.
method : {"diff", "diff/model", "diff/sqrt(model)"}
Normalization used to compute the residuals, see `MapDataset.residuals`.
smooth_kernel : {"gauss", "box"}
Kernel shape.
smooth_radius: `~astropy.units.Quantity`, str or float
Smoothing width given as quantity or float. If a float is given, it
is interpreted as smoothing width in pixels.
**kwargs : dict
Keyword arguments passed to `~matplotlib.axes.Axes.imshow`.
Returns
-------
ax : `~astropy.visualization.wcsaxes.WCSAxes`
WCSAxes object.
Examples
--------
>>> from gammapy.datasets import MapDataset
>>> dataset = MapDataset.read("$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz")
>>> kwargs = {"cmap": "RdBu_r", "vmin":-5, "vmax":5, "add_cbar": True}
>>> dataset.plot_residuals_spatial(method="diff/sqrt(model)", **kwargs) # doctest: +SKIP
"""
counts, npred = self.counts.copy(), self.npred()
if counts.geom.is_region:
raise ValueError("Cannot plot spatial residuals for RegionNDMap")
if self.mask is not None:
counts *= self.mask
npred *= self.mask
counts_spatial = counts.sum_over_axes().smooth(
width=smooth_radius, kernel=smooth_kernel
)
npred_spatial = npred.sum_over_axes().smooth(
width=smooth_radius, kernel=smooth_kernel
)
residuals = self._compute_residuals(counts_spatial, npred_spatial, method)
if self.mask_safe is not None:
mask = self.mask_safe.reduce_over_axes(func=np.logical_or, keepdims=True)
residuals.data[~mask.data] = np.nan
kwargs.setdefault("add_cbar", True)
kwargs.setdefault("cmap", "coolwarm")
kwargs.setdefault("vmin", -5)
kwargs.setdefault("vmax", 5)
ax = residuals.plot(ax, **kwargs)
return ax
def plot_residuals_spectral(self, ax=None, method="diff", region=None, **kwargs):
"""Plot spectral residuals.
The residuals are extracted from the provided region, and the normalization
used for its computation can be controlled using the method parameter.
The error bars are computed using the uncertainty on the excess with a symmetric assumption.
Parameters
----------
ax : `~matplotlib.axes.Axes`
Axes to plot on.
method : {"diff", "diff/sqrt(model)"}
Normalization used to compute the residuals, see `SpectrumDataset.residuals`.
region: `~regions.SkyRegion` (required)
Target sky region.
**kwargs : dict
Keyword arguments passed to `~matplotlib.axes.Axes.errorbar`.
Returns
-------
ax : `~matplotlib.axes.Axes`
Axes object.
Examples
--------
>>> from gammapy.datasets import MapDataset
>>> dataset = MapDataset.read("$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz")
>>> kwargs = {"markerfacecolor": "blue", "markersize":8, "marker":'s'}
>>> dataset.plot_residuals_spectral(method="diff/sqrt(model)", **kwargs) # doctest: +SKIP
"""
counts, npred = self.counts.copy(), self.npred()
if self.mask is None:
mask = self.counts.copy()
mask.data = 1
else:
mask = self.mask