Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use dataset GTI table in LightCurveEstimator #2561

Merged
merged 2 commits into from Nov 19, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions gammapy/data/gti.py
Expand Up @@ -74,8 +74,8 @@ def create(cls, start, stop, reference_time="2000-01-01"):
reference_time : `~astropy.time.Time`
the reference time to use in GTI definition
"""
start = Quantity(start)
stop = Quantity(stop)
start = np.atleast_1d(Quantity(start))
stop = np.atleast_1d(Quantity(stop))
reference_time = Time(reference_time)
meta = time_ref_to_dict(reference_time)
table = Table({"START": start.to("s"), "STOP": stop.to("s")}, meta=meta)
Expand Down
8 changes: 4 additions & 4 deletions gammapy/time/lightcurve_estimator.py
Expand Up @@ -130,8 +130,8 @@ def run(self, e_ref, e_min, e_max, steps="all"):
rows = []
for dataset in self.datasets:
row = {
"time_min": dataset.counts.meta["t_start"].mjd,
"time_max": dataset.counts.meta["t_stop"].mjd,
"time_min": dataset.gti.time_start[0].mjd,
"time_max": dataset.gti.time_stop[-1].mjd,
}
row.update(self.estimate_time_bin_flux(dataset, steps))
rows.append(row)
Expand Down Expand Up @@ -179,8 +179,8 @@ def estimate_time_bin_flux(self, dataset, steps="all"):
log.warning(
"Fit failed for time bin between {t_min} and {t_max},"
" setting NaN.".format(
t_min=dataset.counts.meta["t_start"],
t_max=dataset.counts.meta["t_stop"],
t_min=dataset.gti.time_start[0].mjd,
t_max=dataset.gti.time_stop[-1].mjd,
)
)

Expand Down
26 changes: 10 additions & 16 deletions gammapy/time/tests/test_lightcurve.py
Expand Up @@ -6,6 +6,7 @@
import astropy.units as u
from astropy.table import Column, Table
from astropy.time import Time
from gammapy.data import GTI
from gammapy.modeling.models import PowerLawSpectralModel
from gammapy.spectrum.tests.test_flux_point_estimator import (
simulate_map_dataset,
Expand Down Expand Up @@ -152,16 +153,13 @@ def test_lightcurve_plot_time(lc):
def get_spectrum_datasets():
model = PowerLawSpectralModel()
dataset_1 = simulate_spectrum_dataset(model=model, random_state=0)
dataset_1.counts.meta = {
"t_start": Time("2010-01-01T00:00:00"),
"t_stop": Time("2010-01-01T01:00:00"),
}
gti1 = GTI.create("0h", "1h", "2010-01-01T00:00:00")
dataset_1.gti = gti1

dataset_2 = simulate_spectrum_dataset(model, random_state=1)
dataset_2.counts.meta = {
"t_start": Time("2010-01-01T01:00:00"),
"t_stop": Time("2010-01-01T02:00:00"),
}
gti2 = GTI.create("1h", "2h", "2010-01-01T00:00:00")
dataset_2.gti = gti2


return [dataset_1, dataset_2]

Expand Down Expand Up @@ -202,16 +200,12 @@ def test_lightcurve_estimator_spectrum_datasets():

def get_map_datasets():
dataset_1 = simulate_map_dataset(random_state=0)
dataset_1.counts.meta = {
"t_start": Time("2010-01-01T00:00:00"),
"t_stop": Time("2010-01-01T01:00:00"),
}
gti1 = GTI.create("0 h", "1 h", "2010-01-01T00:00:00")
dataset_1.gti = gti1

dataset_2 = simulate_map_dataset(random_state=1)
dataset_2.counts.meta = {
"t_start": Time("2010-01-01T01:00:00"),
"t_stop": Time("2010-01-01T02:00:00"),
}
gti2 = GTI.create("1 h", "2 h", "2010-01-01T00:00:00")
dataset_2.gti = gti2

return [dataset_1, dataset_2]

Expand Down
6 changes: 0 additions & 6 deletions tutorials/light_curve.ipynb
Expand Up @@ -227,8 +227,6 @@
" max_radius=\"0.3 deg\",\n",
" )\n",
"\n",
" stacked.counts.meta[\"t_start\"] = time_interval[0]\n",
" stacked.counts.meta[\"t_stop\"] = time_interval[1]\n",
" datasets.append(stacked)"
]
},
Expand Down Expand Up @@ -402,10 +400,6 @@
" observation, selection=[\"counts\", \"aeff\", \"edisp\"]\n",
" )\n",
"\n",
" dataset.counts.meta = dict()\n",
" dataset.counts.meta[\"t_start\"] = time_interval[0]\n",
" dataset.counts.meta[\"t_stop\"] = time_interval[1]\n",
"\n",
" dataset_on_off = bkg_maker.run(dataset, observation)\n",
" dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)\n",
" datasets_1d.append(dataset_on_off)"
Expand Down