Skip to content

Commit

Permalink
Merge pull request #37 from legend-exp/pydataobj-update
Browse files Browse the repository at this point in the history
Support the latest legend-pydataobj version
  • Loading branch information
gipert committed Jan 1, 2024
2 parents 2c0d1c2 + e57ced3 commit 5f5bf78
Show file tree
Hide file tree
Showing 13 changed files with 41 additions and 43 deletions.
6 changes: 3 additions & 3 deletions docs/source/notebooks/DSPTutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,14 @@
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"\n",
"import lgdo.lh5_store as lh5\n",
"from lgdo import lh5\n",
"from legendtestdata import LegendTestData\n",
"\n",
"# Get some sample waveforms from LEGEND test data\n",
"ldata = LegendTestData()\n",
"raw_file = ldata.get_path(\"lh5/LDQTA_r117_20200110T105115Z_cal_geds_raw.lh5\")\n",
"st = lh5.LH5Store()\n",
"tab = st.read_object(\"geds/raw\", raw_file)[0]\n",
"tab = st.read(\"geds/raw\", raw_file)[0]\n",
"wfs = tab[\"waveform\"].values.nda.astype(\"float32\")\n",
"t = tab[\"waveform\"].dt.nda.reshape((100, 1)) * np.arange(wfs.shape[1])\n",
"baselines = tab[\"baseline\"].nda\n",
Expand Down Expand Up @@ -731,7 +731,7 @@
"\n",
"lh5.show(\"test_dsp.lh5\")\n",
"print()\n",
"print(st.read_object(\"geds/dsp\", \"test_dsp.lh5\")[0].get_dataframe())"
"print(st.read(\"geds/dsp\", \"test_dsp.lh5\")[0].get_dataframe())"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions docs/source/notebooks/IntroToDSP.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"metadata": {},
"outputs": [],
"source": [
"from lgdo import LH5Store, ls, show, load_dfs\n",
"from lgdo.lh5 import LH5Store, ls, show, load_dfs\n",
"from dspeed.vis import WaveformBrowser\n",
"from dspeed import units\n",
"from dspeed import build_dsp\n",
Expand Down Expand Up @@ -510,7 +510,7 @@
"outputs": [],
"source": [
"store = LH5Store()\n",
"obj, nrows = store.read_object(\"geds/dsp\", \"./example_dsp_file.lh5\")\n",
"obj, nrows = store.read(\"geds/dsp\", \"./example_dsp_file.lh5\")\n",
"print(\n",
" f\"We have saved the following list of outputs from our DSP routine: {list(obj.keys())}\"\n",
")\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/source/notebooks/WaveformBrowser.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"\n",
"u = pint.get_application_registry()\n",
"\n",
"import lgdo.lh5_store as lh5\n",
"from lgdo import lh5\n",
"from dspeed.vis.waveform_browser import WaveformBrowser\n",
"from legendtestdata import LegendTestData\n",
"\n",
Expand Down Expand Up @@ -132,7 +132,7 @@
"metadata": {},
"outputs": [],
"source": [
"from lgdo import load_dfs\n",
"from lgdo.lh5 import load_dfs\n",
"\n",
"df = load_dfs(dsp_file, [\"trapEmax\", \"AoE\"], \"geds/dsp\")"
]
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ install_requires =
colorlog
h5py>=3.2
iminuit
legend-pydataobj~=1.0
legend-pydataobj>=1.5.0a1
matplotlib
numba!=0.53.*,!=0.54.*
parse
Expand Down
7 changes: 3 additions & 4 deletions src/dspeed/build_dsp.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
import os

import h5py
import lgdo.lh5_store as lh5
import numpy as np
from lgdo.lgdo_utils import expand_path
from lgdo import lh5
from tqdm.auto import tqdm

from .errors import DSPFatal
Expand Down Expand Up @@ -133,7 +132,7 @@ def build_dsp(
# get the database parameters. For now, this will just be a dict in a json
# file, but eventually we will want to interface with the metadata repo
if isinstance(database, str):
with open(expand_path(database)) as db_file:
with open(lh5.utils.expand_path(database)) as db_file:
database = json.load(db_file)

if database and not isinstance(database, dict):
Expand Down Expand Up @@ -190,7 +189,7 @@ def build_dsp(
e.wf_range = f"{e.wf_range[0]+start_row}-{e.wf_range[1]+start_row}"
raise e

raw_store.write_object(
raw_store.write(
obj=tb_out,
name=tb_name,
lh5_file=f_dsp,
Expand Down
12 changes: 5 additions & 7 deletions src/dspeed/processing_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,8 @@
from typing import Any

import lgdo
import lgdo.lh5_store as lh5
import numpy as np
from lgdo import LGDO
from lgdo.lgdo_utils import expand_path
from lgdo import LGDO, lh5
from numba import vectorize
from pint import Quantity, Unit

Expand Down Expand Up @@ -1111,7 +1109,7 @@ def _loadlh5(path_to_file, path_in_file: str) -> np.array: # noqa: N805
"""

try:
loaded_data = sto.read_object(path_in_file, path_to_file)[0].nda
loaded_data = sto.read(path_in_file, path_to_file)[0].nda
except ValueError:
raise ProcessingChainError(f"LH5 file not found: {path_to_file}")

Expand Down Expand Up @@ -1780,8 +1778,8 @@ def build_processing_chain(
block_width: int = 16,
) -> tuple[ProcessingChain, list[str], lgdo.Table]:
"""Produces a :class:`ProcessingChain` object and an LH5
:class:`~lgdo.table.Table` for output parameters from an input LH5
:class:`~lgdo.table.Table` and a JSON recipe.
:class:`~lgdo.types.table.Table` for output parameters from an input LH5
:class:`~lgdo.types.table.Table` and a JSON recipe.
Parameters
----------
Expand Down Expand Up @@ -1863,7 +1861,7 @@ def build_processing_chain(
proc_chain = ProcessingChain(block_width, lh5_in.size)

if isinstance(dsp_config, str):
with open(expand_path(dsp_config)) as f:
with open(lh5.utils.expand_path(dsp_config)) as f:
dsp_config = json.load(f)
elif dsp_config is None:
dsp_config = {"outputs": [], "processors": {}}
Expand Down
6 changes: 3 additions & 3 deletions src/dspeed/processors/wiener_filter.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import lgdo.lh5_store as lh5
import numpy as np
from lgdo import lh5
from numba import guvectorize

from ..errors import DSPFatal
Expand Down Expand Up @@ -65,10 +65,10 @@ def wiener_filter(file_name_array: list[str]) -> np.ndarray:

# Read in the data

superpulse, _ = sto.read_object("spms/processed/superpulse", file_name)
superpulse, _ = sto.read("spms/processed/superpulse", file_name)
superpulse = superpulse.nda

noise_wf, _ = sto.read_object("spms/processed/noise_wf", file_name)
noise_wf, _ = sto.read("spms/processed/noise_wf", file_name)
noise_wf = noise_wf.nda

# Now check that the data are valid
Expand Down
19 changes: 10 additions & 9 deletions src/dspeed/vis/waveform_browser.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
import string
import sys

import lgdo.lh5_store as lh5
import lgdo
import matplotlib.pyplot as plt
import numpy as np
import pandas
import pint
from cycler import cycler
from lgdo import lh5
from matplotlib.lines import Line2D

from ..processing_chain import build_processing_chain
Expand All @@ -28,7 +29,7 @@ class WaveformBrowser:

def __init__(
self,
files_in: str | list[str] | lgdo.LH5Iterator, # noqa: F821
files_in: str | list[str] | lh5.LH5Iterator, # noqa: F821
lh5_group: str | list[str] = "",
base_path: str = "",
entry_list: list[int] | list[list[int]] = None,
Expand Down Expand Up @@ -61,7 +62,7 @@ def __init__(
and group, they must be the same size. If a file is wild-carded,
the same group will be assigned to each file found
base_path
base path for file. See :class:`~lgdo.lh5_store.LH5Store`.
base path for file. See :class:`~lgdo.lh5.LH5Store`.
entry_list
list of event indices to draw. If it is a nested list, use local
Expand Down Expand Up @@ -293,7 +294,7 @@ def __init__(
# If we still have no x_unit get it from the first waveform we can find
if self.x_unit is None:
for wf in self.lh5_out.values():
if not isinstance(wf, lh5.WaveformTable):
if not isinstance(wf, lgdo.WaveformTable):
continue
self.x_unit = ureg(wf.dt_units)

Expand Down Expand Up @@ -390,7 +391,7 @@ def find_entry(
ref_time = 0
elif isinstance(self.align_par, str):
data = self.lh5_out.get(self.align_par, None)
if isinstance(data, lh5.Array):
if isinstance(data, lgdo.Array):
ref_time = data.nda[i_tb]
unit = data.attrs.get("units", None)
if unit and unit in ureg and ureg.is_compatible_with(unit, self.x_unit):
Expand All @@ -405,7 +406,7 @@ def find_entry(
for name, lines in self.lines.items():
# Get the data; note this is implicitly copying it!
data = self.lh5_out.get(name, None)
if isinstance(data, lh5.WaveformTable):
if isinstance(data, lgdo.WaveformTable):
y = data.values.nda[i_tb, :] / norm
dt = data.dt.nda[i_tb] * float(ureg(data.dt_units) / self.x_unit)
t0 = (
Expand All @@ -416,13 +417,13 @@ def find_entry(
lines.append(Line2D(x, y))
self._update_auto_limit(x, y)

elif isinstance(data, lh5.ArrayOfEqualSizedArrays):
elif isinstance(data, lgdo.ArrayOfEqualSizedArrays):
y = data.nda[i_tb, :] / norm
x = np.arange(len(y), dtype="float")
lines.append(Line2D(x, y))
self._update_auto_limit(x, y)

elif isinstance(data, lh5.Array):
elif isinstance(data, lgdo.Array):
val = data.nda[i_tb]
unit = data.attrs.get("units", None)
if unit and unit in ureg and ureg.is_compatible_with(unit, self.x_unit):
Expand Down Expand Up @@ -453,7 +454,7 @@ def find_entry(

if not data:
data = ureg.Quantity(self.aux_vals[name][entry])
elif isinstance(data, lh5.Array):
elif isinstance(data, lgdo.Array):
unit = data.attrs.get("units", None)
if unit and unit in ureg:
data = data.nda[i_tb] * ureg(unit)
Expand Down
6 changes: 3 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import numpy as np
import pytest
from legendtestdata import LegendTestData
from lgdo import LH5Store
from lgdo.lh5 import LH5Store

import dspeed.processors # noqa: F401

Expand Down Expand Up @@ -42,7 +42,7 @@ def lgnd_test_data():
@pytest.fixture(scope="session")
def geds_raw_tbl(lgnd_test_data):
store = LH5Store()
obj, _ = store.read_object(
obj, _ = store.read(
"/geds/raw",
lgnd_test_data.get_path("lh5/LDQTA_r117_20200110T105115Z_cal_geds_raw.lh5"),
n_rows=10,
Expand All @@ -53,7 +53,7 @@ def geds_raw_tbl(lgnd_test_data):
@pytest.fixture(scope="session")
def spms_raw_tbl(lgnd_test_data):
store = LH5Store()
obj, _ = store.read_object(
obj, _ = store.read(
"/ch0/raw",
lgnd_test_data.get_path("lh5/L200-comm-20211130-phy-spms.lh5"),
n_rows=10,
Expand Down
4 changes: 2 additions & 2 deletions tests/processors/test_histogram.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os

import lgdo.lh5_store as store
from lgdo import lh5

from dspeed import build_dsp

Expand Down Expand Up @@ -28,7 +28,7 @@ def test_histogram_fixed_width(lgnd_test_data, tmptestdir):
)
assert os.path.exists(dsp_file)

df = store.load_nda(dsp_file, ["hist_weights", "hist_borders"], "geds/dsp/")
df = lh5.load_nda(dsp_file, ["hist_weights", "hist_borders"], "geds/dsp/")

assert len(df["hist_weights"][0]) + 1 == len(df["hist_borders"][0])
for i in range(2, len(df["hist_borders"][0])):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_build_dsp.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import lgdo
import pytest
from lgdo.lh5_store import LH5Store, ls
from lgdo.lh5 import LH5Store, ls

from dspeed import build_dsp

Expand Down Expand Up @@ -72,7 +72,7 @@ def test_build_dsp_spms_channelwise(dsp_test_file_spm):
]

store = LH5Store()
lh5_obj, n_rows = store.read_object("/ch0/dsp/energies", dsp_test_file_spm)
lh5_obj, n_rows = store.read("/ch0/dsp/energies", dsp_test_file_spm)
assert isinstance(lh5_obj, lgdo.ArrayOfEqualSizedArrays)
assert len(lh5_obj) == 5
assert len(lh5_obj.nda[0]) == 20
4 changes: 2 additions & 2 deletions tests/test_list_parsing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import os
from pathlib import Path

import lgdo.lh5_store as store
import numpy as np
from lgdo import lh5

from dspeed import build_dsp

Expand Down Expand Up @@ -33,6 +33,6 @@ def test_list_parisng(lgnd_test_data, tmptestdir):
)
assert os.path.exists(dsp_file)

df = store.load_nda(dsp_file, ["wf_out"], "geds/dsp/")
df = lh5.load_nda(dsp_file, ["wf_out"], "geds/dsp/")

assert np.all(df["wf_out"][:] == np.array([7, 9, 11, 13, 15]))
6 changes: 3 additions & 3 deletions tests/test_numpy_constants_parsing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import os
from pathlib import Path

import lgdo.lh5_store as store
import numpy as np
from lgdo import lh5

from dspeed import build_dsp

Expand All @@ -24,7 +24,7 @@ def test_build_dsp(lgnd_test_data, tmptestdir):

def test_numpy_math_constants_dsp(tmptestdir):
dsp_file = f"{tmptestdir}/LDQTA_r117_20200110T105115Z_cal_geds__numpy_test_dsp.lh5"
df = store.load_nda(dsp_file, ["timestamp", "calc1", "calc2", "calc3"], "geds/dsp/")
df = lh5.load_nda(dsp_file, ["timestamp", "calc1", "calc2", "calc3"], "geds/dsp/")

a1 = df["timestamp"] - df["timestamp"] - np.pi * df["timestamp"]
a2 = df["timestamp"] - df["timestamp"] - np.pi
Expand All @@ -41,7 +41,7 @@ def test_numpy_math_constants_dsp(tmptestdir):

def test_numpy_infinity_and_nan_dsp(tmptestdir):
dsp_file = f"{tmptestdir}/LDQTA_r117_20200110T105115Z_cal_geds__numpy_test_dsp.lh5"
df = store.load_nda(dsp_file, ["calc4", "calc5", "calc6"], "geds/dsp/")
df = lh5.load_nda(dsp_file, ["calc4", "calc5", "calc6"], "geds/dsp/")

assert (np.isnan(df["calc4"])).all()
assert (np.isneginf(df["calc5"])).all()
Expand Down

0 comments on commit 5f5bf78

Please sign in to comment.