Skip to content

Commit

Permalink
From databroker (#999)
Browse files Browse the repository at this point in the history
* Initial implementation of from_databroker

* Put from_databroker in docs toctree

* docstring

* Changelog

* dev requirements

* correct uid source

* `__all__`
  • Loading branch information
ksunden committed Apr 24, 2021
1 parent 25422da commit b1f5702
Show file tree
Hide file tree
Showing 7 changed files with 82 additions and 1 deletion.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).

## [Unreleased]

### Added
- `from_databroker` method to import Data objects from databroker catalogs

### Fixed
- Avoid passing both `vmin/vmax` and `norm` to `pcolor*` methods

Expand Down
2 changes: 2 additions & 0 deletions WrightTools/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from ._brunold import *
from ._colors import *
from ._data import *
from ._databroker import *
from ._jasco import *
from ._kent import *
from ._aramis import *
Expand All @@ -32,6 +33,7 @@
# From methods in alphabetic order
"from_BrunoldrRaman",
"from_COLORS",
"from_databroker",
"from_JASCO",
"from_KENT",
"from_Aramis",
Expand Down
39 changes: 39 additions & 0 deletions WrightTools/data/_databroker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
__all__ = ["from_databroker"]

from ._data import Data


def from_databroker(run, dataset="primary"):
"""Import a dataset from a bluesky run into the WrightTools Data format.
Parameters
----------
run: BlueskyRun
The bluesky run as returned by e.g. catalog["<uid>"]
dataset: str
The string identifier of the stream to import from the bluesky run.
By default "primary" is used, but e.g. "baseline" is also common
"""
describe = run.describe()
md = describe["metadata"]
start = md["start"]
ds = run[dataset].read()
shape = start.get("shape", (len(ds.time),))

detectors = start.get("detectors", [])

data = Data(name=start["uid"])
for var in ds:
if var == "uid":
continue
if var.endswith("_busy"):
continue
if any(var.startswith(d) for d in detectors):
data.create_channel(var, values=ds[var].data.reshape(shape))
else:
# TODO units, once they are in the dataset metadata
data.create_variable(var, values=ds[var].data.reshape(shape))

transform = [x[0] for x, ds_name in start["hints"]["dimensions"] if ds_name == dataset]
data.transform(*transform)
return data
1 change: 1 addition & 0 deletions docs/api/WrightTools.data.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ WrightTools\.data package
join
from_BrunoldrRaman
from_COLORS
from_databroker
from_JASCO
from_KENT
from_Aramis
Expand Down
11 changes: 10 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,16 @@ def read(fname):
],
extras_require={
"docs": docs_require,
"dev": ["black", "pre-commit", "pydocstyle", "pytest", "pytest-cov"] + docs_require,
"dev": [
"black",
"pre-commit",
"pydocstyle",
"pytest",
"pytest-cov",
"databroker>=1.2",
"msgpack",
]
+ docs_require,
},
version=version,
description="Tools for loading, processing, and plotting multidimensional spectroscopy data.",
Expand Down
Binary file not shown.
27 changes: 27 additions & 0 deletions tests/data/from_databroker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import pathlib

from databroker.v2 import temp
import msgpack

import WrightTools as wt

catalog = temp()

__here__ = pathlib.Path(__file__).parent

for child in (__here__ / "bluesky_data").iterdir():
with child.open("rb") as f:
unpack = msgpack.Unpacker(f)
for item in unpack:
catalog.v1.insert(*item)


def test_2d_data():
run = catalog["3dbdd402-434b-4aac-b004-447a2f026d73"]
data = wt.data.from_databroker(run)
assert data.shape == (10, 11)
assert "d1_readback" in data
assert "d1_setpoint" in data
assert data.channel_names == ("daq_random_walk",)
assert data.d1_readback.min() == 0
assert data.d1_readback.max() == 1

0 comments on commit b1f5702

Please sign in to comment.