Skip to content

Commit

Permalink
Merge pull request #1051 from wright-group/from_LabRAM
Browse files Browse the repository at this point in the history
from_LabRAM
  • Loading branch information
kameyer226 committed Apr 11, 2022
2 parents c19ded0 + fd8ce48 commit ebd00d4
Show file tree
Hide file tree
Showing 11 changed files with 213 additions and 8 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).

### Added
- Invalid `unit` conversions now throw a `pint` error.
- `data.from_LabRAM`: import Horiba LabRAM txt files

### Fixed
- docs: from method JASCO example updated
- `data.from_Solis`: import works without metadata
- `unit` conversions of `None` to `None` no longer throws a warning.
- better error messages for some functions
Expand Down
2 changes: 2 additions & 0 deletions WrightTools/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from ._data import *
from ._databroker import *
from ._jasco import *
from ._labram import *
from ._kent import *
from ._aramis import *
from ._ocean_optics import *
Expand All @@ -36,6 +37,7 @@
"from_databroker",
"from_JASCO",
"from_KENT",
"from_LabRAM",
"from_Aramis",
"from_ocean_optics",
"from_PyCMDS",
Expand Down
157 changes: 157 additions & 0 deletions WrightTools/data/_labram.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
# --- import --------------------------------------------------------------------------------------


import os
import pathlib
import warnings
import time

import numpy as np

from ._data import Data
from .. import exceptions as wt_exceptions
from ..kit import _timestamp as timestamp


# --- define --------------------------------------------------------------------------------------


__all__ = ["from_LabRAM"]


# --- from function -------------------------------------------------------------------------------


def from_LabRAM(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from Horiba LabRAM txt file.
Parameters
----------
filepath : path-like
Path to txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, name is extracted from file.
Default is None.
parent : WrightTools.Collection (optional)
Collection to place new data object within. Default is None.
verbose : boolean (optional)
Toggle talkback. Default is True.
Returns
-------
data
New data object(s).
"""
# parse filepath
filestr = os.fspath(filepath)
filepath = pathlib.Path(filepath)

if not ".txt" in filepath.suffixes:
wt_exceptions.WrongFileTypeWarning.warn(filepath, ".txt")
# parse name
if not name:
name = filepath.name.split(".")[0]

kwargs = {"name": name, "kind": "Horiba", "source": filestr}

# create data
if parent is None:
data = Data(**kwargs)
else:
data = parent.create_data(**kwargs)

ds = np.DataSource(None)
f = ds.open(filestr, "rt", encoding="ISO-8859-1")

# header
header = {}
while True:
line = f.readline()
if not line.startswith("#"):
wm = np.array([np.nan if i == "" else float(i) for i in line.split("\t")])
break
key, val = [s.strip() for s in line[1:].split("=", 1)]
header[key] = val

if not header:
raise NotImplementedError(
"At this time, we require metadata to parse LabRAM data. \
Consider manually importing this data."
)

# extract key metadata
created = header["Acquired"]
created = time.strptime(created, "%d.%m.%Y %H:%M:%S")
created = timestamp.TimeStamp(time.mktime(created)).RFC3339
data.attrs["created"] = created
data.attrs.update()

try:
acquisition_time = float(header["Acq. time (s)"]) * int(header["Accumulations"])
channel_units = "cps"
except KeyError:
warnings.warn(f"{filepath.name}: could not determine signal acquisition time.")
acquisition_time = 1
channel_units = None

# spectral units
k_spec = [k for k in header.keys() if k.startswith("Spectro") or k.startswith("Range (")][0]
if "cm-¹" in k_spec:
spectral_units = "wn"
elif "nm" in k_spec:
spectral_units = "nm"
else:
warnings.warn(f"spectral units are unrecognized: {k_spec}")
spectral_units = None

# dimensionality
extra_dims = np.isnan(wm).sum()

if extra_dims == 0: # single spectrum; we extracted wm wrong, so go back in file
f.seek(0)
wm, arr = np.genfromtxt(f, delimiter="\t", unpack=True)
f.close()
data.create_variable("wm", values=wm, units=spectral_units)
data.create_channel("signal", values=arr / acquisition_time, units=channel_units)
data.transform("wm")
else:
arr = np.genfromtxt(f, delimiter="\t")
f.close()
wm = wm[extra_dims:]

if extra_dims == 1: # spectrum vs (x or survey)
data.create_variable("wm", values=wm[:, None], units=spectral_units)
data.create_channel(
"signal", values=arr[:, 1:].T / acquisition_time, units=channel_units
)
x = arr[:, 0]
if np.all(x == np.arange(x.size) + 1): # survey
data.create_variable("index", values=x[None, :])
data.transform("wm", "index")
else: # x
data.create_variable("x", values=x[None, :], units="um")
data.transform("wm", "x")
elif extra_dims == 2: # spectrum vs x vs y
# fold to 3D
x = sorted(
set(arr[:, 0]), reverse=arr[0, 0] > arr[-1, 0]
) # 0th column is stepped always (?)
x = np.array(list(x))
x = x.reshape(1, -1, 1)
y = arr[:, 1].reshape(1, x.size, -1)
ypts = y.mean(axis=1).reshape(1, 1, -1)
sig = arr[:, 2:].T.reshape(wm.size, x.size, -1) # TODO: test fold
data.create_variable("wm", values=wm[:, None, None], units=spectral_units)
data.create_variable("x", values=x, units="um")
data.create_variable("y", values=y, units="um")
data.create_variable("y_points", values=ypts, units="um")
data.create_channel("signal", values=sig / acquisition_time, units=channel_units)
data.transform("wm", "x", "y_points")

if verbose:
data.print_tree()
print(" kind: {0}".format(data.kind))

return data
Binary file added WrightTools/datasets/LabRAM/map_nm.txt.gz
Binary file not shown.
Binary file not shown.
Binary file added WrightTools/datasets/LabRAM/spectrum_nm.txt.gz
Binary file not shown.
Binary file added WrightTools/datasets/LabRAM/spectrum_wn.txt.gz
Binary file not shown.
Binary file added WrightTools/datasets/LabRAM/survey_nm.txt.gz
Binary file not shown.
3 changes: 3 additions & 0 deletions WrightTools/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@ def _from_directory(self, dirname, prefix=""):
KENT._from_directory(here / "KENT" / "LDS821 DOVE")
KENT._from_directory(here / "KENT" / "PbSe 2D delay B")

LabRAM = DatasetContainer()
LabRAM._from_files(here / "LabRAM")

ocean_optics = DatasetContainer()
ocean_optics._from_files("ocean_optics")

Expand Down
18 changes: 10 additions & 8 deletions docs/write_from_function.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ We will walk through by way of example, using :meth:`~WrightTools.data.from_JASC
# --- define ---------------------------------------------------------------
__all__ = ["from_JASCO"]
# --- from function --------------------------------------------------------
def from_JASCO(filepath, name=None, parent=None, *, verbose=True):
def from_JASCO(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from JASCO UV-Vis spectrometers.
Parameters
----------
filepath : path-like
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, filename is used.
Default is None.
Expand All @@ -57,13 +57,14 @@ We will walk through by way of example, using :meth:`~WrightTools.data.from_JASC
New data object(s).
"""
# parse filepath
filestr = ps.fspath(filepath)
filestr = os.fspath(filepath)
filepath = pathlib.Path(filepath)
if not ".txt" in filepath.suffixes:
wt_exceptions.WrongFileTypeWarning.warn(filepath, ".txt")
# parse name
if not name:
name = os.path.basename(filepath).split(".")[0]
name = filepath.name.split(".")[0]
# create data
kwargs = {"name": name, "kind": "JASCO", "source": filestr}
if parent is None:
Expand All @@ -75,7 +76,8 @@ We will walk through by way of example, using :meth:`~WrightTools.data.from_JASC
f = ds.open(filestr, "rt")
arr = np.genfromtxt(f, skip_header=18).T
f.close()
# add variable and channels
# chew through all scans
data.create_variable(name="energy", values=arr[0], units="nm")
data.create_channel(name="signal", values=arr[1])
data.transform("energy")
Expand Down Expand Up @@ -121,7 +123,7 @@ Check out the existing examples for formatting, such as the example from :meth:`

.. code-block:: python
def from_JASCO(filepath, name=None, parent=None, *, verbose=True):
def from_JASCO(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from JASCO UV-Vis spectrometers.
Parameters
Expand Down
39 changes: 39 additions & 0 deletions tests/data/from_LabRAM.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import WrightTools as wt
from WrightTools.datasets import LabRAM


def test_spectral_units():
d = wt.data.from_LabRAM(LabRAM.spectrum_nm)
assert d.wm.units == "nm"
d.close()
d = wt.data.from_LabRAM(LabRAM.spectrum_wn)
assert d.wm.units == "wn"
d.close()


def test_import_1D():
d = wt.data.from_LabRAM(LabRAM.spectrum_nm)
d.close()


def test_import_2D():
d = wt.data.from_LabRAM(LabRAM.raman_linescan)
d.close()


def test_import_2D_survey():
d = wt.data.from_LabRAM(LabRAM.survey_nm)
d.close()


def test_import_3D():
d = wt.data.from_LabRAM(LabRAM.map_nm)
d.close()


if __name__ == "__main__":
test_spectral_units()
test_import_1D()
test_import_2D()
test_import_2D_survey()
test_import_3D()

0 comments on commit ebd00d4

Please sign in to comment.