Skip to content

Commit

Permalink
Move dataset reporting out of HDF writer into DatasetReportingPart
Browse files Browse the repository at this point in the history
  • Loading branch information
coretl committed Oct 10, 2016
1 parent a1b811c commit 7fdb94e
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 32 deletions.
38 changes: 38 additions & 0 deletions malcolm/parts/ADCore/datasetreportingpart.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from collections import OrderedDict, namedtuple

from malcolm.core import Part, Table
from malcolm.core.vmetas import StringArrayMeta, ChoiceArrayMeta, TableMeta
from malcolm.controllers.runnablecontroller import RunnableController

# Make a table for the dataset info we produce
columns = OrderedDict()
columns["name"] = StringArrayMeta("Dataset name")
columns["filename"] = StringArrayMeta("Filename of HDF file")
columns["type"] = ChoiceArrayMeta("Type of dataset", ["primary", "additional"])
columns["path"] = StringArrayMeta("Dataset path within HDF file")
columns["uniqueid"] = StringArrayMeta("UniqueID array path within HDF file")
dataset_table_meta = TableMeta("Datsets produced in HDF file", columns=columns)

# Produced by plugins in part_info
DatasetProducedInfo = namedtuple("DatasetProducedInfo", columns)


class DatasetReportingPart(Part):
# Created attributes
datasets = None

def create_attributes(self):
for data in super(DatasetReportingPart, self).create_attributes():
yield data
self.datasets = dataset_table_meta.make_attribute()
yield "datasets", self.datasets, None

@RunnableController.PostConfigure
def update_datasets_table(self, _, part_info):
# Update the dataset table
datasets_table = Table(dataset_table_meta)
for dataset_infos in part_info.values():
for dataset_info in dataset_infos:
row = list(dataset_info)
datasets_table.append(row)
self.datasets.set_value(datasets_table)
48 changes: 20 additions & 28 deletions malcolm/parts/ADCore/hdfwriterpart.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,19 @@
import os
from xml.etree import cElementTree as ET
from collections import OrderedDict, namedtuple
from collections import namedtuple

from malcolm.compat import et_to_string
from malcolm.core import method_takes, REQUIRED, Task, Table
from malcolm.core.vmetas import BooleanMeta, StringMeta, PointGeneratorMeta, \
StringArrayMeta, ChoiceArrayMeta, TableMeta
from malcolm.core import method_takes, REQUIRED, Task
from malcolm.core.vmetas import BooleanMeta, StringMeta, PointGeneratorMeta
from malcolm.parts.builtin.layoutpart import LayoutPart
from malcolm.controllers.runnablecontroller import RunnableController
from malcolm.parts.ADCore.datasetreportingpart import DatasetProducedInfo

SUFFIXES = "NXY3456789"

# Make a table for the dataset info we produce
columns = OrderedDict()
columns["name"] = StringArrayMeta("Dataset name")
columns["filename"] = StringArrayMeta("Filename of HDF file")
columns["type"] = ChoiceArrayMeta("Type of dataset", ["primary", "additional"])
columns["path"] = StringArrayMeta("Dataset path within HDF file")
columns["uniqueid"] = StringArrayMeta("UniqueID array path within HDF file")
dataset_table_meta = TableMeta("Datsets produced in HDF file", columns=columns)
SUFFIXES = "NXY3456789"

# Produced by plugins in part_info
DatasetInfo = namedtuple("DatasetInfo", "name,type")
DatasetSourceInfo = namedtuple("DatasetSourceInfo", "name,type")


class HDFWriterPart(LayoutPart):
Expand All @@ -33,22 +25,19 @@ class HDFWriterPart(LayoutPart):
array_future = None
done_when_reaches = 0

def create_attributes(self):
for data in super(HDFWriterPart, self).create_attributes():
yield data
self.datasets = dataset_table_meta.make_attribute()
yield "datasets", self.datasets, None

def _update_datasets_table(self, part_info, file_path):
def _create_dataset_infos(self, part_info, file_path):
# Update the dataset table
datasets = Table(dataset_table_meta)
ret = []
for dataset_infos in part_info.values():
for dataset_info in dataset_infos:
path = "/entry/%s/%s" % (dataset_info.name, dataset_info.name)
datasets.append([
dataset_info.name, file_path, dataset_info.type, path,
"/entry/NDAttributes/NDArrayUniqueId"])
self.datasets.set_value(datasets)
path = "/entry/%s/%s" % (dataset_info.name, dataset_info.name),
ret.append(DatasetProducedInfo(
name=dataset_info.name,
filename=file_path,
type=dataset_info.type,
path=path,
uniqueid="/entry/NDAttributes/NDArrayUniqueId"))
return ret

@RunnableController.Configuring
@method_takes(
Expand All @@ -73,7 +62,6 @@ def configure(self, task, completed_steps, steps_to_do, part_info, params):
futures += self._set_file_path(task, params.filePath)
futures += self._set_dimensions(task, params.generator)
xml = self._make_layout_xml(params.generator, part_info)
self._update_datasets_table(part_info, params.filePath)
futures += task.put_async(self.child["xml"], xml)
# Wait for the previous puts to finish
task.wait_all(futures)
Expand All @@ -87,6 +75,9 @@ def configure(self, task, completed_steps, steps_to_do, part_info, params):
# Start a future waiting for the first array
self.array_future = task.when_matches_async(
self.child["arrayCounter"], 1)
# Return the dataset information
dataset_infos = self._create_dataset_infos(part_info, params.filePath)
return dataset_infos

@RunnableController.Running
def run(self, task, update_completed_steps):
Expand All @@ -98,6 +89,7 @@ def run(self, task, update_completed_steps):
id_ = task.subscribe(self.child["uniqueId"], update_completed_steps)
# TODO: what happens if we miss the last frame?
task.when_matches(self.child["uniqueId"], self.done_when_reaches)
# TODO: why do we need this? Tasks should have been recreated...
task.unsubscribe(id_)

@RunnableController.PostRun
Expand Down
48 changes: 48 additions & 0 deletions tests/test_parts/test_ADCore/test_datasetreportingpart.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import setup_malcolm_paths

import unittest
from mock import MagicMock

from malcolm.parts.ADCore.datasetreportingpart import DatasetReportingPart, \
DatasetProducedInfo


class TestDatasetReportingPart(unittest.TestCase):

def setUp(self):
self.process = MagicMock()
self.child = MagicMock()
self.params = MagicMock()
self.o = DatasetReportingPart(self.process, self.params)
list(self.o.create_attributes())

def test_init(self):
self.assertEqual(self.o.datasets.meta.elements.endpoints,
["name", "filename", "type", "path", "uniqueid"])

def test_post_configure(self):
task = MagicMock()
part_info = dict(
HDF=[
DatasetProducedInfo(
"det", "fn1", "primary", "/p/det", "/p/uid"),
DatasetProducedInfo(
"stat", "fn1", "additional", "/p/s1", "/p/uid"),
DatasetProducedInfo(
"stat", "fn1", "additional", "/p/s2", "/p/uid"),
]
)
self.o.update_datasets_table(task, part_info)
v = self.o.datasets.value
self.assertEqual(v.name, ["det", "stat", "stat"])
self.assertEqual(v.filename, ["fn1", "fn1", "fn1"])
self.assertEqual(v.type, ["primary", "additional", "additional"])
self.assertEqual(v.path, ["/p/det", "/p/s1", "/p/s2"])
self.assertEqual(v.uniqueid, ["/p/uid", "/p/uid", "/p/uid"])


if __name__ == "__main__":
unittest.main(verbosity=2)
7 changes: 3 additions & 4 deletions tests/test_parts/test_ADCore/test_hdfwriterpart.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import unittest
from mock import Mock, MagicMock, call, ANY

from malcolm.parts.ADCore.hdfwriterpart import HDFWriterPart, DatasetInfo
from malcolm.parts.ADCore.hdfwriterpart import HDFWriterPart, DatasetSourceInfo

from scanpointgenerator import LineGenerator, CompoundGenerator, SpiralGenerator

Expand All @@ -24,7 +24,6 @@ def getitem(name):
self.child.__getitem__.side_effect = getitem

self.params = MagicMock()
self.params.merit_attr = "StatsMean"
self.process.get_block.return_value = self.child
self.o = HDFWriterPart(self.process, self.params)
list(self.o.create_attributes())
Expand All @@ -43,8 +42,8 @@ def test_configure(self):
completed_steps = 0
steps_to_do = 38
part_info = {
"DET": [DatasetInfo("detector", "primary")],
"STAT": [DatasetInfo("StatsTotal", "additional")],
"DET": [DatasetSourceInfo("detector", "primary")],
"STAT": [DatasetSourceInfo("StatsTotal", "additional")],
}
self.o.configure(task, completed_steps, steps_to_do, part_info, params)
self.assertEqual(task.put.call_args_list, [
Expand Down

0 comments on commit 7fdb94e

Please sign in to comment.