diff --git a/.travis.yml b/.travis.yml
index fb7b2d8b3..0e290f93e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,20 +2,16 @@ sudo: false
language: python
python:
- "2.7"
- - "3.3"
- - "3.4"
- "3.5"
- "3.6"
env:
- - PYMALCOLM_USE_COTHREAD=YES
- - PYMALCOLM_USE_COTHREAD=NO
+ - PYMALCOLM_USE_COTHREAD=YES
+ - PYMALCOLM_USE_COTHREAD=NO
addons:
apt:
packages:
- - qt4-qmake
- - libqt4-dev
- libhdf5-serial-dev
cache:
@@ -28,13 +24,10 @@ install:
- env
- ls -al ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages
- ls -al ${VIRTUAL_ENV}/bin
- - ci/install_pyqt.sh
- - python -c "import PyQt4;print(PyQt4)"
- pip install "setuptools>=36"
- pip install -r requirements/test.txt
- pip install coveralls
- ls -al ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages
- - ldd ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/PyQt4/*.so
- ls -al ${VIRTUAL_ENV}/bin
- python setup.py bdist_egg
@@ -56,4 +49,4 @@ deploy:
tags: true
# As we are doing a source dist, only deploy for one python in matrix
python: "3.6"
- condition: $PYMALCOLM_USE_COTHREAD = YES
\ No newline at end of file
+ condition: $PYMALCOLM_USE_COTHREAD = YES
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index d0911a62c..1dbd3ca46 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -7,6 +7,9 @@ This project adheres to `Semantic Versioning `_ after 2-1.
`Unreleased`_
-------------
+Added:
+- Web gui (malcolmjs 0.4)
+
Changed:
- Nothing yet
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index e4e4c11d7..1cb1ec456 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -27,14 +27,6 @@ While 100% code coverage does not make a library bug-free, it significantly
reduces the number of easily caught bugs! Please make sure coverage is at 100%
before submitting a pull request!
-Code Quality
-------------
-
-For code quality, please run flake8::
-
- $ pip install flake8
- $ flake8 .
-
Code Styling
------------
Please arrange imports with the following style
@@ -52,7 +44,7 @@ Please arrange imports with the following style
Please follow `Google's python style`_ guide wherever possible.
-.. _Google's python style: http://google-styleguide.googlecode.com/svn/trunk/pyguide.html
+.. _Google's python style: https://google.github.io/styleguide/pyguide.html
Building the docs
-----------------
diff --git a/ci/install_pyqt.sh b/ci/install_pyqt.sh
deleted file mode 100755
index d5b6746b6..000000000
--- a/ci/install_pyqt.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/sh
-# sip
-SIP_VER=4.13.3
-SIP_DIR=sip-${SIP_VER}
-SIP_TAR=${SIP_DIR}.tar.gz
-# pyqt
-PYQT_VER=4.9.4
-PYQT_DIR=PyQt-x11-gpl-${PYQT_VER}
-PYQT_TAR=${PYQT_DIR}.tar.gz
-SITE_PACKAGES=${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages
-set -ex
-if [ ! -e ${SITE_PACKAGES}/PyQt4 ]; then
- # Install sip
- wget http://sourceforge.net/projects/pyqt/files/sip/${SIP_DIR}/${SIP_TAR}
- tar -xzf ${SIP_TAR}
- cd ${SIP_DIR}
- python ./configure.py \
- --bindir=$(pwd)/prefix/bin \
- --sipdir=$(pwd)/prefix/share/sip \
- --incdir=$(pwd)/prefix/include \
- --destdir=${SITE_PACKAGES}
- make -j 2
- make install
- cd ..
- # Install pyqt
- export PATH=$PATH:$(pwd)/${SIP_DIR}/prefix/bin
- wget http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-${PYQT_VER}/${PYQT_TAR}
- tar -xzf ${PYQT_TAR}
- rm ${PYQT_TAR}
- cd ${PYQT_DIR}
- mkdir prefix
- python ./configure.py \
- --confirm-license \
- --bindir=$(pwd)/prefix/bin \
- --destdir=${SITE_PACKAGES}
- make -j 2
- make install
- cd ..
-fi
diff --git a/docs/api/core_api.rst b/docs/api/core_api.rst
index 709ccb78c..dc40c979e 100644
--- a/docs/api/core_api.rst
+++ b/docs/api/core_api.rst
@@ -5,245 +5,3 @@ This is the core of Malcolm
.. automodule:: malcolm.core
:members:
-
-.. autoclass:: Alarm
- :members:
-
-.. autoclass:: AlarmSeverity
- :members:
-
-.. autoclass:: AlarmStatus
- :members:
-
-.. autoclass:: AttributeModel
- :members:
-
- An Attribute holds a value such as an Int, Float or Enum Table representing
- the current state of a block.
-
- Hold the current value of a piece of data of a fixed simple type
- like Int, Float, String, Enum, IntArray or Table. You can Get and Subscribe to
- changes in all Attributes, and Put to Attributes with a defined setter. In a
- client Block, Attributes will mirror the value of the Block acting as a
- server, with a Put operation being forwarded to the server Block. For example,
- the State of a Block would be an Attribute, as would the CurrentStep of a
- scan.
-
- Subclasses serialize differently.
-
-.. autoclass:: Attribute
- :members:
-
- An Attribute holds a value such as an Int, Float or Enum Table representing
- the current state of a block.
-
- Hold the current value of a piece of data of a fixed simple type
- like Int, Float, String, Enum, IntArray or Table. You can Get and Subscribe to
- changes in all Attributes, and Put to Attributes with a defined setter. In a
- client Block, Attributes will mirror the value of the Block acting as a
- server, with a Put operation being forwarded to the server Block. For example,
- the State of a Block would be an Attribute, as would the CurrentStep of a
- scan.
-
- Subclasses serialize differently.
-
-.. autoclass:: BlockMeta
- :members:
-
-.. autoclass:: Block
- :members:
-
- An object consisting of a number of Attributes and Methods.
-
-
-
- It should
- be designed to be as small and self contained as possible, and complex logic
- should be implemented by nesting Blocks. For example, a detector driver would
- be a Block, as would an HDF writer, but there would also be a higher level
- detector Block to co-ordinate the low level Blocks. Any Block may be
- synchronized among a number of Processes, the Block acting as the server will
- perform the logic, and the client copies will expose the same API as the
- server Block to the end user.
-
-.. autoclass:: BlockModel
- :members:
-
-.. autoclass:: Context
- :members:
-
-.. autoclass:: Controller
- :members:
-
- Controller: A State Machine just exposes the list of allowed transitions
- between StatefulStates. The Controller provides the logic that goes behind those
- transitions. It creates a number of Methods fixing the external interface of
- how to control the blocks, creates some Attributes for monitoring
- configuration and runtime variables, and then exposes a number of hooks that
- Parts can utilise to be executed and control transition to other states. For
- example, there will be an AreaDetectorController with hooks for
- PreRunDriverStart, PreRunPluginStart, and Running.
-
- A Controller implements the logic for changing states and contains Hooks for
- allow Parts to run any functions that are relevant to the current transition
-
-.. autoexception:: TimeoutError
-
-.. autoexception:: AbortedError
-
-.. autoexception:: ResponseError
-
-.. autoexception:: UnexpectedError
-
-.. autoexception:: BadValueError
-
-.. autoclass:: Future
- :members:
-
-.. autoclass:: Hook
- :members:
-
- Hooks are used to link a Parts' Methods to the relevant state transition of
- the controller:
-
-.. autoclass:: Info
- :members:
-
- Infos are used to report things from Hook runs
-
-.. autoclass:: Loggable
- :members:
-
-.. autoclass:: MapMeta
- :members:
-
-.. autoclass:: Map
- :members:
-
-.. autoclass:: Meta
- :members:
-
-.. autoclass:: MethodModel
- :members:
-
- A Method exposes a function call for a Block:
-
- Expose a function call. You can Call a Method with a (possibly empty)
- Map of arguments, and it will return a (possibly empty) Map of return values.
- In a client Block, the Call will be forwarded to the server Block, and the
- return value returned to the caller. For example, configure() and run() would
- be Methods of a Blocks used in a mapping scan.
-
-.. data:: REQUIRED
-
- Used to mark an argument in method_takes() or method_returns() as required
-
-.. data:: OPTIONAL
-
- Used to mark an argument in method_takes() or method_returns() as optional
-
-.. autofunction:: method_takes
-
-.. autofunction:: method_returns
-
-.. autoclass:: Method
- :members:
-
- A Method exposes a function call for a Block:
-
- Expose a function call. You can Call a Method with a (possibly empty)
- Map of arguments, and it will return a (possibly empty) Map of return values.
- In a client Block, the Call will be forwarded to the server Block, and the
- return value returned to the caller. For example, configure() and run() would
- be Methods of a Blocks used in a mapping scan.
-
-.. autoclass:: Part
- :members:
-
- These provide the logic for using a particular child Block with a
- particular Controller. It can register to use a number of hooks that the
- Controller provides, and the Controller will wait for all using that hook to
- run concurrently before moving to the next State. Parts can also create
- Attributes on the parent Block, as well as contribute Attributes that should
- be taken as arguments to Methods provided by the Controller. For example,
- there will be an HDFWriterPart that knows how to set PVs on the HDFWriter in
- the right order and expose the FilePath as an Attribute to the configure()
- method.
-
- A Part contains the logic for a Controller to interact with a child Block to
- perform more device-specific actions:
-
-
-.. autoclass:: Process
- :members:
-
- A Malcolm instance containing a number of Blocks along with various
- communication modules to communicate with other Malcolm instances. A Process
- can be a client of or a server to a number of other Processes.
-
- A Process is a host for Block instances and allows communication between them:
-
-
-
-.. autoclass:: Request
- :members:
-
- Request objects are used to interact with another block:
-
-.. autoclass:: Get
- :members:
-
-.. autoclass:: Put
- :members:
-
-.. autoclass:: Post
- :members:
-
-.. autoclass:: Subscribe
- :members:
-
-.. autoclass:: Unsubscribe
- :members:
-
-
-.. autoclass:: Response
- :members:
-
- Response objects are received when a requested action is complete:
-
-.. autoclass:: Return
- :members:
-
-.. autoclass:: Error
- :members:
-
-.. autoclass:: Update
- :members:
-
-.. autoclass:: Delta
- :members:
-
-
-.. autoclass:: Serializable
- :members:
-
- Objects that need to be sent over json implement the Serializable class:
-
-.. autoclass:: Spawned
- :members:
-
-
-.. autoclass:: StringArray
- :members:
-
-.. autoclass:: Table
- :members:
-
-.. autoclass:: TimeStamp
- :members:
-
-.. autoclass:: VArrayMeta
- :members:
-
-.. autoclass:: VMeta
- :members:
\ No newline at end of file
diff --git a/docs/api/tags_api.rst b/docs/api/tags_api.rst
deleted file mode 100644
index a4fb63016..000000000
--- a/docs/api/tags_api.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-malcolm.tags
-============
-
-Utility functions to help create tags. The meaning of the tags is documented
-in the `tags_reference` guide.
-
-.. automodule:: malcolm.tags
- :members:
diff --git a/docs/conf.py b/docs/conf.py
index 2cadc1dee..5411e0518 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# malcolm documentation build configuration file
-
+import inspect
import os
import re
import sys
@@ -32,10 +32,11 @@ def get_version():
require("ruamel.yaml")
from mock import MagicMock
+from annotypes import make_annotations
# Mock out failing imports
MOCK_MODULES = [
- "scanpointgenerator", "pvaccess", "plop", "plop.viewer", "h5py"]
+ "scanpointgenerator", "pvaccess", "plop", "plop.viewer", "h5py", "vdsgen"]
sys.modules.update((mod_name, MagicMock()) for mod_name in MOCK_MODULES)
@@ -47,32 +48,58 @@ def get_version():
# Autodoc event handlers
def skip_member(app, what, name, obj, skip, options):
- # Override @method_takes to always be documented
- if hasattr(obj, "MethodModel") and hasattr(obj.MethodModel, "takes") and \
- obj.MethodModel.takes.elements:
+ # Override @add_call_types to always be documented
+ if hasattr(obj, "call_types") or hasattr(obj, "return_type"):
return False
def process_docstring(app, what, name, obj, options, lines):
- # Add some documentation for @method_takes decorated members
- if hasattr(obj, "MethodModel") and hasattr(obj.MethodModel, "takes") and \
- obj.MethodModel.takes.elements:
- # Add a new docstring
- lines.append("params:")
- for param, vmeta in obj.MethodModel.takes.elements.items():
- lines.append(
- " - %s (%s):" % (param, vmeta.doc_type_string()))
- description = vmeta.description.strip()
- if not description[-1] in ".?!,":
- description += "."
- if param in obj.MethodModel.takes.required:
- default = "Required"
- elif param in obj.MethodModel.defaults:
- default = "Default=%r" % (obj.MethodModel.defaults[param],)
- else:
- default = "Optional"
- lines.append(" %s %s" % (description, default))
- lines.append("")
+ # Work out if we need to work out the call types and return types
+ needs_call_types = True
+ needs_return_type = True
+ for line in lines:
+ strip = line.strip()
+ if strip.startswith(":type"):
+ needs_call_types = False
+ elif strip.startswith(":rtype"):
+ needs_return_type = False
+ # If we have annotated with @add_call_types, or this is a WithCallTypes
+ # instance, and we need call_types and return_type, make them
+ if needs_call_types and hasattr(obj, "call_types"):
+ for k, anno in obj.call_types.items():
+ lines.append(":param %s: %s" % (k, anno.description))
+ typ = getattr(anno.typ, "__name__", None)
+ if typ:
+ lines.append(":type %s: %s" % (k, typ))
+ needs_call_types = False
+ if needs_return_type and hasattr(obj, "return_type"):
+ # If we have a return type and it isn't the object itself
+ rt = obj.return_type
+ if rt and rt.typ != obj:
+ typ = getattr(rt.typ, "__name__", None)
+ if typ:
+ # Don't include the return description if no type given
+ lines.append(":returns: %s" % rt.description)
+ lines.append(":rtype: %s" % typ)
+ needs_return_type = False
+ # If we have a type comment but no call_types or return_type, process it
+ if needs_call_types or needs_return_type:
+ if inspect.isclass(obj):
+ obj = obj.__init__
+ if inspect.isfunction(obj) or inspect.ismethod(obj):
+ try:
+ annotations = make_annotations(obj)
+ except Exception as e:
+ raise ValueError("Can't make annotations for %s, %s" % (obj, e))
+ else:
+ annotations = None
+ if annotations:
+ for k, v in annotations.items():
+ if k == "return":
+ if v and needs_return_type:
+ lines.append(":rtype: %s" % v)
+ elif needs_call_types:
+ lines.append(":type %s: %s" % (k, v))
def setup(app):
diff --git a/docs/contents.rst b/docs/contents.rst
index ce2aaa0c9..d90cb3a4a 100644
--- a/docs/contents.rst
+++ b/docs/contents.rst
@@ -34,7 +34,6 @@ Contents
:caption: API
api/core_api
- api/tags_api
build/modules_api
* :ref:`genindex`
diff --git a/docs/generate_api_docs.py b/docs/generate_api_docs.py
index 79543affc..841d39eae 100644
--- a/docs/generate_api_docs.py
+++ b/docs/generate_api_docs.py
@@ -43,15 +43,15 @@ def generate_docs():
documents = []
dirs = sorted(os.listdir(module_root))
# Make any parameters and defines docs
- for fname in ["parameters.py", "defines.py"]:
+ for fname in ["parameters.py", "defines.py", "hooks.py", "infos.py",
+ "util.py"]:
docname = "%s_api" % fname[:-3]
if fname in dirs and docname not in documents:
# Make document for module
section = "malcolm.modules.%s.%s" % (modulename, fname[:-3])
make_automodule_doc(section, docs_build)
documents.append(docname)
- for dirname in ["blocks", "includes", "controllers", "parts",
- "infos", "vmetas"]:
+ for dirname in ["blocks", "includes", "controllers", "parts"]:
docname = "%s_api" % dirname
if dirname in dirs and docname not in documents:
# Make document for module
@@ -69,8 +69,8 @@ def generate_docs():
def make_automodule_doc(section, docs_build):
docname = section.rsplit(".")[-1]
with open(os.path.join(docs_build, docname + "_api.rst"), "w") as f:
- f.write(docname + "\n")
- f.write("=" * len(docname) + "\n\n")
+ f.write(section + "\n")
+ f.write("=" * len(section) + "\n\n")
f.write(".. automodule:: %s\n" % section)
f.write(" :members:\n")
@@ -84,7 +84,7 @@ def make_index_doc(modulename, docs_build, doc_dirs):
f.write(" :maxdepth: 1\n")
f.write(" :caption: malcolm.modules.%s\n\n" % modulename)
for doc in doc_dirs:
- f.write(" %s\n" % doc)
+ f.write(" %s <%s>\n" % (doc[:-4], doc))
if __name__ == "__main__":
diff --git a/docs/reference/structure.rst b/docs/reference/structure.rst
index 0465e77fd..304712799 100644
--- a/docs/reference/structure.rst
+++ b/docs/reference/structure.rst
@@ -34,8 +34,7 @@ A Block looks like this::
malcolm:core/Block:1.0
BlockMeta meta
Attribute health // HealthMeta
- {Attribute }0+
- {Method }0+
+ {Attribute | Method }0+
BlockMeta :=
@@ -60,7 +59,7 @@ An Attribute looks like this::
Attribute := Scalar | ScalarArray | Table | PointGenerator
- NTScalar :=
+ Scalar :=
epics:nt/NTScalar:1.0 // Conformant but optional fields -> meta
scalar_t value
@@ -203,8 +202,6 @@ A Method looks like this::
malcolm:core/MapMeta:1.0
structure elements // Metadata for each element in map
{ArgumentMeta }0+
- string description // Description of what the map is for
- string[] tags :opt // e.g. "widget:group"
string[] required :opt // These fields will always be present
The ``takes`` structure describes the arguments that should be passed to the
diff --git a/docs/reference/tags.rst b/docs/reference/tags.rst
index 3438349c6..af0627377 100644
--- a/docs/reference/tags.rst
+++ b/docs/reference/tags.rst
@@ -10,10 +10,17 @@ widget
The gui widget.
-======================= ========================================================
+======================= ===========================================================
Tag Description
-======================= ========================================================
+======================= ===========================================================
widget:textinput Single line text input for writeable fields
widget:textupdate Single line label showing an updating field
-======================= ========================================================
+widget:led On/Off LED indicator
+widget:combo Select from a number of choice values
+widget:icon This field gives the URL for an icon for the whole Block
+widget:group Group node in a TreeView that other fields can attach to
+widget:table Table of rows. A list is a single column table
+widget:checkbox A box that can be checked or not
+widget:flowgraph Boxes with lines representing child blocks and connections
+======================= ===========================================================
diff --git a/docs/tutorials/areadetector.rst b/docs/tutorials/areadetector.rst
index 09ebcd500..a05423012 100644
--- a/docs/tutorials/areadetector.rst
+++ b/docs/tutorials/areadetector.rst
@@ -94,7 +94,7 @@ diagram:
.. note::
- There is a separation and hence and interface between `part_` and child
+ There is a separation and hence an interface between `part_` and child
`block_`. The interface goes in the child Block, and the logic goes in the
controlling Part. This is desirable because we could potentially have many
possible logic Parts that could control the same kind of child Block, and
@@ -240,7 +240,7 @@ when we `save()` the Device Block, it will write the current value of all these
Attributes of all its child Hardware Blocks to a JSON `design_` file.
The keen eyed will notice that the top level `RunnableController` has
-``configDir`` and ``initialDesign`` parameters. The first we set to
+``config_dir`` and ``initial_design`` parameters. The first we set to
``$(yamldir)/saved_designs`` which tells us where to save and load designs from.
The second we set to ``demo_design`` which is the name design we should load at
init.
diff --git a/docs/tutorials/scanning.rst b/docs/tutorials/scanning.rst
index bfaa1452e..07656d10d 100644
--- a/docs/tutorials/scanning.rst
+++ b/docs/tutorials/scanning.rst
@@ -173,7 +173,7 @@ Let's have a look at the Process definition
It looks quite similar to the one from the `generator_tutorial`, starting off
with the defines that are needed to talk to our simulated areaDetector plus
-a new one that defines a configDir variable that can be shared between our
+a new one that defines a config_dir variable that can be shared between our
Blocks. After that come the Blocks, with simDetector and ticker Blocks that we
have seen in previous tutorials, then our scan_block to sit on top.
diff --git a/examples/pandabox_onboard.py b/examples/pandabox_onboard.py
deleted file mode 100755
index 92a0c974a..000000000
--- a/examples/pandabox_onboard.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/env dls-python
-import os
-import sys
-import code
-
-from pkg_resources import require
-
-require("tornado", "numpy")
-sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
-
-from malcolm.core import Process, call_with_params
-from malcolm.modules.web.controllers import HTTPServerComms
-from malcolm.modules.web.parts import WebsocketServerPart
-from malcolm.modules.pandablocks.controllers import PandABlocksManagerController
-
-# Input params
-HOSTNAME = "localhost"
-PORT = 8888
-WSPORT = 8080
-CONFIGDIR = "/tmp"
-
-# Make the top level objects
-process = Process("Process")
-
-# Add the websocket server
-part = call_with_params(WebsocketServerPart)
-controller = call_with_params(
- HTTPServerComms, process, [part], port=WSPORT, mri="WS")
-process.add_controller("WS", controller)
-
-# Add the PandABox
-controller = call_with_params(
- PandABlocksManagerController, process, [],
- configDir=CONFIGDIR, hostname=HOSTNAME, port=PORT, mri="P")
-process.add_controller("P", controller)
-
-# We daemonise the server by double forking, but we leave the controlling
-# terminal and other file connections alone.
-# Not working until we defer cothread import until later...
-if False:
- if os.fork():
- # Exit first parent
- sys.exit(0)
- # Do second fork to avoid generating zombies
- if os.fork():
- sys.exit(0)
-
-# Start the server
-process.start()
-
-# Wait for completion
-code.interact("Welcome to PandABox", local=locals())
-
-# TODO: why does this not shutdown cleanly? socket.shutdown not called right?
-process.stop(timeout=1)
diff --git a/malcolm/compat.py b/malcolm/compat.py
index 647ca72b1..5b7ce4f00 100644
--- a/malcolm/compat.py
+++ b/malcolm/compat.py
@@ -34,6 +34,7 @@
def et_to_string(element):
+ # type: (ET.Element) -> str
xml = ''
try:
xml += ET.tostring(element, encoding="unicode")
@@ -59,6 +60,20 @@ def get_pool_num_threads():
return num_threads
+# Exception handling from future.utils
+if sys.version_info < (3,):
+ exec('''
+def raise_with_traceback(exc, traceback=Ellipsis):
+ if traceback == Ellipsis:
+ _, _, traceback = sys.exc_info()
+ raise exc, None, traceback
+''')
+else:
+ def raise_with_traceback(exc, traceback=Ellipsis):
+ if traceback == Ellipsis:
+ _, _, traceback = sys.exc_info()
+ raise exc.with_traceback(traceback)
+
try:
# Python2
from thread import get_ident as get_thread_ident
diff --git a/malcolm/core/__init__.py b/malcolm/core/__init__.py
index b86038b7a..f094359b4 100644
--- a/malcolm/core/__init__.py
+++ b/malcolm/core/__init__.py
@@ -1,42 +1,35 @@
# Make a nice namespace
from .alarm import Alarm, AlarmSeverity, AlarmStatus
-from .attribute import Attribute
-from .attributemodel import AttributeModel
-from .block import Block
-from .blockmodel import BlockModel
-from .blockmeta import BlockMeta
from .context import Context
-from .controller import Controller, ABORT_TIMEOUT
+from .controller import Controller, AMri, ADescription, AUseCothread, \
+ ABORT_TIMEOUT
+from .define import Define
from .errors import AbortedError, BadValueError, TimeoutError, ResponseError, \
- UnexpectedError, YamlError
+ UnexpectedError, YamlError, FieldError
from .future import Future
from .hook import Hook
-from .importer import Importer
from .info import Info
from .loggable import Loggable
-from .map import Map
-from .mapmeta import MapMeta
-from .meta import Meta
-from .method import Method
-from .methodmodel import MethodModel, method_takes, method_returns, \
- method_writeable_in, REQUIRED, OPTIONAL, method_also_takes, \
- call_with_params, create_class_params
-from .ntscalar import NTScalar
-from .ntscalararray import NTScalarArray
-from .nttable import NTTable
-from .ntunion import NTUnion
-from .part import Part
-from .process import Process
+from .models import BlockModel, AttributeModel, MethodModel, \
+ BooleanArrayMeta, BooleanMeta, ChoiceArrayMeta, \
+ ChoiceMeta, NumberArrayMeta, NumberMeta, StringArrayMeta, StringMeta, \
+ TableMeta, VMeta, VArrayMeta, AMetaDescription, NTUnion
+from .moduleutil import submodule_all
+from .part import Part, PartRegistrar, APartName
+from .process import Process, ProcessPublishHook, ProcessStartHook, \
+ ProcessStopHook, APublished, UnpublishedInfo, UUnpublishedInfos
from .queue import Queue
-from .request import Request, Subscribe, Unsubscribe, Get, Put, Post
+from .request import Request, PathRequest, Subscribe, Unsubscribe, Get, Put, Post
from .response import Response, Delta, Update, Return, Error
from .serializable import Serializable, deserialize_object, serialize_object, \
- json_decode, json_encode, snake_to_camel, camel_to_title
+ json_decode, json_encode, snake_to_camel, camel_to_title, \
+ check_camel_case, serialize_hook
from .spawned import Spawned
-from .stringarray import StringArray
+from .stateset import StateSet
from .table import Table
+from .tags import Widget, Port, group_tag, config_tag, get_config_tag
from .timestamp import TimeStamp
-from .varraymeta import VArrayMeta
-from .vmeta import VMeta
+from .views import Attribute, Method, Block
-__all__ = ["Alarm", "AlarmSeverity", "AlarmStatus"]
+# Make a nice namespace
+__all__ = submodule_all(globals())
diff --git a/malcolm/core/alarm.py b/malcolm/core/alarm.py
index 96577b07d..d86593db8 100644
--- a/malcolm/core/alarm.py
+++ b/malcolm/core/alarm.py
@@ -1,51 +1,68 @@
+from enum import Enum
+
import numpy as np
+from annotypes import Anno
from malcolm.compat import str_
from .serializable import Serializable, deserialize_object
-def sort_names(d):
- name_d = dict((k, v) for k, v in d.items() if isinstance(v, int))
- return list(sorted(name_d, key=name_d.__getitem__))
-
-
-class AlarmSeverity(object):
+class AlarmSeverity(Enum):
+ """An alarm severity"""
NO_ALARM, MINOR_ALARM, MAJOR_ALARM, INVALID_ALARM, UNDEFINED_ALARM = \
- range(5)
- names = sort_names(locals())
+ np.arange(5, dtype=np.int32)
-class AlarmStatus(object):
+class AlarmStatus(Enum):
+ """An alarm status"""
NO_STATUS, DEVICE_STATUS, DRIVER_STATUS, RECORD_STATUS, DB_STATUS, \
- CONF_STATUS, UNDEFINED_STATUS, CLIENT_STATUS = range(8)
- names = sort_names(locals())
+ CONF_STATUS, UNDEFINED_STATUS, CLIENT_STATUS = \
+ np.arange(8, dtype=np.int32)
+
+
+with Anno("The alarm severity"):
+ AAlarmSeverity = AlarmSeverity
+with Anno("The alarm status"):
+ AAlarmStatus = AlarmStatus
+with Anno("A descriptive alarm message"):
+ AMessage = str
@Serializable.register_subclass("alarm_t")
class Alarm(Serializable):
+ """Model representing a alarm state with severity, status and message"""
+
+ __slots__ = ["severity", "status", "message"]
- endpoints = ["severity", "status", "message"]
- __slots__ = endpoints
-
- def __init__(self, severity=AlarmSeverity.NO_ALARM,
- status=AlarmStatus.NO_STATUS, message=""):
- # Set initial values
- assert int(severity) in range(len(AlarmSeverity.names)), \
- "Expected AlarmSeverity.*_ALARM, got %r" % severity
- self.severity = np.int32(severity)
- assert int(status) in range(len(AlarmStatus.names)), \
- "Expected AlarmStatus.*_STATUS, got %r" % status
- self.status = np.int32(status)
+ def __init__(self,
+ severity=AlarmSeverity.NO_ALARM, # type: AAlarmSeverity
+ status=AlarmStatus.NO_STATUS, # type: AAlarmStatus
+ message="", # type: AMessage
+ ):
+ # type: (...) -> None
+ if not isinstance(severity, AlarmSeverity):
+ severity = AlarmSeverity(severity)
+ self.severity = severity
+ if not isinstance(status, AlarmStatus):
+ status = AlarmStatus(status)
+ self.status = status
self.message = deserialize_object(message, str_)
@classmethod
def major(cls, message):
+ # type: (str) -> Alarm
return cls(
AlarmSeverity.MAJOR_ALARM, AlarmStatus.DEVICE_STATUS, message)
@classmethod
def invalid(cls, message):
+ # type: (str) -> Alarm
return cls(
AlarmSeverity.INVALID_ALARM, AlarmStatus.DEVICE_STATUS, message)
-Alarm.ok = Alarm()
\ No newline at end of file
+ def is_ok(self):
+ # type: () -> bool
+ return self.severity == AlarmSeverity.NO_ALARM
+
+
+Alarm.ok = Alarm()
diff --git a/malcolm/core/attribute.py b/malcolm/core/attribute.py
deleted file mode 100644
index cf6798ce0..000000000
--- a/malcolm/core/attribute.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from .view import View
-
-
-class Attribute(View):
- """Represents a value with type information that may be backed elsewhere"""
-
- def __init__(self, controller, context, data):
- self._do_init(controller, context, data)
-
- @property
- def meta(self):
- return self._controller.make_view(self._context, self._data, "meta")
-
- @property
- def value(self):
- return self._controller.make_view(self._context, self._data, "value")
-
- def put_value(self, value, timeout=None):
- """Put a value to the Attribute and wait for completion"""
- self._context.put(self._data.path + ["value"], value, timeout=timeout)
-
- def put_value_async(self, value):
- fs = self._context.put_async(self._data.path + ["value"], value)
- return fs
-
- @property
- def alarm(self):
- return self._controller.make_view(self._context, self._data, "alarm")
-
- @property
- def timeStamp(self):
- return self._controller.make_view(
- self._context, self._data, "timeStamp")
-
- def __repr__(self):
- return "<%s value=%r>" % (self.__class__.__name__, self.value)
diff --git a/malcolm/core/attributemodel.py b/malcolm/core/attributemodel.py
deleted file mode 100644
index 33ff8c164..000000000
--- a/malcolm/core/attributemodel.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from .model import Model
-from .serializable import deserialize_object
-from .alarm import Alarm
-from .timestamp import TimeStamp
-
-
-class AttributeModel(Model):
- """Data Model for an Attribute"""
-
- endpoints = ["meta", "value", "alarm", "timeStamp"]
-
- def __init__(self, meta, value=None, alarm=None, timeStamp=None):
- #: The `VMeta` for validating value sets
- self.meta = self.set_meta(meta)
- #: The current value of the attribute
- self.value = self.set_value(value, set_alarm_ts=False)
- #: The `Alarm` status associated with the value
- self.alarm = self.set_alarm(alarm)
- #: The `TimeStamp` that the value was last updated
- self.timeStamp = self.set_timeStamp(timeStamp)
-
- def set_notifier_path(self, notifier, path):
- super(AttributeModel, self).set_notifier_path(notifier, path)
- self.meta.set_notifier_path(notifier, self.path + ["meta"])
-
- def set_meta(self, meta):
- """Set the meta VMeta"""
- meta = deserialize_object(meta)
- # Check that the meta attribute_class is ourself
- assert hasattr(meta, "attribute_class"), \
- "Expected meta object, got %r" % meta
- assert isinstance(self, meta.attribute_class), \
- "Meta object needs to be attached to %s, we are a %s" % (
- meta.attribute_class, type(self))
- if hasattr(self, "meta"):
- self.meta.set_notifier_path(None, ())
- meta.set_notifier_path(self.notifier, self.path + ["meta"])
- return self.set_endpoint_data("meta", meta)
-
- def set_value(self, value, set_alarm_ts=True, alarm=None, ts=None):
- """Set the value"""
- value = self.meta.validate(value)
- if set_alarm_ts:
- if alarm is None:
- alarm = Alarm.ok
- else:
- alarm = deserialize_object(alarm, Alarm)
- if ts is None:
- ts = TimeStamp()
- else:
- ts = deserialize_object(ts, TimeStamp)
- self.set_value_alarm_ts(value, alarm, ts)
- else:
- self.set_endpoint_data("value", value)
- return self.value
-
- def set_value_alarm_ts(self, value, alarm, ts):
- """Set value with pre-validated alarm and timeStamp"""
- with self.notifier.changes_squashed:
- # Assume they are of the right format
- self.value = value
- self.notifier.add_squashed_change(self.path + ["value"], value)
- self.alarm = alarm
- self.notifier.add_squashed_change(self.path + ["alarm"], alarm)
- self.timeStamp = ts
- self.notifier.add_squashed_change(self.path + ["timeStamp"], ts)
-
- def set_alarm(self, alarm=None):
- """Set the Alarm"""
- if alarm is None:
- alarm = Alarm.ok
- else:
- alarm = deserialize_object(alarm, Alarm)
- return self.set_endpoint_data("alarm", alarm)
-
- def set_timeStamp(self, timeStamp=None):
- """Set the TimeStamp"""
- if timeStamp is None:
- timeStamp = TimeStamp()
- else:
- timeStamp = deserialize_object(timeStamp, TimeStamp)
- return self.set_endpoint_data("timeStamp", timeStamp)
diff --git a/malcolm/core/block.py b/malcolm/core/block.py
deleted file mode 100644
index 61475448e..000000000
--- a/malcolm/core/block.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from .methodmodel import MethodModel
-from .view import View, make_get_property
-
-
-class Block(View):
- """Object consisting of a number of Attributes and Methods"""
- @property
- def mri(self):
- return self._data.path[0]
-
- def _prepare_endpoints(self, data):
- for endpoint in data:
- if isinstance(data[endpoint], MethodModel):
- # Add _async versions of method
- self._make_async_method(endpoint)
- return super(Block, self)._prepare_endpoints(data)
-
- def _make_async_method(self, endpoint):
- def post_async(*args, **kwargs):
- child = getattr(self, endpoint)
- return child.post_async(*args, **kwargs)
-
- object.__setattr__(self, "%s_async" % endpoint, post_async)
-
- def put_attribute_values_async(self, params):
- futures = []
- if type(params) is dict:
- # If we have a plain dictionary, then sort items
- items = sorted(params.items())
- else:
- # Assume we are already ordered
- items = params.items()
- for attr, value in items:
- assert hasattr(self, attr), \
- "Block does not have attribute %s" % attr
- future = self._context.put_async(
- self._data.path + [attr, "value"], value)
- futures.append(future)
- return futures
-
- def put_attribute_values(self, params, timeout=None, event_timeout=None):
- futures = self.put_attribute_values_async(params)
- self._context.wait_all_futures(
- futures, timeout=timeout, event_timeout=event_timeout)
-
- def when_value_matches(self, attr, good_value, bad_values=None,
- timeout=None, event_timeout=None):
- future = self.when_value_matches_async(attr, good_value, bad_values)
- self._context.wait_all_futures(
- future, timeout=timeout, event_timeout=event_timeout)
-
- def when_value_matches_async(self, attr, good_value, bad_values=None):
- path = self._data.path + [attr, "value"]
- future = self._context.when_matches_async(path, good_value, bad_values)
- return future
-
- def wait_all_futures(self, futures, timeout=None, event_timeout=None):
- self._context.wait_all_futures(
- futures, timeout=timeout, event_timeout=event_timeout)
-
-
-def make_block_view(controller, context, data):
- class BlockSubclass(Block):
- def __init__(self):
- self._do_init(controller, context, data)
-
- for endpoint in data:
- # make properties for the endpoints we know about
- make_get_property(BlockSubclass, endpoint)
-
- block = BlockSubclass()
- return block
\ No newline at end of file
diff --git a/malcolm/core/blockmeta.py b/malcolm/core/blockmeta.py
deleted file mode 100644
index 0da58e0b4..000000000
--- a/malcolm/core/blockmeta.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from malcolm.compat import str_
-from .meta import Meta
-from .serializable import Serializable, deserialize_object
-from .stringarray import StringArray
-
-
-@Serializable.register_subclass("malcolm:core/BlockMeta:1.0")
-class BlockMeta(Meta):
- endpoints = ["description", "tags", "writeable", "label", "fields"]
-
- def __init__(self, description="", tags=(), writeable=False, label="",
- fields=()):
- super(BlockMeta, self).__init__(description, tags, writeable, label)
- # Set initial values
- self.fields = self.set_fields(fields)
-
- def set_fields(self, fields):
- """Set the fields StringArray"""
- fields = StringArray(deserialize_object(f, str_) for f in fields)
- return self.set_endpoint_data("fields", fields)
diff --git a/malcolm/core/blockmodel.py b/malcolm/core/blockmodel.py
deleted file mode 100644
index 739418f2e..000000000
--- a/malcolm/core/blockmodel.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from malcolm.compat import str_
-
-from .attributemodel import AttributeModel
-from .blockmeta import BlockMeta
-from .methodmodel import MethodModel
-from .model import Model
-from .serializable import Serializable, deserialize_object
-
-
-@Serializable.register_subclass("malcolm:core/Block:1.0")
-class BlockModel(Model):
- """Data Model for a Block"""
-
- def __init__(self):
- # TODO: how do we take children while preserving order?
- self.endpoints = []
- self.meta = self.set_endpoint_data("meta", BlockMeta())
-
- def set_notifier_path(self, notifier, path):
- super(BlockModel, self).set_notifier_path(notifier, path)
- for endpoint in self.endpoints:
- self[endpoint].set_notifier_path(notifier, self.path + [endpoint])
-
- def set_endpoint_data(self, name, value):
- name = deserialize_object(name, str_)
- if name == "meta":
- value = deserialize_object(value, BlockMeta)
- else:
- value = deserialize_object(value, (AttributeModel, MethodModel))
- with self.notifier.changes_squashed:
- if name in self.endpoints:
- # Stop the old endpoint notifying
- self[name].set_notifier_path(Model.notifier, ())
- else:
- self.endpoints.append(name)
- value.set_notifier_path(self.notifier, self.path + [name])
- setattr(self, name, value)
- # Tell the notifier what changed
- self.notifier.add_squashed_change(self.path + [name], value)
- self._update_fields()
- return value
-
- def _update_fields(self):
- self.meta.set_fields([x for x in self.endpoints if x != "meta"])
-
- def remove_endpoint(self, name):
- with self.notifier.changes_squashed:
- self[name].set_notifier_path(Model.notifier, ())
- self.endpoints.remove(name)
- delattr(self, name)
- self._update_fields()
- self.notifier.add_squashed_change(self.path + [name])
diff --git a/malcolm/core/context.py b/malcolm/core/context.py
index 3983fb44c..e2a2fd13d 100644
--- a/malcolm/core/context.py
+++ b/malcolm/core/context.py
@@ -1,27 +1,50 @@
import weakref
import time
+from annotypes import TYPE_CHECKING
+
from malcolm.compat import maybe_import_cothread
from .future import Future
-from .loggable import Loggable
from .request import Put, Post, Subscribe, Unsubscribe
from .response import Update, Return, Error
from .queue import Queue
-from .errors import TimeoutError, AbortedError, ResponseError, BadValueError
+from .errors import TimeoutError, AbortedError, BadValueError
+
+if TYPE_CHECKING:
+ from typing import Callable, Any, List, Union
+ from .process import Process
+ from .views import Block
class When(object):
- def __init__(self, condition_satisfied):
+ def __init__(self, good_value, bad_values):
+ # type: (Callable[[Any], bool]) -> None
+ if callable(good_value):
+ def condition_satisfied(value):
+ return good_value(value)
+ else:
+ def condition_satisfied(value):
+ if bad_values and value in bad_values:
+ raise BadValueError(
+ "Waiting for %r, got %r" % (good_value, value))
+ return value == good_value
+ condition_satisfied.__name__ = "equals_%s" % good_value
self.condition_satisfied = condition_satisfied
- self.future = None
- self.context = None
+ self.future = None # type: Future
+ self.context = None # type: Context
+ self.last = None
def set_future_context(self, future, context):
+ # type: (Future, Context) -> None
self.future = future
self.context = context
- def check_condition(self, value):
+ def __call__(self, value):
+ # type: (Any) -> None
+ # Need to check if we have a future as we might be called while the
+ # unsubscribe is taking place
if self.future:
+ self.last = value
try:
satisfied = self.condition_satisfied(value)
except Exception:
@@ -36,18 +59,17 @@ def check_condition(self, value):
self.future = None
-class Context(Loggable):
+class Context(object):
+ """Helper allowing Future style access to Block Attributes and Methods"""
+
STOP = object()
- runner = None
def __init__(self, process):
- """
- Args:
- process (Process): The process to use to find child Block
- """
+ # type: (Process) -> None
self._q = self.make_queue()
# Func to call just before requests are dispatched
self._notify_dispatch_request = None
+ self._notify_args = ()
self._process = process
self._next_id = 1
self._futures = {} # dict {int id: Future)}
@@ -59,10 +81,12 @@ def __init__(self, process):
self._cothread = maybe_import_cothread()
def make_queue(self):
+ # type: () -> Queue
return Queue()
@property
def mri_list(self):
+ # type: () -> List[str]
return self._process.mri_list
def get_controller(self, mri):
@@ -70,10 +94,11 @@ def get_controller(self, mri):
return controller
def block_view(self, mri):
+ # type: (str) -> Block
"""Get a view of a block
Args:
- mri (str): The mri of the controller hosting the block
+ mri: The mri of the controller hosting the block
Returns:
Block: The block we control
@@ -87,7 +112,7 @@ def _get_next_id(self):
self._next_id += 1
return new_id
- def set_notify_dispatch_request(self, notify_dispatch_request):
+ def set_notify_dispatch_request(self, notify_dispatch_request, *args):
"""Set function to call just before requests are dispatched
Args:
@@ -95,6 +120,7 @@ def set_notify_dispatch_request(self, notify_dispatch_request):
with request as single arg just before request is dispatched
"""
self._notify_dispatch_request = notify_dispatch_request
+ self._notify_args = args
def _dispatch_request(self, request):
future = Future(weakref.proxy(self))
@@ -102,7 +128,7 @@ def _dispatch_request(self, request):
self._requests[future] = request
controller = self.get_controller(request.path[0])
if self._notify_dispatch_request:
- self._notify_dispatch_request(request)
+ self._notify_dispatch_request(request, *self._notify_args)
controller.handle_request(request)
# Yield control to allow the request to be handled
if self._cothread:
@@ -145,7 +171,8 @@ def put_async(self, path, value):
Returns:
Future: A single Future which will resolve to the result
"""
- request = Put(self._get_next_id(), path, value, self._q.put)
+ request = Put(self._get_next_id(), path, value)
+ request.set_callback(self._q.put)
future = self._dispatch_request(request)
return future
@@ -178,7 +205,8 @@ def post_async(self, path, params=None):
Returns:
Future: as single Future that will resolve to the result
"""
- request = Post(self._get_next_id(), path, params, self._q.put)
+ request = Post(self._get_next_id(), path, params)
+ request.set_callback(self._q.put)
future = self._dispatch_request(request)
return future
@@ -189,7 +217,8 @@ def subscribe(self, path, callback, *args):
Returns:
Future: A single Future which will resolve to the result
"""
- request = Subscribe(self._get_next_id(), path, False, self._q.put)
+ request = Subscribe(self._get_next_id(), path, delta=False)
+ request.set_callback(self._q.put)
# If self is in args, then make weak version of it
saved_args = []
for arg in args:
@@ -214,7 +243,8 @@ def unsubscribe(self, future):
self._pending_unsubscribes[future] = subscribe
# Clear out the subscription
self._subscriptions.pop(subscribe.id)
- request = Unsubscribe(subscribe.id, self._q.put)
+ request = Unsubscribe(subscribe.id)
+ request.set_callback(self._q.put)
controller = self.get_controller(subscribe.path[0])
controller.handle_request(request)
@@ -229,7 +259,11 @@ def unsubscribe_all(self):
def __del__(self):
# Unsubscribe from anything that is still active
- self.unsubscribe_all()
+ try:
+ self.unsubscribe_all()
+ except ValueError:
+ # Controller has already gone, probably during tearDown
+ pass
def when_matches(self, path, good_value, bad_values=None, timeout=None,
event_timeout=None):
@@ -263,22 +297,13 @@ def when_matches_async(self, path, good_value, bad_values=None):
Future: a single Future that will resolve when the path matches
good_value or bad_values
"""
- if callable(good_value):
- def condition_satisfied(value):
- return good_value(value)
- else:
- def condition_satisfied(value):
- if bad_values and value in bad_values:
- raise BadValueError(
- "Waiting for %r, got %r" % (good_value, value))
- return value == good_value
-
- when = When(condition_satisfied)
- future = self.subscribe(path, when.check_condition)
+ when = When(good_value, bad_values)
+ future = self.subscribe(path, when)
when.set_future_context(future, weakref.proxy(self))
return future
def wait_all_futures(self, futures, timeout=None, event_timeout=None):
+ # type: (Union[List[Future], Future, None], float, float) -> None
"""Services all futures until the list 'futures' are all done
then returns. Calls relevant subscription callbacks as they
come off the queue and raises an exception on abort
@@ -302,14 +327,14 @@ def wait_all_futures(self, futures, timeout=None, event_timeout=None):
else:
futures = []
- filtered_futures = set()
+ filtered_futures = []
for f in futures:
if f.done():
if f.exception() is not None:
raise f.exception()
else:
- filtered_futures.add(f)
+ filtered_futures.append(f)
while filtered_futures:
if event_timeout is not None:
@@ -329,13 +354,42 @@ def sleep(self, seconds):
until = time.time() + seconds
try:
while True:
- self._service_futures(set(), until)
+ self._service_futures([], until)
except TimeoutError:
return
+ def _describe_futures(self, futures):
+ descriptions = []
+ for future in futures:
+ request = self._requests.get(future, None)
+ if isinstance(request, Put):
+ path = ".".join(request.path)
+ descriptions.append("%s.put_value(%s)" % (path, request.value))
+ elif isinstance(request, Subscribe):
+ path = ".".join(request.path)
+ func, _ = self._subscriptions[request.id]
+ if isinstance(func, When):
+ descriptions.append("When(%s, %s, last=%s)" % (
+ path, func.condition_satisfied.__name__, func.last))
+ else:
+ descriptions.append("Subscribe(%s)" % path)
+ elif isinstance(request, Post):
+ path = ".".join(request.path)
+ if request.parameters:
+ params = "..."
+ else:
+ params = ""
+ descriptions.append("%s(%s)" % (path, params))
+ else:
+ descriptions.append(str(request))
+ if descriptions:
+ return "[" + ", ".join(descriptions) + "]"
+ else:
+ return "[]"
+
def _service_futures(self, futures, until=None):
"""Args:
- futures (set): The futures to service
+ futures (list): The futures to service
until (float): Timestamp to wait until
"""
if until is None:
@@ -344,39 +398,40 @@ def _service_futures(self, futures, until=None):
timeout = until - time.time()
if timeout < 0:
timeout = 0
- response = self._q.get(timeout)
+ try:
+ response = self._q.get(timeout)
+ except TimeoutError:
+ raise TimeoutError(
+ "Timeout waiting for %s" % self._describe_futures(futures))
if response is self._sentinel_stop:
self._sentinel_stop = None
elif response is self.STOP:
if self._sentinel_stop is None:
# This is a stop we should listen to...
- raise AbortedError()
+ raise AbortedError(
+ "Aborted waiting for %s" % self._describe_futures(futures))
elif isinstance(response, Update):
# This is an update for a subscription
if response.id in self._subscriptions:
- (func, args) = self._subscriptions[response.id]
+ func, args = self._subscriptions[response.id]
func(response.value, *args)
elif isinstance(response, Return):
future = self._futures.pop(response.id)
- request = self._requests.pop(future)
+ del self._requests[future]
self._pending_unsubscribes.pop(future, None)
result = response.value
- # Deserialize if this was a method
- if isinstance(request, Post) and result is not None:
- controller = self.get_controller(request.path[0])
- result = controller.validate_result(request.path[1], result)
future.set_result(result)
try:
futures.remove(future)
- except KeyError:
+ except ValueError:
pass
elif isinstance(response, Error):
future = self._futures.pop(response.id)
del self._requests[future]
- future.set_exception(ResponseError(response.message))
+ future.set_exception(response.message)
try:
futures.remove(future)
- except KeyError:
+ except ValueError:
pass
else:
- raise future.exception()
+ raise response.message
diff --git a/malcolm/core/controller.py b/malcolm/core/controller.py
index 4c9c65e21..407b88c7c 100644
--- a/malcolm/core/controller.py
+++ b/malcolm/core/controller.py
@@ -1,145 +1,99 @@
from contextlib import contextmanager
-import inspect
-import weakref
-import time
+
+from annotypes import TYPE_CHECKING, Anno, Sequence, overload
from malcolm.compat import OrderedDict
-from .alarm import Alarm
-from .attribute import Attribute
-from .attributemodel import AttributeModel
-from .block import Block, make_block_view
-from .blockmodel import BlockModel
from .context import Context
-from .errors import UnexpectedError, AbortedError, WrongThreadError
-from .healthmeta import HealthMeta
-from .hook import Hook, get_hook_decorated
-from .loggable import Loggable
-from .map import Map
-from .method import Method
-from .methodmodel import MethodModel, get_method_decorated
-from .model import Model
+from .errors import UnexpectedError, WrongThreadError
+from .hook import Hookable, start_hooks, wait_hooks, Hook
+from .info import Info
+from .models import BlockModel, AttributeModel, MethodModel, Model
from .notifier import Notifier
-from .request import Get, Subscribe, Unsubscribe, Put, Post
+from .part import PartRegistrar, Part, FieldRegistry, InfoRegistry
from .queue import Queue
+from .request import Get, Subscribe, Unsubscribe, Put, Post, Request
+from .response import Response
from .rlock import RLock
-from .serializable import serialize_object, deserialize_object, camel_to_title
-from .view import make_view
+from .serializable import serialize_object, camel_to_title
+from .spawned import Spawned
+from .views import make_view, Block
+if TYPE_CHECKING:
+ from typing import List, Dict, Tuple, Union, Callable, Any
+ from .process import Process
+ Field = Union[AttributeModel, MethodModel]
+ CallbackResponses = List[Tuple[Callable[[Response], None], Response]]
+# How long should we wait for spawned functions to complete after abort
ABORT_TIMEOUT = 5.0
-class Controller(Loggable):
- use_cothread = True
+with Anno("The Malcolm Resource Identifier for the Block produced"):
+ AMri = str
+with Anno("Description of the Block produced by the controller"):
+ ADescription = str
+with Anno("Whether the Controller should use cothread for its spawns"):
+ AUseCothread = bool
- # Attributes
- health = None
- def __init__(self, process, mri, parts, description=""):
- super(Controller, self).__init__(mri=mri)
- self.process = process
+class Controller(Hookable):
+ process = None
+
+ def __init__(self, mri, description="", use_cothread=True):
+ # type: (AMri, ADescription, AUseCothread) -> None
+ self.set_logger(mri=mri)
+ self.name = mri
self.mri = mri
+ self.use_cothread = use_cothread
self._request_queue = Queue()
- # {Part: Alarm} for current faults
- self._faults = {}
- # {Hook: name}
- self._hook_names = {}
- # {Hook: {Part: func_name}}
- self._hooked_func_names = {}
- self._find_hooks()
- # {part_name: (field_name, Model, setter)
- self.part_fields = OrderedDict()
- # {name: Part}
- self.parts = OrderedDict()
+ self.parts = OrderedDict() # type: Dict[str, Part]
self._lock = RLock(self.use_cothread)
self._block = BlockModel()
self._block.meta.set_description(description)
- self.set_label(mri)
- for part in parts:
- self.add_part(part)
+ self._block.meta.set_label(mri)
self._notifier = Notifier(mri, self._lock, self._block)
self._block.set_notifier_path(self._notifier, [mri])
self._write_functions = {}
- self._add_block_fields()
+ self.field_registry = FieldRegistry()
+ self.info_registry = InfoRegistry()
- def set_label(self, label):
- """Set the label of the Block Meta object"""
- self._block.meta.set_label(label)
+ def setup(self, process):
+ # type: (Process) -> None
+ self.process = process
+ self.info_registry.set_spawn(self.spawn)
+ self.add_initial_part_fields()
def add_part(self, part):
+ # type: (Part) -> None
assert part.name not in self.parts, \
"Part %r already exists in Controller %r" % (part.name, self.mri)
- part.attach_to_controller(self)
- # Check part hooks into one of our hooks
- for func_name, part_hook, _ in get_hook_decorated(part):
- assert part_hook in self._hook_names, \
- "Part %s func %s not hooked into %s" % (
- part.name, func_name, self)
- self._hooked_func_names[part_hook][part] = func_name
- part_fields = list(part.create_attribute_models()) + \
- list(part.create_method_models())
+ part.setup(PartRegistrar(
+ self.field_registry, self.info_registry, part))
self.parts[part.name] = part
- self.part_fields[part.name] = part_fields
-
- def _find_hooks(self):
- for name, member in inspect.getmembers(self, Hook.isinstance):
- assert member not in self._hook_names, \
- "Hook %s already in %s as %s" % (
- self, name, self._hook_names[member])
- self._hook_names[member] = name
- self._hooked_func_names[member] = {}
-
- def _add_block_fields(self):
- for iterable in (self.create_attribute_models(),
- self.create_method_models(),
- self.initial_part_fields()):
- for name, child, writeable_func in iterable:
- self.add_block_field(name, child, writeable_func)
def add_block_field(self, name, child, writeable_func):
- if writeable_func:
- self._write_functions[name] = writeable_func
+ # type: (str, Field, Callable[..., Any]) -> None
if isinstance(child, AttributeModel):
- if writeable_func:
- child.meta.set_writeable(True)
- if not child.meta.label:
- child.meta.set_label(camel_to_title(name))
+ meta = child.meta
elif isinstance(child, MethodModel):
- if writeable_func:
- child.set_writeable(True)
- for k, v in child.takes.elements.items():
- v.set_writeable(True)
- if not child.label:
- child.set_label(camel_to_title(name))
+ meta = child
else:
raise ValueError("Invalid block field %r" % child)
+ if writeable_func:
+ self._write_functions[name] = writeable_func
+ meta.set_writeable(True)
+ if not meta.label:
+ meta.set_label(camel_to_title(name))
self._block.set_endpoint_data(name, child)
- def create_method_models(self):
- """Provide MethodModel instances to be attached to BlockModel
-
- Yields:
- tuple: (string name, MethodModel, callable post_function).
- """
- return get_method_decorated(self)
-
- def create_attribute_models(self):
- """Provide AttributeModel instances to be attached to BlockModel
-
- Yields:
- tuple: (string name, AttributeModel, callable put_function).
- """
- # Create read-only attribute to show error texts
- meta = HealthMeta("Displays OK or an error message")
- self.health = meta.create_attribute_model()
- yield "health", self.health, None
-
- def initial_part_fields(self):
- for part_fields in self.part_fields.values():
- for data in part_fields:
- yield data
+ def add_initial_part_fields(self):
+ # type: () -> None
+ for part_fields in self.field_registry.fields.values():
+ for name, child, writeable_func in part_fields:
+ self.add_block_field(name, child, writeable_func)
def spawn(self, func, *args, **kwargs):
+ # type: (Callable[..., Any], *Any, **Any) -> Spawned
"""Spawn a function in the right thread"""
spawned = self.process.spawn(func, args, kwargs, self.use_cothread)
return spawned
@@ -157,79 +111,41 @@ def lock_released(self):
def changes_squashed(self):
return self._notifier.changes_squashed
- def update_health(self, part, alarm=None):
- """Set the health attribute. Called from part"""
- if alarm is not None:
- alarm = deserialize_object(alarm, Alarm)
- with self.changes_squashed:
- if alarm is None or not alarm.severity:
- self._faults.pop(part, None)
- else:
- self._faults[part] = alarm
- if self._faults:
- # Sort them by severity
- faults = sorted(self._faults.values(), key=lambda a: a.severity)
- alarm = faults[-1]
- text = faults[-1].message
- else:
- alarm = None
- text = "OK"
- self.health.set_value(text, alarm=alarm)
-
- def block_view(self):
- """Get a view of the block we control
-
- Returns:
- Block: The block we control
- """
- context = Context(self.process)
- return self.make_view(context)
+ @overload
+ def make_view(self, context=None):
+ # type: (Context) -> Block
+ pass
+ @overload
def make_view(self, context, data=None, child_name=None):
+ # type: (Context, Model, str) -> Any
+ pass
+
+ def make_view(self, context=None, data=None, child_name=None):
"""Make a child View of data[child_name]"""
try:
- return self._make_view(context, data, child_name)
+ ret = self._make_view(context, data, child_name)
except WrongThreadError:
# called from wrong thread, spawn it again
result = self.spawn(self._make_view, context, data, child_name)
- return result.get()
+ ret = result.get()
+ return ret
def _make_view(self, context, data, child_name):
+ # type: (Context, Model, str) -> Any
"""Called in cothread's thread"""
with self._lock:
+ if context is None:
+ context = Context(self.process)
if data is None:
child = self._block
else:
child = data[child_name]
- child_view = self._make_appropriate_view(context, child)
+ child_view = make_view(self, context, child)
return child_view
- def _make_appropriate_view(self, context, data):
- if isinstance(data, BlockModel):
- # Make an Block View
- return make_block_view(self, context, data)
- elif isinstance(data, AttributeModel):
- # Make an Attribute View
- return Attribute(self, context, data)
- elif isinstance(data, MethodModel):
- # Make a Method View
- return Method(self, context, data)
- elif isinstance(data, Model):
- # Make a generic View of it
- return make_view(self, context, data)
- elif isinstance(data, dict):
- # Need to recurse down
- d = OrderedDict()
- for k, v in data.items():
- d[k] = self._make_appropriate_view(context, v)
- return d
- elif isinstance(data, list):
- # Need to recurse down
- return [self._make_appropriate_view(context, x) for x in data]
- else:
- return data
-
def handle_request(self, request):
+ # type: (Request) -> Spawned
"""Spawn a new thread that handles Request"""
# Put data on the queue, so if spawns are handled out of order we
# still get the most up to date data
@@ -237,6 +153,7 @@ def handle_request(self, request):
return self.spawn(self._handle_request)
def _handle_request(self):
+ # type: () -> None
responses = []
with self._lock:
# We spawned just above, so there is definitely something on the
@@ -262,14 +179,16 @@ def _handle_request(self):
for cb, response in responses:
try:
cb(response)
- except Exception as e:
+ except Exception:
self.log.exception("Exception notifying %s", response)
raise
def _handle_get(self, request):
+ # type: (Get) -> CallbackResponses
"""Called with the lock taken"""
data = self._block
- for endpoint in request.path[1:]:
+
+ for i, endpoint in enumerate(request.path[1:]):
try:
data = data[endpoint]
except KeyError:
@@ -278,27 +197,33 @@ def _handle_get(self, request):
else:
typ = type(data)
raise UnexpectedError(
- "Object of type %r has no attribute %r" % (typ, endpoint))
+ "Object %s of type %r has no attribute %r" % (request.path[:i+1], typ, endpoint))
+ # Important to serialize now with the lock so we get a consistent set
serialized = serialize_object(data)
ret = [request.return_response(serialized)]
return ret
def _handle_put(self, request):
+ # type: (Put) -> CallbackResponses
"""Called with the lock taken"""
attribute_name = request.path[1]
attribute = self._block[attribute_name]
+
assert attribute.meta.writeable, \
"Attribute %s is not writeable" % attribute_name
put_function = self._write_functions[attribute_name]
+ value = attribute.meta.validate(request.value)
with self.lock_released:
- result = put_function(request.value)
+ result = put_function(value)
+ # Don't need to serialize as the result is None, at the moment...
ret = [request.return_response(result)]
return ret
def _handle_post(self, request):
+ # type: (Post) -> CallbackResponses
"""Called with the lock taken"""
method_name = request.path[1]
if request.parameters:
@@ -306,100 +231,45 @@ def _handle_post(self, request):
else:
param_dict = {}
- method = self._block[method_name]
+ method = self._block[method_name] # type: MethodModel
assert method.writeable, \
"Method %s is not writeable" % method_name
- args = method.prepare_call_args(**param_dict)
+ args = method.validate(param_dict)
+
post_function = self._write_functions[method_name]
with self.lock_released:
- result = post_function(*args)
+ result = post_function(**args)
- result = self.validate_result(method_name, result)
+ # Don't need to serialize as the result should be immutable
ret = [request.return_response(result)]
return ret
- def validate_result(self, method_name, result):
- with self._lock:
- method = self._block[method_name]
- # Prepare output map
- if method.returns.elements:
- result = Map(method.returns, result)
- result.check_valid()
- return result
-
- def create_part_contexts(self):
- part_contexts = {}
- for part_name, part in self.parts.items():
- part_contexts[part] = Context(self.process)
- return part_contexts
-
- def run_hook(self, hook, part_contexts, *args, **params):
- hook_queue, hook_runners = self.start_hook(
- hook, part_contexts, *args, **params)
- return_dict = self.wait_hook(hook_queue, hook_runners)
- return return_dict
-
- def start_hook(self, hook, part_contexts, *args, **params):
- assert hook in self._hook_names, \
- "Hook %s doesn't appear in controller hooks %s" % (
- hook, self._hook_names)
- hook_name = self._hook_names[hook]
- self.log.debug("%s: Starting hook", hook_name)
-
- # This queue will hold (part, result) tuples
- hook_queue = Queue()
- hook_queue.hook_name = hook_name
- hook_runners = {}
-
- # now start them off
+ def run_hooks(self, hooks):
+ # type: (Sequence[Hook]) -> Dict[str, List[Info]]
+ return self.wait_hooks(*self.start_hooks(hooks))
+
+ def start_hooks(self, hooks):
+ # type: (Sequence[Hook]) -> Tuple[Queue, List[Hook]]
+ # Hooks might be a generator, so convert to a list
+ hooks = list(hooks)
+ if not hooks:
+ return Queue(), []
+ self.log.debug("%s: Starting hook", hooks[0].name)
+ for hook in hooks:
+ hook.set_spawn(self.spawn)
# Take the lock so that no hook abort can come in between now and
# the spawn of the context
with self._lock:
- for part, context in part_contexts.items():
- # context might have been aborted but have nothing servicing
- # the queue, we still want the legitimate messages on the queue
- # so just tell it to ignore stops it got before now
- context.ignore_stops_before_now()
- func_name = self._hooked_func_names[hook].get(part, None)
- if func_name:
- hook_runners[part] = part.make_hook_runner(
- hook_queue, func_name, weakref.proxy(context), *args,
- **params)
-
- return hook_queue, hook_runners
-
- def wait_hook(self, hook_queue, hook_runners):
- # Wait for them all to finish
- return_dict = {}
- start = time.time()
- while hook_runners:
- part, ret = hook_queue.get()
- hook_runner = hook_runners.pop(part)
-
- # Wait for the process to terminate
- hook_runner.wait()
- return_dict[part.name] = ret
- duration = time.time() - start
- if hook_runners:
- self.log.debug(
- "%s: Part %s returned %r after %ss. Still waiting for %s",
- hook_queue.hook_name, part.name, ret, duration,
- [p.name for p in hook_runners])
- else:
- self.log.debug(
- "%s: Part %s returned %r after %ss. Returning...",
- hook_queue.hook_name, part.name, ret, duration)
-
- if isinstance(ret, Exception):
- if not isinstance(ret, AbortedError):
- # If AbortedError, all tasks have already been stopped.
- # Got an error, so stop and wait all hook runners
- for h in hook_runners.values():
- h.stop()
- # Wait for them to finish
- for h in hook_runners.values():
- h.wait(timeout=ABORT_TIMEOUT)
- raise ret
-
+ hook_queue, hook_spawned = start_hooks(hooks)
+ return hook_queue, hook_spawned
+
+ def wait_hooks(self, hook_queue, hook_spawned):
+ # type: (Queue, List[Hook]) -> Dict[str, List[Info]]
+ if hook_spawned:
+ return_dict = wait_hooks(
+ self.log, hook_queue, hook_spawned, ABORT_TIMEOUT)
+ else:
+ self.log.debug("No Parts hooked")
+ return_dict = {}
return return_dict
diff --git a/malcolm/core/define.py b/malcolm/core/define.py
new file mode 100644
index 000000000..071174760
--- /dev/null
+++ b/malcolm/core/define.py
@@ -0,0 +1,14 @@
+from annotypes import WithCallTypes, Anno, Any
+
+
+with Anno("The name of the defined parameter"):
+ AName = str
+with Anno("The value of the defined parameter"):
+ AValue = Any
+
+
+class Define(WithCallTypes):
+ def __init__(self, name, value):
+ # type: (AName, AValue) -> None
+ self.name = name
+ self.value = value
diff --git a/malcolm/core/errors.py b/malcolm/core/errors.py
index 2e4c858d8..0e23ef8b0 100644
--- a/malcolm/core/errors.py
+++ b/malcolm/core/errors.py
@@ -35,4 +35,9 @@ class WrongThreadError(MalcolmException):
class YamlError(MalcolmException):
"""When instantiating some YAML raises an error"""
- pass
\ No newline at end of file
+ pass
+
+
+class FieldError(MalcolmException):
+ """Basically a KeyError but doesn't include quotation marks in error message"""
+ pass
diff --git a/malcolm/core/future.py b/malcolm/core/future.py
index 9c03502bd..15bfb27ef 100644
--- a/malcolm/core/future.py
+++ b/malcolm/core/future.py
@@ -82,5 +82,7 @@ def set_exception(self, exception):
Should only be used by Task and unit tests.
"""
+ assert isinstance(exception, Exception), \
+ "%r should be an Exception" % exception
self._exception = exception
self._state = self.FINISHED
diff --git a/malcolm/core/healthmeta.py b/malcolm/core/healthmeta.py
deleted file mode 100644
index bbab2def5..000000000
--- a/malcolm/core/healthmeta.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from .serializable import Serializable
-from .vmeta import VMeta
-
-
-@Serializable.register_subclass("malcolm:core/HealthMeta:1.0")
-class HealthMeta(VMeta):
- """Meta object containing information for a string"""
- _faults = None
-
- def validate(self, value):
- """
- Check if the value is None and returns None, else casts value to a
- string and returns it
-
- Args:
- value: Value to validate
-
- Returns:
- str: Value as a string [If value is not None]
- """
- if value is None:
- value = "OK"
- else:
- value = str(value)
- return value
diff --git a/malcolm/core/hook.py b/malcolm/core/hook.py
index 14cdacd04..01f3c9ad9 100644
--- a/malcolm/core/hook.py
+++ b/malcolm/core/hook.py
@@ -1,29 +1,191 @@
-import inspect
+import time
+import logging
+from annotypes import TYPE_CHECKING, Anno, WithCallTypes, Any, Generic, \
+ TypeVar, Sequence
-class Hook(object):
- def __call__(self, func):
- """Decorator function to add a Hook to a Part's function
+from malcolm.compat import OrderedDict
+from .errors import AbortedError
+from .loggable import Loggable
+from .queue import Queue
+from .spawned import Spawned
+from .info import Info
- Args:
- func: Function to decorate with Hook
+if TYPE_CHECKING:
+ from typing import Callable, List, Dict, Tuple, Type, Union, Optional
- Returns:
- Decorated function
- """
+# Create a module level logger
+log = logging.getLogger(__name__)
- if not hasattr(func, "Hooked"):
- func.Hooked = []
- func.Hooked.append(self)
- return func
- @classmethod
- def isinstance(cls, o):
- return isinstance(o, cls)
+T = TypeVar("T")
+if TYPE_CHECKING:
+ Hooked = Callable[..., T]
+ ArgsGen = Callable[(), List[str]]
-def get_hook_decorated(part):
- for name, member in inspect.getmembers(part, inspect.ismethod):
- if hasattr(member, "Hooked"):
- for hook in member.Hooked:
- yield name, hook, member
+class Hookable(Loggable, WithCallTypes):
+ name = None # type: str
+ hooked = None # type: Dict[Type[Hook], Tuple[Hooked, ArgsGen]]
+
+ def register_hooked(self,
+ hooks, # type: Union[Type[Hook], Sequence[Type[Hook]]]
+ func, # type: Hooked
+ args_gen=None # type: Optional[ArgsGen]
+ ):
+ # type: (Type[Hook], Callable, Optional[Callable]) -> None
+ if self.hooked is None:
+ self.hooked = {}
+ if args_gen is None:
+ args_gen = getattr(func, "call_types", {}).keys
+ if not isinstance(hooks, Sequence):
+ hooks = [hooks]
+ for hook_cls in hooks:
+ self.hooked[hook_cls] = (func, args_gen)
+
+ def on_hook(self, hook):
+ # type: (Hook) -> None
+ """Takes a hook, and optionally calls hook.run on a function"""
+ try:
+ func, args_gen = self.hooked[type(hook)]
+ except (KeyError, TypeError):
+ return
+ else:
+ hook(func, args_gen())
+
+
+with Anno("The child that the hook is being passed to"):
+ AHookable = Hookable
+
+
+class Hook(Generic[T], WithCallTypes):
+ """Something that children can register with to be called"""
+
+ def __init__(self, child, **kwargs):
+ # type: (AHookable, **Any) -> None
+ self.child = child
+ self._kwargs = kwargs
+ self._queue = None # type: Queue
+ self._spawn = None # type: Callable[..., Spawned]
+ self.spawned = None # type: Spawned
+
+ @property
+ def name(self):
+ return type(self).__name__
+
+ def set_spawn(self, spawn):
+ # type: (Callable[..., Spawned]) -> Hook
+ self._spawn = spawn
+ return self
+
+ def set_queue(self, queue):
+ # type: (Queue) -> Hook
+ self._queue = queue
+ return self
+
+ def prepare(self):
+ # type: () -> None
+ """Override this if we need to prepare before running"""
+ pass
+
+ def __call__(self, func, keys=None):
+ # type: (Callable[..., T], Sequence[str]) -> None
+ """Spawn the function, passing kwargs specified by func.call_types or
+ keys if given"""
+ if keys is None:
+ keys = getattr(func, "call_types", {}).keys()
+ assert not self.spawned, \
+ "Hook has already spawned a function, cannot run another"
+ self.prepare()
+ # TODO: should we check the return types here?
+ kwargs = {}
+ for k in keys:
+ assert k in self._kwargs, \
+ "Hook requested argument %r not in %r" % (
+ k, list(self._kwargs))
+ kwargs[k] = self._kwargs[k]
+ self.spawned = self._spawn(self._run, func, kwargs)
+
+ def _run(self, func, kwargs):
+ # type: (Callable[..., T], Dict[str, Any]) -> None
+ try:
+ result = func(**kwargs)
+ result = self.validate_return(result)
+ except AbortedError as e:
+ log.info("%s: %s has been aborted", self.child, func)
+ result = e
+ except Exception as e: # pylint:disable=broad-except
+ log.exception("%s: %s(**%s) raised exception %s",
+ self.child, func, kwargs, e)
+ result = e
+ self._queue.put((self, result))
+
+ def stop(self):
+ # type: () -> None
+ """Override this if we can stop"""
+ raise RuntimeError("%s cannot be stopped" % self.name)
+
+ def validate_return(self, ret):
+ # type: (T) -> None
+ """Override this if the function is expected to return something to
+ to validate its value"""
+ assert not ret, "Expected no return, got %s" % (ret,)
+ return None
+
+
+def start_hooks(hooks):
+ # type: (List[Hook]) -> Tuple[Queue, List[Hook]]
+ # This queue will hold (part, result) tuples
+ hook_queue = Queue()
+ hook_spawned = []
+ # now start them off
+ for hook in hooks:
+ hook.set_queue(hook_queue)
+ hook.child.on_hook(hook)
+ if hook.spawned:
+ hook_spawned.append(hook)
+ return hook_queue, hook_spawned
+
+
+def wait_hooks(logger, hook_queue, hook_spawned, timeout=None,
+ exception_check=True):
+ # type: (logging.Logger, Queue, List[Hook], float) -> Dict[str, List[Info]]
+ # timeout is time to wait for spawned processes to complete on abort,
+ # not time for them to run for
+ # Wait for them all to finish
+ return_dict = OrderedDict()
+ for hook in hook_spawned:
+ return_dict[hook.child.name] = None
+ start = time.time()
+ hook_spawned = set(hook_spawned)
+ while hook_spawned:
+ hook, ret = hook_queue.get() # type: Tuple[Hook, Any]
+ hook_spawned.remove(hook)
+ # Wait for the process to terminate
+ hook.spawned.wait(timeout)
+ duration = time.time() - start
+ if hook_spawned:
+ logger.debug(
+ "%s: Child %s returned %r after %ss. Still waiting for %s",
+ hook.name, hook.child.name, ret, duration,
+ [h.child.name for h in hook_spawned])
+ else:
+ logger.debug(
+ "%s: Child %s returned %r after %ss. Returning...",
+ hook.name, hook.child.name, ret, duration)
+
+ if isinstance(ret, Exception):
+ if exception_check:
+ if not isinstance(ret, AbortedError):
+ # If AbortedError, all tasks have already been stopped.
+ # Got an error, so stop and wait all hook runners
+ for h in hook_spawned:
+ h.stop()
+ # Wait for them to finish
+ for h in hook_spawned:
+ h.spawned.wait(timeout)
+ raise ret
+ else:
+ return_dict[hook.child.name] = ret
+
+ return return_dict
diff --git a/malcolm/core/hookrunner.py b/malcolm/core/hookrunner.py
deleted file mode 100644
index 6d8ee72ef..000000000
--- a/malcolm/core/hookrunner.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import logging
-
-from malcolm.core.errors import AbortedError
-
-
-# Create a module level logger
-log = logging.getLogger(__name__)
-
-
-class HookRunner(object):
- def __init__(self, hook_queue, part, func, context, args):
- self.hook_queue = hook_queue
- self.part = part
- self.func = func
- self.context = context
- self.args = args
- self.spawned = self.part.spawn(self.func_result_on_queue)
-
- def func_result_on_queue(self):
- try:
- result = self.func(self.context, *self.args)
- except AbortedError as e:
- log.info("%s: %s has been aborted", self.part.name, self.func)
- result = e
- except Exception as e: # pylint:disable=broad-except
- log.exception(
- "%s: %s%s raised exception %s",
- self.part.name, self.func, self.args, e)
- result = e
- self.hook_queue.put((self.part, result))
-
- def stop(self):
- self.context.stop()
-
- def wait(self, timeout=None):
- self.spawned.wait(timeout=timeout)
diff --git a/malcolm/core/importer.py b/malcolm/core/importer.py
deleted file mode 100644
index 50fdd2631..000000000
--- a/malcolm/core/importer.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import os
-import sys
-import importlib
-import logging
-import types
-import imp
-
-# Create a module level logger
-log = logging.getLogger(__name__)
-
-
-class Importer(object):
- def __init__(self, ):
- self.special_names = [
- "vmetas", "infos", "controllers", "parts", "includes", "blocks"]
-
- def import_all_packages(self, root_package, root_init, globals_d):
- """Import any packages relative to self.root_dir, recursing down one
- level to specially named subdirs"""
- modules = {}
- root_dir = os.path.dirname(root_init)
- for f in os.listdir(root_dir):
- if os.path.isfile(os.path.join(root_dir, f, "__init__.py")):
- name = ".".join([root_package, f])
- modules.update(self.try_import_name(name))
- pkg_dir = os.path.join(root_dir, f)
- self.import_special_subpackages(name, pkg_dir)
- globals_d.update(modules)
- return list(modules)
-
- def import_special_subpackages(self, name, path):
- """Import specially named subpackages of name"""
- for n in self.special_names:
- sub_dir = os.path.join(path, n)
- if os.path.isdir(sub_dir) or os.path.isfile(sub_dir + ".py"):
- self.try_import_name(".".join([name, n]))
-
- def try_import_name(self, name):
- try:
- imp = importlib.import_module(name)
- except ImportError:
- log.warning("Importing %s failed", name, exc_info=True)
- return {}
- else:
- return {name: imp}
-
- def import_package_from_path(self, name, path):
- dirname, basename = os.path.abspath(path).rsplit(os.sep, 1)
- file, pathname, description = imp.find_module(basename, [dirname])
- try:
- mod = imp.load_module(name, file, pathname, description)
- finally:
- if file is not None:
- file.close()
- parent_name, attr_name = name.rsplit(".", 1)
- parent = importlib.import_module(parent_name)
- setattr(parent, attr_name, mod)
diff --git a/malcolm/core/info.py b/malcolm/core/info.py
index bfcf5cf9d..22774b6ec 100644
--- a/malcolm/core/info.py
+++ b/malcolm/core/info.py
@@ -1,6 +1,15 @@
import inspect
+from annotypes import TYPE_CHECKING, TypeVar
+
from malcolm.compat import OrderedDict
+from .errors import BadValueError
+
+if TYPE_CHECKING:
+ from typing import Type, Dict, List, Union, Sequence
+ PartInfo = Dict[str, Union[None, Sequence]]
+
+T = TypeVar("T")
class Info(object):
@@ -14,6 +23,7 @@ def __repr__(self):
@classmethod
def filter_parts(cls, part_info):
+ # type: (Type[T], PartInfo) -> Dict[str, List[T]]
"""Filter the part_info dict looking for instances of our class
Args:
@@ -34,6 +44,7 @@ def filter_parts(cls, part_info):
@classmethod
def filter_values(cls, part_info):
+ # type: (Type[T], PartInfo) -> List[T]
"""Filter the part_info dict list looking for instances of our class
Args:
@@ -47,3 +58,22 @@ def filter_values(cls, part_info):
for info_list in cls.filter_parts(part_info).values():
filtered += info_list
return filtered
+
+ @classmethod
+ def filter_single_value(cls, part_info):
+ # type: (Type[T], PartInfo) -> T
+ """Filter the part_info dict list looking for a single instance of our
+ class
+
+ Args:
+ part_info (dict): {part_name: [Info] or None} as returned from
+ Controller.run_hook()
+
+ Returns:
+ info subclass of cls
+ """
+ filtered = cls.filter_values(part_info)
+ if len(filtered) != 1:
+ raise BadValueError("Expected a single %s, got %s of them" % (
+ cls.__name__, len(filtered)))
+ return filtered[0]
diff --git a/malcolm/core/loggable.py b/malcolm/core/loggable.py
index a19209ee1..c47403e09 100644
--- a/malcolm/core/loggable.py
+++ b/malcolm/core/loggable.py
@@ -15,11 +15,9 @@ def filter(self, record):
class Loggable(object):
"""Utility class that provides a named logger for a class instance"""
+ log = None # type: logging.Logger
- def __init__(self, **fields):
- self.log = self.set_logger_extra(**fields)
-
- def set_logger_extra(self, **fields):
+ def set_logger(self, **fields):
"""Change the name of the logger that log.* should call
Args:
diff --git a/malcolm/core/map.py b/malcolm/core/map.py
deleted file mode 100644
index b4473ac1a..000000000
--- a/malcolm/core/map.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from malcolm.core.serializable import Serializable
-
-
-@Serializable.register_subclass("malcolm:core/Map:1.0")
-class Map(Serializable):
-
- def __init__(self, meta, d=None):
- self.endpoints = []
- self.meta = meta
- if d:
- self.update(d)
-
- def __setattr__(self, attr, val):
- if hasattr(self, "meta"):
- if attr not in self.meta.elements:
- raise AttributeError(
- "%s is not a valid key for given meta" % attr)
- val = self.meta.elements[attr].validate(val)
- unordered_endpoints = self.endpoints + [attr]
- object.__setattr__(
- self, "endpoints",
- [x for x in self.meta.elements if x in unordered_endpoints])
- super(Map, self).__setattr__(attr, val)
-
- def __setitem__(self, key, val):
- try:
- setattr(self, key, val)
- except AttributeError:
- raise ValueError(key)
-
- def update(self, d):
- invalid = [k for k in d
- if k not in self.meta.elements and k != "typeid"]
- if invalid:
- raise ValueError(
- "Keys %s from %s not in %s" % (
- invalid, d, list(self.meta.elements)))
- for k in d:
- if k != "typeid":
- self[k] = d[k]
-
- def check_valid(self):
- invalid = [k for k in self.meta.required if k not in self.endpoints]
- if invalid:
- raise ValueError(
- "Keys %s from %s not set" % (invalid, self.meta.required))
-
- def __repr__(self):
- elements = ", ".join("%r: %r" % kv for kv in self.items())
- return "Map({%s})" % elements
-
- def clear(self):
- while self.endpoints:
- delattr(self, self.endpoints.pop())
-
- def keys(self):
- return self.endpoints[:]
-
- def values(self):
- return [self[k] for k in self]
-
- def items(self):
- return [(k, self[k]) for k in self]
-
- def __eq__(self, rhs):
- return list(self.items()) == list(rhs.items())
-
- def __ne__(self, rhs):
- return not self == rhs
diff --git a/malcolm/core/mapmeta.py b/malcolm/core/mapmeta.py
deleted file mode 100644
index 15e38db4d..000000000
--- a/malcolm/core/mapmeta.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from malcolm.compat import str_, OrderedDict
-from .meta import Meta
-from .serializable import Serializable, deserialize_object, camel_to_title
-from .stringarray import StringArray
-from .vmeta import VMeta
-
-
-@Serializable.register_subclass("malcolm:core/MapMeta:1.0")
-class MapMeta(Meta):
- """An object containing a set of ScalarMeta objects"""
-
- endpoints = ["elements", "description", "tags", "writeable", "label",
- "required"]
-
- def __init__(self, description="", tags=(), writeable=False, label="",
- elements=None, required=()):
- super(MapMeta, self).__init__(description, tags, writeable, label)
- if elements is None:
- elements = {}
- self.elements = self.set_elements(elements)
- self.required = self.set_required(required)
-
- def set_notifier_path(self, notifier, path):
- super(MapMeta, self).set_notifier_path(notifier, path)
- for k, v in self.elements.items():
- v.set_notifier_path(notifier, self.path + ["elements", k])
-
- def set_elements(self, elements):
- """Set the elements dict from a serialized dict"""
- deserialized = OrderedDict()
- for k, v in elements.items():
- if k != "typeid":
- k = deserialize_object(k, str_)
- v = deserialize_object(v, VMeta)
- if not v.label:
- v.set_label(camel_to_title(k))
- deserialized[k] = v
- if hasattr(self, "elements"):
- # Stop old elements notifying
- for k, v in self.elements.items():
- v.set_notifier_path(None, ())
- for k, v in deserialized.items():
- v.set_notifier_path(self.notifier, self.path + ["elements", k])
- return self.set_endpoint_data("elements", deserialized)
-
- def set_required(self, required):
- """Set the required string list"""
- required = StringArray(deserialize_object(t, str_) for t in required)
- for r in required:
- assert r in self.elements, \
- "Expected one of %r, got %r" % (list(self.elements), r)
- return self.set_endpoint_data("required", required)
-
diff --git a/malcolm/core/meta.py b/malcolm/core/meta.py
deleted file mode 100644
index d647be0b7..000000000
--- a/malcolm/core/meta.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from malcolm.compat import str_
-from .model import Model
-from .serializable import deserialize_object
-from .stringarray import StringArray
-
-
-class Meta(Model):
- """Meta base class"""
-
- endpoints = ["description", "tags", "writeable", "label"]
-
- def __init__(self, description="", tags=(), writeable=False, label=""):
- # Set initial values
- self.description = self.set_description(description)
- self.tags = self.set_tags(tags)
- self.writeable = self.set_writeable(writeable)
- self.label = self.set_label(label)
- # List of state names that we are writeable in, not serializable
- self.writeable_in = []
-
- def set_description(self, description):
- """Set the description string"""
- description = deserialize_object(description, str_)
- return self.set_endpoint_data("description", description)
-
- def set_tags(self, tags):
- """Set the tags StringArray"""
- tags = StringArray(deserialize_object(t, str_) for t in tags)
- return self.set_endpoint_data("tags", tags)
-
- def set_writeable(self, writeable):
- """Set the writeable bool"""
- writeable = deserialize_object(writeable, bool)
- return self.set_endpoint_data("writeable", writeable)
-
- def set_label(self, label):
- """Set the label string"""
- label = deserialize_object(label, str_)
- return self.set_endpoint_data("label", label)
-
- def set_writeable_in(self, *states):
- """Set the states that the object is writeable in"""
- states = tuple(deserialize_object(state, str_) for state in states)
- self.writeable_in = states
diff --git a/malcolm/core/method.py b/malcolm/core/method.py
deleted file mode 100644
index 7c65084b3..000000000
--- a/malcolm/core/method.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from .view import View
-
-
-class Method(View):
- """Exposes a function with metadata for arguments and return values"""
-
- def __init__(self, controller, context, data):
- self._do_init(controller, context, data)
-
- def _add_positional_args(self, args, kwargs):
- # add any positional args into our kwargs dict
- for name, v in zip(self._data.takes.elements, args):
- assert name not in kwargs, \
- "%s specified as positional and keyword args" % (name,)
- kwargs[name] = v
- return kwargs
-
- def post(self, *args, **kwargs):
- kwargs = self._add_positional_args(args, kwargs)
- result = self._context.post(self._data.path, kwargs)
- return result
-
- __call__ = post
-
- def post_async(self, *args, **kwargs):
- kwargs = self._add_positional_args(args, kwargs)
- fs = self._context.post_async(self._data.path, kwargs)
- return fs
-
- @property
- def takes(self):
- return self._controller.make_view(self._context, self._data, "takes")
-
- @property
- def defaults(self):
- return self._controller.make_view(self._context, self._data, "defaults")
-
- @property
- def description(self):
- return self._controller.make_view(
- self._context, self._data, "description")
-
- @property
- def tags(self):
- return self._controller.make_view(self._context, self._data, "tags")
-
- @property
- def writeable(self):
- return self._controller.make_view(
- self._context, self._data, "writeable")
-
- @property
- def label(self):
- return self._controller.make_view(self._context, self._data, "label")
-
- @property
- def returns(self):
- return self._controller.make_view(self._context, self._data, "returns")
diff --git a/malcolm/core/methodmodel.py b/malcolm/core/methodmodel.py
deleted file mode 100644
index 6f45c52fe..000000000
--- a/malcolm/core/methodmodel.py
+++ /dev/null
@@ -1,267 +0,0 @@
-import inspect
-
-from malcolm.compat import str_, OrderedDict
-from .map import Map
-from .mapmeta import MapMeta
-from .meta import Meta
-from .serializable import Serializable, deserialize_object, check_camel_case, \
- serialize_object
-
-REQUIRED = object()
-OPTIONAL = object()
-
-
-@Serializable.register_subclass("malcolm:core/Method:1.0")
-class MethodModel(Meta):
- """Exposes a function with metadata for arguments and return values"""
-
- endpoints = ["takes", "defaults", "description", "tags", "writeable",
- "label", "returns"]
-
- def __init__(self, description="", tags=(), writeable=True, label="",
- takes=None, defaults=None, returns=None):
- super(MethodModel, self).__init__(description, tags, writeable, label)
- self.takes = self.set_takes(takes)
- self.returns = self.set_returns(returns)
- self.defaults = self.set_defaults(defaults)
-
- def set_notifier_path(self, notifier, path):
- super(MethodModel, self).set_notifier_path(notifier, path)
- for endpoint in ["takes", "returns"]:
- self[endpoint].set_notifier_path(notifier, self.path + [endpoint])
-
- def set_takes(self, takes):
- """Set the takes MapMeta"""
- if takes is None:
- takes = MapMeta()
- else:
- takes = deserialize_object(takes, MapMeta)
- if hasattr(self, "takes"):
- self.takes.set_notifier_path(None, ())
- takes.set_notifier_path(self.notifier, self.path + ["takes"])
- return self.set_endpoint_data("takes", takes)
-
- def set_defaults(self, defaults):
- """Set the defaults dict"""
- if defaults is None:
- defaults = {}
- for k, v in defaults.items():
- if k != "typeid":
- k = deserialize_object(k, str_)
- defaults[k] = self.takes.elements[k].validate(v)
- return self.set_endpoint_data("defaults", defaults)
-
- def set_returns(self, returns):
- """Set the returns MapMeta"""
- if returns is None:
- returns = MapMeta()
- else:
- returns = deserialize_object(returns, MapMeta)
- if hasattr(self, "returns"):
- self.returns.set_notifier_path(None, ())
- returns.set_notifier_path(self.notifier, self.path + ["returns"])
- return self.set_endpoint_data("returns", returns)
-
- def prepare_call_args(self, **param_dict):
- args = []
- # Prepare input map
- if self.takes.elements:
- params = Map(self.takes, self.defaults)
- params.update(param_dict)
- params.check_valid()
- args.append(params)
- # Prepare output map
- if self.returns.elements:
- ret = Map(self.returns)
- args.append(ret)
- return tuple(args)
-
- @classmethod
- def wrap_method(cls, func):
- """Checks if a function already has a MethodModel implementation of
- itself and if it does not, creates one.
-
- Args:
- func: Function to wrap
-
- Returns:
- callable: Function with MethodModel instance of itself as an
- attribute
- """
-
- if not hasattr(func, "MethodModel"):
- # Make a new one
- description = inspect.getdoc(func) or ""
- method = cls(description)
- else:
- # Copy it in case we are subclassing
- method = cls.from_dict(func.MethodModel.to_dict())
- method.set_writeable_in(*func.MethodModel.writeable_in)
-
- func.MethodModel = method
- return func
-
- def recreate_from_others(self, method_metas, without=()):
- defaults = OrderedDict()
- elements = OrderedDict()
- required = []
-
- # Populate the intermediate data structures
- for method_meta in method_metas:
- for element in method_meta.takes.elements:
- if element not in without:
- # Serialize it to copy it
- serialized = method_meta.takes.elements[element].to_dict()
- elements[element] = serialized
- if element in method_meta.takes.required and \
- element not in required:
- required.append(element)
- if element in method_meta.defaults:
- defaults.pop(element, None)
- defaults[element] = method_meta.defaults[element]
- # TODO: what about returns?
-
- # remove required args that are now defaulted
- required = [r for r in required if r not in defaults]
-
- # Update ourself from these structures
- takes = MapMeta()
- takes.set_elements(elements)
- takes.set_required(required)
- self.set_takes(takes)
- self.set_defaults(defaults)
-
-
-def _prepare_map_meta(args, allow_defaults, defaults=None, elements=None,
- required=None):
- # prepare some data structures that will be used for the takes MapMeta
- if defaults is None:
- defaults = OrderedDict()
- if elements is None:
- elements = OrderedDict()
- if required is None:
- required = []
- for index in range(0, len(args), 3):
- # pick out 3 arguments
- name = args[index]
- check_camel_case(name)
- meta = args[index + 1]
- default = args[index + 2]
- # store them in the right structures
- elements[name] = meta
- if default is REQUIRED:
- required.append(name)
- elif default is not OPTIONAL:
- assert allow_defaults, \
- "Defaults not allowed in this structure"
- defaults[name] = default
-
- # Setup the takes MapMeta and attach it to the function's MethodModel
- meta = MapMeta()
- meta.set_elements(elements)
- meta.set_required(required)
- return meta, defaults
-
-
-def method_takes(*args):
- """Checks if function has a MethodModel representation, calls wrap_method to
- create one if it doesn't and then adds the takes attribute to it
- from \*args
-
- Args:
- \*args(list): List of of length nparams*3. List of form:
- [name, `VMeta`, `REQUIRED`/`OPTIONAL`/default, ...]
-
- Returns:
- callable: Updated function
- """
-
- def decorator(func):
- MethodModel.wrap_method(func)
- takes_meta, defaults = _prepare_map_meta(args, allow_defaults=True)
- func.MethodModel.set_takes(takes_meta)
- func.MethodModel.set_defaults(defaults)
- return func
-
- return decorator
-
-
-def method_also_takes(*args):
- """As `method_takes`, but adds \*args to method takes instead of replacing
- """
-
- def decorator(func):
- assert inspect.isclass(func), \
- "method_also_takes() only works on a Class, not %r" % func
- MethodModel.wrap_method(func)
- takes_meta, defaults = _prepare_map_meta(
- args, allow_defaults=True,
- elements=serialize_object(func.MethodModel.takes.elements),
- defaults=func.MethodModel.defaults.copy(),
- required=list(func.MethodModel.takes.required)
- )
- func.MethodModel.set_takes(takes_meta)
- func.MethodModel.set_defaults(defaults)
- return func
-
- return decorator
-
-
-def method_returns(*args):
- """Checks if function has a MethodModel representation, calls wrap_method to
- create one if it doesn't and then adds the returns attribute to it
- from \*args
-
- Args:
- \*args(list): List of of length nparams*3. List of form:
- [name, `VMeta`, `REQUIRED`/`OPTIONAL`/default, ...]
-
- Returns:
- callable: Updated function
- """
-
- def decorator(func):
- MethodModel.wrap_method(func)
- returns_meta, _ = _prepare_map_meta(args, allow_defaults=False)
- func.MethodModel.set_returns(returns_meta)
- return func
-
- return decorator
-
-
-def method_writeable_in(*states):
- """Checks if function has a MethodModel representation, calls wrap_method to
- create one if it doesn't and then adds only_in to it from \*states
-
- Args:
- \*states(list): List of state names, like DefaultStateMachine.RESETTING
-
- Returns:
- callable: Updated function
- """
- def decorator(func):
- MethodModel.wrap_method(func)
- func.MethodModel.set_writeable_in(*states)
- return func
- return decorator
-
-
-def get_method_decorated(instance):
- for name, member in inspect.getmembers(instance, inspect.ismethod):
- if hasattr(member, "MethodModel"):
- # Copy it so we get a new one for this instance
- method_model = MethodModel.from_dict(member.MethodModel.to_dict())
- method_model.writeable_in = member.MethodModel.writeable_in
- yield name, method_model, member
-
-
-def create_class_params(cls, **kwargs):
- method_model = cls.MethodModel
- params = method_model.prepare_call_args(**kwargs)[0]
- return params
-
-
-def call_with_params(func, *args, **params):
- method_model = func.MethodModel
- args += method_model.prepare_call_args(**params)
- return func(*args)
diff --git a/malcolm/core/model.py b/malcolm/core/model.py
deleted file mode 100644
index bb18ae446..000000000
--- a/malcolm/core/model.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from contextlib import contextmanager
-
-from .serializable import Serializable
-
-
-class DummyNotifier(object):
- @property
- @contextmanager
- def changes_squashed(self):
- yield
-
- def add_squashed_change(self, path, data=None):
- pass
-
-
-class Model(Serializable):
- notifier = DummyNotifier()
- path = []
-
- def set_notifier_path(self, notifier, path):
- """Sets the notifier, and the path from the path from block root
-
- Args:
- notifier (Notifier): The Notifier to tell when endpoint data changes
- path (list): The absolute path to get to this object
- """
- self.notifier = notifier
- self.path = list(path)
-
- def set_endpoint_data(self, name, value):
- with self.notifier.changes_squashed:
- # Actually set the attribute
- assert name in self.endpoints, \
- "Endpoint %r not defined for %r" % (name, self)
- setattr(self, name, value)
- # Tell the notifier what changed
- self.notifier.add_squashed_change(self.path + [name], value)
- return value
-
-
diff --git a/malcolm/core/models.py b/malcolm/core/models.py
new file mode 100644
index 000000000..97da6b958
--- /dev/null
+++ b/malcolm/core/models.py
@@ -0,0 +1,1027 @@
+import inspect
+
+from annotypes import Array, Anno, Union, Sequence, Mapping, Any, to_array, \
+ Optional, TYPE_CHECKING, WithCallTypes, NO_DEFAULT
+import numpy as np
+from enum import Enum
+
+from malcolm.compat import OrderedDict, str_
+from .alarm import Alarm
+from .notifier import DummyNotifier, Notifier
+from .serializable import Serializable, deserialize_object, camel_to_title
+from .table import Table
+from .tags import Widget
+from .timestamp import TimeStamp
+
+if TYPE_CHECKING:
+ from typing import Tuple, Type, List, Dict, Callable
+
+
+def check_type(value, typ):
+ if typ != Any:
+ if typ == str:
+ typ = str_
+ assert isinstance(value, typ), "Expected %s, got %r" % (typ, value)
+
+
+class Model(Serializable):
+ notifier = DummyNotifier()
+ path = []
+ __slots__ = []
+
+ def set_notifier_path(self, notifier, path):
+ """Sets the notifier, and the path from the path from block root
+
+ Args:
+ notifier (Notifier): The Notifier to tell when endpoint data changes
+ path (list): The absolute path to get to this object
+ """
+ # type: (Union[Notifier, DummyNotifier], List[str]) -> None
+ # This function should either change from the DummyNotifier or to
+ # the DummyNotifier, never between two valid notifiers
+ assert self.notifier is Model.notifier or notifier is Model.notifier, \
+ "Already have a notifier %s path %s" % (self.notifier, self.path)
+ self.notifier = notifier
+ self.path = path
+ # Tell all our children too
+ for name, ct in self.call_types.items():
+ if ct.is_mapping:
+ child = getattr(self, name)
+ if isinstance(ct.typ[1], Model) and child:
+ for k, v in getattr(self, name).items():
+ v.set_notifier_path(notifier, self.path + [name, k])
+ elif isinstance(ct.typ, Model):
+ assert not ct.is_array, \
+ "Can't deal with Arrays of Models %s" % ct
+ child = getattr(self, name)
+ child.set_notifier_path(notifier, self.path + [name])
+
+ def set_endpoint_data(self, name, value):
+ # type: (str_, Any) -> Any
+ try:
+ ct = self.call_types[name]
+ except KeyError:
+ raise ValueError("%r not in %r.call_types %r" % (
+ name, self, self.call_types))
+ else:
+ if ct.is_array:
+ # Cast to right type, this will do some cheap validation
+ value = ct(value) # type: Array
+ # Check we have the right type
+ assert not issubclass(ct.typ, Model), \
+ "Can't handle Array[Model] at the moment"
+ if isinstance(value.seq, (tuple, list)):
+ # Variable array, check types of each instance
+ # TODO: this might harm performance
+ if ct.typ == str:
+ typ = str_
+ else:
+ typ = ct.typ
+ for x in value.seq:
+ assert isinstance(x, typ), \
+ "Expected Array[%r], got %r" % (ct.typ, value.seq)
+ elif ct.is_mapping:
+ # Check it is the right type
+ ktype, vtype = ct.typ
+ for k, v in value.items():
+ check_type(k, ktype)
+ check_type(v, vtype)
+ # If we are setting structures of Models then sort notification
+ if issubclass(ct.typ[1], Model):
+ # If we have old Models then stop them notifying
+ child = getattr(self, name, {})
+ if child:
+ for k, v in child.items():
+ v.set_notifier_path(Model.notifier, [])
+ for k, v in value.items():
+ v.set_notifier_path(self.notifier,
+ self.path + [name, k])
+ else:
+ # If we are setting a Model then sort notification
+ if issubclass(ct.typ, Model):
+ # If we have an old Model then stop it notifying
+ child = getattr(self, name, None)
+ if child:
+ child.set_notifier_path(Model.notifier, [])
+ value.set_notifier_path(self.notifier, self.path)
+ # Make sure it is the right typ
+ check_type(value, ct.typ)
+ with self.notifier.changes_squashed:
+ # Actually set the attribute
+ setattr(self, name, value)
+ # Tell the notifier what changed
+ self.notifier.add_squashed_change(self.path + [name], value)
+ return value
+
+
+# Types used when deserializing to the class
+with Anno("Description of what this element represents"):
+ AMetaDescription = str
+with Anno("Generic text tags for client tools to interpret"):
+ ATags = Array[str]
+with Anno("Whether this element is currently writeable"):
+ AWriteable = bool
+with Anno("A human readable label for the element"):
+ ALabel = str
+
+# A more permissive union to allow a wider range of set_* args
+UTags = Union[ATags, Sequence[str], str]
+
+
+class Meta(Model):
+ """Base class for describing Blocks, Methods and Attributes"""
+ __slots__ = ["description", "tags", "writeable", "label"]
+
+ def __init__(self, description="", tags=(), writeable=False, label=""):
+ # type: (AMetaDescription, UTags, AWriteable, ALabel) -> None
+ self.description = self.set_description(description)
+ self.tags = self.set_tags(tags)
+ self.writeable = self.set_writeable(writeable)
+ self.label = self.set_label(label)
+
+ def set_description(self, description):
+ # type: (AMetaDescription) -> AMetaDescription
+ return self.set_endpoint_data("description", description)
+
+ def set_tags(self, tags):
+ # type: (UTags) -> ATags
+ return self.set_endpoint_data("tags", tags)
+
+ def set_writeable(self, writeable):
+ # type: (AWriteable) -> AWriteable
+ return self.set_endpoint_data("writeable", writeable)
+
+ def set_label(self, label):
+ # type: (ALabel) -> ALabel
+ return self.set_endpoint_data("label", label)
+
+
+class VMeta(Meta):
+ """Abstract base class for validating the values of Attributes"""
+ attribute_class = None
+ _annotype_lookup = {} # type: Mapping[Tuple[type, bool, bool], Type[VMeta]]
+ __slots__ = []
+
+ def validate(self, value):
+ # type: (Any) -> Any
+ """Abstract function to validate a given value
+
+ Args:
+ value: Value to validate
+
+ Returns:
+ The validated value if it passes
+ """
+ raise NotImplementedError(self)
+
+ def create_attribute_model(self, initial_value=None):
+ # type: (Any) -> AttributeModel
+ """Make an AttributeModel instance of the correct type for this Meta
+
+ Args:
+ initial_value: The initial value the Attribute should take
+
+ Returns:
+ AttributeModel: The created attribute model instance
+ """
+ attr = self.attribute_class(meta=self, value=initial_value)
+ return attr
+
+ def doc_type_string(self):
+ # type: () -> str
+ """Abstract function to return the python type string.
+
+ For example, "str" or "numpy.int32"
+ """
+ raise NotImplementedError(self)
+
+ def default_widget(self):
+ # type: () -> Widget
+ """Abstract function to return the default widget type"""
+ raise NotImplementedError(self)
+
+ @classmethod
+ def from_annotype(cls, anno, writeable, **kwargs):
+ # type: (Anno, bool, **Any) -> VMeta
+ """Return an instance of this class from an Anno"""
+ ret = cls(description=anno.description, writeable=writeable, **kwargs)
+ widget = ret.default_widget()
+ if widget != Widget.NONE:
+ ret.set_tags([widget.tag()])
+ return ret
+
+ @classmethod
+ def register_annotype_converter(cls, types, is_array=False,
+ is_mapping=False):
+ # type: (Union[Sequence[type], type], bool, bool) -> Any
+ """Register this class as a converter for Anno instances"""
+ if not isinstance(types, Sequence):
+ types = [types]
+
+ def decorator(subclass):
+ for typ in types:
+ cls._annotype_lookup[(typ, is_array, is_mapping)] = subclass
+ return subclass
+
+ return decorator
+
+ @classmethod
+ def lookup_annotype_converter(cls, anno):
+ # type: (Anno) -> Type[VMeta]
+ """Look up a vmeta based on an Anno"""
+ if hasattr(anno.typ, "__bases__"):
+ # This is a proper type
+ bases = inspect.getmro(anno.typ)
+ else:
+ # This is a numpy dtype
+ bases = [anno.typ]
+ for typ in bases:
+ key = (typ, bool(anno.is_array), bool(anno.is_mapping))
+ try:
+ return cls._annotype_lookup[key]
+ except KeyError:
+ pass
+ raise KeyError(anno)
+
+
+# Types used when deserializing to the class
+with Anno("The current value of the Attribute"):
+ AValue = Any
+with Anno("The current alarm status"):
+ AAlarm = Alarm
+with Anno("The time when the value was last updated"):
+ ATimeStamp = TimeStamp
+with Anno("The validating Meta object describing our value"):
+ AVMeta = VMeta
+
+
+# Don't register this with Serializable as we never instantiate it directly,
+# only a subclass like NTScalar
+class AttributeModel(Model):
+ """Data Model for an Attribute"""
+ __slots__ = ["value", "alarm", "timeStamp", "meta"]
+
+ # noinspection PyPep8Naming
+ # timeStamp is camelCase to maintain compatibility with EPICS normative
+ # types
+ def __init__(self, value=None, alarm=None, timeStamp=None, meta=None):
+ # type: (AValue, AAlarm, ATimeStamp, AVMeta) -> None
+ self.meta = self.set_meta(meta)
+ self.value = self.set_value(value, set_alarm_ts=False)
+ self.alarm = self.set_alarm(alarm)
+ self.timeStamp = self.set_ts(timeStamp)
+
+ def set_meta(self, meta):
+ # type: (VMeta) -> VMeta
+ meta = deserialize_object(meta)
+ # Check that the meta attribute_class is ourself
+ assert hasattr(meta, "attribute_class"), \
+ "Expected meta object, got %r" % meta
+ assert isinstance(self, meta.attribute_class), \
+ "Meta object needs to be attached to %s, we are a %s" % (
+ meta.attribute_class, type(self))
+ return self.set_endpoint_data("meta", meta)
+
+ def set_value(self, value, set_alarm_ts=True, alarm=None, ts=None):
+ # type: (Any, bool, Alarm, TimeStamp) -> Any
+ """Set value, calculating alarm and ts if requested"""
+ value = self.meta.validate(value)
+ if set_alarm_ts:
+ if alarm is None:
+ alarm = Alarm.ok
+ else:
+ alarm = deserialize_object(alarm, Alarm)
+ if ts is None:
+ ts = TimeStamp()
+ else:
+ ts = deserialize_object(ts, TimeStamp)
+ self.set_value_alarm_ts(value, alarm, ts)
+ else:
+ self.set_endpoint_data("value", value)
+ return self.value
+
+ def set_value_alarm_ts(self, value, alarm, ts):
+ """Set value with pre-validated alarm and timeStamp"""
+ # type: (Any, Alarm, TimeStamp) -> None
+ with self.notifier.changes_squashed:
+ # Assume they are of the right format
+ self.value = value
+ self.notifier.add_squashed_change(self.path + ["value"], value)
+ if alarm is not self.alarm:
+ self.alarm = alarm
+ self.notifier.add_squashed_change(self.path + ["alarm"], alarm)
+ self.timeStamp = ts
+ self.notifier.add_squashed_change(self.path + ["timeStamp"], ts)
+
+ def set_alarm(self, alarm=None):
+ # type: (Alarm) -> Alarm
+ if alarm is None:
+ alarm = Alarm.ok
+ else:
+ alarm = deserialize_object(alarm, Alarm)
+ return self.set_endpoint_data("alarm", alarm)
+
+ def set_ts(self, ts=None):
+ # type: (TimeStamp) -> TimeStamp
+ if ts is None:
+ ts = TimeStamp()
+ else:
+ ts = deserialize_object(ts, TimeStamp)
+ return self.set_endpoint_data("timeStamp", ts)
+
+
+@Serializable.register_subclass("epics:nt/NTTable:1.0")
+class NTTable(AttributeModel):
+ __slots__ = []
+
+ def to_dict(self):
+ # type: () -> OrderedDict
+ d = OrderedDict()
+ d["typeid"] = self.typeid
+ # Add labels for compatibility with epics normative types
+ labels = []
+ for column_name in self.meta.elements:
+ column_meta = self.meta.elements[column_name]
+ if column_meta.label:
+ labels.append(column_meta.label)
+ else:
+ labels.append(column_name)
+ d["labels"] = labels
+ d.update(super(NTTable, self).to_dict())
+ return d
+
+ @classmethod
+ def from_dict(cls, d, ignore=()):
+ ignore += ("labels",)
+ return super(NTTable, cls).from_dict(d, ignore)
+
+
+@Serializable.register_subclass("epics:nt/NTUnion:1.0")
+class NTUnion(AttributeModel):
+ __slots__ = []
+
+
+@Serializable.register_subclass("epics:nt/NTScalarArray:1.0")
+class NTScalarArray(AttributeModel):
+ __slots__ = []
+
+
+@Serializable.register_subclass("epics:nt/NTScalar:1.0")
+class NTScalar(AttributeModel):
+ __slots__ = []
+
+
+@Serializable.register_subclass("malcolm:core/BooleanMeta:1.0")
+@VMeta.register_annotype_converter(bool)
+class BooleanMeta(VMeta):
+ """Meta object containing information for a boolean"""
+ attribute_class = NTScalar
+ __slots__ = []
+
+ def validate(self, value):
+ # type: (Any) -> bool
+ """Cast value to boolean and return it"""
+ return bool(value)
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "bool"
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.CHECKBOX
+ else:
+ return Widget.LED
+
+
+with Anno("Choices of valid strings"):
+ AChoices = Array[str]
+
+UChoices = Union[AChoices, Sequence[Enum], Sequence[str]]
+
+
+@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
+@VMeta.register_annotype_converter(Enum)
+class ChoiceMeta(VMeta):
+ """Meta object containing information for a enum"""
+ attribute_class = NTScalar
+ __slots__ = ["choices"]
+
+ def __init__(self, description="", choices=(), tags=(), writeable=False,
+ label=""):
+ # type: (AMetaDescription, UChoices, UTags, AWriteable, ALabel) -> None
+ super(ChoiceMeta, self).__init__(description, tags, writeable, label)
+ self.choices_lookup = {} # type: Dict[Any, Union[str, Enum]]
+ self.enum_cls = None
+ self.choices = self.set_choices(choices)
+
+ def set_choices(self, choices):
+ # type: (UChoices) -> AChoices
+ # Calculate a lookup from all possible entries to the choice value
+ choices_lookup = {} # type: Dict[Any, Union[str, Enum]]
+ str_choices = []
+ enum_typ = None # type: Type
+ for i, choice in enumerate(choices):
+ # If we already have an enum type it must match
+ if enum_typ is not None:
+ assert isinstance(choice, enum_typ), \
+ "Expected %s choice, got %s" % (enum_typ, choice)
+ else:
+ enum_typ = type(choice)
+ if isinstance(choice, Enum):
+ # Our choice value must be a string
+ assert isinstance(choice.value, str), \
+ "Expected Enum choice to have str value, got %r with " \
+ "value %r" % (choice, choice.value)
+ # Map the Enum instance and str to the Enum instance
+ choices_lookup[choice.value] = choice
+ choices_lookup[choice] = choice
+ str_choices.append(choice.value)
+ else:
+ assert isinstance(choice, str), \
+ "Expected string choice, got %s" % (choice,)
+ # Map the string to itself
+ choices_lookup[choice] = choice
+ str_choices.append(choice)
+ # Map the index to the choice
+ choices_lookup[i] = choice
+ if choices:
+ # Map the default value to the first choice
+ choices_lookup[None] = choices[0]
+ else:
+ # There are no choices, so the default value is the empty string
+ choices_lookup[None] = ""
+ if enum_typ is None or issubclass(enum_typ, str_):
+ # We are producing strings
+ self.enum_cls = str
+ else:
+ # We are producing enums
+ self.enum_cls = enum_typ
+ self.choices_lookup = choices_lookup
+ return self.set_endpoint_data("choices", AChoices(str_choices))
+
+ def validate(self, value):
+ # type: (Any) -> Union[Enum, str]
+ """Check if the value is valid returns it"""
+ # Our lookup table contains all the possible values
+ try:
+ return self.choices_lookup[value]
+ except KeyError:
+ raise ValueError(
+ "%r is not a valid value in %s" % (value, list(self.choices)))
+
+ def doc_type_string(self):
+ # type: () -> str
+ return " | ".join([repr(x) for x in self.choices])
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.COMBO
+ else:
+ return Widget.TEXTUPDATE
+
+ @classmethod
+ def from_annotype(cls, anno, writeable, **kwargs):
+ # type: (Anno, bool, **Any) -> VMeta
+ return super(ChoiceMeta, cls).from_annotype(
+ anno, writeable, choices=list(anno.typ))
+
+
+with Anno("Numpy dtype string"):
+ ADtype = str
+
+
+_dtype_strings = ["int8", "uint8", "int16", "uint16", "int32", "uint32", "int64",
+ "uint64", "float32", "float64"]
+_dtype_string_lookup = {getattr(np, dtype): dtype for dtype in _dtype_strings}
+_dtype_string_lookup.update({int: "int64", float: "float64"})
+
+
+@Serializable.register_subclass("malcolm:core/NumberMeta:1.0")
+@VMeta.register_annotype_converter(list(_dtype_string_lookup))
+class NumberMeta(VMeta):
+ """Meta object containing information for a numerical value"""
+ attribute_class = NTScalar
+ __slots__ = ["dtype"]
+
+ def __init__(self, dtype="float64", description="", tags=(),
+ writeable=False, label=""):
+ # type: (ADtype, AMetaDescription, UTags, AWriteable, ALabel) -> None
+ super(NumberMeta, self).__init__(description, tags, writeable, label)
+ # like np.float64
+ self._np_type = None # type: type
+ # like "float64"
+ self.dtype = self.set_dtype(dtype)
+
+ def set_dtype(self, dtype):
+ # type: (ADtype) -> ADtype
+ assert dtype in _dtype_strings, \
+ "Expected dtype to be in %s, got %s" % (self._dtypes, dtype)
+ self._np_type = getattr(np, dtype)
+ return self.set_endpoint_data("dtype", dtype)
+
+ def validate(self, value):
+ # type: (Any) -> np.number
+ """Check if the value is valid returns it"""
+ if value is None:
+ value = 0
+ cast = self._np_type(value)
+ return cast
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "%s" % self.dtype
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.TEXTINPUT
+ else:
+ return Widget.TEXTUPDATE
+
+ @classmethod
+ def from_annotype(cls, anno, writeable, **kwargs):
+ # type: (Anno, bool, **Any) -> VMeta
+ return super(NumberMeta, cls).from_annotype(
+ anno, writeable, dtype=_dtype_string_lookup[anno.typ])
+
+
+@Serializable.register_subclass("malcolm:core/StringMeta:1.0")
+@VMeta.register_annotype_converter(str)
+class StringMeta(VMeta):
+ """Meta object containing information for a string"""
+ attribute_class = NTScalar
+ __slots__ = []
+
+ def validate(self, value):
+ # type: (Any) -> str
+ """Check if the value is valid returns it"""
+ if value is None:
+ return ""
+ else:
+ return str(value)
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "str"
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.TEXTINPUT
+ else:
+ return Widget.TEXTUPDATE
+
+
+class VArrayMeta(VMeta):
+ # intermediate class so TableMeta can say "only arrays"
+ attribute_class = NTScalarArray
+ __slots__ = []
+
+
+def to_np_array(dtype, value):
+ # Give the Array the shorthand version
+ if dtype == np.float64:
+ dtype = float
+ elif dtype == np.int64:
+ dtype = int
+ if isinstance(value, Sequence):
+ # Cast to numpy array
+ value = np.array(value, dtype=dtype)
+ return to_array(Array[dtype], value)
+
+
+@Serializable.register_subclass("malcolm:core/BooleanArrayMeta:1.0")
+@VMeta.register_annotype_converter(bool, is_array=True)
+class BooleanArrayMeta(VArrayMeta):
+ """Meta object containing information for a boolean array"""
+
+ def validate(self, value):
+ # type: (Any) -> Array[bool]
+ """Check if the value is valid returns it"""
+ return to_np_array(bool, value)
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "[bool]"
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.CHECKBOX
+ else:
+ return Widget.LED
+
+
+@Serializable.register_subclass("malcolm:core/ChoiceArrayMeta:1.0")
+@VMeta.register_annotype_converter(Enum, is_array=True)
+class ChoiceArrayMeta(ChoiceMeta, VArrayMeta):
+ """Meta object containing information for a choice array"""
+
+ def validate(self, value):
+ # type: (Any) -> Array[str]
+ """Check if the value is valid returns it"""
+ if value is None:
+ return Array[self.enum_cls]()
+ else:
+ ret = []
+ if isinstance(value, str_):
+ value = [value]
+ for i, choice in enumerate(value):
+ # Our lookup table contains all the possible values
+ try:
+ ret.append(self.choices_lookup[choice])
+ except KeyError:
+ raise ValueError(
+ "%s is not a valid value in %s for element %s" % (
+ value, self.choices, i))
+ return to_array(Array[self.enum_cls], ret)
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "[%s]" % super(ChoiceArrayMeta, self).doc_type_string()
+
+
+@Serializable.register_subclass("malcolm:core/NumberArrayMeta:1.0")
+@VMeta.register_annotype_converter(list(_dtype_string_lookup), is_array=True)
+class NumberArrayMeta(NumberMeta, VArrayMeta):
+ """Meta object containing information for an array of numerical values"""
+ def validate(self, value):
+ # type: (Any) -> Array
+ """Check if the value is valid returns it"""
+ return to_np_array(self._np_type, value)
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "[%s]" % self.dtype
+
+
+@Serializable.register_subclass("malcolm:core/StringArrayMeta:1.0")
+@VMeta.register_annotype_converter(str, is_array=True)
+class StringArrayMeta(VArrayMeta):
+ """Meta object containing information for a string array"""
+
+ def validate(self, value):
+ # type: (Any) -> Array
+ """Check if the value is valid returns it"""
+ cast = to_array(Array[str], value)
+ for v in cast:
+ assert isinstance(v, str_), "Expected Array[str], got %r" % (value,)
+ return cast
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "[str]"
+
+ def default_widget(self):
+ # type: () -> Widget
+ if self.writeable:
+ return Widget.TEXTINPUT
+ else:
+ return Widget.TEXTUPDATE
+
+
+with Anno("Elements that should appear in the table instance"):
+ ATableElements = Mapping[str, VArrayMeta]
+
+
+@Serializable.register_subclass("malcolm:core/TableMeta:1.0")
+@VMeta.register_annotype_converter(Table)
+class TableMeta(VMeta):
+ __slots__ = ["elements"]
+ attribute_class = NTTable
+
+ def __init__(self,
+ description="", # type: AMetaDescription
+ tags=(), # type: UTags
+ writeable=False, # type: AWriteable
+ label="", # type: ALabel
+ elements=None, # type: ATableElements
+ ):
+ # type: (...) -> None
+ self.table_cls = None # type: Type[Table]
+ self.elements = {}
+ super(TableMeta, self).__init__(description, tags, writeable, label)
+ # Do this after so writeable is honoured
+ self.set_elements(elements if elements else {})
+
+ def set_elements(self, elements):
+ # type: (ATableElements) -> ATableElements
+ """Set the elements dict from a serialized dict"""
+ deserialized = OrderedDict()
+ for k, v in elements.items():
+ if k != "typeid":
+ deserialized[k] = deserialize_object(v, VArrayMeta)
+ ret = self.set_endpoint_data("elements", deserialized)
+ self.set_table_cls(self.table_cls)
+ return ret
+
+ def set_table_cls(self, table_cls=None):
+ # type: (Type[Table]) -> None
+ if table_cls is None or table_cls.__name__ == "TableSubclass":
+ # Either autogenerated by this function or not set, so make one
+
+ class TableSubclass(Table):
+ def __init__(self, **kwargs):
+ # type: (**Any) -> None
+ self.__dict__.update(kwargs)
+
+ table_cls = TableSubclass
+ for k, meta in self.elements.items():
+ # We can distinguish the type by asking for the default
+ # validate value
+ default_array = meta.validate(None) # type: Array
+ anno = Anno(meta.description, default_array.typ, k)
+ anno.is_array = True
+ anno.is_mapping = False
+ table_cls.call_types[k] = anno
+ else:
+ # User supplied, check it matches element names
+ assert issubclass(table_cls, Table), \
+ "Expecting table subclass, got %s" % (table_cls,)
+ missing = set(self.elements) - set(table_cls.call_types)
+ assert not missing, "Supplied Table missing fields %s" % (missing,)
+ extra = set(table_cls.call_types) - set(self.elements)
+ assert not extra, "Supplied Table has extra fields %s" % (extra,)
+ self.table_cls = table_cls
+
+ def validate(self, value):
+ if value is None:
+ # Create an empty table
+ value = {k: None for k in self.elements}
+ elif isinstance(value, Table):
+ # Serialize it so we can type check it
+ value = value.to_dict()
+ elif not isinstance(value, dict):
+ raise ValueError(
+ "Expected Table instance or serialized, got %s" % (value,))
+ # We need to make a table instance ourselves
+ keys = set(x for x in value if x != "typeid")
+ missing = set(self.elements) - keys
+ assert not missing, "Supplied table missing fields %s" % (missing,)
+ extra = keys - set(self.elements)
+ assert not extra, "Supplied table has extra fields %s" % (extra,)
+ args = {k: meta.validate(value[k]) for k, meta in self.elements.items()}
+ value = self.table_cls(**args)
+ # Check column lengths
+ value.validate_column_lengths()
+ return value
+
+ def doc_type_string(self):
+ # type: () -> str
+ return "`Table`"
+
+ def default_widget(self):
+ # type: () -> Widget
+ return Widget.TABLE
+
+ @classmethod
+ def from_table(cls, table_cls, description, widget=None, writeable=()):
+ """Create a TableMeta object, using a Table subclass as the spec
+
+ Args:
+ table_cls: The Table class to read __init__ args from
+ description: The description of the created Meta
+ widget: The widget of the created Meta
+ writeable: A list of the writeable field names. If there are any
+ writeable fields then the whole Meta is writeable
+ """
+ # type: (Type[Table], str, Widget, List[str]) -> TableMeta
+ elements = OrderedDict()
+ for k, ct in table_cls.call_types.items():
+ subclass = cls.lookup_annotype_converter(ct)
+ elements[k] = subclass.from_annotype(ct, writeable=k in writeable)
+ ret = cls(description=description, elements=elements,
+ writeable=bool(writeable))
+ if widget is None:
+ widget = ret.default_widget()
+ ret.set_tags([widget.tag()])
+ ret.set_table_cls(table_cls)
+ return ret
+
+ @classmethod
+ def from_annotype(cls, anno, writeable, **kwargs):
+ # type: (Anno, bool, **Any) -> VMeta
+ assert issubclass(anno.typ, Table), \
+ "Expected Table, got %s" % anno.typ
+ return cls.from_table(anno.typ, anno.description, writeable=writeable)
+
+
+# Types used when deserializing to the class
+with Anno("Meta objects that are used to describe the elements in the map"):
+ AElements = Mapping[str, VMeta]
+with Anno("The required elements in the map"):
+ ARequired = Array[str]
+
+# A more permissive union to allow a wider range of set_* args
+URequired = Union[ARequired, Sequence[str], str]
+
+
+@Serializable.register_subclass("malcolm:core/MapMeta:1.0")
+class MapMeta(Model):
+ """An object containing a set of ScalarMeta objects"""
+ __slots__ = ["elements", "required"]
+
+ def __init__(self,
+ elements=None, # type: Optional[AElements]
+ required=() # type: URequired
+ ):
+ # type: (...) -> None
+ self.elements = self.set_elements(elements if elements else {})
+ self.required = self.set_required(required)
+
+ def set_elements(self, elements):
+ # type: (AElements) -> AElements
+ deserialized = OrderedDict()
+ for k, v in elements.items():
+ if k != "typeid":
+ v = deserialize_object(v, VMeta)
+ if not v.label:
+ v.set_label(camel_to_title(k))
+ deserialized[k] = v
+ return self.set_endpoint_data("elements", deserialized)
+
+ def set_required(self, required):
+ # type: (URequired) -> ARequired
+ for r in required:
+ assert r in self.elements, \
+ "Expected one of %r, got %r" % (list(self.elements), r)
+ return self.set_endpoint_data("required", ARequired(required))
+
+
+# Types used when deserializing to the class
+with Anno("Meta for describing the arguments that should be passed"):
+ ATakes = MapMeta
+with Anno("The required elements in the map"):
+ ADefaults = Mapping[str, Any]
+with Anno("Meta for describing the arguments that will be returned"):
+ AReturns = MapMeta
+
+
+@Serializable.register_subclass("malcolm:core/Method:1.0")
+class MethodModel(Meta):
+ """Exposes a function with metadata for arguments and return values"""
+ __slots__ = ["takes", "returns", "defaults"]
+
+ def __init__(self,
+ takes=None, # type: Optional[ATakes]
+ defaults=None, # type: Optional[ADefaults]
+ description="", # type: AMetaDescription
+ tags=(), # type: UTags
+ writeable=False, # type: AWriteable
+ label="", # type: ALabel
+ returns=None, # type: Optional[AReturns]
+ ):
+ # type: (...) -> None
+ self.takes = self.set_takes(takes if takes else MapMeta())
+ self.returns = self.set_returns(returns if returns else MapMeta())
+ self.defaults = self.set_defaults(defaults if defaults else {})
+ super(MethodModel, self).__init__(description, tags, writeable, label)
+
+ def set_takes(self, takes):
+ # type: (ATakes) -> ATakes
+ takes = deserialize_object(takes, MapMeta)
+ return self.set_endpoint_data("takes", takes)
+
+ def set_defaults(self, defaults):
+ # type: (ADefaults) -> ADefaults
+ for k, v in defaults.items():
+ if k != "typeid":
+ defaults[k] = self.takes.elements[k].validate(v)
+ return self.set_endpoint_data("defaults", defaults)
+
+ def set_returns(self, returns):
+ # type: (AReturns) -> AReturns
+ returns = deserialize_object(returns, MapMeta)
+ return self.set_endpoint_data("returns", returns)
+
+ def validate(self, param_dict):
+ # type: (Dict[str, Any]) -> Dict[str, Any]
+ args = {}
+ for k, v in param_dict.items():
+ assert k in self.takes.elements, \
+ "Method passed argument %r which is not in %r" % (
+ k, list(self.takes.elements))
+ args[k] = self.takes.elements[k].validate(v)
+ missing = set(self.takes.required) - set(args)
+ assert not missing, \
+ "Method requires %s but only passed %s" % (
+ list(self.takes.required), list(args))
+ return args
+
+ @classmethod
+ def from_callable(cls, func, description=None, returns=True):
+ # type: (Callable, str, bool) -> MethodModel
+ """Return an instance of this class from a Callable"""
+ if description is None:
+ if func.__doc__ is None:
+ description = ""
+ else:
+ description = func.__doc__
+ method = cls(description=description)
+ takes_elements = OrderedDict()
+ defaults = OrderedDict()
+ takes_required = []
+ for k, anno in getattr(func, "call_types", {}).items():
+ scls = VMeta.lookup_annotype_converter(anno)
+ takes_elements[k] = scls.from_annotype(anno, writeable=True)
+ if anno.default is NO_DEFAULT:
+ takes_required.append(k)
+ elif anno.default is not None:
+ defaults[k] = anno.default
+ takes = MapMeta(elements=takes_elements, required=takes_required)
+ method.set_takes(takes)
+ method.set_defaults(defaults)
+ if returns:
+ returns_elements = OrderedDict()
+ returns_required = []
+ return_type = getattr(func, "return_type", None) # type: Anno
+ if return_type is None or return_type.typ is None:
+ call_types = {}
+ elif issubclass(return_type.typ, WithCallTypes):
+ call_types = return_type.typ.call_types
+ else:
+ call_types = {"return": return_type}
+ for k, anno in call_types.items():
+ scls = VMeta.lookup_annotype_converter(anno)
+ returns_elements[k] = scls.from_annotype(anno, writeable=False)
+ if anno.default is not None:
+ returns_required.append(k)
+ returns = MapMeta(
+ elements=returns_elements, required=returns_required)
+ method.set_returns(returns)
+ return method
+
+
+# Types used when deserializing to the class
+with Anno("The list of fields currently in the Block"):
+ AFields = Array[str]
+
+# A more permissive union to allow a wider range of set_* args
+UFields = Union[AFields, Sequence[str], str]
+
+
+@Serializable.register_subclass("malcolm:core/BlockMeta:1.0")
+class BlockMeta(Meta):
+ __slots__ = ["fields"]
+
+ def __init__(self,
+ description="", # type: AMetaDescription
+ tags=(), # type: UTags
+ writeable=True, # type: AWriteable
+ label="", # type: ALabel
+ fields=(), # type: UFields
+ ):
+ # type: (...) -> None
+ super(BlockMeta, self).__init__(description, tags, writeable, label)
+ self.fields = self.set_fields(fields)
+
+ def set_fields(self, fields):
+ # type: (UFields) -> AFields
+ return self.set_endpoint_data("fields", AFields(fields))
+
+
+@Serializable.register_subclass("malcolm:core/Block:1.0")
+class BlockModel(Model):
+ """Data Model for a Block"""
+
+ def __init__(self):
+ # type: () -> None
+ # Make a new call_types dict so we don't modify for all instances
+ self.call_types = OrderedDict()
+ self.meta = self.set_endpoint_data("meta", BlockMeta())
+
+ def set_endpoint_data(self, name, value):
+ # type: (str, Union[AttributeModel, MethodModel, BlockMeta]) -> Any
+ name = deserialize_object(name, str_)
+ if name == "meta":
+ value = deserialize_object(value, BlockMeta)
+ else:
+ value = deserialize_object(value, (AttributeModel, MethodModel))
+ with self.notifier.changes_squashed:
+ if name in self.call_types:
+ # Stop the old Model notifying
+ getattr(self, name).set_notifier_path(Model.notifier, [])
+ else:
+ anno = Anno("Field", typ=type(value))
+ self.call_types[name] = anno
+ value.set_notifier_path(self.notifier, self.path + [name])
+ setattr(self, name, value)
+ # Tell the notifier what changed
+ self.notifier.add_squashed_change(self.path + [name], value)
+ self._update_fields()
+ return value
+
+ def _update_fields(self):
+ self.meta.set_fields([x for x in self.call_types if x != "meta"])
+
+ def remove_endpoint(self, name):
+ # type: (str) -> None
+ with self.notifier.changes_squashed:
+ getattr(self, name).set_notifier_path(Model.notifier, [])
+ self.call_types.pop(name)
+ delattr(self, name)
+ self._update_fields()
+ self.notifier.add_squashed_change(self.path + [name])
diff --git a/malcolm/core/moduleutil.py b/malcolm/core/moduleutil.py
new file mode 100644
index 000000000..9acdc7885
--- /dev/null
+++ b/malcolm/core/moduleutil.py
@@ -0,0 +1,11 @@
+from annotypes import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing import Dict, Any, List
+
+
+def submodule_all(globals_d):
+ # type: (Dict[str, Any]) -> List[str]
+ # Return all the classes
+ return sorted(k for k, v in globals_d.items() if isinstance(v, type))
+
diff --git a/malcolm/core/notifier.py b/malcolm/core/notifier.py
index 461244952..9b85a1e81 100644
--- a/malcolm/core/notifier.py
+++ b/malcolm/core/notifier.py
@@ -1,49 +1,62 @@
+from contextlib import contextmanager
+
+from annotypes import TYPE_CHECKING
+
from .serializable import serialize_object
from .loggable import Loggable
from .request import Subscribe, Unsubscribe
+from .response import Response, Update
+from .rlock import RLock
+
+if TYPE_CHECKING:
+ from .models import BlockModel
+ from typing import List, Tuple, Callable, Any, Dict
+ Callback = Callable[[Response], None]
+ CallbackResponses = List[Tuple[Callback, Response]]
+ SubscriptionKeys = Dict[Tuple[Callback, int], Subscribe]
+
+
+class DummyNotifier(object):
+ @property
+ @contextmanager
+ def changes_squashed(self):
+ yield
+
+ def add_squashed_change(self, path, data=None):
+ # type: (List[str], Any) -> None
+ pass
class Notifier(Loggable):
"""Object that can service callbacks on given endpoints"""
def __init__(self, mri, lock, block):
- super(Notifier, self).__init__(mri=mri)
+ # type: (str, RLock, BlockModel) -> None
+ self.set_logger(mri=mri)
self._tree = NotifierNode(block)
self._lock = lock
# Incremented every time we do with changes_squashed
self._squashed_count = 0
- self._squashed_changes = []
- # {Subscribe.generator_key(): Subscribe}
- self._subscription_keys = {}
+ self._squashed_changes = [] # type: List[List]
+ self._subscription_keys = {} # type: SubscriptionKeys
def handle_subscribe(self, request):
- """Handle a Subscribe request from outside. Called with lock taken
-
- Args:
- request (Subscribe): Request to respond to
-
- Returns:
- list: [(callback, Response)] that need to be called
- """
+ # type: (Subscribe) -> CallbackResponses
+ """Handle a Subscribe request from outside. Called with lock taken"""
ret = self._tree.handle_subscribe(request, request.path[1:])
self._subscription_keys[request.generate_key()] = request
return ret
def handle_unsubscribe(self, request):
- """Handle a Unsubscribe request from outside. Called with lock taken
-
- Args:
- request (Unsubscribe): Request to respond to
-
- Returns:
- list: [(callback, Response)] that need to be called
- """
+ # type: (Unsubscribe) -> CallbackResponses
+ """Handle a Unsubscribe request from outside. Called with lock taken"""
subscribe = self._subscription_keys.pop(request.generate_key())
ret = self._tree.handle_unsubscribe(subscribe, subscribe.path[1:])
return ret
@property
def changes_squashed(self):
+ # type: () -> Notifier
"""Context manager to allow multiple calls to notify_change() to be
made and all changes squashed into one consistent set. E.g:
@@ -54,6 +67,7 @@ def changes_squashed(self):
return self
def add_squashed_change(self, path, data=None):
+ # type: (List[str], Any) -> None
"""Call setter, then notify subscribers of change
Args:
@@ -80,13 +94,14 @@ def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
if self._squashed_count == 0:
changes = self._squashed_changes
self._squashed_changes = []
- # TODO: squash intermediate changes here?
+ # TODO: squash intermediate deltas here?
responses += self._tree.notify_changes(changes)
finally:
self._lock.release()
self._callback_responses(responses)
def _callback_responses(self, responses):
+ # type: (CallbackResponses) -> None
for cb, response in responses:
try:
cb(response)
@@ -101,17 +116,15 @@ class NotifierNode(object):
"delta_requests", "update_requests", "children", "parent", "data"]
def __init__(self, data, parent=None):
- # [Subscribe]
- self.delta_requests = []
- self.update_requests = []
- # {name: NotifierNode}
- self.children = {}
- # Leaf
+ # type: (Any, NotifierNode) -> None
+ self.delta_requests = [] # type: List[Subscribe]
+ self.update_requests = [] # type: List[Subscribe]
+ self.children = {} # type: Dict[str, NotifierNode]
self.parent = parent
- # object
self.data = data
def notify_changes(self, changes):
+ # type: (List[List]) -> CallbackResponses
"""Set our data and notify anyone listening
Args:
@@ -147,6 +160,7 @@ def notify_changes(self, changes):
return ret
def _add_child_change(self, change, child_changes):
+ # type: (List, Dict[str, List]) -> None
path = change[0]
if path:
# This is for one of our children
@@ -167,6 +181,7 @@ def _add_child_change(self, change, child_changes):
child_changes.setdefault(name, []).append(child_change)
def _update_data(self, data):
+ # type: (Any) -> Dict[str, List]
"""Set our data and notify any subscribers of children what has changed
Args:
@@ -190,6 +205,7 @@ def _update_data(self, data):
return child_change_dict
def handle_subscribe(self, request, path):
+ # type: (Subscribe, List[str]) -> CallbackResponses
"""Add to the list of request to notify, and notify the initial value of
the data held
@@ -220,10 +236,12 @@ def handle_subscribe(self, request, path):
return ret
def handle_unsubscribe(self, request, path):
+ # type: (Subscribe, List[str]) -> CallbackResponses
"""Remove from the notifier list and send a return
Args:
request (Subscribe): The original subscribe request
+ path (list): The relative path from ourself
Returns:
list: [(callback, Response)] that need to be called
@@ -244,4 +262,4 @@ def handle_unsubscribe(self, request, path):
else:
self.delta_requests.remove(request)
ret.append(request.return_response())
- return ret
\ No newline at end of file
+ return ret
diff --git a/malcolm/core/ntscalar.py b/malcolm/core/ntscalar.py
deleted file mode 100644
index 56cfaaef6..000000000
--- a/malcolm/core/ntscalar.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .attributemodel import AttributeModel
-from .serializable import Serializable
-
-
-@Serializable.register_subclass("epics:nt/NTScalar:1.0")
-class NTScalar(AttributeModel):
- pass
diff --git a/malcolm/core/ntscalararray.py b/malcolm/core/ntscalararray.py
deleted file mode 100644
index d0a89af88..000000000
--- a/malcolm/core/ntscalararray.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .attributemodel import AttributeModel
-from .serializable import Serializable
-
-
-@Serializable.register_subclass("epics:nt/NTScalarArray:1.0")
-class NTScalarArray(AttributeModel):
- pass
diff --git a/malcolm/core/nttable.py b/malcolm/core/nttable.py
deleted file mode 100644
index 99cc5f374..000000000
--- a/malcolm/core/nttable.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from malcolm.compat import OrderedDict
-from .attributemodel import AttributeModel
-from .serializable import Serializable
-
-
-@Serializable.register_subclass("epics:nt/NTTable:1.0")
-class NTTable(AttributeModel):
- def to_dict(self):
- d = OrderedDict()
- d["typeid"] = self.typeid
- # Add labels for compatibility with epics normative types
- labels = []
- for column_name in self.meta.elements:
- column_meta = self.meta.elements[column_name]
- if column_meta.label:
- labels.append(column_meta.label)
- else:
- labels.append(column_name)
- d["labels"] = labels
- d.update(super(NTTable, self).to_dict())
- return d
-
- @classmethod
- def from_dict(cls, d, ignore=()):
- ignore += ("labels",)
- return super(NTTable, cls).from_dict(d, ignore)
diff --git a/malcolm/core/ntunion.py b/malcolm/core/ntunion.py
deleted file mode 100644
index 0436a98ec..000000000
--- a/malcolm/core/ntunion.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .attributemodel import AttributeModel
-from .serializable import Serializable
-
-
-@Serializable.register_subclass("epics:nt/NTUnion:1.0")
-class NTUnion(AttributeModel):
- pass
diff --git a/malcolm/core/part.py b/malcolm/core/part.py
index f7e3aa5a3..20eb1bcee 100644
--- a/malcolm/core/part.py
+++ b/malcolm/core/part.py
@@ -1,76 +1,146 @@
-from malcolm.compat import str_
-from .controller import Controller
-from .hook import get_hook_decorated
-from .hookrunner import HookRunner
-from .loggable import Loggable
-from .methodmodel import get_method_decorated, MethodModel
+from annotypes import Anno, TYPE_CHECKING
+from malcolm.compat import OrderedDict
+from .queue import Queue
+from .hook import Hookable
+from .info import Info
+from .serializable import check_camel_case
+from .spawned import Spawned
+from .models import MethodModel, AttributeModel
+
+if TYPE_CHECKING:
+ from typing import Union, List, Tuple, Dict, Callable, Optional, Type
+ Field = Union[AttributeModel, MethodModel]
+ FieldDict = Dict[object, List[Tuple[str, Field, Callable]]]
+ Callback = Callable[[object, Info], None]
+
+with Anno("The name of the Part within the Controller"):
+ APartName = str
+
+
+class FieldRegistry(object):
+ def __init__(self):
+ # type: () -> None
+ self.fields = OrderedDict() # type: FieldDict
+
+ def get_field(self, name):
+ # type: (str) -> Field
+ for fields in self.fields.values():
+ for (n, field, _) in fields:
+ if n == name:
+ return field
+ raise ValueError("No field named %s found" % (name,))
+
+ def add_method_model(self,
+ func, # type: Callable
+ name=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ owner=None, # type: object
+ ):
+ # type: (...) -> MethodModel
+ """Register a function to be added to the block"""
+ if name is None:
+ name = func.__name__
+ method = MethodModel.from_callable(func, description)
+ self._add_field(owner, name, method, func)
+ return method
+
+ def add_attribute_model(self,
+ name, # type: str
+ attr, # type: AttributeModel
+ writeable_func=None, # type: Optional[Callable]
+ owner=None, # type: object
+ ):
+ # type: (...) -> AttributeModel
+ self._add_field(owner, name, attr, writeable_func)
+ return attr
+
+ def _add_field(self, owner, name, model, writeable_func):
+ # type: (object, str, Field, Callable) -> None
+ check_camel_case(name)
+ part_fields = self.fields.setdefault(owner, [])
+ part_fields.append((name, model, writeable_func))
+
+
+class InfoRegistry(object):
+ def __init__(self):
+ # type: (Callable[..., Spawned]) -> None
+ self._reportable_infos = {} # type: Dict[Type[Info], Callback]
+ self._spawn = None
+ self._report_queue = Queue()
+
+ def set_spawn(self, spawn):
+ # type: (Callable[..., Spawned]) -> None
+ """Called once the Controller has been attached to a Process so that
+ reports become asynchronous"""
+ self._spawn = spawn
+
+ def add_reportable(self, info, callback):
+ # type: (Type[Info], Callback) -> None
+ self._reportable_infos[info] = callback
+
+ def report(self, reporter, info):
+ # type: (object, Info) -> None
+ callback = self._reportable_infos[type(info)]
+ self._report_queue.put((callback, reporter, info))
+ if self._spawn:
+ # Spawn in case we are coming from a non-cothread to cothread thread
+ self._spawn(self._report).get()
+ else:
+ # No process yet, just run directly
+ self._report()
+
+ def _report(self):
+ callback, reporter, info = self._report_queue.get()
+ callback(reporter, info)
+
+
+class Part(Hookable):
+ registrar = None # type: PartRegistrar
-class Part(Loggable):
def __init__(self, name):
- super(Part, self).__init__()
- assert isinstance(name, str_), \
- "Expected name to be a string, got %s. Did you forget to " \
- "subclass __init__ in %s?" % (name, self)
- self.controller = None
- self.use_cothread = False
- self.process = None
+ # type: (APartName) -> None
+ self.set_logger(part_name=name)
self.name = name
- self.method_models = {}
-
- def notify_dispatch_request(self, request):
- """Will be called when a context passed to a hooked function is about
- to dispatch a request"""
- pass
-
- def attach_to_controller(self, controller):
- """Attach this part to a controller
-
- Args:
- controller (Controller): The controller to attach to
- """
- self.set_logger_extra(mri=controller.mri, part=self.name)
- self.controller = controller
- self.process = controller.process
- self.use_cothread = controller.use_cothread
-
- def spawn(self, func, *args, **kwargs):
- """Spawn a function in the right thread"""
- spawned = self.process.spawn(func, args, kwargs, self.use_cothread)
- return spawned
-
- def update_part_health(self, alarm=None):
- """Set the health attribute"""
- self.controller.update_health(self, alarm)
-
- def make_hook_runner(self, hook_queue, func_name, context, *args, **params):
- # TODO: add phase information
- func = getattr(self, func_name)
- method_model = self.method_models.get(func_name, MethodModel())
- filtered_params = {k: v for k, v in params.items()
- if k in method_model.takes.elements}
- args += method_model.prepare_call_args(**filtered_params)
- runner = HookRunner(hook_queue, self, func, context, args)
- return runner
-
- def create_method_models(self):
- hooked = [name for (name, _, _) in get_hook_decorated(self)]
- for name, method_model, func in get_method_decorated(self):
- self.method_models[name] = method_model
- if name not in hooked:
- yield name, method_model, func
-
- def create_attribute_models(self):
- """Should be implemented in subclasses to yield any Attributes that
- should be attached to the Block
-
- Yields:
- tuple: (attribute_name, attribute, set_function), where:
-
- - attribute_name is the name of the Attribute within the Block
- - attribute is the Attribute to be attached
- - set_function is a callable if the Attribute should be
- writeable, or None if not
- """
- return iter(())
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ """Use the given Registrar to populate the hooks and fields"""
+ self.registrar = registrar
+
+
+class PartRegistrar(object):
+ def __init__(self, field_registry, info_registry, part):
+ # type: (FieldRegistry, InfoRegistry, Part) -> None
+ self._field_registry = field_registry
+ self._info_registry = info_registry
+ self._part = part
+ self._info_queue = Queue()
+
+ def get_fields(self):
+ # type: () -> List[Tuple[str, Field, Callable]]
+ """Get the field list that we have added"""
+ return self._field_registry.fields[self]
+
+ def add_method_model(self,
+ func, # type: Callable
+ name=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ ):
+ # type: (...) -> MethodModel
+ """Register a function to be added to the block"""
+ return self._field_registry.add_method_model(
+ func, name, description, self._part)
+
+ def add_attribute_model(self,
+ name, # type: str
+ attr, # type: AttributeModel
+ writeable_func=None, # type: Optional[Callable]
+ ):
+ # type: (...) -> AttributeModel
+ return self._field_registry.add_attribute_model(
+ name, attr, writeable_func, self._part)
+
+ def report(self, info):
+ # type: (Info) -> None
+ self._info_registry.report(self._part, info)
diff --git a/malcolm/core/process.py b/malcolm/core/process.py
index e58b4d222..f3d47768e 100644
--- a/malcolm/core/process.py
+++ b/malcolm/core/process.py
@@ -1,58 +1,86 @@
from multiprocessing.pool import ThreadPool
-import inspect
+
+from annotypes import Anno, Array, TYPE_CHECKING, Union, Sequence
from malcolm.compat import OrderedDict, maybe_import_cothread, \
- get_pool_num_threads
+ get_pool_num_threads, str_
from .context import Context
-from .hook import Hook, get_hook_decorated
+from .controller import Controller
+from .errors import WrongThreadError
+from .hook import Hook, start_hooks, AHookable, wait_hooks
+from .info import Info
from .loggable import Loggable
-from .spawned import Spawned
from .rlock import RLock
-from .errors import WrongThreadError
+from .spawned import Spawned
+from .views import Block
+
+if TYPE_CHECKING:
+ from typing import List, Callable, Dict, Any, Tuple, TypeVar
+
+ T = TypeVar("T")
# Clear spawned handles after how many spawns?
SPAWN_CLEAR_COUNT = 1000
+# States for how far in start procedure we've got
+STOPPED = 0
+STARTING = 1
+STARTED = 2
+
+
+with Anno("The list of currently published Controller mris"):
+ APublished = Array[str]
-class Process(Loggable):
- """Hosts a number of Controllers and provides spawn capabilities"""
- Init = Hook()
+class UnpublishedInfo(Info):
+ def __init__(self, mri):
+ # type: (str) -> None
+ self.mri = mri
+
+
+class ProcessPublishHook(Hook[None]):
+ """Called when a new block is added"""
+ def __init__(self, child, published):
+ # type: (AHookable, APublished) -> None
+ super(ProcessPublishHook, self).__init__(child, published=published)
+
+
+with Anno("Each of these reports that the controller should not be published"):
+ AUnpublishedInfos = Array[UnpublishedInfo]
+UUnpublishedInfos = Union[AUnpublishedInfos, Sequence[UnpublishedInfo],
+ UnpublishedInfo, None]
+
+
+class ProcessStartHook(Hook[None]):
"""Called at start() to start all child controllers"""
- Publish = Hook()
- """Called when a new block is added
+ def validate_return(self, ret):
+ # type: (UUnpublishedInfos) -> AUnpublishedInfos
+ """Check that all returns are UnpublishedInfo objects indicating
+ that the controller shouldn't be published via any server comms"""
+ return AUnpublishedInfos(ret)
- Args:
- published (list): [mri] list of published Controller mris
- """
- Halt = Hook()
+class ProcessStopHook(Hook[None]):
"""Called at stop() to gracefully stop all child controllers"""
+
+class Process(Loggable):
+ """Hosts a number of Controllers and provides spawn capabilities"""
+
def __init__(self, name):
- super(Process, self).__init__(process=name)
+ # type: (str_) -> None
+ self.set_logger(process_name=name)
self.name = name
self._cothread = maybe_import_cothread()
self._controllers = OrderedDict() # mri -> Controller
- self._published = [] # [mri] for publishable controllers
- self.started = False
+ self._unpublished = set() # [mri] for unpublishable controllers
+ self.state = STOPPED
self._spawned = []
self._spawn_count = 0
self._thread_pool = None
self._lock = RLock()
- self._hooked_func_names = {}
- self._hook_names = {}
- self._find_hooks()
-
- def _find_hooks(self):
- for name, member in inspect.getmembers(self, Hook.isinstance):
- assert member not in self._hook_names, \
- "Hook %s already in %s as %s" % (
- self, name, self._hook_names[member])
- self._hook_names[member] = name
- self._hooked_func_names[member] = {}
def start(self, timeout=None):
"""Start the process going
@@ -61,26 +89,45 @@ def start(self, timeout=None):
timeout (float): Maximum amount of time to wait for each spawned
process. None means forever
"""
- assert not self.started, "Process already started"
- self.started = True
- self._run_hook(self.Init, timeout=timeout)
- self._run_hook(
- self.Publish, args=(self._published,), timeout=timeout)
-
- def _run_hook(self, hook, controller_list=None, args=(), timeout=None):
+ assert not self.state, "Process already started"
+ self.state = STARTING
+ should_publish = self._start_controllers(
+ self._controllers.values(), timeout)
+ if should_publish:
+ self._publish_controllers(timeout)
+ self.state = STARTED
+
+ def _start_controllers(self, controller_list, timeout=None):
+ # type: (List[Controller], float) -> bool
+ # Start just the given controller_list
+ infos = self._run_hook(ProcessStartHook, controller_list,
+ timeout=timeout)
+ new_unpublished = set(
+ info.mri for info in UnpublishedInfo.filter_values(infos))
+ with self._lock:
+ self._unpublished |= new_unpublished
+ if len(controller_list) > len(new_unpublished):
+ return True
+ else:
+ return False
+
+ def _publish_controllers(self, timeout):
+ # New controllers to publish
+ published = [mri for mri in self._controllers
+ if mri not in self._unpublished]
+ self._run_hook(ProcessPublishHook,
+ timeout=timeout, published=published)
+
+ def _run_hook(self, hook, controller_list=None, timeout=None, **kwargs):
# Run the given hook waiting til all hooked functions are complete
# but swallowing any errors
if controller_list is None:
controller_list = self._controllers.values()
-
- spawned = []
- for controller in controller_list:
- func_name = self._hooked_func_names[hook].get(controller, None)
- if func_name:
- func = getattr(controller, func_name)
- spawned.append(controller.spawn(func, *args))
- for s in spawned:
- s.wait(timeout)
+ hooks = [hook(controller, **kwargs).set_spawn(controller.spawn)
+ for controller in controller_list]
+ hook_queue, hook_spawned = start_hooks(hooks)
+ return wait_hooks(
+ self.log, hook_queue, hook_spawned, timeout, exception_check=False)
def stop(self, timeout=None):
"""Stop the process and wait for it to finish
@@ -89,22 +136,23 @@ def stop(self, timeout=None):
timeout (float): Maximum amount of time to wait for each spawned
object. None means forever
"""
- assert self.started, "Process not started"
+ assert self.state == STARTED, "Process not started"
# Allow every controller a chance to clean up
- self._run_hook(self.Halt, timeout=timeout)
+ self._run_hook(ProcessStopHook, timeout=timeout)
for s in self._spawned:
self.log.debug("Waiting for %s", s._function)
s.wait(timeout=timeout)
self._spawned = []
self._controllers = OrderedDict()
- self._published = []
- self.started = False
+ self._unpublished = set()
+ self.state = STOPPED
if self._thread_pool:
self._thread_pool.close()
self._thread_pool.join()
self._thread_pool = None
def spawn(self, function, args, kwargs, use_cothread):
+ # type: (Callable[..., Any], Tuple, Dict, bool) -> Spawned
"""Runs the function in a worker thread, returning a Result object
Args:
@@ -117,10 +165,12 @@ def spawn(self, function, args, kwargs, use_cothread):
Spawned: Something you can call wait(timeout) on to see when it's
finished executing
"""
- return self._call_in_right_thread(
+ ret = self._call_in_right_thread(
self._spawn, function, args, kwargs, use_cothread)
+ return ret
def _call_in_right_thread(self, func, *args):
+ # type: (Callable[..., T], *Any) -> T
try:
return func(*args)
except WrongThreadError:
@@ -128,8 +178,9 @@ def _call_in_right_thread(self, func, *args):
return self._cothread.CallbackResult(func, *args)
def _spawn(self, function, args, kwargs, use_cothread):
+ # type: (Callable[..., Any], Tuple, Dict, bool) -> Spawned
with self._lock:
- assert self.started, "Can't spawn before process started"
+ assert self.state, "Can't spawn before process started"
if self._thread_pool is None:
if not self._cothread or not use_cothread:
self._thread_pool = ThreadPool(get_pool_num_threads())
@@ -143,89 +194,50 @@ def _spawn(self, function, args, kwargs, use_cothread):
return spawned
def _clear_spawn_list(self):
+ # type: () -> None
self._spawn_count = 0
self._spawned = [s for s in self._spawned if not s.ready()]
- def add_controller(self, mri, controller, publish=True, timeout=None):
+ def add_controller(self, controller, timeout=None):
+ # type: (Controller, float) -> None
"""Add a controller to be hosted by this process
Args:
- mri (str): The malcolm resource id for the controller
controller (Controller): Its controller
- publish (bool): Whether to notify other controllers about its
- existence
timeout (float): Maximum amount of time to wait for each spawned
object. None means forever
"""
self._call_in_right_thread(
- self._add_controller, mri, controller, publish, timeout)
-
- def _add_controller(self, mri, controller, publish, timeout):
- with self._lock:
- assert mri not in self._controllers, \
- "Controller already exists for %s" % mri
- self._controllers[mri] = controller
- for func_name, hook, _ in get_hook_decorated(controller):
- assert hook in self._hook_names, \
- "Controller %s func %s not hooked into %s" % (
- mri, func_name, self)
- self._hooked_func_names[hook][controller] = func_name
- if publish:
- self._published.append(mri)
- if self.started:
- self._run_hook(self.Init, [controller], timeout=timeout)
- self._run_hook(self.Publish, args=(self._published,),
- timeout=timeout)
-
- def remove_controller(self, mri, timeout=None):
- """Remove a controller that is hosted by this process
-
- Args:
- mri (str): The malcolm resource id for the controller
- timeout (float): Maximum amount of time to wait for each spawned
- object. None means forever
- """
- self._call_in_right_thread(self._remove_controller, mri, timeout)
+ self._add_controller, controller, timeout)
- def _remove_controller(self, mri, timeout):
+ def _add_controller(self, controller, timeout):
+ # type: (Controller, float) -> None
with self._lock:
- controller = self._controllers.pop(mri)
- for d in self._hooked_func_names.values():
- d.pop(controller, None)
- if mri in self._published:
- self._published.remove(mri)
- if self.started:
- self._run_hook(self.Publish, args=(self._published,),
- timeout=timeout)
- self._run_hook(self.Halt, [controller], timeout=timeout)
+ assert controller.mri not in self._controllers, \
+ "Controller already exists for %s" % controller.mri
+ self._controllers[controller.mri] = controller
+ controller.setup(self)
+ if self.state:
+ should_publish = self._start_controllers([controller], timeout)
+ if self.state == STARTED and should_publish:
+ self._publish_controllers(timeout)
@property
def mri_list(self):
+ # type: () -> List[str]
return list(self._controllers)
def get_controller(self, mri):
- """Get controller from mri
-
- Args:
- mri (str): The malcolm resource id for the controller
-
- Returns:
- Controller: the controller
- """
+ # type: (str) -> Controller
+ """Get controller which can make Block views for this mri"""
try:
return self._controllers[mri]
except KeyError:
- raise ValueError("No controller registered for mri %r" % mri)
+ raise ValueError("No controller registered for mri '%s'" % mri)
def block_view(self, mri):
- """Get a Block view from a Controller
-
- Args:
- mri (str): The malcolm resource id for the block
-
- Returns:
- Block: the block view
- """
+ # type: (str) -> Block
+ """Get a Block view from a Controller with given mri"""
controller = self.get_controller(mri)
context = Context(self)
block = controller.make_view(context)
diff --git a/malcolm/core/request.py b/malcolm/core/request.py
index 8ee2f300d..901075085 100644
--- a/malcolm/core/request.py
+++ b/malcolm/core/request.py
@@ -1,221 +1,143 @@
import logging
-from malcolm.compat import OrderedDict, str_
-from .response import Return, Error, Update, Delta
-from .serializable import Serializable, deserialize_object, serialize_object, \
- json_encode
+from annotypes import Anno, Array, Any, TYPE_CHECKING, Mapping, Union, Sequence
+
+from .response import Return, Error, Update, Delta, Response
+from .serializable import Serializable, serialize_object
+
+if TYPE_CHECKING:
+ from typing import Callable, Tuple, List
+ Callback = Callable[[Response], None]
# Create a module level logger
log = logging.getLogger(__name__)
+with Anno("ID that should be used for any responses"):
+ AId = int
+with Anno("Path to target Block substructure"):
+ APath = Array[str]
+with Anno("Value to put"):
+ AValue = Any
+with Anno("Parameters to use in a method Post"):
+ AParameters = Mapping[str, Any]
+with Anno("Notify of differences only"):
+ ADifferences = bool
+UPath = Union[APath, Sequence[str], str]
+
+
class Request(Serializable):
"""Request object that registers a callback for when action is complete."""
+ __slots__ = ["id", "callback"]
- endpoints = ["id"]
- __slots__ = []
-
- id = None
- callback = None
-
- def __init__(self, id=None, callback=None):
- """
- Args:
- id (int): ID that context(): Context of request
- callback (callable): Callback for when the response is available
-
- callback(response) will be called when the request is completed
- """
- self.set_id(id)
- self.set_callback(callback)
-
- def __repr__(self):
- d = self.to_dict()
- if self.callback:
- d["callback"] = repr(self.callback)
- return json_encode(d)
-
- def set_id(self, id):
- """Set the identifier for the request
-
- Args:
- id (int): Unique identifier for request
- """
- if id is not None:
- id = deserialize_object(id, int)
+ # Allow id to shadow builtin id so id is a key in the serialized dict
+ # noinspection PyShadowingBuiltins
+ def __init__(self, id=0):
+ # type: (AId) -> None
self.id = id
+ def callback(_):
+ # type: (Response) -> None
+ pass
+
+ self.callback = callback
+
def set_callback(self, callback):
+ # type: (Callback) -> None
"""Set the callback to be called on response"""
- if callback is None:
- def callback(value):
- pass
self.callback = callback
def return_response(self, value=None):
- """Create a Return Response object to signal a return value
-
- Args:
- value (object): Return value
- """
+ # type: (Any) -> Tuple[Callback, Return]
+ """Create a Return Response object to signal a return value"""
response = Return(id=self.id, value=value)
return self.callback, response
def error_response(self, exception):
- """Create an Error Response object to signal an error
-
- Args:
- exception (exceptions.Exception): Message explaining error
- """
- message = "%s: %s" % (exception.__class__.__name__, exception)
- response = Error(id=self.id, message=message)
- log.info("Exception raised for request %s", self, exc_info=True)
+ # type: (Exception) -> Tuple[Callback, Error]
+ """Create an Error Response object to signal an error"""
+ response = Error(id=self.id, message=exception)
+ log.exception("Exception raised for request %s", self)
return self.callback, response
def generate_key(self):
"""A key that will uniquely identify this request, for matching
Subscribes up to Unsubscribes"""
+ # type: () -> Tuple[Callback, int]
key = (self.callback, self.id)
return key
class PathRequest(Request):
- """Create a Get Request object"""
-
- endpoints = ["id", "path"]
- __slots__ = []
-
- path = None
-
- def __init__(self, id=None, path=None, callback=None):
- """
- Args:
- id (int): Unique identifier for request
- path (list): [`str`] Path to target Block substructure
- callback (callable): Callback for when the response is available
- """
- super(PathRequest, self).__init__(id, callback)
- self.set_path(path)
+ __slots__ = ["path"]
- def set_path(self, path):
- """Set the path to the endpoint to operate on
+ # Allow id to shadow builtin id so id is a key in the serialized dict
+ # noinspection PyShadowingBuiltins
+ def __init__(self, id=0, path=None):
+ # type: (AId, UPath) -> None
+ super(PathRequest, self).__init__(id)
+ if path:
+ assert isinstance(path, (list, tuple)), "Path must be given as a list or tuple"
+ self.path = APath(path)
- Args:
- path (list): [`str`] Path to target Block substructure
- """
- self.path = [deserialize_object(e, str_) for e in path]
@Serializable.register_subclass("malcolm:core/Get:1.0")
class Get(PathRequest):
"""Create a Get Request object"""
+ __slots__ = []
@Serializable.register_subclass("malcolm:core/Put:1.0")
class Put(PathRequest):
"""Create a Put Request object"""
+ __slots__ = ["value"]
- endpoints = ["id", "path", "value"]
- __slots__ = []
-
- value = None
-
- def __init__(self, id=None, path=(), value=None, callback=None):
- """
- Args:
- id (int): Unique identifier for request
- path (list): [`str`] Path to target Block substructure
- value: Value to put to path
- callback (callable): Callback for when the response is available
- """
- super(Put, self).__init__(id, path, callback)
- self.set_value(value)
-
- def set_value(self, value):
- """Value to Put to endpoint
-
- Args:
- value: Value to put to path
- """
+ # Allow id to shadow builtin id so id is a key in the serialized dict
+ # noinspection PyShadowingBuiltins
+ def __init__(self, id=0, path=None, value=None):
+ # type: (AId, UPath, AValue) -> None
+ super(Put, self).__init__(id, path)
self.value = serialize_object(value)
@Serializable.register_subclass("malcolm:core/Post:1.0")
class Post(PathRequest):
"""Create a Post Request object"""
+ __slots__ = ["parameters"]
- endpoints = ["id", "path", "parameters"]
- __slots__ = []
-
- parameters = None
-
- def __init__(self, id=None, path=(), parameters=None, callback=None):
- """
- Args:
- id (int): Unique identifier for request
- path (list): [`str`] Path to target Block substructure
- parameters: Parameters to Post
- callback (callable): Callback for when the response is available
- """
- super(Post, self).__init__(id, path, callback)
- self.set_parameters(parameters)
-
- def set_parameters(self, parameters):
- """Parameters to Post to endpoint
-
- Args:
- parameters: Value to post to path
- """
+ # Allow id to shadow builtin id so id is a key in the serialized dict
+ # noinspection PyShadowingBuiltins
+ def __init__(self, id=0, path=None, parameters=None):
+ # type: (AId, UPath, AParameters) -> None
+ super(Post, self).__init__(id, path)
if parameters is not None:
- parameters = OrderedDict(
- (deserialize_object(k, str_), serialize_object(v))
- for k, v in parameters.items())
+ for k, v in parameters.items():
+ parameters[k] = serialize_object(v)
self.parameters = parameters
@Serializable.register_subclass("malcolm:core/Subscribe:1.0")
class Subscribe(PathRequest):
"""Create a Subscribe Request object"""
+ __slots__ = ["delta"]
- endpoints = ["id", "path", "delta"]
- __slots__ = []
-
- delta = None
-
- def __init__(self, id=None, path=(), delta=False, callback=None):
- """Args:
- id (int): Unique identifier for request
- path (list): [`str`] Path to target Block substructure
- delta (bool): Notify of differences only (default False)
- callback (callable): Callback for when the response is available
- """
-
- super(Subscribe, self).__init__(id, path, callback)
- self.set_delta(delta)
-
- def set_delta(self, delta):
- """Whether to ask for delta responses or not
-
- Args:
- delta: If true then request Delta responses, otherwise Update
- """
- self.delta = deserialize_object(delta, bool)
+ # Allow id to shadow builtin id so id is a key in the serialized dict
+ # noinspection PyShadowingBuiltins
+ def __init__(self, id=0, path=None, delta=False):
+ # type: (AId, UPath, ADifferences) -> None
+ super(Subscribe, self).__init__(id, path)
+ self.delta = delta
def update_response(self, value):
- """Create an Update Response object to handle the request
-
- Args:
- value: Serialized new value
- """
+ # type: (Any) -> Tuple[Callback, Update]
+ """Create an Update Response object to handle the request"""
response = Update(id=self.id, value=value)
return self.callback, response
def delta_response(self, changes):
- """Create a Delta Response object to handle the request
-
- Args:
- changes (list): list of [[path], value] pairs for changed values
- """
+ # type: (List[List[List[str], Any]]) -> Tuple[Callback, Delta]
+ """"Create a Delta Response object to handle the request"""
response = Delta(id=self.id, changes=changes)
return self.callback, response
@@ -223,3 +145,4 @@ def delta_response(self, changes):
@Serializable.register_subclass("malcolm:core/Unsubscribe:1.0")
class Unsubscribe(Request):
"""Create an Unsubscribe Request object"""
+ __slots__ = []
diff --git a/malcolm/core/response.py b/malcolm/core/response.py
index 2a27e3b68..fb1cab11b 100644
--- a/malcolm/core/response.py
+++ b/malcolm/core/response.py
@@ -1,109 +1,64 @@
-from malcolm.compat import str_
-from .serializable import Serializable, deserialize_object, serialize_object
+from annotypes import Anno, Any
+from .serializable import Serializable
-class Response(Serializable):
- """Represents a response to a Request"""
- endpoints = ["id"]
- __slots__ = []
+with Anno("ID that the Request was sent with"):
+ AId = int
+with Anno("Return value of the request"):
+ AValue = Any
+with Anno("Error message exception"):
+ AMessage = Exception
+with Anno("List of [[path], value] pairs for changed values"):
+ AChanges = Any
- id = None
- def __init__(self, id=None):
- """Args:
- id (int): ID that the Request was sent with
- """
- self.set_id(id)
+class Response(Serializable):
+ """Represents a response to a Request"""
- def set_id(self, id):
- """Set the identifier for the response
+ __slots__ = ["id"]
- Args:
- id (int): Unique identifier for response
- """
- if id is not None:
- id = deserialize_object(id, int)
+ def __init__(self, id=0):
+ # type: (AId) -> None
self.id = id
@Serializable.register_subclass("malcolm:core/Return:1.0")
class Return(Response):
+ """Represents a return from a Put or Post"""
- endpoints = ["id", "value"]
- __slots__ = []
-
- value = None
+ __slots__ = ["value"]
- def __init__(self, id=None, value=None):
- """
- Args:
- id (int): ID that the Request was sent with
- value: Return value of the Request
- """
+ def __init__(self, id=0, value=None):
+ # type: (AId, AValue) -> None
super(Return, self).__init__(id)
- self.set_value(value)
-
- def set_value(self, value):
- """Set the return value of the Request
-
- Args:
- value: Serialized value
- """
- self.value = serialize_object(value)
+ # TODO: we used to serialize here, but I think its unnecessary
+ # This will be serialized by to_dict() if needed
+ self.value = value
@Serializable.register_subclass("malcolm:core/Error:1.0")
class Error(Response):
"""Create an Error Response object with the provided parameters"""
- endpoints = ["id", "message"]
- __slots__ = []
-
- message = None
+ __slots__ = ["message"]
- def __init__(self, id=None, message=""):
- """
- Args:
- id (int): ID that the Request was sent with
- message(str): Error message
- """
+ def __init__(self, id=0, message=""):
+ # type: (AId, AMessage) -> None
super(Error, self).__init__(id)
- self.set_message(message)
-
- def set_message(self, message):
- """Set the error message of the Response
-
- Args:
- message (str): Error message
- """
- self.message = deserialize_object(message, str_)
+ self.message = message
@Serializable.register_subclass("malcolm:core/Update:1.0")
class Update(Response):
"""Create an Update Response object with the provided parameters"""
- endpoints = ["id", "value"]
- __slots__ = []
-
- value = None
+ __slots__ = ["value"]
- def __init__(self, id=None, value=None):
- """
- Args:
- id (int): ID that the Request was sent with
- value: Serialized state of update object
- """
+ def __init__(self, id=0, value=None):
+ # type: (AId, AValue) -> None
super(Update, self).__init__(id)
- self.set_value(value)
-
- def set_value(self, value):
- """Set the return value of the Request. Should already be serialized
-
- Args:
- value: Serialized value
- """
+ # Should already be serialized
self.value = value
@@ -111,27 +66,12 @@ def set_value(self, value):
class Delta(Response):
"""Create a Delta Response object with the provided parameters"""
- endpoints = ["id", "changes"]
- __slots__ = []
-
- changes = None
-
- def __init__(self, id=None, changes=None):
- """
- Args:
- id (int): ID that the Request was sent with
- changes (list): list of [[path], value] pairs for changed values
- """
+ __slots__ = ["changes"]
+ def __init__(self, id=0, changes=None):
+ # type: (AId, AChanges) -> None
super(Delta, self).__init__(id)
- self.set_changes(changes)
-
- def set_changes(self, changes):
- """Set the change set for the Request, should already be serialized
-
- Args:
- changes (list): list of [[path], value] pairs for changed values
- """
+ # Should already be serialized
self.changes = changes
def apply_changes_to(self, d):
diff --git a/malcolm/core/serializable.py b/malcolm/core/serializable.py
index 83792ec2f..63a8a4c7e 100644
--- a/malcolm/core/serializable.py
+++ b/malcolm/core/serializable.py
@@ -2,9 +2,13 @@
import logging
import json
-import numpy as np
+from annotypes import WithCallTypes, TypeVar, Any, TYPE_CHECKING, Array
+from enum import Enum
from malcolm.compat import OrderedDict
+from .errors import FieldError
+if TYPE_CHECKING:
+ from typing import Type, Union, Sequence
# Create a module level logger
log = logging.getLogger(__name__)
@@ -18,18 +22,27 @@ def json_encode(o, indent=None):
def json_decode(s):
- o = json.loads(s, object_pairs_hook=OrderedDict)
- return o
+ try:
+ o = json.loads(s, object_pairs_hook=OrderedDict)
+ assert isinstance(o, OrderedDict), "didn't return OrderedDict"
+ return o
+ except Exception as e:
+ raise ValueError("Error decoding JSON object (%s)" % str(e))
def serialize_hook(o):
o = serialize_object(o)
- if isinstance(o, (np.number, np.bool_)):
- return o.tolist()
- elif isinstance(o, np.ndarray):
- assert len(o.shape) == 1, "Expected 1d array, got {}".format(o.shape)
+ if isinstance(o, Array):
+ # Unwrap the array as it might be a list, tuple or numpy array
+ o = o.seq
+ if hasattr(o, "tolist"):
+ # Numpy bools, numbers and arrays all have a tolist function
return o.tolist()
+ elif isinstance(o, Exception):
+ # Exceptions should be stringified
+ return "%s: %s" % (type(o).__name__, str(o))
else:
+ # Everything else should be serializable already
return o
@@ -82,59 +95,41 @@ def serialize_object(o):
elif isinstance(o, list):
# Need to recurse down
return [serialize_object(x) for x in o]
+ elif isinstance(o, Enum):
+ return o.value
else:
# Hope it's serializable!
return o
-def repr_object(o):
- if hasattr(o, "to_dict"):
- # This will do all the sub layers for us
- return repr(o)
- elif isinstance(o, dict):
- # Need to recurse down
- text = ", ".join("%r: %s" % (k, repr_object(v)) for k, v in o.items())
- return "{%s}" % text
- elif isinstance(o, list):
- # Need to recurse down
- text = ", ".join(repr_object(x) for x in o)
- return "[%s]" % text
- else:
- # Hope it's serializable!
- return repr(o)
+T = TypeVar("T")
def deserialize_object(ob, type_check=None):
+ # type: (Any, Union[Type[T], Sequence[Type[T]]]) -> T
if isinstance(ob, dict):
subclass = Serializable.lookup_subclass(ob)
ob = subclass.from_dict(ob)
if type_check is not None:
assert isinstance(ob, type_check), \
- "Expected %s, got %r" % (type_check, ob)
+ "Expected %s, got %r" % (type_check, type(ob))
return ob
-class Serializable(object):
- """Mixin class for serializable objects"""
+class Serializable(WithCallTypes):
+ """Base class for serializable objects"""
# This will be set by subclasses calling cls.register_subclass()
typeid = None
- # List of endpoint strings for to_dict()
- endpoints = ()
-
# dict mapping typeid name -> cls
_subcls_lookup = {}
- def __len__(self):
- return len(self.endpoints)
-
- def __iter__(self):
- return iter(self.endpoints)
+ __slots__ = []
def __getitem__(self, item):
- """Dictionary access to endpoint data"""
- if item in self.endpoints:
+ """Dictionary access to attr data"""
+ if item in self.call_types:
try:
return getattr(self, item)
except (AttributeError, TypeError):
@@ -142,7 +137,11 @@ def __getitem__(self, item):
else:
raise KeyError(item)
+ def __iter__(self):
+ return iter(self.call_types)
+
def to_dict(self):
+ # type: () -> OrderedDict
"""Create a dictionary representation of object attributes
Returns:
@@ -150,35 +149,15 @@ def to_dict(self):
"""
d = OrderedDict()
- d["typeid"] = self.typeid
+ if self.typeid:
+ d["typeid"] = self.typeid
- for endpoint in self.endpoints:
- check_camel_case(endpoint)
- d[endpoint] = serialize_object(getattr(self, endpoint))
+ for k in self.call_types:
+ # check_camel_case(k)
+ d[k] = serialize_object(getattr(self, k))
return d
- def __repr__(self):
- fields = [(endpoint, repr_object(getattr(self, endpoint)))
- for endpoint in self.endpoints]
- fields = " ".join("%s=%s" % f for f in fields)
- s = "<%s %s>" % (self.__class__.__name__, fields)
- return s
-
- def __eq__(self, other):
- if hasattr(other, "to_dict"):
- return self.to_dict() == other.to_dict()
- else:
- return self.to_dict() == other
-
- def __ne__(self, other):
- return not self == other
-
- def __hash__(self):
- # This is not technically correct, but will do...
- # https://stackoverflow.com/a/1608888
- return id(self)
-
@classmethod
def from_dict(cls, d, ignore=()):
"""Create an instance from a serialized version of cls
@@ -198,8 +177,11 @@ def from_dict(cls, d, ignore=()):
(v, cls, cls.typeid)
elif k not in ignore:
filtered[k] = v
-
- inst = cls(**filtered)
+ try:
+ inst = cls(**filtered)
+ except TypeError as e:
+ # raise TypeError("%s(**%s) raised error: %s" % (type(cls), filtered, str(e)))
+ raise TypeError("%s raised error: %s" % (cls.typeid, str(e)))
return inst
@classmethod
@@ -225,6 +207,13 @@ def lookup_subclass(cls, d):
Returns:
Serializable subclass
"""
- typeid = d["typeid"]
- subclass = cls._subcls_lookup[typeid]
- return subclass
+ try:
+ typeid = d["typeid"]
+ except KeyError:
+ raise FieldError("typeid field not present in dictionary ( d.keys() = %s )" % list(d))
+
+ subclass = cls._subcls_lookup.get(typeid, None)
+ if not subclass:
+ raise FieldError("'%s' not a valid typeid" % typeid)
+ else:
+ return subclass
\ No newline at end of file
diff --git a/malcolm/core/spawned.py b/malcolm/core/spawned.py
index 6ad88eeab..7189b2d11 100644
--- a/malcolm/core/spawned.py
+++ b/malcolm/core/spawned.py
@@ -1,8 +1,15 @@
import logging
+from multiprocessing.pool import ThreadPool
+
+from annotypes import TYPE_CHECKING
from malcolm.compat import maybe_import_cothread, get_thread_ident
from .queue import Queue
+if TYPE_CHECKING:
+ from typing import Callable, TypeVar, Tuple, Dict, Any, Union
+ T = TypeVar("T")
+
# Create a module level logger
log = logging.getLogger(__name__)
@@ -11,14 +18,14 @@
class Spawned(object):
NO_RESULT = object()
- def __init__(self, function, args, kwargs, use_cothread=True,
- thread_pool=None):
+ def __init__(self, func, args, kwargs, use_cothread=True, thread_pool=None):
+ # type: (Callable[..., Any], Tuple, Dict, bool, ThreadPool) -> None
self.cothread = maybe_import_cothread()
if use_cothread and not self.cothread:
use_cothread = False
self._result_queue = Queue()
- self._result = self.NO_RESULT
- self._function = function
+ self._result = self.NO_RESULT # type: Union[T, Exception]
+ self._function = func
self._args = args
self._kwargs = kwargs
@@ -42,17 +49,25 @@ def catching_function(self):
"Exception calling %s(*%s, **%s)",
self._function, self._args, self._kwargs, exc_info=True)
self._result = e
+ # We finished running the function, so remove the reference to it
+ # in case it's stopping garbage collection
+ self._function = None
self._result_queue.put(None)
def wait(self, timeout=None):
+ # type: (float) -> None
# Only one person can wait on this at a time
if self._result == self.NO_RESULT:
self._result_queue.get(timeout)
def ready(self):
+ # type: () -> bool
+ """Return True if the spawned result has returned or errored"""
return self._result != self.NO_RESULT
def get(self, timeout=None):
+ # type: (float) -> T
+ """Return the result or raise the error the function has produced"""
self.wait(timeout)
if isinstance(self._result, Exception):
raise self._result
diff --git a/malcolm/core/stateset.py b/malcolm/core/stateset.py
new file mode 100644
index 000000000..167fb15f1
--- /dev/null
+++ b/malcolm/core/stateset.py
@@ -0,0 +1,25 @@
+from malcolm.compat import OrderedDict
+
+
+class StateSet(object):
+ def __init__(self):
+ # type: () -> None
+ self._allowed = OrderedDict()
+ # These are all the states we can possibly be in
+ self.possible_states = []
+
+ def transition_allowed(self, initial_state, target_state):
+ # type: (str, str) -> bool
+ """Check if a transition between two states is allowed"""
+ assert initial_state in self._allowed, \
+ "%s is not in %s" % (initial_state, list(self._allowed))
+ return target_state in self._allowed[initial_state]
+
+ def set_allowed(self, initial_state, *allowed_states):
+ # type: (str, *str) -> None
+ """Add an allowed transition from initial_state to allowed_states"""
+ allowed_states = list(allowed_states)
+ self._allowed.setdefault(initial_state, set()).update(allowed_states)
+ for state in allowed_states + [initial_state]:
+ if state not in self.possible_states:
+ self.possible_states.append(state)
diff --git a/malcolm/core/stringarray.py b/malcolm/core/stringarray.py
deleted file mode 100644
index 6fb70d040..000000000
--- a/malcolm/core/stringarray.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from malcolm.compat import str_
-
-
-def error_message(*args):
- formatted_args = ", ".join(repr(a) for a in args)
- message = "Expected StringArray(s1, s2, ...) or StringArray(seq). " \
- "Got StringArray(%s)" % formatted_args
- return message
-
-
-class StringArray(tuple):
- def __new__(cls, seq=(), *more):
- if isinstance(seq, str_):
- # First element is a string, so assume *more is seq of strings
- seq = (seq,) + more
- else:
- # Assume seq is iterable, so there should be no *more
- if more:
- raise ValueError(error_message(seq, *more))
-
- inst = tuple.__new__(StringArray, seq)
- for item in inst:
- if not isinstance(item, str_):
- raise ValueError(error_message(seq, *more))
- return inst
diff --git a/malcolm/core/table.py b/malcolm/core/table.py
index f6ac5e20e..b49f6b061 100644
--- a/malcolm/core/table.py
+++ b/malcolm/core/table.py
@@ -1,92 +1,33 @@
-import numpy as np
-
-from malcolm.core.serializable import Serializable
-from malcolm.core.stringarray import StringArray
+from .serializable import Serializable
@Serializable.register_subclass("malcolm:core/Table:1.0")
class Table(Serializable):
# real data stored as attributes
# getitem supported for row by row operations
-
- def __init__(self, meta, d=None):
- self.meta = meta
- if d is None:
- d = {}
- for e in meta.elements:
- v = d[e] if e in d else []
- setattr(self, e, v)
-
- @property
- def endpoints(self):
- return list(self.meta.elements)
-
- def verify_column_lengths(self):
- if len(self.meta.elements) == 0:
- return True
- lengths = [len(getattr(self, e)) for e in self.meta.elements]
- assert len(set(lengths)) == 1, \
+ def validate_column_lengths(self):
+ lengths = {a: len(getattr(self, a)) for a in self.call_types}
+ assert len(set(lengths.values())) == 1, \
"Column lengths %s don't match" % lengths
- return lengths[0]
- def __getitem__(self, idx):
- """Get row"""
- if isinstance(idx, int):
- self.verify_column_lengths()
- columns = len(self.meta.elements)
- row = [None] * columns
- for i in range(columns):
- row[i] = getattr(self, list(self.meta.elements)[i])[idx]
- return row
+ def __getitem__(self, item):
+ if isinstance(item, int):
+ self.validate_column_lengths()
+ return [getattr(self, a)[item] for a in self.call_types]
else:
- return getattr(self, idx)
-
- def __setitem__(self, idx, row):
- """Set row for int, column for string"""
- if isinstance(idx, int):
- # set row from index
- length = self.verify_column_lengths()
- # Check length in range
- if idx >= length:
- raise IndexError("Index %s >= table length %s" % (idx, length))
- if len(row) != len(self.meta.elements):
- raise ValueError(
- "Row %s does not specify correct number of values" % row)
- for e, v in zip(self.meta.elements, row):
- column = getattr(self, e)
- if isinstance(column, StringArray):
- new_column = StringArray(
- column[:idx] + (v,) + column[idx+1:])
- else:
- # numpy array
- v = self.meta.elements[e].validate([v])
- new_column = np.concatenate(
- (column[:idx], v, column[idx+1:]))
- setattr(self, e, new_column)
- else:
- setattr(self, idx, row)
-
- def __setattr__(self, attr, value):
- """Set column"""
- if hasattr(self, "meta"):
- if attr not in self.meta.elements:
- raise AttributeError(
- "Attr %s not in %s" % (attr, self.meta.elements))
- column_meta = self.meta.elements[attr]
- value = column_meta.validate(value)
- object.__setattr__(self, attr, value)
-
- def append(self, row):
- self.verify_column_lengths()
- if len(row) != len(self.meta.elements):
- raise ValueError(
- "Row %s does not specify correct number of values" % row)
- for e, v in zip(self.meta.elements, row):
- column = getattr(self, e)
- if isinstance(column, StringArray):
- new_column = StringArray(column + (v,))
- else:
- v = self.meta.elements[e].validate([v])
- new_column = np.concatenate((column, v))
- setattr(self, e, new_column)
-
+ return super(Table, self).__getitem__(item)
+
+ @classmethod
+ def from_rows(cls, rows):
+ attrs = {k: [] for k in cls.call_types}
+ for row in rows:
+ for key, data in zip(cls.call_types, row):
+ attrs[key].append(data)
+ attrs = {k: cls.call_types[k](v) for k, v in attrs.items()}
+ return cls(**attrs)
+
+ def rows(self):
+ self.validate_column_lengths()
+ data = [getattr(self, a) for a in self.call_types]
+ for row in zip(*data):
+ yield list(row)
diff --git a/malcolm/core/tags.py b/malcolm/core/tags.py
new file mode 100644
index 000000000..bd4077b66
--- /dev/null
+++ b/malcolm/core/tags.py
@@ -0,0 +1,66 @@
+from enum import Enum
+from annotypes import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing import Sequence, Union
+
+
+def group_tag(group_name):
+ # type: (str) -> str
+ """Marks this field as belonging to a group"""
+ tag = "group:%s" % group_name
+ return tag
+
+
+def config_tag(iteration=1):
+ # type: (int) -> str
+ """Marks this field as a value that should be saved and loaded at config
+
+ Args:
+ iteration: All iterations are sorted in increasing order and done in
+ batches of the same iteration number
+ """
+ tag = "config:%d" % iteration
+ return tag
+
+
+def get_config_tag(tags):
+ # type: (Sequence[str]) -> Union[str, None]
+ """Get the config_tag from tags or return None"""
+ for tag in tags:
+ if tag.startswith("config:"):
+ return tag
+
+
+class Widget(Enum):
+ """Enum with all the known widget tags to appear on Attribute Metas"""
+ NONE = "" # Force no widget
+ TEXTINPUT = "textinput" # Editable text input box
+ TEXTUPDATE = "textupdate" # Read only text update
+ LED = "led" # On/Off LED indicator
+ COMBO = "combo" # Select from a number of choice values
+ ICON = "icon" # This field gives the URL for an icon for the whole Block
+ GROUP = "group" # Group node in a TreeView that other fields can attach to
+ TABLE = "table" # Table of rows. A list is a single column table
+ CHECKBOX = "checkbox" # A box that can be checked or not
+ FLOWGRAPH = "flowgraph" # Boxes with lines for child block connections
+ TITLE = "title" # This widget should be used as the title of the page
+
+ def tag(self):
+ assert self != Widget.NONE, "Widget.NONE has no widget tag"
+ return "widget:%s" % self.value
+
+
+class Port(Enum):
+ """Enum with all the known flowgraph port tags to appear on Attribute
+ Metas"""
+ BOOL = "bool" # Boolean
+ INT32 = "int32" # 32-bit signed integer
+ NDARRAY = "NDArray" # areaDetector NDArray port
+ MOTOR = "motor" # motor record connection to CS or controller
+
+ def inport_tag(self, disconnected_value):
+ return "inport:%s:%s" % (self.value, disconnected_value)
+
+ def outport_tag(self, connected_value):
+ return "outport:%s:%s" % (self.value, connected_value)
diff --git a/malcolm/core/timestamp.py b/malcolm/core/timestamp.py
index b0f836080..9ddc2af14 100644
--- a/malcolm/core/timestamp.py
+++ b/malcolm/core/timestamp.py
@@ -1,19 +1,32 @@
import time
import numpy as np
+from annotypes import Anno
from .serializable import Serializable
+
+with Anno("Seconds since Jan 1, 1970 00:00:00 UTC"):
+ ASecondsPastEpoch = np.int64
+with Anno("Nanoseconds relative to the secondsPastEpoch field"):
+ ANanoseconds = np.int32
+with Anno("An integer value whose interpretation is deliberately undefined"):
+ AUserTag = np.int32
+
+
zero32 = np.int32(0)
@Serializable.register_subclass("time_t")
class TimeStamp(Serializable):
- endpoints = ["secondsPastEpoch", "nanoseconds", "userTag"]
- __slots__ = endpoints
+ __slots__ = ["secondsPastEpoch", "nanoseconds", "userTag"]
+ # noinspection PyPep8Naming
+ # secondsPastEpoch and userTag are camelCase to maintain compatibility with
+ # EPICS normative types
def __init__(self, secondsPastEpoch=None, nanoseconds=None, userTag=zero32):
+ # type: (ASecondsPastEpoch, ANanoseconds, AUserTag) -> None
# Set initial values
if secondsPastEpoch is None or nanoseconds is None:
now = time.time()
@@ -25,4 +38,5 @@ def __init__(self, secondsPastEpoch=None, nanoseconds=None, userTag=zero32):
self.userTag = userTag
def to_time(self):
+ # type: () -> float
return self.secondsPastEpoch + 1e-9 * self.nanoseconds
diff --git a/malcolm/core/varraymeta.py b/malcolm/core/varraymeta.py
deleted file mode 100644
index 19c311ea3..000000000
--- a/malcolm/core/varraymeta.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .vmeta import VMeta
-from .ntscalararray import NTScalarArray
-
-
-class VArrayMeta(VMeta):
- # intermediate class so TableMeta can say "only arrays"
- attribute_class = NTScalarArray
diff --git a/malcolm/core/view.py b/malcolm/core/view.py
deleted file mode 100644
index d586b6fa8..000000000
--- a/malcolm/core/view.py
+++ /dev/null
@@ -1,90 +0,0 @@
-class View(object):
- """View of a Model to allow Put, Get, Subscribe etc."""
- _controller = None
- _context = None
- _data = None
- _endpoints = ()
-
- def __init__(self):
- raise NotImplementedError("View must be instantiated with make_view()")
-
- def _do_init(self, controller, context, data):
- # This will be called by the subclass created in make_view
- object.__setattr__(self, "_controller", controller)
- object.__setattr__(self, "_context", context)
- object.__setattr__(self, "_data", data)
- if hasattr(data, "typeid"):
- object.__setattr__(self, "typeid", data.typeid)
- self._prepare_endpoints(data)
-
- def _prepare_endpoints(self, data):
- object.__setattr__(self, "_endpoints", tuple(data))
- for endpoint in self._endpoints:
- # Add _subscribe methods for each endpoint
- self._make_subscribe_method(endpoint)
-
- def __iter__(self):
- return iter(self._endpoints)
-
- def to_dict(self):
- return self._data.to_dict()
-
- def __getitem__(self, item):
- try:
- return getattr(self, item)
- except AttributeError:
- return KeyError(item)
-
- def __setattr__(self, name, value):
- raise NameError("Cannot set attribute %s on view" % name)
-
- def _make_subscribe_method(self, endpoint):
- # Make subscribe_endpoint method
- def subscribe_child(callback, *args, **kwargs):
- return self._context.subscribe(
- self._data.path + [endpoint], callback, *args, **kwargs)
-
- object.__setattr__(self, "subscribe_%s" % endpoint, subscribe_child)
-
-
-def make_get_property(cls, endpoint):
- @property
- def make_child_view(self):
- # Get the child of self._data. Needs to be done by the controller to
- # make sure lock is taken and we get consistent data
- child = self._controller.make_view(self._context, self._data, endpoint)
- return child
-
- setattr(cls, endpoint, make_child_view)
-
-
-def make_view(controller, context, data):
- """Make a View subclass containing properties specific for given data
-
- Args:
- controller (Controller): The child controller that hosts the data
- context (Context): The context the parent has made that the View should
- use for manipulating the data
- data (Model): The actual data that context will be manipulating
-
- Returns:
- View: A View subclass instance that provides a user-focused API to
- the given data
- """
- # Properties can only be set on classes, so make subclass that we can use
-
- class ViewSubclass(View):
- def __init__(self):
- self._do_init(controller, context, data)
-
- for endpoint in data:
- # make properties for the endpoints we know about
- make_get_property(ViewSubclass, endpoint)
-
- view = ViewSubclass()
- return view
-
-
-
-
-
diff --git a/malcolm/core/views.py b/malcolm/core/views.py
new file mode 100644
index 000000000..c623aff93
--- /dev/null
+++ b/malcolm/core/views.py
@@ -0,0 +1,262 @@
+from annotypes import TYPE_CHECKING
+
+from malcolm.compat import OrderedDict
+from .context import Context
+from .models import BlockModel, MethodModel, AttributeModel
+from malcolm.core.models import Model
+
+if TYPE_CHECKING:
+ from typing import Any
+ from .controller import Controller
+
+
+class View(object):
+ """View of a Model to allow Put, Get, Subscribe etc."""
+ _controller = None # type: Controller
+ _context = None # type: Context
+ _data = None # type: Model
+ typeid = None # type: str
+
+ def __init__(self, controller, context, data):
+ # type: (Controller, Context, Model) -> None
+ object.__setattr__(self, "typeid", data.typeid)
+ object.__setattr__(self, "_controller", controller)
+ object.__setattr__(self, "_context", context)
+ object.__setattr__(self, "_data", data)
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def to_dict(self):
+ return self._data.to_dict()
+
+ def __getitem__(self, item):
+ try:
+ return getattr(self, item)
+ except AttributeError:
+ return KeyError(item)
+
+ def __setattr__(self, name, value):
+ raise NameError("Cannot set attribute %s on view" % name)
+
+
+def _make_get_property(cls, endpoint):
+ @property
+ def make_child_view(self):
+ # Get the child of self._data. Needs to be done by the controller to
+ # make sure lock is taken and we get consistent data
+ child = self._controller.make_view(self._context, self._data, endpoint)
+ return child
+
+ setattr(cls, endpoint, make_child_view)
+
+
+def _make_view_subclass(cls, controller, context, data):
+ # Properties can only be set on classes, so make subclass that we can use
+ class ViewSubclass(cls):
+ pass
+
+ for endpoint in data:
+ # make properties for the endpoints we know about
+ _make_get_property(ViewSubclass, endpoint)
+
+ view = ViewSubclass(controller, context, data)
+ return view
+
+
+class Attribute(View):
+ """Represents a value with type information that may be backed elsewhere"""
+
+ @property
+ def meta(self):
+ return self._controller.make_view(self._context, self._data, "meta")
+
+ @property
+ def value(self):
+ return self._controller.make_view(self._context, self._data, "value")
+
+ def put_value(self, value, timeout=None):
+ """Put a value to the Attribute and wait for completion"""
+ self._context.put(self._data.path + ["value"], value, timeout=timeout)
+
+ def put_value_async(self, value):
+ fs = self._context.put_async(self._data.path + ["value"], value)
+ return fs
+
+ def subscribe_value(self, callback, *args):
+ return self._context.subscribe(
+ self._data.path + ["value"], callback, *args)
+
+ @property
+ def alarm(self):
+ return self._controller.make_view(self._context, self._data, "alarm")
+
+ # noinspection PyPep8Naming
+ # timeStamp is camelCase to maintain compatibility with EPICS normative
+ # types
+ @property
+ def timeStamp(self):
+ return self._controller.make_view(
+ self._context, self._data, "timeStamp")
+
+ def __repr__(self):
+ return "<%s value=%r>" % (self.__class__.__name__, self.value)
+
+
+class Method(View):
+ """Exposes a function with metadata for arguments and return values"""
+
+ def _add_positional_args(self, args, kwargs):
+ # add any positional args into our kwargs dict
+ for name, v in zip(self._data.takes.elements, args):
+ assert name not in kwargs, \
+ "%s specified as positional and keyword args" % (name,)
+ kwargs[name] = v
+ return kwargs
+
+ def post(self, *args, **kwargs):
+ kwargs = self._add_positional_args(args, kwargs)
+ result = self._context.post(self._data.path, kwargs)
+ return result
+
+ __call__ = post
+
+ def post_async(self, *args, **kwargs):
+ kwargs = self._add_positional_args(args, kwargs)
+ fs = self._context.post_async(self._data.path, kwargs)
+ return fs
+
+ @property
+ def takes(self):
+ return self._controller.make_view(self._context, self._data, "takes")
+
+ @property
+ def defaults(self):
+ return self._controller.make_view(self._context, self._data, "defaults")
+
+ @property
+ def description(self):
+ return self._controller.make_view(
+ self._context, self._data, "description")
+
+ @property
+ def tags(self):
+ return self._controller.make_view(self._context, self._data, "tags")
+
+ @property
+ def writeable(self):
+ return self._controller.make_view(
+ self._context, self._data, "writeable")
+
+ @property
+ def label(self):
+ return self._controller.make_view(self._context, self._data, "label")
+
+ @property
+ def returns(self):
+ return self._controller.make_view(self._context, self._data, "returns")
+
+
+class Block(View):
+ """Object consisting of a number of Attributes and Methods"""
+ def __init__(self, controller, context, data):
+ super(Block, self).__init__(controller, context, data)
+ for endpoint in self._data:
+ if isinstance(data[endpoint], MethodModel):
+ # Add _async versions of method
+ self._make_async_method(endpoint)
+
+ def __getattr__(self, item):
+ # type: (str) -> View
+ # Get the child of self._data. Needs to be done by the controller to
+ # make sure lock is taken and we get consistent data
+ child = self._controller.make_view(self._context, self._data, item)
+ return child
+
+ @property
+ def mri(self):
+ return self._data.path[0]
+
+ def _make_async_method(self, endpoint):
+ def post_async(*args, **kwargs):
+ child = getattr(self, endpoint) # type: Method
+ return child.post_async(*args, **kwargs)
+
+ object.__setattr__(self, "%s_async" % endpoint, post_async)
+
+ def put_attribute_values_async(self, params):
+ futures = []
+ if type(params) is dict:
+ # If we have a plain dictionary, then sort items
+ items = sorted(params.items())
+ else:
+ # Assume we are already ordered
+ items = params.items()
+ for attr, value in items:
+ assert hasattr(self, attr), \
+ "Block does not have attribute %s" % attr
+ future = self._context.put_async(
+ self._data.path + [attr, "value"], value)
+ futures.append(future)
+ return futures
+
+ def put_attribute_values(self, params, timeout=None, event_timeout=None):
+ futures = self.put_attribute_values_async(params)
+ self._context.wait_all_futures(
+ futures, timeout=timeout, event_timeout=event_timeout)
+
+ def when_value_matches(self, attr, good_value, bad_values=None,
+ timeout=None, event_timeout=None):
+ future = self.when_value_matches_async(attr, good_value, bad_values)
+ self._context.wait_all_futures(
+ future, timeout=timeout, event_timeout=event_timeout)
+
+ def when_value_matches_async(self, attr, good_value, bad_values=None):
+ path = self._data.path + [attr, "value"]
+ future = self._context.when_matches_async(path, good_value, bad_values)
+ return future
+
+ def wait_all_futures(self, futures, timeout=None, event_timeout=None):
+ self._context.wait_all_futures(
+ futures, timeout=timeout, event_timeout=event_timeout)
+
+
+def make_view(controller, context, data):
+ # type: (Controller, Context, Any) -> Any
+ """Make a View subclass containing properties specific for given data
+
+ Args:
+ controller (Controller): The child controller that hosts the data
+ context (Context): The context the parent has made that the View should
+ use for manipulating the data
+ data (Model): The actual data that context will be manipulating
+
+ Returns:
+ View: A View subclass instance that provides a user-focused API to
+ the given data
+ """
+ if isinstance(data, BlockModel):
+ # Make an Block View
+ view = _make_view_subclass(Block, controller, context, data)
+ elif isinstance(data, AttributeModel):
+ # Make an Attribute View
+ view = Attribute(controller, context, data)
+ elif isinstance(data, MethodModel):
+ # Make a Method View
+ view = Method(controller, context, data)
+ elif isinstance(data, Model):
+ # Make a generic View
+ view = _make_view_subclass(View, controller, context, data)
+ elif isinstance(data, dict):
+ # Make a dict of Views
+ d = OrderedDict()
+ for k, v in data.items():
+ d[k] = make_view(controller, context, v)
+ view = d
+ elif isinstance(data, list):
+ # Need to recurse down
+ view = [make_view(controller, context, x) for x in data]
+ else:
+ # Just return the data unwrapped as it should be immutable
+ view = data
+ return view
diff --git a/malcolm/core/vmeta.py b/malcolm/core/vmeta.py
deleted file mode 100644
index 1deee52a4..000000000
--- a/malcolm/core/vmeta.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from .meta import Meta
-from .ntscalar import NTScalar
-from .attributemodel import AttributeModel
-
-
-class VMeta(Meta):
- """Abstract base class for validating the values of Attributes"""
- attribute_class = NTScalar
-
- def validate(self, value):
- """Abstract function to validate a given value
-
- Args:
- value: Value to validate
- """
- raise NotImplementedError(self)
-
- def create_attribute_model(self, initial_value=None):
- """Make an AttributeModel instance of the correct type for this Meta
-
- Args:
- initial_value: The initial value the Attribute should take
-
- Returns:
- AttributeModel: The created attribute model instance
- """
- attr = self.attribute_class(self, initial_value)
- return attr
-
- def doc_type_string(self):
- """Abstract function to return the python type string.
-
- For example, "str" or "numpy.int32"
- """
- raise NotImplementedError(self)
diff --git a/malcolm/gui/attributeitem.py b/malcolm/gui/attributeitem.py
deleted file mode 100644
index 4c35e290d..000000000
--- a/malcolm/gui/attributeitem.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from malcolm.core import Error, Return, Put
-from malcolm.gui.baseitem import BaseItem
-
-
-class AttributeItem(BaseItem):
-
- def get_label(self):
- if self.ref.meta.label:
- return self.ref.meta.label
- else:
- return super(AttributeItem, self).get_label()
-
- def get_value(self):
- return str(self.ref.value)
-
- def get_writeable(self):
- return self.ref.meta.writeable
-
- def set_value(self, value):
- self._state = self.RUNNING
- request = Put(path=self.endpoint + ("value",), value=str(value),
- callback=self.handle_response)
- return request
-
- def handle_response(self, response):
- if isinstance(response, Error):
- self._state = self.ERROR
- elif isinstance(response, Return):
- self._state = self.IDLE
- else:
- raise TypeError(type(response))
diff --git a/malcolm/gui/baseitem.py b/malcolm/gui/baseitem.py
deleted file mode 100644
index dce2c00cf..000000000
--- a/malcolm/gui/baseitem.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from PyQt4.Qt import QStyle, QApplication
-
-
-class BaseItem(object):
- IDLE = "Idle"
- RUNNING = "Run"
- ERROR = "Error"
- CHANGED = "Changed"
- icons = {
- IDLE: QStyle.SP_FileDialogInfoView,
- RUNNING: QStyle.SP_BrowserReload,
- ERROR: QStyle.SP_MessageBoxCritical,
- CHANGED: QStyle.SP_FileDialogStart,
- }
- # map endpoint -> BlockItem
- items = {}
-
- def __init__(self, endpoint, ref):
- # The Block or Method or Attribute or AttributeMeta ref
- self.ref = ref
- # parent BlockItem
- self.parent_item = None
- # any BlockItem children
- self.children = []
- # endpoint list for this node
- self.endpoint = tuple(endpoint)
- # add to items
- self.items[self.endpoint] = self
- # current state
- self._state = self.IDLE
-
- def get_icon(self):
- icon = self.icons[self.get_state()]
- return QApplication.style().standardIcon(icon)
-
- def get_label(self):
- return self.endpoint[-1]
-
- def get_value(self):
- return None
-
- def get_writeable(self):
- return False
-
- def get_state(self):
- return self._state
-
- def parent_row(self):
- if self.parent_item:
- assert self in self.parent_item.children, \
- "%s is not in %s" % (self, self.parent_item.children)
- return self.parent_item.children.index(self)
- return 0
-
- def add_child(self, item):
- item.parent_item = self
- self.children.append(item)
- self.items[item.endpoint] = item
-
- def remove_child(self, item):
- self.children.remove(item)
- self.items.pop(item.endpoint)
-
- def ref_children(self):
- return 0
-
- def create_children(self):
- return
-
- def set_value(self, value):
- raise NotImplementedError()
-
diff --git a/malcolm/gui/blockgui.py b/malcolm/gui/blockgui.py
deleted file mode 100644
index 4f67819fb..000000000
--- a/malcolm/gui/blockgui.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import os
-
-from PyQt4.Qt import QTreeView, QSize, QIcon
-
-from malcolm.gui.guimodel import GuiModel
-from malcolm.gui.delegate import Delegate
-from malcolm.gui.attributeitem import AttributeItem
-
-
-class BlockGui(QTreeView):
-
- def __init__(self, process, block):
- QTreeView.__init__(self)
- model = GuiModel(process, block)
- self.setModel(model)
- self.setWindowTitle("%s: imalcolm" % model.block.mri)
- root = os.path.join(os.path.dirname(__file__), "..", "..")
- icon_path = os.path.join(root, "docs", "malcolm-logo.svg")
- self.setWindowIcon(QIcon(icon_path))
- self.setColumnWidth(0, 160)
- self.setColumnWidth(1, 180)
- self.setColumnWidth(2, 25)
- self.resize(QSize(370, 500))
- self.setItemDelegateForColumn(1, Delegate())
- self.setEditTriggers(self.AllEditTriggers)
- self.expanded.connect(self.write_expanded)
- self.collapsed.connect(self.write_collapsed)
-
- def write_expanded(self, index):
- self._write_group(index, "expanded")
-
- def write_collapsed(self, index):
- self._write_group(index, "collapsed")
-
- def dataChanged(self, topLeft, bottomRight):
- model = self.model()
- for row in range(model.rowCount()):
- str_data = str(model.index(row, 1).data().toString())
- index = model.index(row, 0)
- if str_data == "expanded":
- self.setExpanded(index, True)
- elif str_data == "collapsed":
- self.setExpanded(index, False)
- super(BlockGui, self).dataChanged(topLeft, bottomRight)
-
- def _write_group(self, index, value):
- item = index.internalPointer()
- if isinstance(item, AttributeItem):
- model = self.model()
- index = model.index(index.row(), 1, index.parent())
- model.setData(index, value)
-
-
-
-
diff --git a/malcolm/gui/blockitem.py b/malcolm/gui/blockitem.py
deleted file mode 100644
index e04a5d0a8..000000000
--- a/malcolm/gui/blockitem.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from malcolm.core import Method, Attribute
-from malcolm.gui.baseitem import BaseItem
-from malcolm.gui.methoditem import MethodItem
-from malcolm.gui.attributeitem import AttributeItem
-
-
-class BlockItem(BaseItem):
-
- def _get_group_name(self, attr):
- meta = getattr(attr, "meta", None)
- if meta:
- tags = getattr(meta, "tags", [])
- else:
- tags = []
- groups = [x for x in tags if x.startswith("group:")]
- if groups:
- group_name = groups[0][len("group:"):]
- return group_name
-
- def ref_children(self):
- """Number of child objects our ref has"""
- nchildren = 0
- for name in self.ref:
- child = self.ref[name]
- # check for group, otherwise put it in place
- if self._get_group_name(child) is None:
- nchildren += 1
- return nchildren
-
- def create_children(self):
- for name in self.ref:
- child = self.ref[name]
- if isinstance(child, Attribute):
- attr = child
- item = AttributeItem(self.endpoint + (name,), attr)
- group_name = self._get_group_name(attr)
- if group_name is None:
- parent_item = self
- else:
- parent_endpoint = self.endpoint + (group_name,)
- parent_item = self.items[parent_endpoint]
- parent_item.add_child(item)
- item.create_children()
- elif isinstance(child, Method):
- method = child
- item = MethodItem(self.endpoint + (name,), method)
- self.add_child(item)
- item.create_children()
diff --git a/malcolm/gui/delegate.py b/malcolm/gui/delegate.py
deleted file mode 100644
index c4bb7dabf..000000000
--- a/malcolm/gui/delegate.py
+++ /dev/null
@@ -1,98 +0,0 @@
-from PyQt4.Qt import QStyledItemDelegate, QStyle, QStyleOptionButton, \
- QApplication, QEvent, QPushButton, QComboBox, QLineEdit, QVariant, Qt
-
-from malcolm.gui.attributeitem import AttributeItem
-from malcolm.gui.parameteritem import ParameterItem
-from malcolm.gui.methoditem import MethodItem
-
-
-class Delegate(QStyledItemDelegate):
-
- def createEditor(self, parent, option, index):
- if index.isValid() and index.column() == 1:
- item = index.internalPointer()
- if isinstance(item, (AttributeItem, ParameterItem)):
- if False: #item.meta["metaOf"] == "malcolm:core/Enum:1.0":
- editor = SpecialComboBox(parent)
- editor.delegate = self
- editor.setEditable(True)
- editor.addItems(item.meta["oneOf"])
- elif False: #item.meta["metaOf"] == "malcolm:core/Boolean:1.0":
- editor = SpecialComboBox(parent)
- editor.delegate = self
- editor.setEditable(True)
- editor.addItems(["False", "True"])
- else:
- editor = QLineEdit(parent)
- return editor
-
- def setEditorData(self, editor, index):
- if isinstance(editor, QComboBox):
- i = editor.findText(index.data(Qt.EditRole).toString())
- if i > -1:
- editor.setCurrentIndex(i)
- else:
- editor.setEditText(index.data(Qt.EditRole).toString())
- editor.lineEdit().selectAll()
- else:
- return QStyledItemDelegate.setEditorData(self, editor, index)
-
- def setModelData(self, editor, model, index):
- if isinstance(editor, QComboBox):
- model.setData(index, QVariant(editor.currentText()), Qt.EditRole)
- else:
- return QStyledItemDelegate.setModelData(self, editor, model, index)
-
- def is_method_button(self, index):
- if index.isValid() and isinstance(index.internalPointer(), MethodItem):
- column = index.column()
- if column == 1:
- return True
- return False
-
- def paint(self, painter, option, index):
- # If we are looking at a method then draw a button
- # http://www.qtcentre.org/threads/26916-inserting-custom-Widget-to-listview?p=128623#post128623
- if self.is_method_button(index):
- item = index.internalPointer()
- opt = QStyleOptionButton()
- style = QApplication.style()
- # If method is running, draw sunken
- if item.get_state() == item.RUNNING:
- opt.state |= QStyle.State_Enabled
- opt.state |= QStyle.State_Sunken
- # if method is allowed, draw blue
- elif item.get_writeable():
- opt.state |= QStyle.State_Enabled
- # if we are hovering, draw highlight
- if option.state & QStyle.State_MouseOver:
- opt.state |= QStyle.State_MouseOver
- opt.rect = option.rect
- opt.text = item.get_label()
- style.drawControl(QStyle.CE_PushButton, opt, painter, QPushButton())
- else:
- if option.state & QStyle.State_Selected:
- # Don't show delegates as highlighted
- option.state = option.state ^ QStyle.State_Selected
- QStyledItemDelegate.paint(self, painter, option, index)
-
- def editorEvent(self, event, model, option, index):
- if self.is_method_button(index):
- # TODO: Drag seems to do the wrong thing here...
- if event.type() in [QEvent.MouseButtonPress,
- QEvent.MouseButtonDblClick]:
- return model.setData(index, None)
- return QStyledItemDelegate.editorEvent(self, event, model, option, index)
-
-
-class SpecialComboBox(QComboBox):
- # Qt outputs an activated signal if you start typing then mouse click on the
- # down arrow. By delaying the activated event until after the mouse click
- # we avoid this problem
- def closeEvent(self, i):
- self.delegate.commitData.emit(self)
- self.delegate.closeEditor.emit(self, QStyledItemDelegate.SubmitModelCache)
-
- def mousePressEvent(self, event):
- QComboBox.mousePressEvent(self, event)
- self.activated.connect(self.closeEvent)
diff --git a/malcolm/gui/guimodel.py b/malcolm/gui/guimodel.py
deleted file mode 100644
index aad4a5ecd..000000000
--- a/malcolm/gui/guimodel.py
+++ /dev/null
@@ -1,177 +0,0 @@
-from PyQt4.Qt import QAbstractItemModel, QModelIndex, Qt
-from PyQt4.QtCore import pyqtSignal
-
-from malcolm.core import Subscribe, Delta
-from malcolm.gui.blockitem import BlockItem
-
-
-class GuiModel(QAbstractItemModel):
-
- response_received = pyqtSignal(object)
-
- def __init__(self, process, block):
- QAbstractItemModel.__init__(self)
- self.controller = process.get_controller(block.mri)
- self.block = block
- self.id_ = 1
- self.root_item = BlockItem((self.block.mri,), block)
- # map id -> item
- self.item_lookup = {}
- # TODO: unsubscribe when done
- self.response_received.connect(self.handle_response)
- self.send_request(Subscribe(path=[self.block.mri], delta=True)).wait()
-
- def send_request(self, request, item=None):
- request.set_id(self.id_)
- request.set_callback(self.response_received.emit)
- self.item_lookup[self.id_] = item
- self.id_ += 1
- return self.controller.handle_request(request)
-
- def get_index(self, item, column):
- return self.createIndex(item.parent_row(), column, item)
-
- def delete_child_endpoints(self, endpoint):
- """Delete all children of item"""
- endpoints = [e for e in BlockItem.items if e!= endpoint and
- e[:len(endpoint)] == endpoint]
- # sort by reversed length so we get children first
- endpoints.sort(key=len, reverse=True)
- for e in endpoints:
- item = BlockItem.items[e]
- parent_index = self.get_index(item.parent_item, 0)
- row = item.parent_row()
- self.beginRemoveRows(parent_index, row, row)
- item.parent_item.remove_child(item)
- self.endRemoveRows()
-
- def handle_response(self, response):
- if isinstance(response, Delta):
- self.handle_changes(response.changes)
- else:
- item = self.item_lookup[response.id]
- item.handle_response(response)
- index = self.get_index(item, 2)
- self.dataChanged.emit(index, index)
-
- def handle_changes(self, changes):
- # create and update children where necessary
- for change in changes:
- path = [self.block.mri] + change[0]
- # See if we can find an item to update
- item, path = self.find_item(path)
- # this path is the biggest thing that has to change, so delete
- # all child endpoints of this
- self.delete_child_endpoints(item.endpoint)
- # now we can update the item, creating all it's children
- self.create_children(item)
-
- def create_children(self, item):
- index = self.get_index(item, 0)
- nchildren = item.ref_children()
- if nchildren:
- # Added rows
- self.beginInsertRows(index, 0, nchildren - 1)
- item.create_children()
- self.endInsertRows()
- endindex = self.get_index(item, 2)
- self.dataChanged.emit(index, endindex)
-
- def find_item(self, endpoint):
- """Find the smallest item up from endpoint"""
- endpoint = tuple(endpoint)
- path = []
- # Keep popping the last item off the endpoint path until we get to the
- # smallest structure that we have as an item
- while endpoint:
- if endpoint in BlockItem.items:
- return BlockItem.items[endpoint], path
- path.insert(0, endpoint[-1])
- endpoint = endpoint[:-1]
-
- # Needed to make this a concrete model
- def index(self, row, column, parent=QModelIndex()):
- # If index out of bounds then return
- if not self.hasIndex(row, column, parent):
- return QModelIndex()
- # If valid parent item use that
- if parent.isValid():
- parent_item = parent.internalPointer()
- else:
- # otherwise we're being asked for our root item
- parent_item = self.root_item
- child_item = parent_item.children[row]
- # Now make an index
- index = self.createIndex(row, column, child_item)
- return index
-
- def parent(self, index):
- # Check valid index
- if not index.isValid():
- return QModelIndex()
- child_item = index.internalPointer()
- parent_item = child_item.parent_item
- # Check child's parent exists
- if parent_item is self.root_item or parent_item is None:
- return QModelIndex()
- # Return an index for us
- index = self.createIndex(parent_item.parent_row(), 0, parent_item)
- return index
-
- def rowCount(self, parent=QModelIndex()):
- if parent.column() > 0:
- return 0
- if parent.isValid():
- parent_item = parent.internalPointer()
- else:
- parent_item = self.root_item
- rows = len(parent_item.children)
- return rows
-
- def columnCount(self, parent=QModelIndex()):
- return 3
-
- def flags(self, index):
- flags = QAbstractItemModel.flags(self, index)
- if index.isValid() and index.column() == 1:
- item = index.internalPointer()
- if item.get_writeable():
- flags |= Qt.ItemIsEditable
- return flags
-
- def data(self, index, role):
- if not index.isValid():
- return None
-
- # Get the item
- item = index.internalPointer()
- if role in (Qt.DisplayRole, Qt.DecorationRole, Qt.EditRole):
- if index.column() == 0:
- return item.get_label()
- elif index.column() == 1:
- return str(item.get_value())
- elif index.column() == 2:
- return item.get_icon()
-
- def setData(self, index, value, role=Qt.EditRole):
- if role == Qt.EditRole and index.isValid() and index.column() == 1:
- item = index.internalPointer()
- if item.get_writeable():
- if hasattr(value, "toString"):
- value = value.toString()
- request = item.set_value(value)
- if request:
- self.send_request(request, item)
- self.dataChanged.emit(index, index)
- if item.children:
- start = self.get_index(item.children[0], 0)
- end = self.get_index(item.children[-1], 2)
- self.dataChanged.emit(start, end)
- return True
- return False
-
- def headerData(self, section, orientation, role=Qt.DisplayRole):
- if orientation == Qt.Horizontal:
- if role == Qt.DisplayRole:
- label = ["name", "value", ""][section]
- return label
diff --git a/malcolm/gui/guiopener.py b/malcolm/gui/guiopener.py
deleted file mode 100644
index f2f0d2c48..000000000
--- a/malcolm/gui/guiopener.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import time
-import logging
-
-from PyQt4.QtCore import QObject, pyqtSignal
-
-from malcolm.gui.blockgui import BlockGui
-
-# Create a module level logger
-log = logging.getLogger(__name__)
-
-
-class GuiOpener(QObject):
- open_gui_signal = pyqtSignal(object, object)
-
- def __init__(self):
- QObject.__init__(self)
- self.guis = {}
- self.open_gui_signal.connect(self._open_gui)
-
- def open_gui(self, block, process):
- self.open_gui_signal.emit(block, process)
-
- def _open_gui(self, block, process):
- try:
- if block not in self.guis:
- self.guis[block] = BlockGui(process, block)
- # If we don't sleep here we get a blank GUI, very strange...
- time.sleep(0.1)
- self.guis[block].show()
- except:
- log.exception("Error opening gui %s" % block)
- raise
diff --git a/malcolm/gui/methoditem.py b/malcolm/gui/methoditem.py
deleted file mode 100644
index d4926f1da..000000000
--- a/malcolm/gui/methoditem.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from malcolm.core import Post, Error, Return
-from malcolm.gui.baseitem import BaseItem
-from malcolm.gui.parameteritem import ParameterItem
-
-
-class MethodItem(BaseItem):
- def get_label(self):
- if self.ref.label:
- return self.ref.label
- else:
- return super(MethodItem, self).get_label()
-
- def get_writeable(self):
- return self.ref.writeable
-
- def ref_children(self):
- """Number of child objects our ref has"""
- return len(self.ref.takes.elements)
-
- def create_children(self):
- for name in self.ref.takes.elements:
- meta = self.ref.takes.elements[name]
- default = self.ref.defaults.get(name, None)
- endpoint = self.endpoint + ("takes", "elements", name)
- item = ParameterItem(endpoint, meta, default)
- self.add_child(item)
- item.create_children()
-
- def set_value(self, value):
- args = {}
- for item in self.children:
- args[item.endpoint[-1]] = item.get_value()
- item.reset_value()
- self._state = self.RUNNING
- request = Post(path=self.endpoint, parameters=args,
- callback=self.handle_response)
- return request
-
- def handle_response(self, response):
- if isinstance(response, Error):
- print("Error: %s" % response.message)
- self._state = self.ERROR
- elif isinstance(response, Return):
- print("Return: %s" % response.value)
- self._state = self.IDLE
- else:
- raise TypeError(type(response))
diff --git a/malcolm/gui/parameteritem.py b/malcolm/gui/parameteritem.py
deleted file mode 100644
index e8b8699fe..000000000
--- a/malcolm/gui/parameteritem.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from malcolm.gui.baseitem import BaseItem
-
-
-class ParameterItem(BaseItem):
- def __init__(self, endpoint, ref, default):
- super(ParameterItem, self).__init__(endpoint, ref)
- self._value = default
- self.default = default
-
- def get_label(self):
- if self.ref.label:
- return self.ref.label
- else:
- return super(ParameterItem, self).get_label()
-
- def reset_value(self):
- self._value = self.default
- self._state = self.IDLE
-
- def set_value(self, value):
- try:
- self._value = self.ref.validate(str(value))
- except Exception:
- self._state = self.ERROR
- else:
- self._state = self.CHANGED
-
- def get_value(self):
- return self._value
-
- def get_writeable(self):
- return getattr(self.ref, "writeable", True)
diff --git a/malcolm/imalcolm.py b/malcolm/imalcolm.py
index 460799f6a..fcf35e50c 100755
--- a/malcolm/imalcolm.py
+++ b/malcolm/imalcolm.py
@@ -75,15 +75,15 @@ def make_process():
"stream": "ext://sys.stdout"
},
- # "local_file_handler": {
- # "class": "logging.handlers.RotatingFileHandler",
- # "level": "DEBUG",
- # "formatter": "extended",
- # "filename": "/tmp/debug.log",
- # "maxBytes": 100048576,
- # "backupCount": 4,
- # "encoding": "utf8"
- # },
+ "local_file_handler": {
+ "class": "logging.handlers.RotatingFileHandler",
+ "level": "DEBUG",
+ "formatter": "extended",
+ "filename": "/tmp/malcolm-debug.log",
+ "maxBytes": 100048576,
+ "backupCount": 4,
+ "encoding": "utf8"
+ },
"graylog_gelf": {
"class": "pygelf.GelfTcpHandler",
@@ -119,7 +119,7 @@ def make_process():
"root": {
"level": "DEBUG",
- "handlers": ["graylog_gelf", "console"],
+ "handlers": ["graylog_gelf", "console", "local_file_handler"],
}
}
@@ -138,36 +138,6 @@ def make_process():
listener.start()
atexit.register(listener.stop)
- # Setup Qt gui, must be done before any malcolm imports otherwise cothread
- # starts in the wrong thread
- try:
- os.environ['DISPLAY']
- # If this environment variable doesn't exist then there is probably no
- # X server for us to talk to.
- except KeyError:
- qt_thread = None
- else:
- from PyQt4.Qt import QApplication
-
- # Start qt
- def start_qt():
- app = QApplication(sys.argv)
- app.setQuitOnLastWindowClosed(False)
- locals_d["app"] = app
- from malcolm.gui.guiopener import GuiOpener
- global opener
- opener = GuiOpener()
- app.exec_()
-
- qt_thread = threading.Thread(target=start_qt)
- qt_thread.setDaemon(True)
-
- def gui(block):
- global opener
- opener.open_gui(block, proc)
-
- locals_d["gui"] = gui
-
# Setup profiler dir
try:
from malcolm.modules.profiling.parts import ProfilingViewerPart
@@ -179,17 +149,19 @@ def gui(block):
os.mkdir(args.profiledir)
ProfilingViewerPart.profiledir = args.profiledir
locals_d["profiler"] = Profiler(args.profiledir)
- #locals_d["profiler"].start()
+ locals_d["profiler"].start()
- from malcolm.core import Process, call_with_params, Context, Queue
+ from malcolm.core import Context, Queue, Process
from malcolm.modules.builtin.blocks import proxy_block
from malcolm.yamlutil import make_include_creator
if args.yaml:
proc_name = os.path.basename(args.yaml).split(".")[-2]
proc = Process(proc_name)
- assembly = make_include_creator(args.yaml)
- call_with_params(assembly, proc)
+ controllers, parts = make_include_creator(args.yaml)()
+ assert not parts, "%s defines parts" % (args.yaml,)
+ for controller in controllers:
+ proc.add_controller(controller)
proc_name = "%s - imalcolm" % proc_name
else:
proc = Process("Process")
@@ -201,17 +173,16 @@ def gui(block):
if args.client.startswith("ws://"):
from malcolm.modules.web.controllers import WebsocketClientComms
hostname, port = args.client[5:].split(":")
- comms = call_with_params(
- WebsocketClientComms, proc, [],
+ comms = WebsocketClientComms(
mri="%s:%s" % (hostname, port), hostname=hostname,
port=int(port))
elif args.client == "pva":
from malcolm.modules.pva.controllers import PvaClientComms
- comms = call_with_params(PvaClientComms, proc, [], mri="pva")
+ comms = PvaClientComms(mri="pva")
else:
raise ValueError(
"Don't know how to create client to %s" % args.client)
- proc.add_controller(comms.mri, comms)
+ proc.add_controller(comms)
class UserContext(Context):
def make_queue(self):
@@ -225,12 +196,10 @@ def post(self, path, params=None, timeout=None):
raise
def make_proxy(self, comms, mri):
- call_with_params(proxy_block, proc, comms=comms, mri=mri)
+ proc.add_controller(proxy_block(comms=comms, mri=mri)[-1])
locals_d["self"] = UserContext(proc)
- if qt_thread:
- qt_thread.start()
- proc.start()
+ proc.start(timeout=60)
locals_d["process"] = proc
return locals_d
@@ -253,7 +222,7 @@ def main():
or
-self.make_proxy("localhost:8080", "HELLO")
+self.make_proxy("localhost:8008", "HELLO")
print self.block_view("HELLO").greet("me")
""" % (locals_d["self"].mri_list,)
@@ -265,8 +234,6 @@ def main():
else:
locals().update(locals_d)
IPython.embed(header=header)
- if "app" in locals_d:
- locals_d["app"].quit()
if "profiler" in locals_d:
if locals_d["profiler"].started:
locals_d["profiler"].stop()
@@ -285,12 +252,10 @@ def main():
from pkg_resources import require
- #sys.path.insert(0,
- # "/dls_sw/work/tools/RHEL6-x86_64/odin/venv/lib/python2.7/"
- # "site-packages")
require("tornado", "numpy", "ruamel.yaml", "cothread==2.14", "vdsgen==0.2",
"pygelf==0.3.1", "scanpointgenerator==2.1.1", "plop", "h5py==2.7.1")
#sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "cothread"))
+ sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "annotypes"))
#sys.path.append(
# "/home/tmc43/virtualenvs/pymalcolm/lib/python2.7/site-packages")
sys.path.append(
diff --git a/malcolm/modules/ADAndor/__init__.py b/malcolm/modules/ADAndor/__init__.py
index e69de29bb..f6ea27363 100644
--- a/malcolm/modules/ADAndor/__init__.py
+++ b/malcolm/modules/ADAndor/__init__.py
@@ -0,0 +1 @@
+from . import parts
diff --git a/malcolm/modules/ADAndor/blocks/__init__.py b/malcolm/modules/ADAndor/blocks/__init__.py
index 67a4beb91..bd2abab4b 100644
--- a/malcolm/modules/ADAndor/blocks/__init__.py
+++ b/malcolm/modules/ADAndor/blocks/__init__.py
@@ -1,9 +1,9 @@
from malcolm.yamlutil import make_block_creator, check_yaml_names
-andor_detector_driver_block = make_block_creator(
- __file__, "andor_detector_driver_block.yaml")
+andor_driver_block = make_block_creator(
+ __file__, "andor_driver_block.yaml")
-andor_detector_runnable_block = make_block_creator(
- __file__, "andor_detector_runnable_block.yaml")
+andor_runnable_block = make_block_creator(
+ __file__, "andor_runnable_block.yaml")
__all__ = check_yaml_names(globals())
diff --git a/malcolm/modules/ADAndor/blocks/andor_detector_driver_block.yaml b/malcolm/modules/ADAndor/blocks/andor_driver_block.yaml
similarity index 100%
rename from malcolm/modules/ADAndor/blocks/andor_detector_driver_block.yaml
rename to malcolm/modules/ADAndor/blocks/andor_driver_block.yaml
diff --git a/malcolm/modules/ADAndor/blocks/andor_detector_runnable_block.yaml b/malcolm/modules/ADAndor/blocks/andor_runnable_block.yaml
similarity index 50%
rename from malcolm/modules/ADAndor/blocks/andor_detector_runnable_block.yaml
rename to malcolm/modules/ADAndor/blocks/andor_runnable_block.yaml
index 00bc7185e..1dce734d0 100644
--- a/malcolm/modules/ADAndor/blocks/andor_detector_runnable_block.yaml
+++ b/malcolm/modules/ADAndor/blocks/andor_runnable_block.yaml
@@ -1,46 +1,45 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.defines.docstring:
value: |
Device block corresponding to ADAndor + stat + pos + hdf writer.
- - Detector driver should have pv prefix $(pvPrefix):CAM
- - Pos should have pv prefix $(pvPrefix):POS
- - Stat should have pv prefix $(pvPrefix):STAT
- - HDF should have pv prefix $(pvPrefix):HDF5
+ - Detector driver should have pv prefix $(pv_prefix):CAM
+ - Pos should have pv prefix $(pv_prefix):POS
+ - Stat should have pv prefix $(pv_prefix):STAT
+ - HDF should have pv prefix $(pv_prefix):HDF5
- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
description: $(docstring)
-- ADAndor.blocks.andor_detector_driver_block:
- mri: $(mriPrefix):DRV
- prefix: $(pvPrefix):CAM
+- ADAndor.blocks.andor_driver_block:
+ mri: $(mri_prefix):DRV
+ prefix: $(pv_prefix):CAM
- ADAndor.parts.AndorDriverPart:
name: DRV
- mri: $(mriPrefix):DRV
- readoutTime: 13e-3
+ mri: $(mri_prefix):DRV
- ADCore.blocks.stats_plugin_block:
- mri: $(mriPrefix):STAT
- prefix: $(pvPrefix):STAT
+ mri: $(mri_prefix):STAT
+ prefix: $(pv_prefix):STAT
- ADCore.parts.StatsPluginPart:
name: STAT
- mri: $(mriPrefix):STAT
+ mri: $(mri_prefix):STAT
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/ADAndor/parts/__init__.py b/malcolm/modules/ADAndor/parts/__init__.py
index 8efdf12ec..7d2391dbf 100644
--- a/malcolm/modules/ADAndor/parts/__init__.py
+++ b/malcolm/modules/ADAndor/parts/__init__.py
@@ -1,4 +1,6 @@
from .andordriverpart import AndorDriverPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/ADAndor/parts/andordriverpart.py b/malcolm/modules/ADAndor/parts/andordriverpart.py
index 09382774a..f7ccd90a8 100644
--- a/malcolm/modules/ADAndor/parts/andordriverpart.py
+++ b/malcolm/modules/ADAndor/parts/andordriverpart.py
@@ -1,12 +1,33 @@
-from malcolm.modules.ADCore.parts import ExposureDetectorDriverPart
+from annotypes import add_call_types, Any
+from malcolm.modules import ADCore, scanning
-class AndorDriverPart(ExposureDetectorDriverPart):
- def setup_detector(self, child, completed_steps, steps_to_do, params=None):
- fs = super(AndorDriverPart, self).setup_detector(
- child, completed_steps, steps_to_do, params)
- child.wait_all_futures(fs)
+
+class AndorDriverPart(ADCore.parts.DetectorDriverPart):
+ """Part for using andor_driver_block in a scan"""
+ def __init__(self,
+ name, # type: ADCore.parts.APartName
+ mri, # type: ADCore.parts.AMri
+ ):
+ # type: (...) -> None
+ super(AndorDriverPart, self).__init__(name, mri)
+
+ @add_call_types
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ generator, # type: scanning.hooks.AGenerator
+ **kwargs # type: **Any
+ ):
+ # type: (...) -> None
+ self.actions.setup_detector(
+ context, completed_steps, steps_to_do, imageMode="Fixed",
+ exposure=generator.duration - 13e-3, **kwargs)
# Need to reset acquirePeriod as it's sometimes wrong
- fs = child.acquirePeriod.put_value_async(
- child.exposure.value + self.readout_time.value)
- return fs
+ child = context.block_view(self.mri)
+ child.acquirePeriod.put_value(generator.duration)
+ # Start now if we are hardware triggered
+ # self.is_hardware_triggered = child.triggerMode == "Hardware"
+ if self.is_hardware_triggered:
+ self.actions.arm_detector(context)
diff --git a/malcolm/modules/ADCore/__init__.py b/malcolm/modules/ADCore/__init__.py
index e69de29bb..a01b8cb73 100644
--- a/malcolm/modules/ADCore/__init__.py
+++ b/malcolm/modules/ADCore/__init__.py
@@ -0,0 +1 @@
+from . import parts, infos, util
diff --git a/malcolm/modules/ADCore/blocks/hdf_writer_block.yaml b/malcolm/modules/ADCore/blocks/hdf_writer_block.yaml
index 48b6bcab8..346264948 100644
--- a/malcolm/modules/ADCore/blocks/hdf_writer_block.yaml
+++ b/malcolm/modules/ADCore/blocks/hdf_writer_block.yaml
@@ -24,243 +24,210 @@
name: fileWriteMode
description: Write single, capture then write, or stream as captured
pv: $(prefix):FileWriteMode
- rbvSuff: _RBV
- widget: combo
+ rbv_suff: _RBV
- ca.parts.CABooleanPart:
name: swmrMode
description: Whether to write single writer multiple reader files
pv: $(prefix):SWMRMode
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
- ca.parts.CABooleanPart:
name: positionMode
description: Whether to write in block got from attributes PosName
pv: $(prefix):PositionMode
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
- ca.parts.CABooleanPart:
name: dimAttDatasets
description: Whether to write attributes in same dimensionality as data
pv: $(prefix):DimAttDatasets
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
- ca.parts.CABooleanPart:
name: lazyOpen
description: If true then don't require a dummy frame to get dims
pv: $(prefix):LazyOpen
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: numCapture
description: Number of frames to capture
pv: $(prefix):NumCapture
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: flushDataPerNFrames
description: Number of frames to capture between HDF dataset flushes
pv: $(prefix):NumFramesFlush
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: flushAttrPerNFrames
description: Number of frames to capture between HDF attribute flushes
pv: $(prefix):NDAttributeChunk
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CACharArrayPart:
name: xml
description: XML for layout
pv: $(prefix):XMLFileName
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
# Filename
- ca.parts.CACharArrayPart:
name: filePath
description: Directory to write files into
pv: $(prefix):FilePath
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CACharArrayPart:
name: fileName
description: Filename within directory
pv: $(prefix):FileName
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CACharArrayPart:
name: fileTemplate
description: File template of full file path
pv: $(prefix):FileTemplate
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
# Dimensionality
- ca.parts.CALongPart:
name: numExtraDims
description: How many extra dimensions. 0=(N,..), 1=(X,N,..), 2=(Y,X,N,..)
pv: $(prefix):NumExtraDims
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
# Where to get dimensional indexes
- ca.parts.CAStringPart:
name: posNameDimN
description: NDAttribute that position labeller will write DimN index into
pv: $(prefix):PosNameDimN
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDimX
description: NDAttribute that position labeller will write DimX index into
pv: $(prefix):PosNameDimX
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDimY
description: NDAttribute that position labeller will write DimY index into
pv: $(prefix):PosNameDimY
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim3
description: NDAttribute that position labeller will write Dim3 index into
pv: $(prefix):PosNameDim3
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim4
description: NDAttribute that position labeller will write Dim4 index into
pv: $(prefix):PosNameDim4
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim5
description: NDAttribute that position labeller will write Dim5 index into
pv: $(prefix):PosNameDim5
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim6
description: NDAttribute that position labeller will write Dim6 index into
pv: $(prefix):PosNameDim6
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim7
description: NDAttribute that position labeller will write Dim7 index into
pv: $(prefix):PosNameDim7
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim8
description: NDAttribute that position labeller will write Dim8 index into
pv: $(prefix):PosNameDim8
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAStringPart:
name: posNameDim9
description: NDAttribute that position labeller will write Dim9 index into
pv: $(prefix):PosNameDim9
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
# Size of dimension
- ca.parts.CALongPart:
name: extraDimSizeN
description: Size of HDF extra dimension N
pv: $(prefix):ExtraDimSizeN
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSizeX
description: Size of HDF extra dimension X
pv: $(prefix):ExtraDimSizeX
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSizeY
description: Size of HDF extra dimension Y
pv: $(prefix):ExtraDimSizeY
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize3
description: Size of HDF extra dimension 3
pv: $(prefix):ExtraDimSize3
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize4
description: Size of HDF extra dimension 4
pv: $(prefix):ExtraDimSize4
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize5
description: Size of HDF extra dimension 5
pv: $(prefix):ExtraDimSize5
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize6
description: Size of HDF extra dimension 6
pv: $(prefix):ExtraDimSize6
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize7
description: Size of HDF extra dimension 7
pv: $(prefix):ExtraDimSize7
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize8
description: Size of HDF extra dimension 8
pv: $(prefix):ExtraDimSize8
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: extraDimSize9
description: Size of HDF extra dimension 9
pv: $(prefix):ExtraDimSize9
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAActionPart:
name: start
description: Demand for starting acquisition
pv: $(prefix):Capture
- statusPv: $(prefix):WriteStatus
- goodStatus: Write OK
+ status_pv: $(prefix):WriteStatus
+ good_status: Write OK
- ca.parts.CAActionPart:
name: stop
@@ -273,10 +240,8 @@
name: running
description: If detector is currently acquiring
rbv: $(prefix):Capture_RBV
- widget: led
- ca.parts.CACharArrayPart:
name: writeMessage
description: Error message if in error
rbv: $(prefix):WriteMessage
- widget: textupdate
diff --git a/malcolm/modules/ADCore/blocks/position_labeller_block.yaml b/malcolm/modules/ADCore/blocks/position_labeller_block.yaml
index 0dae16a39..d28dc36af 100644
--- a/malcolm/modules/ADCore/blocks/position_labeller_block.yaml
+++ b/malcolm/modules/ADCore/blocks/position_labeller_block.yaml
@@ -30,22 +30,19 @@
name: xml
description: Filename of xml or xml text
pv: $(prefix):Filename
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
config: False
- ca.parts.CALongPart:
name: idStart
description: First uid value to look for
pv: $(prefix):IDStart
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: qty
description: Number of positions left in the buffer
rbv: $(prefix):Qty_RBV
- widget: textupdate
- ca.parts.CAActionPart:
name: start
@@ -63,4 +60,3 @@
name: running
description: If detector is currently acquiring
rbv: $(prefix):Running_RBV
- widget: led
diff --git a/malcolm/modules/ADCore/blocks/stats_plugin_block.yaml b/malcolm/modules/ADCore/blocks/stats_plugin_block.yaml
index d0a5c6d45..d5ac7d72e 100644
--- a/malcolm/modules/ADCore/blocks/stats_plugin_block.yaml
+++ b/malcolm/modules/ADCore/blocks/stats_plugin_block.yaml
@@ -24,5 +24,4 @@
name: computeStatistics
description: Compute min, max, mean, tot, sigma, net
pv: $(prefix):ComputeStatistics
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
diff --git a/malcolm/modules/ADCore/includes/adbase_parts.yaml b/malcolm/modules/ADCore/includes/adbase_parts.yaml
index 8abcbf5ef..b0326bf59 100644
--- a/malcolm/modules/ADCore/includes/adbase_parts.yaml
+++ b/malcolm/modules/ADCore/includes/adbase_parts.yaml
@@ -9,22 +9,20 @@
name: imageMode
description: Whether to take 1, many, or unlimited images at start
pv: $(prefix):ImageMode
- rbvSuff: _RBV
- widget: combo
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: numImages
description: Number of images to take if imageMode=Multiple
pv: $(prefix):NumImages
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAActionPart:
name: start
description: Demand for starting acquisition
pv: $(prefix):Acquire
- statusPv: $(prefix):DetectorState_RBV
- goodStatus: Idle
+ status_pv: $(prefix):DetectorState_RBV
+ good_status: Idle
- ca.parts.CAActionPart:
name: stop
@@ -37,26 +35,21 @@
name: acquiring
description: If detector is currently acquiring
rbv: $(prefix):Acquire
- widget: led
- ca.parts.CAChoicePart:
name: triggerMode
description: What is triggering the detector to take frames
pv: $(prefix):TriggerMode
- rbvSuff: _RBV
- widget: combo
+ rbv_suff: _RBV
- ca.parts.CADoublePart:
name: exposure
description: Exposure time for each frame
pv: $(prefix):AcquireTime
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CADoublePart:
name: acquirePeriod
description: Duration of each frame including readout
pv: $(prefix):AcquirePeriod
- rbvSuff: _RBV
- widget: textinput
-
+ rbv_suff: _RBV
diff --git a/malcolm/modules/ADCore/includes/filewriting_collection.yaml b/malcolm/modules/ADCore/includes/filewriting_collection.yaml
index bf6a99793..ca9a8d89e 100644
--- a/malcolm/modules/ADCore/includes/filewriting_collection.yaml
+++ b/malcolm/modules/ADCore/includes/filewriting_collection.yaml
@@ -1,26 +1,26 @@
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for all the other plugins
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id prefix for all created blocks
- ADCore.parts.DatasetTablePart:
name: DSET
- ADCore.blocks.position_labeller_block:
- mri: $(mriPrefix):POS
- prefix: $(pvPrefix):POS
+ mri: $(mri_prefix):POS
+ prefix: $(pv_prefix):POS
- ADCore.parts.PositionLabellerPart:
name: POS
- mri: $(mriPrefix):POS
+ mri: $(mri_prefix):POS
- ADCore.blocks.hdf_writer_block:
- mri: $(mriPrefix):HDF5
- prefix: $(pvPrefix):HDF5
+ mri: $(mri_prefix):HDF5
+ prefix: $(pv_prefix):HDF5
- ADCore.parts.HDFWriterPart:
name: HDF5
- mri: $(mriPrefix):HDF5
+ mri: $(mri_prefix):HDF5
diff --git a/malcolm/modules/ADCore/includes/ndarraybase_parts.yaml b/malcolm/modules/ADCore/includes/ndarraybase_parts.yaml
index 78816bfe8..75643dbab 100644
--- a/malcolm/modules/ADCore/includes/ndarraybase_parts.yaml
+++ b/malcolm/modules/ADCore/includes/ndarraybase_parts.yaml
@@ -12,29 +12,24 @@
name: arrayCallbacks
description: Whether to produce images or not
pv: $(prefix):ArrayCallbacks
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: arrayCounter
description: Reset unique id for next frame
pv: $(prefix):ArrayCounter
- widget: textinput
- ca.parts.CALongPart:
name: arrayCounterReadback
description: Current unique id number for frame
rbv: $(prefix):ArrayCounter_RBV
- widget: textupdate
- ca.parts.CALongPart:
name: uniqueId
description: Current unique id number for frame
rbv: $(prefix):UniqueId_RBV
- widget: textupdate
- ca.parts.CACharArrayPart:
name: attributesFile
description: Filename for NDAttributes
pv: $(prefix):NDAttributesFile
- widget: textinput
diff --git a/malcolm/modules/ADCore/includes/ndpluginbase_parts.yaml b/malcolm/modules/ADCore/includes/ndpluginbase_parts.yaml
index 3c9ec255b..0a0e7955b 100644
--- a/malcolm/modules/ADCore/includes/ndpluginbase_parts.yaml
+++ b/malcolm/modules/ADCore/includes/ndpluginbase_parts.yaml
@@ -9,13 +9,11 @@
name: inp
description: Array inport name
pv: $(prefix):NDArrayPort
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
inport: NDArray
- ca.parts.CABooleanPart:
name: enableCallbacks
description: Enable plugin to run when we get a new frame
pv: $(prefix):EnableCallbacks
- rbvSuff: _RBV
- widget: checkbox
+ rbv_suff: _RBV
diff --git a/malcolm/modules/ADCore/infos.py b/malcolm/modules/ADCore/infos.py
new file mode 100644
index 000000000..3e8cd0f50
--- /dev/null
+++ b/malcolm/modules/ADCore/infos.py
@@ -0,0 +1,93 @@
+from malcolm.core import Info
+
+from .util import DatasetType, AttributeDatasetType
+
+
+class ExposureDeadtimeInfo(Info):
+ """Detector exposure time should be generator.duration - deadtime
+
+ Args:
+ readout_time: The per frame readout time of the detector
+ frequency_accuracy: The crystal accuracy in ppm
+ """
+ def __init__(self, readout_time, frequency_accuracy):
+ # type: (float, float) -> None
+ self.readout_time = readout_time
+ self.frequency_accuracy = frequency_accuracy
+
+ def calculate_exposure(self, duration):
+ # type: (float) -> float
+ """Calculate the exposure to set the detector to given the duration of
+ the frame and the readout_time and frequency_accuracy"""
+ exposure = duration - self.frequency_accuracy * duration / 1000000.0 - \
+ self.readout_time
+ assert exposure > 0.0, \
+ "Exposure time %s too small when deadtime taken into account" % (
+ exposure,)
+ return exposure
+
+
+class NDArrayDatasetInfo(Info):
+ """Declare the NDArray data this produces as being a useful dataset to store
+ to file
+
+ Args:
+ rank: The rank of the dataset, e.g. 2 for a 2D detector
+ """
+ def __init__(self, rank):
+ # type: (int) -> None
+ self.rank = rank
+
+
+class CalculatedNDAttributeDatasetInfo(Info):
+ """Declare that we have calculated some statistics from the main dataset
+ and these will be available
+
+ Args:
+ name: Dataset name that should be written to
+ attr: NDAttribute name to get data from
+ """
+ def __init__(self, name, attr):
+ # type: (str, str) -> None
+ self.name = name
+ self.attr = attr
+
+
+class NDAttributeDatasetInfo(Info):
+ """Declare an NDAttribute attached to this NDArray to produce a useful
+ dataset to store to file
+
+ Args:
+ name: Dataset name that should be written to
+ type: What NeXuS dataset type it produces
+ attr: NDAttribute name to get data from
+ rank: The rank of the dataset
+ """
+ def __init__(self, name, type, attr, rank):
+ # type: (str, AttributeDatasetType, str, int) -> None
+ self.name = name
+ self.type = type
+ self.attr = attr
+ self.rank = rank
+
+
+class DatasetProducedInfo(Info):
+ """Declare that we will write the following dataset to file
+
+ Args:
+ name: Dataset name
+ filename: Filename relative to the fileDir we were given
+ type: What NeXuS dataset type it produces
+ rank: The rank of the dataset including generator dims
+ path: The path of the dataset within the file
+ uniqueid: The path of the UniqueID dataset within the file
+ """
+ def __init__(self, name, filename, type, rank, path, uniqueid):
+ # type: (str, str, DatasetType, int, str, str) -> None
+ self.name = name
+ self.filename = filename
+ self.type = type
+ self.rank = rank
+ self.path = path
+ self.uniqueid = uniqueid
+
diff --git a/malcolm/modules/ADCore/infos/__init__.py b/malcolm/modules/ADCore/infos/__init__.py
deleted file mode 100644
index 9680d3df8..000000000
--- a/malcolm/modules/ADCore/infos/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from .calculatedndattributedatasetinfo import CalculatedNDAttributeDatasetInfo
-from .datasetproducedinfo import DatasetProducedInfo, dataset_types
-from .ndarraydatasetinfo import NDArrayDatasetInfo
-from .ndattributedatasetinfo import NDAttributeDatasetInfo, \
- attribute_dataset_types
-from .uniqueidinfo import UniqueIdInfo
-
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
diff --git a/malcolm/modules/ADCore/infos/calculatedndattributedatasetinfo.py b/malcolm/modules/ADCore/infos/calculatedndattributedatasetinfo.py
deleted file mode 100644
index 59204cca0..000000000
--- a/malcolm/modules/ADCore/infos/calculatedndattributedatasetinfo.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from malcolm.core import Info
-
-
-class CalculatedNDAttributeDatasetInfo(Info):
- def __init__(self, name, attr):
- self.name = name
- self.attr = attr
diff --git a/malcolm/modules/ADCore/infos/datasetproducedinfo.py b/malcolm/modules/ADCore/infos/datasetproducedinfo.py
deleted file mode 100644
index 976803f24..000000000
--- a/malcolm/modules/ADCore/infos/datasetproducedinfo.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from malcolm.core import Info
-
-dataset_types = [
- "primary", "secondary", "monitor", "position_set", "position_value"]
-
-
-class DatasetProducedInfo(Info):
- def __init__(self, name, filename, type, rank, path, uniqueid):
- self.name = name
- self.filename = filename
- assert type in dataset_types, \
- "Dataset type %s not in %s" % (type, dataset_types)
- self.type = type
- self.rank = rank
- self.path = path
- self.uniqueid = uniqueid
-
-
diff --git a/malcolm/modules/ADCore/infos/ndarraydatasetinfo.py b/malcolm/modules/ADCore/infos/ndarraydatasetinfo.py
deleted file mode 100644
index 8498db1fb..000000000
--- a/malcolm/modules/ADCore/infos/ndarraydatasetinfo.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from malcolm.core import Info
-
-
-class NDArrayDatasetInfo(Info):
- """Declare the NDArray data this produces as being a useful dataset
-
- Args:
- rank (int): The rank of the data, e.g. 2 for a 2D detector
- """
- def __init__(self, rank):
- self.rank = rank
diff --git a/malcolm/modules/ADCore/infos/ndattributedatasetinfo.py b/malcolm/modules/ADCore/infos/ndattributedatasetinfo.py
deleted file mode 100644
index 9443db7aa..000000000
--- a/malcolm/modules/ADCore/infos/ndattributedatasetinfo.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from malcolm.core import Info
-
-
-attribute_dataset_types = ["detector", "monitor", "position"]
-
-
-class NDAttributeDatasetInfo(Info):
- def __init__(self, name, type, attr, rank):
- self.name = name
- assert type in attribute_dataset_types, \
- "Dataset type %s not in %s" % (type, attribute_dataset_types)
- self.type = type
- self.attr = attr
- self.rank = rank
-
-
diff --git a/malcolm/modules/ADCore/infos/uniqueidinfo.py b/malcolm/modules/ADCore/infos/uniqueidinfo.py
deleted file mode 100644
index afa59f6f6..000000000
--- a/malcolm/modules/ADCore/infos/uniqueidinfo.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from malcolm.core import Info
-
-
-class UniqueIdInfo(Info):
- """Report the current value of the UniqueId array counter"""
- def __init__(self, value):
- self.value = value
diff --git a/malcolm/modules/ADCore/parts/__init__.py b/malcolm/modules/ADCore/parts/__init__.py
index 9bf215ccf..f0163b4ac 100644
--- a/malcolm/modules/ADCore/parts/__init__.py
+++ b/malcolm/modules/ADCore/parts/__init__.py
@@ -1,10 +1,14 @@
from .datasetrunnablechildpart import DatasetRunnableChildPart
from .datasettablepart import DatasetTablePart
-from .detectordriverpart import DetectorDriverPart, configure_args
-from .exposuredetectordriverpart import ExposureDetectorDriverPart
-from .hdfwriterpart import HDFWriterPart
+from .detectordriverpart import DetectorDriverPart, APartName, AMri, \
+ AHardwareTriggered, AMainDatasetUseful
+from .exposuredeadtimepart import ExposureDeadtimePart, AInitialAccuracy, \
+ AInitialReadoutTime
+from .hdfwriterpart import HDFWriterPart, AFileDir, AFileTemplate, AFormatName
from .positionlabellerpart import PositionLabellerPart
from .statspluginpart import StatsPluginPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/ADCore/parts/datasetrunnablechildpart.py b/malcolm/modules/ADCore/parts/datasetrunnablechildpart.py
index ed52951cc..b4193ec31 100644
--- a/malcolm/modules/ADCore/parts/datasetrunnablechildpart.py
+++ b/malcolm/modules/ADCore/parts/datasetrunnablechildpart.py
@@ -1,49 +1,48 @@
-from malcolm.core import method_takes
-from malcolm.modules.ADCore.infos import DatasetProducedInfo
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.scanning.parts import RunnableChildPart
+from annotypes import add_call_types, Any
+
+from malcolm.core import APartName
+from malcolm.modules.scanning.parts import RunnableChildPart, AMri, \
+ AInitialVisibility
+from malcolm.modules import scanning
+from ..infos import DatasetProducedInfo
class DatasetRunnableChildPart(RunnableChildPart):
"""Part controlling a configure/run child Block with a dataset table"""
- def update_part_configure_args(self, response, without=()):
- # Decorate validate and configure with the sum of its parts
- super(DatasetRunnableChildPart, self).update_part_configure_args(
- response, without=without + ("formatName",))
- def _params_with_format_name(self, params):
- new_params = dict(formatName=self.name)
- new_params.update(params)
- return new_params
+ def __init__(self, name, mri, initial_visibility=False):
+ # type: (APartName, AMri, AInitialVisibility) -> None
+ super(DatasetRunnableChildPart, self).__init__(
+ name, mri, initial_visibility, ignore_configure_args="formatName")
- # Method will be filled in by update_configure_validate_args
- @RunnableController.Validate
- @method_takes()
- def validate(self, context, part_info, params):
- params = self._params_with_format_name(params)
- return super(DatasetRunnableChildPart, self).validate(
- context, part_info, params)
+ @add_call_types
+ def validate(self,
+ context, # type: scanning.hooks.AContext
+ **kwargs # type: **Any
+ ):
+ # type: (...) -> scanning.hooks.UParameterTweakInfos
+ child = context.block_view(self.mri)
+ # Add formatName in if the child wants it
+ if "formatName" in child.configure.takes.elements:
+ kwargs["formatName"] = self.name
+ return super(DatasetRunnableChildPart, self).validate(context, **kwargs)
- # Method will be filled in at update_configure_validate_args
- @RunnableController.Configure
- @method_takes()
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- child = context.block_view(self.params.mri)
+ @add_call_types
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ **kwargs # type: **Any
+ ):
+ # type: (...) -> scanning.hooks.UInfos
+ child = context.block_view(self.mri)
+ # Add formatName in if the child wants it
if "formatName" in child.configure.takes.elements:
- params = self._params_with_format_name(params)
- super(DatasetRunnableChildPart, self).configure(
- context, completed_steps, steps_to_do, part_info, params)
+ kwargs["formatName"] = self.name
+ # Run the configure command
+ super(DatasetRunnableChildPart, self).configure(context, **kwargs)
info_list = []
+ # Report back any datasets the child has to our parent
if hasattr(child, "datasets"):
datasets_table = child.datasets.value
- for i in range(len(datasets_table.name)):
- info = DatasetProducedInfo(
- name=datasets_table.name[i],
- filename=datasets_table.filename[i],
- type=datasets_table.type[i],
- rank=datasets_table.rank[i],
- path=datasets_table.path[i],
- uniqueid=datasets_table.uniqueid[i])
- info_list.append(info)
+ for row in datasets_table.rows():
+ info_list.append(DatasetProducedInfo(*row))
return info_list
diff --git a/malcolm/modules/ADCore/parts/datasettablepart.py b/malcolm/modules/ADCore/parts/datasettablepart.py
index fdb6fa529..a3660f99b 100644
--- a/malcolm/modules/ADCore/parts/datasettablepart.py
+++ b/malcolm/modules/ADCore/parts/datasettablepart.py
@@ -1,44 +1,39 @@
-from malcolm.compat import OrderedDict
-from malcolm.core import Part, Table, method_takes, REQUIRED
-from malcolm.modules.ADCore.infos import DatasetProducedInfo, dataset_types
-from malcolm.modules.builtin.vmetas import StringArrayMeta, ChoiceArrayMeta, \
- TableMeta, NumberArrayMeta, StringMeta
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.tags import widget
+from annotypes import add_call_types
+
+from malcolm.core import Part, PartRegistrar, APartName, TableMeta
+from malcolm.modules import scanning
+from ..infos import DatasetProducedInfo
+from ..util import DatasetTable
-# Make a table for the dataset info we produce
-columns = OrderedDict()
-columns["name"] = StringArrayMeta("Dataset name")
-columns["filename"] = StringArrayMeta(
- "Filename of HDF file relative to fileDir")
-columns["type"] = ChoiceArrayMeta("Type of dataset", dataset_types)
-columns["rank"] = NumberArrayMeta("int32", "Rank (number of dimensions)")
-columns["path"] = StringArrayMeta("Dataset path within HDF file")
-columns["uniqueid"] = StringArrayMeta("UniqueID array path within HDF file")
-dataset_table_meta = TableMeta(
- "Datsets produced in HDF file", elements=columns, tags=[widget("table")])
-@method_takes(
- "name", StringMeta("Name of the Part within the controller"), REQUIRED)
class DatasetTablePart(Part):
"""Exposes an Attribute that reports the datasets that will be written
during a scan"""
- def __init__(self, params):
- # Created attributes
- self.datasets = None
- super(DatasetTablePart, self).__init__(params.name)
+ def __init__(self, name):
+ # type: (APartName) -> None
+ super(DatasetTablePart, self).__init__(name)
+ self.datasets = TableMeta.from_table(
+ DatasetTable, "Datasets produced in HDF file"
+ ).create_attribute_model()
+ self.register_hooked(scanning.hooks.PostConfigureHook,
+ self.post_configure)
- def create_attribute_models(self):
- # Create read-only attribute showing the datasets we are creating
- self.datasets = dataset_table_meta.create_attribute_model()
- yield "datasets", self.datasets, None
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model("datasets", self.datasets)
- @RunnableController.PostConfigure
- def update_datasets_table(self, context, part_info):
+ @add_call_types
+ def post_configure(self, part_info):
+ # type: (scanning.hooks.APartInfo) -> None
# Update the dataset table
- datasets_table = Table(dataset_table_meta)
+ name, filename, typ, rank, path, uid = [], [], [], [], [], []
for i in DatasetProducedInfo.filter_values(part_info):
- if i.name not in datasets_table.name:
- row = [i.name, i.filename, i.type, i.rank, i.path, i.uniqueid]
- datasets_table.append(row)
+ if i.name not in name:
+ name.append(i.name)
+ filename.append(i.filename)
+ typ.append(i.type)
+ rank.append(i.rank)
+ path.append(i.path)
+ uid.append(i.uniqueid)
+ datasets_table = DatasetTable(name, filename, typ, rank, path, uid)
self.datasets.set_value(datasets_table)
diff --git a/malcolm/modules/ADCore/parts/detectordriverpart.py b/malcolm/modules/ADCore/parts/detectordriverpart.py
index f643847e4..7f9bceae1 100644
--- a/malcolm/modules/ADCore/parts/detectordriverpart.py
+++ b/malcolm/modules/ADCore/parts/detectordriverpart.py
@@ -1,102 +1,84 @@
-from malcolm.core import method_takes, TimeoutError, REQUIRED
-from malcolm.modules.ADCore.infos import UniqueIdInfo
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta
-
-
-# Args for configure()
-configure_args = (
- "generator", PointGeneratorMeta("Generator instance"), REQUIRED
-)
-
-
-class DetectorDriverPart(StatefulChildPart):
- # Attributes
- trigger_mode = None
-
- # Stored futures
- start_future = None
-
- # How many we are waiting for
- done_when_reaches = None
-
- # The offset we should apply to the arrayCounter to give us completedSteps
- completed_offset = None
-
- @RunnableController.Reset
+from annotypes import Anno, add_call_types, Any
+
+from malcolm.core import APartName, BadValueError
+from malcolm.modules.builtin.parts import AMri, ChildPart
+from malcolm.modules.scanning.hooks import ReportStatusHook, \
+ ConfigureHook, PostRunArmedHook, SeekHook, RunHook, ResumeHook, PauseHook, \
+ AbortHook, AContext, UInfos, AStepsToDo, ACompletedSteps, APartInfo
+from malcolm.modules.scanning.util import AGenerator
+from ..infos import NDArrayDatasetInfo, ExposureDeadtimeInfo
+from ..util import ADBaseActions
+
+with Anno("Is detector hardware triggered?"):
+ AHardwareTriggered = bool
+with Anno("Is main detector dataset useful to publish in DatasetTable?"):
+ AMainDatasetUseful = bool
+
+
+class DetectorDriverPart(ChildPart):
+ def __init__(self,
+ name, # type: APartName
+ mri, # type: AMri
+ is_hardware_triggered=True, # type: AHardwareTriggered
+ main_dataset_useful=True, # type: AMainDatasetUseful
+ ):
+ # type: (...) -> None
+ super(DetectorDriverPart, self).__init__(name, mri)
+ self.is_hardware_triggered = is_hardware_triggered
+ self.main_dataset_useful = main_dataset_useful
+ self.actions = ADBaseActions(mri)
+ # Hooks
+ self.register_hooked(ReportStatusHook, self.report_status)
+ self.register_hooked((ConfigureHook, PostRunArmedHook, SeekHook),
+ self.configure)
+ self.register_hooked((RunHook, ResumeHook), self.run)
+ self.register_hooked((PauseHook, AbortHook), self.abort)
+
+ @add_call_types
def reset(self, context):
+ # type: (AContext) -> None
super(DetectorDriverPart, self).reset(context)
- self.abort(context)
-
- @RunnableController.ReportStatus
- def report_configuration(self, context):
- child = context.block_view(self.params.mri)
- infos = [UniqueIdInfo(child.arrayCounterReadback.value)]
- return infos
-
- @RunnableController.Configure
- @RunnableController.PostRunArmed
- @RunnableController.Seek
- @method_takes(*configure_args)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- context.unsubscribe_all()
- child = context.block_view(self.params.mri)
- fs = self.setup_detector(child, completed_steps, steps_to_do, params)
- context.wait_all_futures(fs)
- self.done_when_reaches = child.arrayCounterReadback.value + steps_to_do
- self.completed_offset = completed_steps - child.arrayCounterReadback.value
- if self.is_hardware_triggered(child):
- # Start now if we are hardware triggered
- self.start_future = child.start_async()
-
- def is_hardware_triggered(self, child):
- return True
-
- def setup_detector(self, child, completed_steps, steps_to_do, params=None):
- if completed_steps == 0:
- # This is an initial configure, so reset arrayCounter to 0
- values = dict(arrayCounter=0)
+ self.actions.abort_detector(context)
+
+ @add_call_types
+ def report_status(self):
+ # type: () -> UInfos
+ if self.main_dataset_useful:
+ return NDArrayDatasetInfo(rank=2)
+
+ @add_call_types
+ def configure(self,
+ context, # type: AContext
+ completed_steps, # type: ACompletedSteps
+ steps_to_do, # type: AStepsToDo
+ part_info, # type: APartInfo
+ generator, # type: AGenerator
+ **kwargs # type: **Any
+ ):
+ # type: (...) -> None
+ try:
+ exposure_info = ExposureDeadtimeInfo.filter_single_value(part_info)
+ except BadValueError:
+ # This is allowed, no exposure required
+ pass
else:
- # Leave the arrayCounter where it is, just start from here
- values = {}
-
- # Not all areaDetector drivers support the imageMode of Multiple
- if "Multiple" in child.imageMode.meta.choices:
- values.update(dict(imageMode="Multiple"))
-
- values.update(dict(
- numImages=steps_to_do,
- arrayCallbacks=True))
- fs = child.put_attribute_values_async(values)
- return fs
-
- def update_completed_steps(self, value, update_completed_steps):
- completed_steps = value + self.completed_offset
- update_completed_steps(completed_steps, self)
+ kwargs["exposure"] = exposure_info.calculate_exposure(
+ generator.duration)
+ self.actions.setup_detector(
+ context, completed_steps, steps_to_do, **kwargs)
+ if self.is_hardware_triggered:
+ # Start now if we are hardware triggered
+ self.actions.arm_detector(context)
- @RunnableController.Run
- @RunnableController.Resume
- def run(self, context, update_completed_steps):
- child = context.block_view(self.params.mri)
- child.arrayCounterReadback.subscribe_value(
- self.update_completed_steps, update_completed_steps)
- if not self.is_hardware_triggered(child):
- # Start now
- self.start_future = child.start_async()
- context.wait_all_futures(self.start_future)
- # Now wait for up to 2*minDelta time to make sure any
- # update_completed_steps come in
- try:
- child.when_value_matches(
- "arrayCounterReadback", self.done_when_reaches, timeout=5.0)
- except TimeoutError:
- raise ValueError("Detector %r didn't produce %s frames in time" % (
- self.params.mri, self.done_when_reaches))
+ @add_call_types
+ def run(self, context):
+ # type: (AContext) -> None
+ if not self.is_hardware_triggered:
+ # Start now if we are software triggered
+ self.actions.arm_detector(context)
+ self.actions.wait_for_detector(context, self.registrar)
- @RunnableController.Abort
- @RunnableController.Pause
+ @add_call_types
def abort(self, context):
- child = context.block_view(self.params.mri)
- child.stop()
+ # type: (AContext) -> None
+ self.actions.abort_detector(context)
diff --git a/malcolm/modules/ADCore/parts/exposuredeadtimepart.py b/malcolm/modules/ADCore/parts/exposuredeadtimepart.py
new file mode 100644
index 000000000..f05b6cce0
--- /dev/null
+++ b/malcolm/modules/ADCore/parts/exposuredeadtimepart.py
@@ -0,0 +1,67 @@
+from __future__ import division
+
+from annotypes import Anno, add_call_types
+
+from malcolm.core import Part, NumberMeta, Widget, config_tag, APartName, \
+ PartRegistrar
+from malcolm.modules import scanning
+from ..infos import ExposureDeadtimeInfo
+
+readout_desc = \
+ "Subtract this time from frame duration when calculating exposure"
+with Anno(readout_desc):
+ AInitialReadoutTime = float
+frequency_accuracy_desc = \
+ "In ppm. Subtract duration*this/1e6 when calculating exposure"
+with Anno(frequency_accuracy_desc):
+ AInitialAccuracy = float
+
+
+class ExposureDeadtimePart(Part):
+ def __init__(self,
+ name, # type: APartName
+ initial_readout_time=0.0, # type: AInitialReadoutTime
+ initial_frequency_accuracy=50.0 # type: AInitialAccuracy
+ ):
+ # type: (...) -> None
+ super(ExposureDeadtimePart, self).__init__(name)
+ self.readout_time = NumberMeta(
+ "float64", readout_desc,
+ tags=[Widget.TEXTINPUT.tag(), config_tag()]
+ ).create_attribute_model(initial_readout_time)
+ self.frequency_accuracy = NumberMeta(
+ "float64", frequency_accuracy_desc,
+ tags=[Widget.TEXTINPUT.tag(), config_tag()]
+ ).create_attribute_model(initial_frequency_accuracy)
+ # Hooks
+ self.register_hooked(
+ scanning.hooks.ReportStatusHook, self.report_status)
+ self.register_hooked(
+ scanning.hooks.ValidateHook, self.validate)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(ExposureDeadtimePart, self).setup(registrar)
+ # Attributes
+ registrar.add_attribute_model(
+ "readoutTime", self.readout_time, self.readout_time.set_value)
+ registrar.add_attribute_model(
+ "frequencyAccuracy", self.frequency_accuracy,
+ self.frequency_accuracy.set_value)
+
+ @add_call_types
+ def validate(self, generator):
+ # type: (scanning.hooks.AGenerator) -> None
+ info = ExposureDeadtimeInfo(
+ self.readout_time.value, self.frequency_accuracy.value)
+ assert generator.duration > 0, \
+ "Duration %s for generator must be >0 to signify constant " \
+ "exposure" % (generator.duration,)
+ info.calculate_exposure(generator.duration)
+
+ @add_call_types
+ def report_status(self):
+ # type: () -> scanning.hooks.UInfos
+ info = ExposureDeadtimeInfo(
+ self.readout_time.value, self.frequency_accuracy.value)
+ return info
diff --git a/malcolm/modules/ADCore/parts/exposuredetectordriverpart.py b/malcolm/modules/ADCore/parts/exposuredetectordriverpart.py
deleted file mode 100644
index 287872b35..000000000
--- a/malcolm/modules/ADCore/parts/exposuredetectordriverpart.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from malcolm.core import method_also_takes, method_takes
-from malcolm.modules.ADCore.infos import NDArrayDatasetInfo
-from malcolm.modules.builtin.vmetas import NumberMeta
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.tags import widget, config
-from .detectordriverpart import DetectorDriverPart, configure_args
-
-
-@method_also_takes(
- "readoutTime", NumberMeta(
- "float64", "Default time taken to readout detector"), 7e-5)
-class ExposureDetectorDriverPart(DetectorDriverPart):
- # Attributes
- readout_time = None
-
- def create_attribute_models(self):
- for data in super(
- ExposureDetectorDriverPart, self).create_attribute_models():
- yield data
- # Create writeable attribute for how long we should allow for detector
- # read out
- meta = NumberMeta(
- "float64", "Time taken to readout detector",
- tags=[widget("textinput"), config()])
- self.readout_time = meta.create_attribute_model(self.params.readoutTime)
- yield "readoutTime", self.readout_time, self.readout_time.set_value
-
- @RunnableController.ReportStatus
- def report_configuration(self, context):
- infos = super(ExposureDetectorDriverPart, self).report_configuration(
- context) + [NDArrayDatasetInfo(rank=2)]
- return infos
-
- @RunnableController.Validate
- @method_takes(*configure_args)
- def validate(self, context, part_info, params):
- exposure = params.generator.duration
- assert exposure > 0, \
- "Duration %s for generator must be >0 to signify constant exposure"\
- % exposure
- # TODO: should really get this from an Info from pmac trajectory part...
- exposure -= self.readout_time.value
- assert exposure > 0.0, \
- "Exposure time %s too small when readoutTime taken into account" % (
- exposure)
-
- def setup_detector(self, child, completed_steps, steps_to_do, params=None):
- fs = super(ExposureDetectorDriverPart, self).setup_detector(
- child, completed_steps, steps_to_do, params)
- exposure = params.generator.duration - self.readout_time.value
- fs.append(child.exposure.put_value_async(exposure))
- return fs
diff --git a/malcolm/modules/ADCore/parts/hdfwriterpart.py b/malcolm/modules/ADCore/parts/hdfwriterpart.py
index a174ab8f5..7ad896e65 100644
--- a/malcolm/modules/ADCore/parts/hdfwriterpart.py
+++ b/malcolm/modules/ADCore/parts/hdfwriterpart.py
@@ -2,15 +2,20 @@
import math
from xml.etree import cElementTree as ET
+from annotypes import add_call_types, Anno, TYPE_CHECKING
+from scanpointgenerator import CompoundGenerator, Dimension
+
from malcolm.compat import et_to_string
-from malcolm.core import method_takes, REQUIRED
-from malcolm.modules.ADCore.infos import CalculatedNDAttributeDatasetInfo, \
+from malcolm.core import APartName, Future, Info, Block, PartRegistrar
+from malcolm.modules import builtin, scanning
+from ..infos import CalculatedNDAttributeDatasetInfo, DatasetType, \
DatasetProducedInfo, NDArrayDatasetInfo, NDAttributeDatasetInfo, \
- attribute_dataset_types, UniqueIdInfo
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.builtin.vmetas import StringMeta
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta
+ AttributeDatasetType
+
+if TYPE_CHECKING:
+ from typing import Iterator, List, Dict
+
+ PartInfo = Dict[str, List[Info]]
SUFFIXES = "NXY3456789"
@@ -18,111 +23,271 @@
# stalled and raise
FRAME_TIMEOUT = 60
-
-class HDFWriterPart(StatefulChildPart):
- """Part for controlling an `hdf_writer_block` in a Device"""
- # Attributes
- datasets = None
-
- # Future for the start action
- start_future = None
- array_future = None
- done_when_reaches = 0
-
- # The offset we should apply to the uniqueId to give us completedSteps
- completed_offset = None
-
- # The HDF5 layout file we write to say where the datasets go
- layout_filename = None
-
- def _create_dataset_infos(self, name, part_info, generator, filename):
- # Update the dataset table
- uniqueid = "/entry/NDAttributes/NDArrayUniqueId"
- generator_rank = len(generator.dimensions)
-
- # Get the detector name from the primary source
- ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
- assert len(ndarray_infos) in (0, 1), \
- "More than one NDArrayDatasetInfo defined %s" % ndarray_infos
-
- # Add the primary datasource
- if ndarray_infos:
- ndarray_info = ndarray_infos[0]
+with Anno("Directory to write data to"):
+ AFileDir = str
+with Anno("Argument for fileTemplate, normally filename without extension"):
+ AFormatName = str
+with Anno("""Printf style template to generate filename relative to fileDir.
+Arguments are:
+ 1) %s: the value of formatName"""):
+ AFileTemplate = str
+
+
+def greater_than_zero(v):
+ # type: (int) -> bool
+ return v > 0
+
+
+def create_dataset_infos(name, part_info, generator, filename):
+ # type: (str, PartInfo, CompoundGenerator, str) -> Iterator[Info]
+ # Update the dataset table
+ uniqueid = "/entry/NDAttributes/NDArrayUniqueId"
+ generator_rank = len(generator.dimensions)
+
+ # Get the detector name from the primary source
+ ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
+ assert len(ndarray_infos) in (0, 1), \
+ "More than one NDArrayDatasetInfo defined %s" % ndarray_infos
+
+ # Add the primary datasource
+ if ndarray_infos:
+ ndarray_info = ndarray_infos[0]
+ yield DatasetProducedInfo(
+ name="%s.data" % name,
+ filename=filename,
+ type=DatasetType.PRIMARY,
+ rank=ndarray_info.rank + generator_rank,
+ path="/entry/detector/detector",
+ uniqueid=uniqueid)
+
+ # Add any secondary datasources
+ for calculated_info in \
+ CalculatedNDAttributeDatasetInfo.filter_values(part_info):
yield DatasetProducedInfo(
- name="%s.data" % name, filename=filename,
- type="primary", rank=ndarray_info.rank + generator_rank,
- path="/entry/detector/detector",
+ name="%s.%s" % (name, calculated_info.name),
+ filename=filename,
+ type=DatasetType.SECONDARY,
+ rank=ndarray_info.rank + generator_rank,
+ path="/entry/%s/%s" % (
+ calculated_info.name, calculated_info.name),
uniqueid=uniqueid)
- # Add any secondary datasources
- for calculated_info in \
- CalculatedNDAttributeDatasetInfo.filter_values(part_info):
- yield DatasetProducedInfo(
- name="%s.%s" % (name, calculated_info.name),
- filename=filename, type="secondary",
- rank=ndarray_info.rank + generator_rank,
- path="/entry/%s/%s" % (
- calculated_info.name, calculated_info.name),
- uniqueid=uniqueid)
-
- # Add all the other datasources
- for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
- if dataset_info.type == "detector":
- # Something like I0
- name = "%s.data" % dataset_info.name
- type = "primary"
- elif dataset_info.type == "monitor":
- # Something like Iref
- name = "%s.data" % dataset_info.name
- type = "monitor"
- elif dataset_info.type == "position":
- # Something like x
- name = "%s.value" % dataset_info.name
- type = "position_value"
+ # Add all the other datasources
+ for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
+ if dataset_info.type is AttributeDatasetType.DETECTOR:
+ # Something like I0
+ name = "%s.data" % dataset_info.name
+ type = DatasetType.PRIMARY
+ elif dataset_info.type is AttributeDatasetType.MONITOR:
+ # Something like Iref
+ name = "%s.data" % dataset_info.name
+ type = DatasetType.MONITOR
+ elif dataset_info.type is AttributeDatasetType.POSITION:
+ # Something like x
+ name = "%s.value" % dataset_info.name
+ type = DatasetType.POSITION_VALUE
+ else:
+ raise AttributeError("Bad dataset type %r, should be a %s" % (
+ dataset_info.type, AttributeDatasetType))
+ yield DatasetProducedInfo(
+ name=name,
+ filename=filename,
+ type=type,
+ rank=dataset_info.rank + generator_rank,
+ path="/entry/%s/%s" % (dataset_info.name, dataset_info.name),
+ uniqueid=uniqueid)
+
+ # Add any setpoint dimensions
+ for dim in generator.axes:
+ yield DatasetProducedInfo(
+ name="%s.value_set" % dim,
+ filename=filename,
+ type=DatasetType.POSITION_SET,
+ rank=1,
+ path="/entry/detector/%s_set" % dim, uniqueid="")
+
+
+def set_dimensions(child, generator):
+ # type: (Block, CompoundGenerator) -> List[Future]
+ num_dims = len(generator.dimensions)
+ assert num_dims <= 10, \
+ "Can only do 10 dims, you gave me %s" % num_dims
+ attr_dict = dict(numExtraDims=num_dims - 1)
+ # Fill in dim name and size
+ # NOTE: HDF writer has these filled with fastest moving first
+ # while dimensions is slowest moving first
+ for i in range(10):
+ suffix = SUFFIXES[i]
+ if i < num_dims:
+ forward_i = num_dims - i - 1
+ index_name = "d%d" % forward_i
+ index_size = generator.dimensions[forward_i].size
+ else:
+ index_name = ""
+ index_size = 1
+ attr_dict["posNameDim%s" % suffix] = index_name
+ attr_dict["extraDimSize%s" % suffix] = index_size
+ futures = child.put_attribute_values_async(attr_dict)
+ return futures
+
+
+def make_set_points(dimension, axis, data_el, units):
+ # type: (Dimension, str, ET.Element, str) -> None
+ axis_vals = ["%.12g" % p for p in dimension.get_positions(axis)]
+ axis_el = ET.SubElement(
+ data_el, "dataset", name="%s_set" % axis, source="constant",
+ type="float", value=",".join(axis_vals))
+ ET.SubElement(axis_el, "attribute", name="units", source="constant",
+ value=units, type="string")
+
+
+def make_nxdata(name, rank, entry_el, generator, link=False):
+ # type: (str, int, ET.Element, CompoundGenerator, bool) -> ET.Element
+ # Make a dataset for the data
+ data_el = ET.SubElement(entry_el, "group", name=name)
+ ET.SubElement(data_el, "attribute", name="signal", source="constant",
+ value=name, type="string")
+ pad_dims = []
+ for d in generator.dimensions:
+ if len(d.axes) == 1:
+ pad_dims.append("%s_set" % d.axes[0])
+ else:
+ pad_dims.append(".")
+
+ pad_dims += ["."] * rank
+ ET.SubElement(data_el, "attribute", name="axes", source="constant",
+ value=",".join(pad_dims), type="string")
+ ET.SubElement(data_el, "attribute", name="NX_class", source="constant",
+ value="NXdata", type="string")
+ # Add in the indices into the dimensions array that our axes refer to
+ for i, d in enumerate(generator.dimensions):
+ for axis in d.axes:
+ ET.SubElement(data_el, "attribute",
+ name="%s_set_indices" % axis,
+ source="constant", value=str(i), type="string")
+ if link:
+ ET.SubElement(data_el, "hardlink",
+ name="%s_set" % axis,
+ target="/entry/detector/%s_set" % axis)
else:
- raise AttributeError("Bad dataset type %r, should be in %s" % (
- dataset_info.type, attribute_dataset_types))
- yield DatasetProducedInfo(
- name=name, filename=filename, type=type,
- rank=dataset_info.rank + generator_rank,
- path="/entry/%s/%s" % (dataset_info.name, dataset_info.name),
- uniqueid=uniqueid)
-
- # Add any setpoint dimensions
- for dim in generator.axes:
- yield DatasetProducedInfo(
- name="%s.value_set" % dim, filename=filename,
- type="position_set", rank=1,
- path="/entry/detector/%s_set" % dim, uniqueid="")
+ make_set_points(
+ d, axis, data_el, generator.units[axis])
+ return data_el
+
+
+def make_layout_xml(generator, part_info):
+ # type: (CompoundGenerator, PartInfo) -> str
+ # Make a root element with an NXEntry
+ root_el = ET.Element("hdf5_layout")
+ entry_el = ET.SubElement(root_el, "group", name="entry")
+ ET.SubElement(entry_el, "attribute", name="NX_class",
+ source="constant", value="NXentry", type="string")
+
+ # Check that there is only one primary source of detector data
+ ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
+ if not ndarray_infos:
+ # Still need to put the data in the file, so manufacture something
+ primary_rank = 1
+ else:
+ primary_rank = ndarray_infos[0].rank
+
+ # Make an NXData element with the detector data in it in
+ # /entry/detector/detector
+ data_el = make_nxdata(
+ "detector", primary_rank, entry_el, generator)
+ det_el = ET.SubElement(data_el, "dataset", name="detector",
+ source="detector", det_default="true")
+ ET.SubElement(det_el, "attribute", name="NX_class",
+ source="constant", value="SDS", type="string")
+
+ # Now add any calculated sources of data
+ for dataset_info in \
+ CalculatedNDAttributeDatasetInfo.filter_values(part_info):
+ # if we are a secondary source, use the same rank as the det
+ attr_el = make_nxdata(
+ dataset_info.name, primary_rank, entry_el, generator, link=True)
+ ET.SubElement(attr_el, "dataset", name=dataset_info.name,
+ source="ndattribute", ndattribute=dataset_info.attr)
+
+ # And then any other attribute sources of data
+ for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
+ # if we are a secondary source, use the same rank as the det
+ attr_el = make_nxdata(dataset_info.name, dataset_info.rank,
+ entry_el, generator, link=True)
+ ET.SubElement(attr_el, "dataset", name=dataset_info.name,
+ source="ndattribute", ndattribute=dataset_info.attr)
+
+ # Add a group for attributes
+ NDAttributes_el = ET.SubElement(entry_el, "group", name="NDAttributes",
+ ndattr_default="true")
+ ET.SubElement(NDAttributes_el, "attribute", name="NX_class",
+ source="constant", value="NXcollection", type="string")
+ xml = et_to_string(root_el)
+ return xml
+
+
+class HDFWriterPart(builtin.parts.ChildPart):
+ """Part for controlling an `hdf_writer_block` in a Device"""
- @RunnableController.Reset
+ def __init__(self, name, mri):
+ # type: (APartName, scanning.parts.AMri) -> None
+ super(HDFWriterPart, self).__init__(name, mri)
+ # Future for the start action
+ self.start_future = None # type: Future
+ self.array_future = None # type: Future
+ self.done_when_reaches = 0
+ # CompletedSteps = arrayCounter + self.uniqueid_offset
+ self.uniqueid_offset = 0
+ # The HDF5 layout file we write to say where the datasets go
+ self.layout_filename = None # type: str
+ # Hooks
+ self.register_hooked(scanning.hooks.ConfigureHook, self.configure)
+ self.register_hooked((scanning.hooks.PostRunArmedHook,
+ scanning.hooks.SeekHook), self.seek)
+ self.register_hooked((scanning.hooks.RunHook,
+ scanning.hooks.ResumeHook), self.run)
+ self.register_hooked(scanning.hooks.PostRunReadyHook,
+ self.post_run_ready)
+ self.register_hooked(scanning.hooks.AbortHook, self.abort)
+
+ @add_call_types
def reset(self, context):
+ # type: (scanning.hooks.AContext) -> None
super(HDFWriterPart, self).reset(context)
self.abort(context)
- @RunnableController.Configure
- @method_takes(
- "generator", PointGeneratorMeta("Generator instance"), REQUIRED,
- "fileDir", StringMeta("Directory to write hdf file to"), REQUIRED,
- "formatName", StringMeta(
- "Argument for fileTemplate, normally filename without extension"),
- "det",
- "fileTemplate", StringMeta(
- """Printf style template to generate filename relative to fileDir.
- Arguments are:
- 1) %s: the value of formatName"""), "%s.h5")
- def configure(self, context, completed_steps, steps_to_do, part_info, params):
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(HDFWriterPart, self).setup(registrar)
+ # Tell the controller to expose some extra configure parameters
+ registrar.report(scanning.hooks.ConfigureHook.create_info(
+ self.configure))
+
+ # Allow CamelCase as these parameters will be serialized
+ # noinspection PyPep8Naming
+ @add_call_types
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ part_info, # type: scanning.hooks.APartInfo
+ generator, # type: scanning.hooks.AGenerator
+ fileDir, # type: AFileDir
+ formatName="det", # type: AFormatName
+ fileTemplate="%s.h5", # type: AFileTemplate
+ ):
+ # type: (...) -> scanning.hooks.UInfos
# On initial configure, expect to get the demanded number of frames
self.done_when_reaches = completed_steps + steps_to_do
- self.completed_offset = 0
- child = context.block_view(self.params.mri)
+ self.uniqueid_offset = 0
+ child = context.block_view(self.mri)
# For first run then open the file
# Enable position mode before setting any position related things
child.positionMode.put_value(True)
# Setup our required settings
# TODO: this should be different for windows detectors
- file_dir = params.fileDir.rstrip(os.sep)
- filename = params.fileTemplate % params.formatName
+ file_dir = fileDir.rstrip(os.sep)
+ filename = fileTemplate % formatName
assert "." in filename, \
"File extension for %r should be supplied" % filename
futures = child.put_attribute_values_async(dict(
@@ -134,28 +299,28 @@ def configure(self, context, completed_steps, steps_to_do, part_info, params):
lazyOpen=True,
arrayCounter=0,
filePath=file_dir + os.sep,
- fileName=params.formatName,
- fileTemplate="%s" + params.fileTemplate))
- futures += self._set_dimensions(child, params.generator)
- xml = self._make_layout_xml(params.generator, part_info)
+ fileName=formatName,
+ fileTemplate="%s" + fileTemplate))
+ futures += set_dimensions(child, generator)
+ xml = make_layout_xml(generator, part_info)
self.layout_filename = os.path.join(
- file_dir, "%s-layout.xml" % self.params.mri)
+ file_dir, "%s-layout.xml" % self.mri)
with open(self.layout_filename, "w") as f:
f.write(xml)
# We want the HDF writer to flush this often:
flush_time = 1 # seconds
# (In particular this means that HDF files can be read cleanly by
# SciSoft at the start of a scan.)
- assert params.generator.duration > 0, \
- "Duration %s for generator must be >0 to signify constant exposure"\
- % params.generator.duration
- if params.generator.duration > flush_time:
+ assert generator.duration > 0, \
+ "Duration %s for generator must be >0 to signify fixed exposure" \
+ % generator.duration
+ if generator.duration > flush_time:
# We are going slower than 1/flush_time Hz, so flush every frame
n_frames_between_flushes = 1
else:
# Limit update rate to be every flush_time seconds
n_frames_between_flushes = int(math.ceil(
- flush_time / params.generator.duration))
+ flush_time / generator.duration))
# But make sure we flush in this round of frames
n_frames_between_flushes = min(
steps_to_do, n_frames_between_flushes)
@@ -171,167 +336,56 @@ def configure(self, context, completed_steps, steps_to_do, part_info, params):
self.start_future = child.start_async()
# Start a future waiting for the first array
self.array_future = child.when_value_matches_async(
- "arrayCounterReadback", self._greater_than_zero)
+ "arrayCounterReadback", greater_than_zero)
# Return the dataset information
- dataset_infos = list(self._create_dataset_infos(
- params.formatName, part_info, params.generator, filename))
+ dataset_infos = list(create_dataset_infos(
+ formatName, part_info, generator, filename))
return dataset_infos
- def _greater_than_zero(self, v):
- return v > 0
-
- @RunnableController.PostRunArmed
- @RunnableController.Seek
- def seek(self, context, completed_steps, steps_to_do, part_info):
- # The detector has been setup differently, so work out what the last
- # frame it will produce is called
- infos = UniqueIdInfo.filter_values(part_info)
- assert len(infos) == 1, \
- "Expected one uniqueId reporter, got %r" % (infos,)
- self.done_when_reaches = infos[0].value + steps_to_do
- self.completed_offset = completed_steps - infos[0].value
- child = context.block_view(self.params.mri)
+ @add_call_types
+ def seek(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ ):
+ # type: (...) -> None
+ # This is rewinding or setting up for another batch, so the detector
+ # will skip to a uniqueID that has not been produced yet
+ self.uniqueid_offset = completed_steps - self.done_when_reaches
+ self.done_when_reaches += steps_to_do
+ child = context.block_view(self.mri)
# Just reset the array counter_block
child.arrayCounter.put_value(0)
# Start a future waiting for the first array
self.array_future = child.when_value_matches_async(
- "arrayCounterReadback", self._greater_than_zero)
-
- def update_completed_steps(self, value, update_completed_steps):
- completed_steps = value + self.completed_offset
- update_completed_steps(completed_steps, self)
+ "arrayCounterReadback", greater_than_zero)
- @RunnableController.Run
- @RunnableController.Resume
- def run(self, context, update_completed_steps):
+ @add_call_types
+ def run(self, context):
+ # type: (scanning.hooks.AContext) -> None
context.wait_all_futures(self.array_future)
context.unsubscribe_all()
- child = context.block_view(self.params.mri)
- child.uniqueId.subscribe_value(
- self.update_completed_steps, update_completed_steps)
+ child = context.block_view(self.mri)
+ child.uniqueId.subscribe_value(self.update_completed_steps)
# TODO: what happens if we miss the last frame?
child.when_value_matches(
"uniqueId", self.done_when_reaches, event_timeout=FRAME_TIMEOUT)
- @RunnableController.PostRunReady
+ @add_call_types
def post_run_ready(self, context):
+ # type: (scanning.hooks.AContext) -> None
# If this is the last one, wait until the file is closed
context.wait_all_futures(self.start_future)
# Delete the layout XML file
os.remove(self.layout_filename)
- @RunnableController.Abort
+ @add_call_types
def abort(self, context):
- child = context.block_view(self.params.mri)
+ # type: (scanning.hooks.AContext) -> None
+ child = context.block_view(self.mri)
child.stop()
- def _set_dimensions(self, child, generator):
- num_dims = len(generator.dimensions)
- assert num_dims <= 10, \
- "Can only do 10 dims, you gave me %s" % num_dims
- attr_dict = dict(numExtraDims=num_dims-1)
- # Fill in dim name and size
- # NOTE: HDF writer has these filled with fastest moving first
- # while dimensions is slowest moving first
- for i in range(10):
- suffix = SUFFIXES[i]
- if i < num_dims:
- forward_i = num_dims - i - 1
- index_name = "d%d" % forward_i
- index_size = generator.dimensions[forward_i].size
- else:
- index_name = ""
- index_size = 1
- attr_dict["posNameDim%s" % suffix] = index_name
- attr_dict["extraDimSize%s" % suffix] = index_size
- futures = child.put_attribute_values_async(attr_dict)
- return futures
-
- def _make_nxdata(self, name, rank, entry_el, generator, link=False):
- # Make a dataset for the data
- data_el = ET.SubElement(entry_el, "group", name=name)
- ET.SubElement(data_el, "attribute", name="signal", source="constant",
- value=name, type="string")
- pad_dims = []
- for d in generator.dimensions:
- if len(d.axes) == 1:
- pad_dims.append("%s_set" % d.axes[0])
- else:
- pad_dims.append(".")
-
- pad_dims += ["."] * rank
- ET.SubElement(data_el, "attribute", name="axes", source="constant",
- value=",".join(pad_dims), type="string")
- ET.SubElement(data_el, "attribute", name="NX_class", source="constant",
- value="NXdata", type="string")
- # Add in the indices into the dimensions array that our axes refer to
- for i, d in enumerate(generator.dimensions):
- for axis in d.axes:
- ET.SubElement(data_el, "attribute",
- name="%s_set_indices" % axis,
- source="constant", value=str(i), type="string")
- if link:
- ET.SubElement(data_el, "hardlink",
- name="%s_set" % axis,
- target="/entry/detector/%s_set" % axis)
- else:
- self._make_set_points(
- d, axis, data_el, generator.units[axis])
- return data_el
-
- def _make_set_points(self, dimension, axis, data_el, units):
- axis_vals = ["%.12g" % p for p in dimension.get_positions(axis)]
- axis_el = ET.SubElement(
- data_el, "dataset", name="%s_set" % axis, source="constant",
- type="float", value=",".join(axis_vals))
- ET.SubElement(axis_el, "attribute", name="units", source="constant",
- value=units, type="string")
-
- def _make_layout_xml(self, generator, part_info):
- # Make a root element with an NXEntry
- root_el = ET.Element("hdf5_layout")
- entry_el = ET.SubElement(root_el, "group", name="entry")
- ET.SubElement(entry_el, "attribute", name="NX_class",
- source="constant", value="NXentry", type="string")
-
- # Check that there is only one primary source of detector data
- ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
- if not ndarray_infos:
- # Still need to put the data in the file, so manufacture something
- primary_rank = 1
- else:
- primary_rank = ndarray_infos[0].rank
-
- # Make an NXData element with the detector data in it in
- # /entry/detector/detector
- data_el = self._make_nxdata(
- "detector", primary_rank, entry_el, generator)
- det_el = ET.SubElement(data_el, "dataset", name="detector",
- source="detector", det_default="true")
- ET.SubElement(det_el, "attribute", name="NX_class",
- source="constant", value="SDS", type="string")
-
- # Now add any calculated sources of data
- for dataset_info in \
- CalculatedNDAttributeDatasetInfo.filter_values(part_info):
- # if we are a secondary source, use the same rank as the det
- attr_el = self._make_nxdata(
- dataset_info.name, primary_rank, entry_el, generator, link=True)
- ET.SubElement(attr_el, "dataset", name=dataset_info.name,
- source="ndattribute", ndattribute=dataset_info.attr)
-
- # And then any other attribute sources of data
- for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
- # if we are a secondary source, use the same rank as the det
- attr_el = self._make_nxdata(dataset_info.name, dataset_info.rank,
- entry_el, generator, link=True)
- ET.SubElement(attr_el, "dataset", name=dataset_info.name,
- source="ndattribute", ndattribute=dataset_info.attr)
-
- # Add a group for attributes
- NDAttributes_el = ET.SubElement(entry_el, "group", name="NDAttributes",
- ndattr_default="true")
- ET.SubElement(NDAttributes_el, "attribute", name="NX_class",
- source="constant", value="NXcollection", type="string")
- xml = et_to_string(root_el)
- return xml
+ def update_completed_steps(self, value):
+ # type: (int) -> None
+ completed_steps = value + self.uniqueid_offset
+ self.registrar.report(scanning.infos.RunProgressInfo(completed_steps))
diff --git a/malcolm/modules/ADCore/parts/positionlabellerpart.py b/malcolm/modules/ADCore/parts/positionlabellerpart.py
index efc7b8065..8b7222f62 100644
--- a/malcolm/modules/ADCore/parts/positionlabellerpart.py
+++ b/malcolm/modules/ADCore/parts/positionlabellerpart.py
@@ -1,11 +1,10 @@
from xml.etree import cElementTree as ET
+from annotypes import TYPE_CHECKING, add_call_types, Any
+
from malcolm.compat import et_to_string
-from malcolm.core import method_takes, REQUIRED
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta
-from malcolm.modules.ADCore.infos import UniqueIdInfo
+from malcolm.core import APartName, Hook
+from malcolm.modules import builtin, scanning
# How big an XML file can the EPICS waveform receive?
XML_MAX_SIZE = 1000000 - 2
@@ -16,21 +15,105 @@
# How far to load ahead
N_LOAD_AHEAD = 4
+if TYPE_CHECKING:
+ from typing import Tuple
+
+
+class PositionLabellerPart(builtin.parts.ChildPart):
+ """Part for controlling a `position_labeller_block` in a scan"""
+
+ def __init__(self, name, mri):
+ # type: (APartName, builtin.parts.AMri) -> None
+ super(PositionLabellerPart, self).__init__(name, mri)
+ # Stored generator for positions
+ self.generator = None
+ # The last index we have loaded
+ self.end_index = 0
+ # Where we should stop loading points
+ self.steps_up_to = 0
+ # Future for plugin run
+ self.start_future = None
+ # If we are currently loading then block loading more points
+ self.loading = False
+ # When arrayCounter gets to here we are done
+ self.done_when_reaches = 0
+ # Hooks
+ self.register_hooked((scanning.hooks.ConfigureHook,
+ scanning.hooks.PostRunArmedHook,
+ scanning.hooks.SeekHook), self.configure)
+ self.register_hooked((scanning.hooks.RunHook,
+ scanning.hooks.ResumeHook), self.run)
+ self.register_hooked((scanning.hooks.AbortHook,
+ scanning.hooks.PauseHook), self.abort)
+
+ @add_call_types
+ def reset(self, context):
+ # type: (scanning.hooks.AContext) -> None
+ super(PositionLabellerPart, self).reset(context)
+ self.abort(context)
+
+ @add_call_types
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ generator, # type: scanning.hooks.AGenerator
+ ):
+ # type: (...) -> None
+ # clear out old subscriptions
+ context.unsubscribe_all()
+ self.generator = generator
+ # Work out the offset between the generator index and uniqueID
+ if completed_steps == 0:
+ # The detector will reset, so the first uniqueId (for index 0)
+ # will be 1
+ id_start = 1
+ self.done_when_reaches = steps_to_do
+ else:
+ # This is rewinding or setting up for another batch, so the detector
+ # will skip to a uniqueID that has not been produced yet
+ id_start = self.done_when_reaches + 1
+ self.done_when_reaches += steps_to_do
+ # Delete any remaining old positions
+ child = context.block_view(self.mri)
+ futures = [child.delete_async()]
+ futures += child.put_attribute_values_async(dict(
+ enableCallbacks=True,
+ idStart=id_start))
+ self.steps_up_to = completed_steps + steps_to_do
+ xml, self.end_index = self._make_xml(completed_steps)
+ # Wait for the previous puts to finish
+ context.wait_all_futures(futures)
+ # Put the xml
+ child.xml.put_value(xml)
+ # Start the plugin
+ self.start_future = child.start_async()
+
+ @add_call_types
+ def run(self, context):
+ # type: (scanning.hooks.AContext) -> None
+ self.loading = False
+ child = context.block_view(self.mri)
+ child.qty.subscribe_value(self.load_more_positions, child)
+ context.wait_all_futures(self.start_future)
-class PositionLabellerPart(StatefulChildPart):
- """Part for controlling a `position_labeller_block` in a Device"""
- # Stored generator for positions
- generator = None
- # The last index we have loaded
- end_index = 0
- # Where we should stop loading points
- steps_up_to = 0
- # Future for plugin run
- start_future = None
- # If we are currently loading then block loading more points
- loading = False
+ @add_call_types
+ def abort(self, context):
+ # type: (scanning.hooks.AContext) -> None
+ child = context.block_view(self.mri)
+ child.stop()
+
+ def load_more_positions(self, number_left, child):
+ # type: (int, Any) -> None
+ if not self.loading and self.end_index < self.steps_up_to and \
+ number_left < POSITIONS_PER_XML * N_LOAD_AHEAD:
+ self.loading = True
+ xml, self.end_index = self._make_xml(self.end_index)
+ child.xml.put_value(xml)
+ self.loading = False
def _make_xml(self, start_index):
+ # type: (int) -> Tuple[str, int]
# Make xml root
root_el = ET.Element("pos_layout")
@@ -66,67 +149,3 @@ def _make_xml(self, start_index):
xml_length = len(xml)
assert xml_length < XML_MAX_SIZE, "XML size %d too big" % xml_length
return xml, end_index
-
- @RunnableController.Reset
- def reset(self, context):
- super(PositionLabellerPart, self).reset(context)
- self.abort(context)
-
- @RunnableController.Configure
- @RunnableController.PostRunArmed
- @RunnableController.Seek
- @method_takes(
- "generator", PointGeneratorMeta("Generator instance"), REQUIRED)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- # clear out old subscriptions
- context.unsubscribe_all()
- self.generator = params.generator
- # Work out the offset between the generator index and uniqueID
- if completed_steps == 0:
- # The detector will reset, so the first uniqueId (for index 0)
- # will be 1
- idStart = 1
- else:
- # The detector will report the last frame it produced, so the
- # first ID will be that number plus 1
- infos = UniqueIdInfo.filter_values(part_info)
- assert len(infos) == 1, \
- "Expected one uniqueId reporter, got %r" % (infos,)
- idStart = infos[0].value + 1
- # Delete any remaining old positions
- child = context.block_view(self.params.mri)
- futures = [child.delete_async()]
- futures += child.put_attribute_values_async(dict(
- enableCallbacks=True,
- idStart=idStart))
- self.steps_up_to = completed_steps + steps_to_do
- xml, self.end_index = self._make_xml(completed_steps)
- # Wait for the previous puts to finish
- context.wait_all_futures(futures)
- # Put the xml
- child.xml.put_value(xml)
- # Start the plugin
- self.start_future = child.start_async()
-
- @RunnableController.Run
- @RunnableController.Resume
- def run(self, context, update_completed_steps):
- self.loading = False
- child = context.block_view(self.params.mri)
- child.qty.subscribe_value(self.load_more_positions, child)
- context.wait_all_futures(self.start_future)
-
- def load_more_positions(self, number_left, child):
- if not self.loading and self.end_index < self.steps_up_to and \
- number_left < POSITIONS_PER_XML * N_LOAD_AHEAD:
- self.loading = True
- xml, self.end_index = self._make_xml(self.end_index)
- child.xml.put_value(xml)
- self.loading = False
-
- @RunnableController.Abort
- @RunnableController.Pause
- def abort(self, context):
- child = context.block_view(self.params.mri)
- child.stop()
diff --git a/malcolm/modules/ADCore/parts/statspluginpart.py b/malcolm/modules/ADCore/parts/statspluginpart.py
index d924d137d..916ea04dc 100644
--- a/malcolm/modules/ADCore/parts/statspluginpart.py
+++ b/malcolm/modules/ADCore/parts/statspluginpart.py
@@ -1,76 +1,76 @@
import os
from xml.etree import cElementTree as ET
-from malcolm.compat import et_to_string, OrderedDict
-from malcolm.core import REQUIRED, method_takes, method_also_takes
-from malcolm.modules.ADCore.infos import CalculatedNDAttributeDatasetInfo
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.builtin.vmetas import StringMeta, ChoiceMeta
-from malcolm.modules.scanning.controllers import RunnableController
+from annotypes import Anno, add_call_types
+from malcolm.compat import et_to_string
+from malcolm.core import APartName, Hook
+from malcolm.modules import builtin, scanning
+from ..infos import CalculatedNDAttributeDatasetInfo
+from ..util import StatisticsName
-statistics = OrderedDict()
+with Anno("Which statistic to capture"):
+ AStatisticsName = StatisticsName
+with Anno("Directory to write data to"):
+ AFileDir = str
-statistics["min"] = "MIN_VALUE" # Minimum counts in any element
-statistics["min_x"] = "MIN_X" # X position of minimum counts
-statistics["min_y"] = "MIN_Y" # Y position of minimum counts
-statistics["max"] = "MAX_VALUE" # Maximum counts in any element
-statistics["max_x"] = "MAX_X" # X position of maximum counts
-statistics["max_y"] = "MAX_Y" # Y position of maximum counts
-statistics["mean"] = "MEAN_VALUE" # Mean counts of all elements
-statistics["sigma"] = "SIGMA_VALUE" # Sigma of all elements
-statistics["sum"] = "TOTAL" # Sum of all elements
-statistics["net"] = "NET" # Sum of all elements not in background region
-
-@method_also_takes(
- "statistic", ChoiceMeta("Which statistic to capture", statistics), "sum")
-class StatsPluginPart(StatefulChildPart):
+class StatsPluginPart(builtin.parts.ChildPart):
"""Part for controlling a `stats_plugin_block` in a Device"""
- # The NDAttributes file we write to say what to capture
- attributes_filename = None
- @RunnableController.ReportStatus
- def report_info(self, _):
- statistic, _, attr = self._get_statistic_source_attr()
- return [CalculatedNDAttributeDatasetInfo(name=statistic, attr=attr)]
+ def __init__(self, name, mri, statistic=StatisticsName.SUM):
+ # type: (APartName, builtin.parts.AMri, AStatisticsName) -> None
+ super(StatsPluginPart, self).__init__(name, mri)
+ self.statistic = statistic
+ # The NDAttributes file we write to say what to capture
+ self.attributes_filename = None # type: str
+ # Hooks
+ self.register_hooked(scanning.hooks.ReportStatusHook,
+ self.report_status)
+ self.register_hooked(scanning.hooks.ConfigureHook, self.configure)
+ self.register_hooked(scanning.hooks.PostRunReadyHook,
+ self.post_run_ready)
+
+ @add_call_types
+ def report_status(self):
+ # type: () -> scanning.hooks.UInfos
+ return [CalculatedNDAttributeDatasetInfo(
+ name=self.statistic.name.lower(), attr=self.statistic_attr())]
- def _get_statistic_source_attr(self):
- statistic = self.params.statistic
- source = statistics[statistic]
- attr = "STATS_%s" % source
- return statistic, source, attr
+ def statistic_attr(self):
+ # type: () -> str
+ return "STATS_%s" % self.statistic.value
def _make_attributes_xml(self):
# Make a root element with an NXEntry
root_el = ET.Element("Attributes")
- statistic, source, attr = self._get_statistic_source_attr()
ET.SubElement(
root_el, "Attribute", addr="0", datatype="DOUBLE", type="PARAM",
- description="%s of the array" % statistic.title(), name=attr,
- source=source)
+ description="%s of the array" % self.statistic.name.title(),
+ name=self.statistic_attr(), source=self.statistic.value)
xml = et_to_string(root_el)
return xml
- @RunnableController.Configure
- @method_takes(
- "fileDir", StringMeta("File directory to write data to"), REQUIRED)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- child = context.block_view(self.params.mri)
+ # Allow CamelCase as these parameters will be serialized
+ # noinspection PyPep8Naming
+ @add_call_types
+ def configure(self, context, fileDir):
+ # type: (scanning.hooks.AContext, AFileDir) -> None
+ child = context.block_view(self.mri)
fs = child.put_attribute_values_async(dict(
enableCallbacks=True,
computeStatistics=True))
xml = self._make_attributes_xml()
self.attributes_filename = os.path.join(
- params.fileDir, "%s-attributes.xml" % self.params.mri)
+ fileDir, "%s-attributes.xml" % self.mri)
with open(self.attributes_filename, "w") as f:
f.write(xml)
fs.append(
child.attributesFile.put_value_async(self.attributes_filename))
context.wait_all_futures(fs)
- @RunnableController.PostRunReady
- def post_run_ready(self, context):
+ @add_call_types
+ def post_run_ready(self):
+ # type: () -> None
# Delete the attribute XML file
os.remove(self.attributes_filename)
diff --git a/malcolm/modules/ADCore/util.py b/malcolm/modules/ADCore/util.py
new file mode 100644
index 000000000..058335a15
--- /dev/null
+++ b/malcolm/modules/ADCore/util.py
@@ -0,0 +1,143 @@
+from annotypes import Anno, Array, Union, Sequence, TYPE_CHECKING
+from enum import Enum
+import numpy as np
+
+from malcolm.core import Table, Future, Context, TimeoutError, PartRegistrar
+from malcolm.modules import scanning
+
+if TYPE_CHECKING:
+ from typing import List, Any
+
+
+class AttributeDatasetType(Enum):
+ DETECTOR = "detector"
+ MONITOR = "monitor"
+ POSITION = "position"
+
+
+class DatasetType(Enum):
+ PRIMARY = "primary"
+ SECONDARY = "secondary"
+ MONITOR = "monitor"
+ POSITION_SET = "position_set"
+ POSITION_VALUE = "position_value"
+
+
+class StatisticsName(Enum):
+ MIN = "MIN_VALUE" # Minimum counts in any element
+ MIN_X = "MIN_X" # X position of minimum counts
+ MIN_Y = "MIN_Y" # Y position of minimum counts
+ MAX = "MAX_VALUE" # Maximum counts in any element
+ MAX_X = "MAX_X" # X position of maximum counts
+ MAX_Y = "MAX_Y" # Y position of maximum counts
+ MEAN = "MEAN_VALUE" # Mean counts of all elements
+ SIGMA = "SIGMA_VALUE" # Sigma of all elements
+ SUM = "TOTAL" # Sum of all elements
+ NET = "NET" # Sum of all elements not in background region
+
+
+with Anno("Dataset names"):
+ ANameArray = Array[str]
+with Anno("Filenames of HDF files relative to fileDir"):
+ AFilenameArray = Array[str]
+with Anno("Types of dataset"):
+ ATypeArray = Array[DatasetType]
+with Anno("Rank (number of dimensions) of the dataset"):
+ ARankArray = Array[np.int32]
+with Anno("Dataset paths within HDF files"):
+ APathArray = Array[str]
+with Anno("UniqueID array paths within HDF files"):
+ AUniqueIDArray = Array[str]
+UNameArray = Union[ANameArray, Sequence[str]]
+UFilenameArray = Union[AFilenameArray, Sequence[str]]
+UTypeArray = Union[ATypeArray, Sequence[DatasetType]]
+URankArray = Union[ARankArray, Sequence[np.int32]]
+UPathArray = Union[APathArray, Sequence[str]]
+UUniqueIDArray = Union[AUniqueIDArray, Sequence[str]]
+
+
+class DatasetTable(Table):
+ # This will be serialized so we need type to be called that
+ # noinspection PyShadowingBuiltins
+ def __init__(self,
+ name, # type: UNameArray
+ filename, # type: UFilenameArray
+ type, # type: UTypeArray
+ rank, # type: URankArray
+ path, # type: UPathArray
+ uniqueid, # type: UUniqueIDArray
+ ):
+ # type: (...) -> None
+ self.name = ANameArray(name)
+ self.filename = AFilenameArray(filename)
+ self.type = ATypeArray(type)
+ self.rank = ARankArray(rank)
+ self.path = APathArray(path)
+ self.uniqueid = AUniqueIDArray(uniqueid)
+
+
+class ADBaseActions(object):
+ def __init__(self, mri):
+ # type: (str) -> None
+ self.mri = mri
+ # When arrayCounter gets to here we are done
+ self.done_when_reaches = 0
+ # CompletedSteps = arrayCounter + self.uniqueid_offset
+ self.uniqueid_offset = 0
+ # A future that completes when detector start calls back
+ self.start_future = None # type: Future
+
+ def setup_detector_async(self, context, completed_steps, steps_to_do,
+ **kwargs):
+ # type: (Context, int, int, **Any) -> List[Future]
+ context.unsubscribe_all()
+ child = context.block_view(self.mri)
+ if completed_steps == 0:
+ # This is an initial configure, so reset arrayCounter to 0
+ array_counter = 0
+ self.done_when_reaches = steps_to_do
+ else:
+ # This is rewinding or setting up for another batch,
+ # skip to a uniqueID that has not been produced yet
+ array_counter = self.done_when_reaches
+ self.done_when_reaches += steps_to_do
+ self.uniqueid_offset = completed_steps - array_counter
+ for k, v in dict(
+ arrayCounter=array_counter,
+ imageMode="Multiple",
+ numImages=steps_to_do,
+ arrayCallbacks=True).items():
+ if k not in kwargs and k in child:
+ kwargs[k] = v
+ fs = child.put_attribute_values_async(kwargs)
+ return fs
+
+ def setup_detector(self, context, completed_steps, steps_to_do, **kwargs):
+ # type: (Context, int, int, **Any) -> None
+ fs = self.setup_detector_async(
+ context, completed_steps, steps_to_do, **kwargs)
+ context.wait_all_futures(fs)
+
+ def arm_detector(self, context):
+ # type: (Context) -> None
+ self.start_future = context.block_view(self.mri).start_async()
+
+ def wait_for_detector(self, context, registrar):
+ # type: (Context, PartRegistrar) -> None
+ child = context.block_view(self.mri)
+ child.arrayCounterReadback.subscribe_value(
+ self.update_completed_steps, registrar)
+ context.wait_all_futures(self.start_future)
+ # Now wait to make sure any update_completed_steps come in
+ child.when_value_matches(
+ "arrayCounterReadback", self.done_when_reaches, timeout=5.0)
+
+ def abort_detector(self, context):
+ # type: (Context) -> None
+ child = context.block_view(self.mri)
+ child.stop()
+
+ def update_completed_steps(self, value, registrar):
+ # type: (int, PartRegistrar) -> None
+ completed_steps = value + self.uniqueid_offset
+ registrar.report(scanning.infos.RunProgressInfo(completed_steps))
diff --git a/malcolm/modules/ADPandABlocks/__init__.py b/malcolm/modules/ADPandABlocks/__init__.py
index e69de29bb..f6ea27363 100644
--- a/malcolm/modules/ADPandABlocks/__init__.py
+++ b/malcolm/modules/ADPandABlocks/__init__.py
@@ -0,0 +1 @@
+from . import parts
diff --git a/malcolm/modules/ADPandABlocks/blocks/pandablocks_runnable_block.yaml b/malcolm/modules/ADPandABlocks/blocks/pandablocks_runnable_block.yaml
index c5d3950e4..ad56514a8 100644
--- a/malcolm/modules/ADPandABlocks/blocks/pandablocks_runnable_block.yaml
+++ b/malcolm/modules/ADPandABlocks/blocks/pandablocks_runnable_block.yaml
@@ -1,9 +1,9 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
@@ -17,16 +17,16 @@
default: 8888
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- ADPandABlocks.controllers.PandABlocksRunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
hostname: $(hostname)
port: $(port)
- areaDetectorPrefix: $(pvPrefix):DRV
+ prefix: $(pv_prefix):DRV
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/ADPandABlocks/controllers/__init__.py b/malcolm/modules/ADPandABlocks/controllers/__init__.py
index 27951f390..b038c7f07 100644
--- a/malcolm/modules/ADPandABlocks/controllers/__init__.py
+++ b/malcolm/modules/ADPandABlocks/controllers/__init__.py
@@ -1,4 +1,6 @@
from .pandablocksrunnablecontroller import PandABlocksRunnableController
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/ADPandABlocks/controllers/pandablocksrunnablecontroller.py b/malcolm/modules/ADPandABlocks/controllers/pandablocksrunnablecontroller.py
index 758a8cfcf..69b60675c 100644
--- a/malcolm/modules/ADPandABlocks/controllers/pandablocksrunnablecontroller.py
+++ b/malcolm/modules/ADPandABlocks/controllers/pandablocksrunnablecontroller.py
@@ -1,24 +1,40 @@
-from malcolm.core import method_also_takes, REQUIRED, call_with_params, \
- snake_to_camel
+from annotypes import Anno
+
+from malcolm.core import snake_to_camel
from malcolm.modules.ADCore.includes import adbase_parts
-from malcolm.modules.ADCore.infos import attribute_dataset_types
-from malcolm.modules.ADPandABlocks.parts import PandABlocksDriverPart, \
- PandABlocksChildPart
+from malcolm.modules.ADCore.util import AttributeDatasetType
from malcolm.modules.builtin.controllers import StatefulController
from malcolm.modules.builtin.parts import StringPart, ChoicePart
-from malcolm.modules.builtin.vmetas import StringMeta, StringArrayMeta
-from malcolm.modules.pandablocks.controllers import PandABlocksManagerController
+from malcolm.modules.pandablocks.controllers import \
+ PandABlocksManagerController, AMri, AConfigDir, AHostname, APort, \
+ AInitialDesign, ADescription, AUseCothread, AUseGit
from malcolm.modules.scanning.controllers import RunnableController
+from ..parts import PandABlocksDriverPart, PandABlocksChildPart
+
+
+with Anno("Prefix for areaDetector records"):
+ APrefix = str
-@method_also_takes(
- "areaDetectorPrefix", StringMeta(
- "Prefix for areaDetector records"), REQUIRED,
- "axesToMove", StringArrayMeta("Default value for configure() axesToMove"),
- []
-)
class PandABlocksRunnableController(PandABlocksManagerController,
RunnableController):
+ def __init__(self,
+ mri, # type: AMri
+ config_dir, # type: AConfigDir
+ prefix, # type: APrefix
+ hostname="localhost", # type: AHostname
+ port=8888, # type: APort
+ initial_design="", # type: AInitialDesign
+ description="", # type: ADescription
+ use_cothread=True, # type: AUseCothread
+ use_git=True, # type: AUseGit
+ ):
+ # type: (...) -> None
+ super(PandABlocksRunnableController, self).__init__(
+ mri, config_dir, hostname, port, initial_design, description,
+ use_cothread, use_git)
+ self.prefix = prefix
+
def _make_child_controller(self, parts, mri):
# Add some extra parts to determine the dataset name and type for
# any CAPTURE field part
@@ -30,10 +46,10 @@ def _make_child_controller(self, parts, mri):
part_name = existing_part.name.replace(
".CAPTURE", ".DATASET_NAME")
attr_name = snake_to_camel(part_name.replace(".", "_"))
- new_parts.append(call_with_params(
- StringPart, name=attr_name, widget="textinput",
+ new_parts.append(StringPart(
+ name=attr_name,
description="Name of the captured dataset in HDF file",
- writeable=True, config=True))
+ writeable=True))
# Make a choice part to hold the type of the dataset
part_name = existing_part.name.replace(
".CAPTURE", ".DATASET_TYPE")
@@ -42,26 +58,29 @@ def _make_child_controller(self, parts, mri):
initial = "position"
else:
initial = "monitor"
- new_parts.append(call_with_params(
- ChoicePart, name=attr_name, widget="combo",
+ new_parts.append(ChoicePart(
+ name=attr_name,
description="Type of the captured dataset in HDF file",
- writeable=True, choices=attribute_dataset_types,
- initialValue=initial))
+ writeable=True, choices=list(AttributeDatasetType),
+ value=initial))
if mri.endswith("PCAP"):
- new_parts += call_with_params(
- adbase_parts, self.process,
- prefix=self.params.areaDetectorPrefix)
- controller = call_with_params(
- StatefulController, self.process, new_parts, mri=mri)
+ cs, ps = adbase_parts(prefix=self.prefix)
+ controller = StatefulController(mri=mri)
+ for p in new_parts + ps:
+ controller.add_part(p)
+ for c in cs:
+ self.process.add_controller(c)
else:
controller = super(PandABlocksRunnableController, self).\
_make_child_controller(new_parts, mri)
return controller
+
+
def _make_corresponding_part(self, block_name, mri):
if block_name == "PCAP":
- part_cls = PandABlocksDriverPart
+ part = PandABlocksDriverPart(name=block_name, mri=mri)
else:
- part_cls = PandABlocksChildPart
- part = call_with_params(part_cls, name=block_name, mri=mri)
+ part = PandABlocksChildPart(
+ name=block_name, mri=mri, stateful=False)
return part
diff --git a/malcolm/modules/ADPandABlocks/parts/__init__.py b/malcolm/modules/ADPandABlocks/parts/__init__.py
index 04c8e1526..61f3e1408 100644
--- a/malcolm/modules/ADPandABlocks/parts/__init__.py
+++ b/malcolm/modules/ADPandABlocks/parts/__init__.py
@@ -1,5 +1,7 @@
from .pandablocksdriverpart import PandABlocksDriverPart
from .pandablockschildpart import PandABlocksChildPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/ADPandABlocks/parts/pandablockschildpart.py b/malcolm/modules/ADPandABlocks/parts/pandablockschildpart.py
index cc2f24043..73d33d816 100644
--- a/malcolm/modules/ADPandABlocks/parts/pandablockschildpart.py
+++ b/malcolm/modules/ADPandABlocks/parts/pandablockschildpart.py
@@ -1,45 +1,53 @@
import re
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.ADCore.infos import NDAttributeDatasetInfo
-from malcolm.modules.builtin.parts import ChildPart
-
-
-class PandABlocksChildPart(ChildPart):
- # Stored futures
- start_future = None
-
- def _is_capture_field(self, child, attr_name):
- if attr_name.endswith("Capture"):
- attr = child[attr_name]
- if attr.value.lower() != "no":
- return True
-
- def _dataset_info(self, child, attr_name):
- dataset_name_attr = attr_name + "DatasetName"
- dataset_type_attr = attr_name + "DatasetType"
- if dataset_name_attr in child and dataset_type_attr in child:
- dataset_name = child[dataset_name_attr].value
- if dataset_name == "":
- return
- assert "." not in dataset_name, \
- "Dataset name should not contain '.'"
- dataset_type = child[dataset_type_attr].value
- uppercase_attr = re.sub("([A-Z])", r"_\1", attr_name).upper()
- return NDAttributeDatasetInfo(
- name=dataset_name,
- type=dataset_type,
- rank=2,
- attr="%s.%s" % (self.name, uppercase_attr))
-
- @RunnableController.ReportStatus
- def report_configuration(self, context):
+from annotypes import add_call_types
+
+from malcolm.core import Hook, PartRegistrar
+from malcolm.modules import scanning, ADCore, builtin
+from malcolm.modules.ADCore.util import AttributeDatasetType
+
+
+def is_capture_field(child, attr_name):
+ if attr_name.endswith("Capture"):
+ attr = child[attr_name]
+ if attr.value.lower() != "no":
+ return True
+
+
+def dataset_info(name, child, attr_name):
+ dataset_name_attr = attr_name + "DatasetName"
+ dataset_type_attr = attr_name + "DatasetType"
+ if dataset_name_attr in child and dataset_type_attr in child:
+ dataset_name = child[dataset_name_attr].value
+ if dataset_name == "":
+ return
+ assert "." not in dataset_name, \
+ "Dataset name should not contain '.'"
+ dataset_type = AttributeDatasetType(child[dataset_type_attr].value)
+ uppercase_attr = re.sub("([A-Z])", r"_\1", attr_name).upper()
+ return ADCore.infos.NDAttributeDatasetInfo(
+ name=dataset_name,
+ type=dataset_type,
+ rank=2,
+ attr="%s.%s" % (name, uppercase_attr))
+
+
+class PandABlocksChildPart(builtin.parts.ChildPart):
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(PandABlocksChildPart, self).setup(registrar)
+ self.register_hooked(scanning.hooks.ReportStatusHook,
+ self.report_status)
+
+ @add_call_types
+ def report_status(self, context):
+ # type: (scanning.hooks.AContext) -> scanning.hooks.UInfos
ret = []
- child = context.block_view(self.params.mri)
+ child = context.block_view(self.mri)
for attr_name in child:
- if self._is_capture_field(child, attr_name):
- dataset_info = self._dataset_info(
- child, attr_name[:-len("Capture")])
- if dataset_info:
- ret.append(dataset_info)
+ if is_capture_field(child, attr_name):
+ info = dataset_info(
+ self.name, child, attr_name[:-len("Capture")])
+ if info:
+ ret.append(info)
return ret
diff --git a/malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py b/malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
index 14e800227..ac22ca565 100644
--- a/malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
+++ b/malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
@@ -1,6 +1,10 @@
-from malcolm.modules.ADCore.parts import DetectorDriverPart
+from malcolm.modules import ADCore
from .pandablockschildpart import PandABlocksChildPart
-class PandABlocksDriverPart(DetectorDriverPart, PandABlocksChildPart):
- pass
+class PandABlocksDriverPart(ADCore.parts.DetectorDriverPart,
+ PandABlocksChildPart):
+ def __init__(self, name, mri):
+ # type: (ADCore.parts.APartName, ADCore.parts.AMri) -> None
+ super(PandABlocksDriverPart, self).__init__(
+ name, mri, main_dataset_useful=False)
diff --git a/malcolm/modules/ADSimDetector/blocks/sim_detector_driver_block.yaml b/malcolm/modules/ADSimDetector/blocks/sim_detector_driver_block.yaml
index a48c77eca..87f78191f 100644
--- a/malcolm/modules/ADSimDetector/blocks/sim_detector_driver_block.yaml
+++ b/malcolm/modules/ADSimDetector/blocks/sim_detector_driver_block.yaml
@@ -23,12 +23,11 @@
name: gainX
description: Gain in the X direction for generating image
pv: $(prefix):GainX
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CADoublePart:
name: gainY
description: Gain in the Y direction for generating image
pv: $(prefix):GainY
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
+
diff --git a/malcolm/modules/ADSimDetector/blocks/sim_detector_runnable_block.yaml b/malcolm/modules/ADSimDetector/blocks/sim_detector_runnable_block.yaml
index c3f8cb845..f8994b907 100644
--- a/malcolm/modules/ADSimDetector/blocks/sim_detector_runnable_block.yaml
+++ b/malcolm/modules/ADSimDetector/blocks/sim_detector_runnable_block.yaml
@@ -1,22 +1,22 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.parameters.string:
- name: initialDesign
+ name: initial_design
description: Design to load at init
default:
- builtin.parameters.string:
- name: drvSuffix
+ name: drv_suffix
description: PV suffix for detector driver
default: DET
@@ -24,34 +24,37 @@
value: |
Device Block corresponding to SimDetector + stat + pos + hdf writer.
- - Detector driver should have pv prefix $(pvPrefix):$(drvSuffix)
- - Pos should have pv prefix $(pvPrefix):POS
- - Stat should have pv prefix $(pvPrefix):STAT
- - HDF should have pv prefix $(pvPrefix):HDF5
+ - Detector driver should have pv prefix $(pv_prefix):$(drv_suffix)
+ - Pos should have pv prefix $(pv_prefix):POS
+ - Stat should have pv prefix $(pv_prefix):STAT
+ - HDF should have pv prefix $(pv_prefix):HDF5
- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
description: $(docstring)
- initialDesign: $(initialDesign)
+ initial_design: $(initial_design)
- ADSimDetector.blocks.sim_detector_driver_block:
- mri: $(mriPrefix):DRV
- prefix: $(pvPrefix):$(drvSuffix)
+ mri: $(mri_prefix):DRV
+ prefix: $(pv_prefix):$(drv_suffix)
-- ADSimDetector.parts.SimDetectorDriverPart:
+- ADCore.parts.DetectorDriverPart:
name: DRV
- mri: $(mriPrefix):DRV
- readoutTime: 40e-6
+ mri: $(mri_prefix):DRV
+ is_hardware_triggered: False
+
+- ADCore.parts.ExposureDeadtimePart:
+ name: DEADTIME
- ADCore.blocks.stats_plugin_block:
- mri: $(mriPrefix):STAT
- prefix: $(pvPrefix):STAT
+ mri: $(mri_prefix):STAT
+ prefix: $(pv_prefix):STAT
- ADCore.parts.StatsPluginPart:
name: STAT
- mri: $(mriPrefix):STAT
+ mri: $(mri_prefix):STAT
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/ADSimDetector/parts/__init__.py b/malcolm/modules/ADSimDetector/parts/__init__.py
deleted file mode 100644
index e377e8d45..000000000
--- a/malcolm/modules/ADSimDetector/parts/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from .simdetectordriverpart import SimDetectorDriverPart
-
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
\ No newline at end of file
diff --git a/malcolm/modules/ADSimDetector/parts/simdetectordriverpart.py b/malcolm/modules/ADSimDetector/parts/simdetectordriverpart.py
deleted file mode 100644
index fb1164dcd..000000000
--- a/malcolm/modules/ADSimDetector/parts/simdetectordriverpart.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from malcolm.modules.ADCore.parts import ExposureDetectorDriverPart
-
-
-class SimDetectorDriverPart(ExposureDetectorDriverPart):
- """Part for controlling a `sim_detector_driver_block` in a Device"""
-
- def is_hardware_triggered(self, child):
- return False
-
diff --git a/malcolm/modules/__init__.py b/malcolm/modules/__init__.py
index e0e368100..e69de29bb 100644
--- a/malcolm/modules/__init__.py
+++ b/malcolm/modules/__init__.py
@@ -1,5 +0,0 @@
-from malcolm.core import Importer
-
-__all__ = Importer().import_all_packages(__name__, __file__, globals())
-
-del Importer
diff --git a/malcolm/modules/adUtil/__init__.py b/malcolm/modules/adUtil/__init__.py
index e69de29bb..f6ea27363 100644
--- a/malcolm/modules/adUtil/__init__.py
+++ b/malcolm/modules/adUtil/__init__.py
@@ -0,0 +1 @@
+from . import parts
diff --git a/malcolm/modules/adUtil/blocks/reframe_plugin_block.yaml b/malcolm/modules/adUtil/blocks/reframe_plugin_block.yaml
index 59231f6ed..b4261e376 100644
--- a/malcolm/modules/adUtil/blocks/reframe_plugin_block.yaml
+++ b/malcolm/modules/adUtil/blocks/reframe_plugin_block.yaml
@@ -20,27 +20,26 @@
name: imageMode
description: Whether to take 1, many, or unlimited images at start
pv: $(prefix):TriggerMode
- widget: combo
+ rbv_suff: _RBV
- ca.parts.CALongPart:
name: numImages
description: Number of images to take if imageMode=Multiple
pv: $(prefix):TriggerCount
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CADoublePart:
name: postCount
description: How many samples to take after each trigger
pv: $(prefix):POSTCOUNT
- widget: textinput
+ rbv_suff: _RBV
- ca.parts.CAActionPart:
name: start
description: Demand for starting acquisition
pv: $(prefix):Capture
- statusPv: $(prefix):Mode_RBV
- goodStatus: Idle
+ status_pv: $(prefix):Mode_RBV
+ good_status: Idle
- ca.parts.CAActionPart:
name: stop
diff --git a/malcolm/modules/adUtil/parts/__init__.py b/malcolm/modules/adUtil/parts/__init__.py
index ebb50bf68..e3318d893 100644
--- a/malcolm/modules/adUtil/parts/__init__.py
+++ b/malcolm/modules/adUtil/parts/__init__.py
@@ -1,4 +1,6 @@
from .reframepluginpart import ReframePluginPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/adUtil/parts/reframepluginpart.py b/malcolm/modules/adUtil/parts/reframepluginpart.py
index 48489754c..ed0d94e54 100644
--- a/malcolm/modules/adUtil/parts/reframepluginpart.py
+++ b/malcolm/modules/adUtil/parts/reframepluginpart.py
@@ -1,35 +1,41 @@
-from malcolm.core import method_also_takes, method_takes
-from malcolm.modules.ADCore.parts import DetectorDriverPart, configure_args
-from malcolm.modules.ADCore.infos import NDArrayDatasetInfo
-from malcolm.modules.builtin.vmetas import NumberMeta
-from malcolm.modules.scanning.controllers import RunnableController
+from annotypes import Anno, add_call_types, Any
+from malcolm.modules import ADCore, scanning
-@method_also_takes(
- "sampleFreq", NumberMeta(
- "int32", "Sample frequency of ADC signal in Hz"), 10000)
-class ReframePluginPart(DetectorDriverPart):
+with Anno("Sample frequency of ADC signal in Hz"):
+ ASampleFreq = float
- @RunnableController.ReportStatus
- def report_configuration(self, context):
- infos = super(ReframePluginPart, self).report_configuration(
- context) + [NDArrayDatasetInfo(rank=2)]
- return infos
- @RunnableController.Validate
- @method_takes(*configure_args)
- def validate(self, context, part_info, params):
- exposure = params.generator.duration
+class ReframePluginPart(ADCore.parts.DetectorDriverPart):
+ def __init__(self, name, mri, sample_freq=10000.0):
+ # type: (ADCore.parts.APartName, ADCore.parts.AMri, ASampleFreq) -> None
+ super(ReframePluginPart, self).__init__(name, mri)
+ self.sample_freq = sample_freq
+ # Hooks
+ self.register_hooked(scanning.hooks.ValidateHook, self.validate)
+
+ @add_call_types
+ def validate(self, generator):
+ # type: (scanning.hooks.AGenerator) -> None
+ exposure = generator.duration
assert exposure > 0, \
- "Duration %s for generator must be >0 to signify constant exposure"\
+ "Duration %s for generator must be >0 to signify fixed exposure" \
% exposure
- nsamples = int(exposure * self.params.sampleFreq) - 1
+ nsamples = int(exposure * self.sample_freq) - 1
assert nsamples > 0, \
"Duration %s for generator gives < 1 ADC sample" % exposure
- def setup_detector(self, child, completed_steps, steps_to_do, params=None):
- fs = super(ReframePluginPart, self).setup_detector(
- child, completed_steps, steps_to_do, params)
- nsamples = int(params.generator.duration * self.params.sampleFreq) - 1
- fs.append(child.postCount.put_value_async(nsamples))
- return fs
+ @add_call_types
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ part_info, # type: scanning.hooks.APartInfo
+ generator, # type: scanning.hooks.AGenerator
+ **kwargs # type: **Any
+ ):
+ # type: (...) -> None
+ nsamples = int(generator.duration * self.sample_freq) - 1
+ super(ReframePluginPart, self).configure(
+ context, completed_steps, steps_to_do, part_info, generator,
+ postCount=nsamples, **kwargs)
diff --git a/malcolm/modules/aravisGigE/blocks/aravisGigE_driver_block.yaml b/malcolm/modules/aravisGigE/blocks/aravisGigE_driver_block.yaml
index 916f352cb..fd3352925 100644
--- a/malcolm/modules/aravisGigE/blocks/aravisGigE_driver_block.yaml
+++ b/malcolm/modules/aravisGigE/blocks/aravisGigE_driver_block.yaml
@@ -15,18 +15,3 @@
- ADCore.includes.adbase_parts:
prefix: $(prefix)
-
-- ca.parts.CADoublePart:
- name: exposure
- description: Exposure time for each frame
- pv: $(prefix):AcquireTime
- rbvSuff: _RBV
- widget: textinput
-
-- ca.parts.CADoublePart:
- name: acquirePeriod
- description: Duration of each frame including readout
- pv: $(prefix):AcquirePeriod
- rbvSuff: _RBV
- widget: textinput
-
diff --git a/malcolm/modules/aravisGigE/blocks/aravisGigE_runnable_block.yaml b/malcolm/modules/aravisGigE/blocks/aravisGigE_runnable_block.yaml
index b2bc789bd..df6f9ce03 100644
--- a/malcolm/modules/aravisGigE/blocks/aravisGigE_runnable_block.yaml
+++ b/malcolm/modules/aravisGigE/blocks/aravisGigE_runnable_block.yaml
@@ -1,46 +1,49 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.defines.docstring:
value: |
Device block corresponding to ADAndor + stat + pos + hdf writer.
- - Detector driver should have pv prefix $(pvPrefix):DET
- - Pos should have pv prefix $(pvPrefix):POS
- - Stat should have pv prefix $(pvPrefix):STAT
- - HDF should have pv prefix $(pvPrefix):HDF5
+ - Detector driver should have pv prefix $(pv_prefix):DET
+ - Pos should have pv prefix $(pv_prefix):POS
+ - Stat should have pv prefix $(pv_prefix):STAT
+ - HDF should have pv prefix $(pv_prefix):HDF5
- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
description: $(docstring)
-- ADAndor.blocks.andor_detector_driver_block:
- mri: $(mriPrefix):DRV
- prefix: $(pvPrefix):DET
+- aravisGigE.blocks.aravisGigE_driver_block:
+ mri: $(mri_prefix):DRV
+ prefix: $(pv_prefix):DET
-- ADCore.parts.ExposureDetectorDriverPart:
+- ADCore.parts.DetectorDriverPart:
name: DRV
- mri: $(mriPrefix):DRV
- readoutTime: 200e-6
+ mri: $(mri_prefix):DRV
+
+- ADCore.parts.ExposureDeadtimePart:
+ name: DEADTIME
+ initial_readout_time: 200e-6
- ADCore.blocks.stats_plugin_block:
- mri: $(mriPrefix):STAT
- prefix: $(pvPrefix):STAT
+ mri: $(mri_prefix):STAT
+ prefix: $(pv_prefix):STAT
- ADCore.parts.StatsPluginPart:
name: STAT
- mri: $(mriPrefix):STAT
+ mri: $(mri_prefix):STAT
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/asyn/__init__.py b/malcolm/modules/asyn/__init__.py
index e69de29bb..f6ea27363 100644
--- a/malcolm/modules/asyn/__init__.py
+++ b/malcolm/modules/asyn/__init__.py
@@ -0,0 +1 @@
+from . import parts
diff --git a/malcolm/modules/asyn/parts/__init__.py b/malcolm/modules/asyn/parts/__init__.py
index 82e37cd2b..c27a7c6cb 100644
--- a/malcolm/modules/asyn/parts/__init__.py
+++ b/malcolm/modules/asyn/parts/__init__.py
@@ -1,4 +1,6 @@
from .asynoutportpart import AsynOutportPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/asyn/parts/asynoutportpart.py b/malcolm/modules/asyn/parts/asynoutportpart.py
index 0ca839af0..5cd2ba0e9 100644
--- a/malcolm/modules/asyn/parts/asynoutportpart.py
+++ b/malcolm/modules/asyn/parts/asynoutportpart.py
@@ -1,30 +1,42 @@
-from malcolm.modules.builtin.controllers import StatefulController
-from malcolm.core import method_takes, REQUIRED
-from malcolm.modules.ca.parts import CAStringPart
-from malcolm.tags import port_types, outport, widget
-from malcolm.modules.builtin.vmetas import StringMeta, ChoiceMeta
-
-
-@method_takes(
- "name", StringMeta("Name of the created attribute"), REQUIRED,
- "description", StringMeta("Desc of created attribute"), REQUIRED,
- "rbv", StringMeta("Full pv of demand and default for rbv"), REQUIRED,
- "outport", ChoiceMeta("Outport type", port_types), REQUIRED)
-class AsynOutportPart(CAStringPart):
- def __init__(self, params):
- args = CAStringPart.MethodModel.prepare_call_args(
- name=params.name, description=params.description, rbv=params.rbv)
- super(AsynOutportPart, self).__init__(*args)
-
- def create_tags(self):
- tags = super(AsynOutportPart, self).create_tags()
- tags.append(widget("textupdate"))
- return tags
-
- @StatefulController.Reset
- def reset(self, context=None):
- super(AsynOutportPart, self).reset(context)
+from annotypes import Anno, Any
+
+from malcolm.core import Part, PartRegistrar, StringMeta, Port
+from malcolm.modules import ca
+
+
+with Anno("Outport type"):
+ AOutport = Port
+
+
+class AsynOutportPart(Part):
+ """Defines a string `Attribute` representing a asyn port that should be
+ depicted as an outport on a Block"""
+
+ def __init__(self,
+ name, # type: ca.util.APartName
+ description, # type: ca.util.AMetaDescription
+ rbv, # type: ca.util.ARbv
+ outport, # type: AOutport
+ group=None # type: ca.util.AGroup
+ ):
+ # type: (...) -> None
+ super(AsynOutportPart, self).__init__(name)
+ self.outport = outport
+ self.meta = StringMeta(description)
+ catools = ca.util.CaToolsHelper.instance()
+ self.caa = ca.util.CAAttribute(
+ self.meta, catools.DBR_STRING, rbv=rbv, group=group,
+ on_connect=self.update_tags)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked)
+
+ def update_tags(self, value):
+ # type: (Any) -> None
# Add the outport tags
- tags = [t for t in self.attr.meta.tags if not t.startswith("outport:")]
- tags.append(outport(self.outport_type, self.attr.value))
- self.attr.meta.set_tags(tags)
+ old_tags = self.meta.tags
+ new_tags = [t for t in old_tags if not t.startswith("outport:")]
+ new_tags.append(self.outport.outport_tag(connected_value=value))
+ if old_tags != new_tags:
+ self.meta.set_tags(new_tags)
diff --git a/malcolm/modules/builtin/__init__.py b/malcolm/modules/builtin/__init__.py
index 8b1378917..881586ccb 100644
--- a/malcolm/modules/builtin/__init__.py
+++ b/malcolm/modules/builtin/__init__.py
@@ -1 +1 @@
-
+from . import controllers, parts, defines, hooks, infos, parameters, util
diff --git a/malcolm/modules/builtin/blocks/proxyblock.py b/malcolm/modules/builtin/blocks/proxyblock.py
index 5e2761eb7..139a8e40a 100644
--- a/malcolm/modules/builtin/blocks/proxyblock.py
+++ b/malcolm/modules/builtin/blocks/proxyblock.py
@@ -1,16 +1,11 @@
-from malcolm.modules.builtin.controllers import ProxyController
-from malcolm.core import method_takes, REQUIRED
-from malcolm.modules.builtin.vmetas import StringMeta, BooleanMeta
+from annotypes import Any
+from malcolm.modules.builtin.controllers import ProxyController, AMri, AComms, \
+ APublish
-# This is done in python rather than YAML so that we can choose whether or not
-# to publish this block via the process
-@method_takes(
- "comms", StringMeta("MRI for the comms block"), REQUIRED,
- "mri", StringMeta("MRI for the client block"), REQUIRED,
- "publish", BooleanMeta("Whether to publish this block"), False
-)
-def proxy_block(process, params):
- controller = ProxyController(process, (), params)
- process.add_controller(params.mri, controller, params.publish)
- return controller
+
+# This is done in python rather than YAML so that we can re-use Annos
+def proxy_block(mri, comms, publish=False):
+ # type: (AMri, AComms, APublish) -> Any
+ controller = ProxyController(mri, comms, publish)
+ return [controller]
diff --git a/malcolm/modules/builtin/controllers/__init__.py b/malcolm/modules/builtin/controllers/__init__.py
index c778556ff..ddb483095 100644
--- a/malcolm/modules/builtin/controllers/__init__.py
+++ b/malcolm/modules/builtin/controllers/__init__.py
@@ -1,9 +1,12 @@
-from .basiccontroller import BasicController
-from .statefulcontroller import StatefulController, StatefulStates
-from .managercontroller import ManagerController, ManagerStates
+from .basiccontroller import BasicController, AMri, ADescription, AUseCothread
+from .statefulcontroller import StatefulController
+from .managercontroller import ManagerController, AConfigDir, AInitialDesign, \
+ AUseGit
from .clientcomms import ClientComms
-from .proxycontroller import ProxyController
+from .proxycontroller import ProxyController, AComms, APublish
from .servercomms import ServerComms
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/builtin/controllers/basiccontroller.py b/malcolm/modules/builtin/controllers/basiccontroller.py
index 68f26b01e..c80ae282b 100644
--- a/malcolm/modules/builtin/controllers/basiccontroller.py
+++ b/malcolm/modules/builtin/controllers/basiccontroller.py
@@ -1,13 +1,43 @@
-from malcolm.core import Controller, method_takes, REQUIRED
-from malcolm.modules.builtin.vmetas import StringMeta
+from malcolm.core import Controller, StringMeta, AMri, ADescription, \
+ AUseCothread, Widget
+from ..infos import TitleInfo, HealthInfo
-@method_takes(
- "mri", StringMeta("Malcolm resource id of created block"), REQUIRED,
- "description", StringMeta("Description for the created block"), "")
class BasicController(Controller):
- """Basic Controller"""
- def __init__(self, process, parts, params):
- self.params = params
- super(BasicController, self).__init__(
- process, params.mri, parts, params.description)
+ """Basic Controller with Health and Title updating"""
+ def __init__(self, mri, description="", use_cothread=True):
+ # type: (AMri, ADescription, AUseCothread) -> None
+ super(BasicController, self).__init__(mri, description, use_cothread)
+ self._faults = {} # Dict[Part, Alarm]
+ self.info_registry.add_reportable(TitleInfo, self.update_title)
+ self.info_registry.add_reportable(HealthInfo, self.update_health)
+ self.health = StringMeta(
+ "Displays OK or an error message", tags=[Widget.TEXTUPDATE.tag()]
+ ).create_attribute_model("OK")
+ self.field_registry.add_attribute_model("health", self.health)
+
+ def update_title(self, _, info):
+ # type: (object, TitleInfo) -> None
+ """Set the label of the Block Meta object"""
+ with self._lock:
+ self._block.meta.set_label(info.title)
+
+ def update_health(self, reporter, info):
+ # type: (object, HealthInfo) -> None
+ """Set the health attribute. Called from part"""
+ with self.changes_squashed:
+ alarm = info.alarm
+ if alarm.is_ok():
+ self._faults.pop(reporter, None)
+ else:
+ self._faults[reporter] = alarm
+ if self._faults:
+ # Sort them by severity
+ faults = sorted(self._faults.values(),
+ key=lambda a: a.severity.value)
+ alarm = faults[-1]
+ text = faults[-1].message
+ else:
+ alarm = None
+ text = "OK"
+ self.health.set_value(text, alarm=alarm)
diff --git a/malcolm/modules/builtin/controllers/managercontroller.py b/malcolm/modules/builtin/controllers/managercontroller.py
index ec6ba36d8..99555c57b 100644
--- a/malcolm/modules/builtin/controllers/managercontroller.py
+++ b/malcolm/modules/builtin/controllers/managercontroller.py
@@ -2,86 +2,55 @@
import subprocess
import numpy as np
+from annotypes import Anno, add_call_types, TYPE_CHECKING
from malcolm.compat import OrderedDict
-from malcolm.core import method_writeable_in, method_takes, Hook, Table, \
- json_encode, json_decode, method_also_takes, REQUIRED, Unsubscribe, \
- Subscribe, deserialize_object, Delta, Context, AttributeModel, Alarm, \
- AlarmSeverity, AlarmStatus
-from malcolm.modules.builtin.infos import LayoutInfo
-from malcolm.modules.builtin.vmetas import StringArrayMeta, NumberArrayMeta, \
- BooleanArrayMeta, TableMeta, StringMeta, ChoiceMeta, ChoiceArrayMeta, \
- BooleanMeta
-from malcolm.tags import widget, config
-from .statefulcontroller import StatefulController, StatefulStates
-
-
-class ManagerStates(StatefulStates):
- SAVING = "Saving"
- LOADING = "Loading"
-
- def create_block_transitions(self):
- super(ManagerStates, self).create_block_transitions()
- self.set_allowed(self.READY, self.SAVING)
- self.set_allowed(self.SAVING, self.READY)
- self.set_allowed(self.READY, self.LOADING)
- self.set_allowed(self.LOADING, self.READY)
-
+from malcolm.core import json_encode, json_decode, Unsubscribe, Subscribe, \
+ deserialize_object, Delta, Context, AttributeModel, Alarm, AlarmSeverity, \
+ AlarmStatus, Part, BooleanMeta, get_config_tag, Widget, ChoiceArrayMeta, \
+ TableMeta, serialize_object, ChoiceMeta, config_tag, Put, Request
+from malcolm.modules.builtin.infos import PortInfo
+from malcolm.modules.builtin.util import ManagerStates
+from ..hooks import LayoutHook, LoadHook, SaveHook
+from ..infos import LayoutInfo, PartExportableInfo, PartModifiedInfo
+from ..util import LayoutTable, ExportTable
+from .statefulcontroller import StatefulController, AMri, \
+ ADescription, AUseCothread
+
+if TYPE_CHECKING:
+ from typing import Dict, List, Set
ss = ManagerStates
-@method_also_takes(
- "configDir", StringMeta("Directory to write save/load config to"), REQUIRED,
- "initialDesign", StringMeta("Design to load at init"), "",
-)
+with Anno("Directory to write save/load config to"):
+ AConfigDir = str
+with Anno("Design to load at init"):
+ AInitialDesign = str
+with Anno("Use git to manage to saved config files"):
+ AUseGit = bool
+with Anno("Name of design to save, if different from current design"):
+ ASaveDesign = str
+
+
class ManagerController(StatefulController):
"""RunnableDevice implementer that also exposes GUI for child parts"""
- stateSet = ss()
-
- Layout = Hook()
- """Called when layout table set and at init to update child layout
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- part_info (dict): {part_name: [Info]} returned from Layout hook
- layout_table (Table): A possibly partial set of changes to the layout
- table that should be acted on
-
- Returns:
- [`LayoutInfo`] - the child layout resulting from this change
- """
-
- Load = Hook()
- """Called at load() or revert() to load child settings from a structure
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- structure (dict): {part_name: part_structure} where part_structure is
- the return from Save hook
- """
-
- Save = Hook()
- """Called at save() to serialize child settings into a dict structure
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
-
- Returns:
- dict: serialized version of the child that could be loaded from
- """
-
- # Attributes
- layout = None
- design = None
- exports = None
- modified = None
-
- def __init__(self, process, parts, params):
- super(ManagerController, self).__init__(process, parts, params)
+ state_set = ss()
+
+ def __init__(self,
+ mri, # type: AMri
+ config_dir, # type: AConfigDir
+ initial_design="", # type: AInitialDesign
+ description="", # type: ADescription
+ use_cothread=True, # type: AUseCothread
+ use_git=True, # type: AUseGit
+ ):
+ # type: (...) -> None
+ super(ManagerController, self).__init__(mri, description, use_cothread)
+ assert os.path.isdir(config_dir), "%s is not a directory" % config_dir
+ self.config_dir = config_dir
+ self.initial_design = initial_design
+ self.use_git = use_git
# last saved layout and exports
self.saved_visibility = None
self.saved_exports = None
@@ -90,110 +59,106 @@ def __init__(self, process, parts, params):
# [Subscribe]
self._subscriptions = []
# {part_name: [PortInfo]}
- self.port_info = {}
+ self.port_info = {} # type: Dict[str, List[PortInfo]]
# {part: [attr_name]}
self.part_exportable = {}
- # {part: Alarm}
- self.part_modified = {}
+ self.context_modified = {} # type: Dict[Part, Set[str]]
+ self.part_modified = {} # type: Dict[Part, PartModifiedInfo]
+ # The attributes our part has published
+ self.our_config_attributes = {} # type: Dict[str, AttributeModel]]
# Whether to do updates
self._do_update = True
+ # The reportable infos we are listening for
+ self.info_registry.add_reportable(
+ PartModifiedInfo, self.update_modified)
+ # Update queue of exportable fields
+ self.info_registry.add_reportable(
+ PartExportableInfo, self.update_exportable)
+ # Create a layout table attribute for setting block positions
+ self.layout = TableMeta.from_table(
+ LayoutTable, "Layout of child blocks", Widget.FLOWGRAPH,
+ writeable=["x", "y", "visible"]
+ ).create_attribute_model()
+ self.set_writeable_in(self.layout, ss.READY)
+ self.field_registry.add_attribute_model(
+ "layout", self.layout, self.set_layout)
+ # Create a design attribute for loading an existing layout
+ self.design = ChoiceMeta(
+ "Design name to load", tags=[config_tag(), Widget.COMBO.tag()]
+ ).create_attribute_model()
+ self.field_registry.add_attribute_model(
+ "design", self.design, self.set_design)
+ self.set_writeable_in(self.design, ss.READY)
+ # Create an export table for mirroring exported fields
+ self.exports = TableMeta.from_table(
+ ExportTable, "Exported fields of child blocks",
+ writeable=list(ExportTable.call_types)
+ ).create_attribute_model()
+ # Overwrite the sources meta to be a ChoiceArrayMeta
+ self.exports.meta.elements["source"] = ChoiceArrayMeta(
+ "Name of the block.field to export", tags=[Widget.COMBO.tag()])
+ self.set_writeable_in(self.exports, ss.READY)
+ self.field_registry.add_attribute_model(
+ "exports", self.exports, self.set_exports)
+ # Create read-only indicator for when things are modified
+ self.modified = BooleanMeta(
+ "Whether the design is modified", tags=[Widget.LED.tag()]
+ ).create_attribute_model()
+ self.field_registry.add_attribute_model("modified", self.modified)
+ # Create the save method
+ self.set_writeable_in(
+ self.field_registry.add_method_model(self.save), ss.READY)
def _run_git_cmd(self, *args):
# Run git command, don't care if it fails, logging the output
- try:
- output = subprocess.check_output(
- ("git",) + args, cwd=self.params.configDir)
- except subprocess.CalledProcessError as e:
- self.log.warning("Git command failed: %s\n%s", e, e.output)
- else:
- self.log.debug("Git command completed: %s", output)
-
- def create_attribute_models(self):
- for data in super(ManagerController, self).create_attribute_models():
- yield data
- assert os.path.isdir(self.params.configDir), \
- "%s is not a directory" % self.params.configDir
- if not os.path.isdir(os.path.join(self.params.configDir, ".git")):
- # Try and make it a git repo, don't care if it fails
- self._run_git_cmd("init")
- self._run_git_cmd("commit", "--allow-empty", "-m", "Created repo")
- # Create writeable attribute table for the layout info we need
- elements = OrderedDict()
- elements["name"] = StringArrayMeta("Name of layout part")
- elements["mri"] = StringArrayMeta("Malcolm full name of child block")
- elements["x"] = NumberArrayMeta(
- "float64", "X Coordinate of child block")
- elements["y"] = NumberArrayMeta(
- "float64", "Y Coordinate of child block")
- elements["visible"] = BooleanArrayMeta("Whether child block is visible")
- layout_table_meta = TableMeta(
- "Layout of child blocks", elements=elements,
- tags=[widget("flowgraph")])
- layout_table_meta.set_writeable_in(ss.READY)
- self.layout = layout_table_meta.create_attribute_model()
- yield "layout", self.layout, self.set_layout
- # Create writeable attribute for loading an existing layout
- design_meta = ChoiceMeta(
- "Design name to load", tags=[config(), widget("combo")])
- design_meta.set_writeable_in(ss.READY)
- self.design = design_meta.create_attribute_model()
- yield "design", self.design, self.set_design
- # Create writeable attribute table for the exported fields
- elements = OrderedDict()
- elements["name"] = ChoiceArrayMeta("Name of exported block.field")
- elements["exportName"] = StringArrayMeta(
- "Name of the field within current block")
- exports_table_meta = TableMeta(
- "Exported fields of child blocks", tags=[widget("table")],
- elements=elements)
- exports_table_meta.set_writeable_in(ss.READY)
- self.exports = exports_table_meta.create_attribute_model()
- yield "exports", self.exports, self.set_exports
- # Create read-only indicator for when things are modified
- modified_meta = BooleanMeta(
- "Whether the design is modified", tags=[widget("led")])
- self.modified = modified_meta.create_attribute_model()
- yield "modified", self.modified, None
+ if self.use_git and os.path.isdir(
+ os.path.join(self.config_dir, ".git")):
+ try:
+ output = subprocess.check_output(
+ ("git",) + args, cwd=self.config_dir)
+ except subprocess.CalledProcessError as e:
+ self.log.warning("Git command failed: %s\n%s", e, e.output)
+ else:
+ self.log.debug("Git command completed: %s", output)
def do_init(self):
- # This will do an initial poll of the exportable parts,
- # so don't update here
super(ManagerController, self).do_init()
- # List the configDir and add to choices
+ # Try and make it a git repo, don't care if it fails
+ self._run_git_cmd("init")
+ self._run_git_cmd("commit", "--allow-empty", "-m", "Created repo")
+ # List the config_dir and add to choices
self._set_layout_names()
- # This will trigger all parts to report their layout, making sure
- # the layout table has a valid value. This will also call
- # self._update_block_endpoints()
- self.set_layout(Table(self.layout.meta))
# If given a default config, load this
- if self.params.initialDesign:
- self.do_load(self.params.initialDesign)
+ if self.initial_design:
+ self.do_load(self.initial_design)
+ else:
+ # This will trigger all parts to report their layout, making sure
+ # the layout table has a valid value. This will also call
+ # self._update_block_endpoints()
+ self.set_layout(LayoutTable([], [], [], [], []))
def set_layout(self, value):
"""Set the layout table value. Called on attribute put"""
- # If it isn't a table, make it one
- if not isinstance(value, Table):
- value = Table(self.layout.meta, value)
# Can't do this with changes_squashed as it will call update_modified
- # from another thread and deadlock
- part_info = self.run_hook(
- self.Layout, self.create_part_contexts(only_visible=False),
- self.port_info, value)
+ # from another thread and deadlock. Need RLock.is_owned() from update_*
+ part_info = self.run_hooks(
+ LayoutHook(p, c, self.port_info, value)
+ for p, c in self.create_part_contexts(only_visible=False).items())
with self.changes_squashed:
- layout_table = Table(self.layout.meta)
layout_parts = LayoutInfo.filter_parts(part_info)
- for name, layout_infos in layout_parts.items():
- assert len(layout_infos) == 1, \
- "%s returned more than 1 layout infos" % name
- layout_parts[name] = layout_infos[0]
- layout_table.name = list(layout_parts)
- layout_table.mri = [i.mri for i in layout_parts.values()]
- layout_table.x = [i.x for i in layout_parts.values()]
- layout_table.y = [i.y for i in layout_parts.values()]
- layout_table.visible = [i.visible for i in layout_parts.values()]
+ name, mri, x, y, visible = [], [], [], [], []
+ for part_name, layout_infos in layout_parts.items():
+ for layout_info in layout_infos:
+ name.append(part_name)
+ mri.append(layout_info.mri)
+ x.append(layout_info.x)
+ y.append(layout_info.y)
+ visible.append(layout_info.visible)
+ layout_table = LayoutTable(name, mri, x, y, visible)
try:
+ # Compare the Array seq to get at the numpy array
np.testing.assert_equal(
- layout_table.visible, self.layout.value.visible)
+ layout_table.visible.seq, self.layout.value.visible.seq)
except AssertionError:
visibility_changed = True
else:
@@ -203,40 +168,49 @@ def set_layout(self, value):
# First write of table, set layout and exports saves
self.saved_visibility = layout_table.visible
self.saved_exports = self.exports.value.to_dict()
+ # Force visibility changed so we update_block_endpoints
+ # even if there weren't any visible
+ visibility_changed = True
if visibility_changed:
self.update_modified()
self.update_exportable()
# Part visibility changed, might have attributes or methods
# that we need to hide or show
- self._update_block_endpoints()
+ self.update_block_endpoints()
def set_exports(self, value):
with self.changes_squashed:
self.exports.set_value(value)
self.update_modified()
- self._update_block_endpoints()
+ self.update_block_endpoints()
- def update_modified(self, part=None, alarm=None):
+ def update_modified(self, part=None, info=None):
+ # type: (Part, PartModifiedInfo) -> None
with self.changes_squashed:
- # Update the alarm for the given part
if part:
- self.part_modified[part] = alarm
+ # Update the alarm for the given part
+ self.part_modified[part] = info
# Find the modified alarms for each visible part
message_list = []
only_modified_by_us = True
for part_name, visible in zip(
self.layout.value.name, self.layout.value.visible):
- if visible:
- alarm = self.part_modified.get(self.parts[part_name], None)
- if alarm:
- # Part flagged as been modified, is it by us?
- if alarm.severity:
+ part = self.parts[part_name]
+ info = self.part_modified.get(part, None)
+ if visible and info:
+ for name, message in sorted(info.modified.items()):
+ # Attribute flagged as been modified, is it by the
+ # context we passed to the part?
+ if name in self.context_modified.get(part, {}):
+ message = "(We modified) %s" % (message,)
+ else:
only_modified_by_us = False
- message_list.append(alarm.message)
+ message_list.append(message)
# Add in any modification messages from the layout and export tables
try:
+ # Compare the Array seq to get at the numpy array
np.testing.assert_equal(
- self.layout.value.visible, self.saved_visibility)
+ self.layout.value.visible.seq, self.saved_visibility)
except AssertionError:
message_list.append("layout changed")
only_modified_by_us = False
@@ -257,11 +231,12 @@ def update_modified(self, part=None, alarm=None):
else:
self.modified.set_value(False)
- def update_exportable(self, part=None, fields=None, port_infos=None):
+ def update_exportable(self, part=None, info=None):
+ # type: (Part, PartExportableInfo) -> None
with self.changes_squashed:
if part:
- self.part_exportable[part] = fields
- self.port_info[part.name] = port_infos
+ self.part_exportable[part] = info.names
+ self.port_info[part.name] = info.port_infos
# Find the exportable fields for each visible part
names = []
for part in self.parts.values():
@@ -269,16 +244,16 @@ def update_exportable(self, part=None, fields=None, port_infos=None):
for attr_name in fields:
names.append("%s.%s" % (part.name, attr_name))
changed_names = set(names).symmetric_difference(
- self.exports.meta.elements["name"].choices)
+ self.exports.meta.elements["source"].choices)
changed_exports = changed_names.intersection(
- self.exports.value.name)
- self.exports.meta.elements["name"].set_choices(names)
+ self.exports.value.source)
+ self.exports.meta.elements["source"].set_choices(names)
# Update the block endpoints if anything currently exported is
# added or deleted
if changed_exports:
- self._update_block_endpoints()
+ self.update_block_endpoints()
- def _update_block_endpoints(self):
+ def update_block_endpoints(self):
if self._current_part_fields:
for name, child, _ in self._current_part_fields:
self._block.remove_endpoint(name)
@@ -288,15 +263,37 @@ def _update_block_endpoints(self):
for name, child, writeable_func in self._current_part_fields:
self.add_block_field(name, child, writeable_func)
- def initial_part_fields(self):
- # Don't return any fields to start with, these will be added on load()
- return iter(())
+ def notify_dispatch_request(self, request, part):
+ # type: (Request, Part) -> None
+ """Will be called when a context passed to a hooked function is about
+ to dispatch a request"""
+ if isinstance(request, Put):
+ # This means the context we were passed has just made a Put request
+ # so mark the field as "we_modified" so it doesn't screw up the
+ # modified led
+ self.context_modified.setdefault(part, set()).add(request.path[-2])
+
+ def add_initial_part_fields(self):
+ # Only add our own fields to start with, the rest will be added on load
+ for name, child, writeable_func in self.field_registry.fields[None]:
+ self.add_block_field(name, child, writeable_func)
+ for part in self.parts.values():
+ for name, field, writeable_func in self.field_registry.fields.get(
+ part, []):
+ if isinstance(field, AttributeModel):
+ tag = get_config_tag(field.meta.tags)
+ if tag:
+ # Strip off the "config" tags from attributes
+ field.meta.set_tags(
+ [x for x in field.meta.tags if x != tag])
+ self.our_config_attributes[name] = field
def _get_current_part_fields(self):
# Clear out the current subscriptions
for subscription in self._subscriptions:
controller = self.process.get_controller(subscription.path[0])
- unsubscribe = Unsubscribe(subscription.id, subscription.callback)
+ unsubscribe = Unsubscribe(subscription.id)
+ unsubscribe.set_callback(subscription.callback)
controller.handle_request(unsubscribe)
self._subscriptions = []
@@ -313,15 +310,14 @@ def _get_current_part_fields(self):
invisible.add(part_name)
# Add fields from parts that aren't invisible
- for part_name in self.parts:
+ for part_name, part in self.parts.items():
if part_name not in invisible:
- for data in self.part_fields[part_name]:
+ for data in self.field_registry.fields.get(part, []):
yield data
# Add exported fields from visible parts
- for name, export_name in zip(
- self.exports.value.name, self.exports.value.exportName):
- part_name, attr_name = name.rsplit(".", 1)
+ for source, export_name in self.exports.value.rows():
+ part_name, attr_name = source.rsplit(".", 1)
part = self.parts[part_name]
# If part is visible, get its mri
mri = mris.get(part_name, None)
@@ -361,7 +357,8 @@ def setter(*args):
ob = ob[p]
getattr(ob, "set_%s" % cp[-1])(value)
- subscription = Subscribe(path=path, delta=True, callback=update_field)
+ subscription = Subscribe(path=path, delta=True)
+ subscription.set_callback(update_field)
self._subscriptions.append(subscription)
# When we have waited for the subscription, the first update_field
# will have been called
@@ -373,68 +370,65 @@ def create_part_contexts(self, only_visible=True):
if only_visible:
for part_name, visible in zip(
self.layout.value.name, self.layout.value.visible):
+ part = self.parts[part_name]
if not visible:
- part_contexts.pop(self.parts[part_name])
+ part_contexts.pop(part)
+ else:
+ part_contexts[part].set_notify_dispatch_request(
+ self.notify_dispatch_request, part)
+
return part_contexts
- @method_writeable_in(ss.READY)
- @method_takes(
- "design", StringMeta(
- "Name of design to save, if different from current design"), "")
- def save(self, params):
+ @add_call_types
+ def save(self, design=""):
+ # type: (ASaveDesign) -> None
"""Save the current design to file"""
- self.try_stateful_function(
- ss.SAVING, ss.READY, self.do_save, params.design)
+ self.try_stateful_function(ss.SAVING, ss.READY, self.do_save, design)
def do_save(self, design=""):
if not design:
design = self.design.value
assert design, "Please specify save design name when saving from new"
structure = OrderedDict()
+ attributes = structure.setdefault("attributes", OrderedDict())
# Add the layout table
- part_layouts = {}
- for name, x, y, visible in sorted(
- zip(self.layout.value.name, self.layout.value.x,
- self.layout.value.y, self.layout.value.visible)):
+ layout = attributes.setdefault("layout", OrderedDict())
+ for name, mri, x, y, visible in self.layout.value.rows():
layout_structure = OrderedDict()
layout_structure["x"] = x
layout_structure["y"] = y
layout_structure["visible"] = visible
- part_layouts[name] = layout_structure
- structure["layout"] = OrderedDict()
- for part_name in self.parts:
- if part_name in part_layouts:
- structure["layout"][part_name] = part_layouts[part_name]
+ layout[name] = layout_structure
# Add the exports table
- structure["exports"] = OrderedDict()
- for name, export_name in sorted(
- zip(self.exports.value.name, self.exports.value.exportName)):
- structure["exports"][name] = export_name
+ exports = attributes.setdefault("exports", OrderedDict())
+ for source, export in self.exports.value.rows():
+ exports[source] = export
+ # Add other attributes
+ for name, attribute in self.our_config_attributes.items():
+ attributes[name] = serialize_object(attribute.value)
# Add any structure that a child part wants to save
- part_structures = self.run_hook(
- self.Save, self.create_part_contexts(only_visible=False))
- for part_name, part_structure in sorted(part_structures.items()):
- structure[part_name] = part_structure
+ structure["children"] = self.run_hooks(
+ SaveHook(p, c)
+ for p, c in self.create_part_contexts(only_visible=False).items())
text = json_encode(structure, indent=2)
filename = self._validated_config_filename(design)
with open(filename, "w") as f:
f.write(text)
- if os.path.isdir(os.path.join(self.params.configDir, ".git")):
- # Try and commit the file to git, don't care if it fails
- self._run_git_cmd("add", filename)
- msg = "Saved %s %s" % (self.mri, design)
- self._run_git_cmd("commit", "--allow-empty", "-m", msg, filename)
+ # Try and commit the file to git, don't care if it fails
+ self._run_git_cmd("add", filename)
+ msg = "Saved %s %s" % (self.mri, design)
+ self._run_git_cmd("commit", "--allow-empty", "-m", msg, filename)
self._mark_clean(design)
def _set_layout_names(self, extra_name=None):
names = [""]
- if extra_name:
- names.append(extra_name)
dir_name = self._make_config_dir()
for f in os.listdir(dir_name):
if os.path.isfile(
os.path.join(dir_name, f)) and f.endswith(".json"):
names.append(f.split(".json")[0])
+ if extra_name and str(extra_name) not in names:
+ names.append(str(extra_name))
self.design.meta.set_choices(names)
def _validated_config_filename(self, name):
@@ -444,14 +438,14 @@ def _validated_config_filename(self, name):
name (str): Filename without dir or extension
Returns:
- str: Full path including extensio
+ str: Full path including extension
"""
dir_name = self._make_config_dir()
filename = os.path.join(dir_name, name.split(".json")[0] + ".json")
return filename
def _make_config_dir(self):
- dir_name = os.path.join(self.params.configDir, self.mri)
+ dir_name = os.path.join(self.config_dir, self.mri)
try:
os.mkdir(dir_name)
except OSError:
@@ -465,26 +459,40 @@ def set_design(self, value):
ss.LOADING, ss.READY, self.do_load, value)
def do_load(self, design):
- filename = self._validated_config_filename(design)
- with open(filename, "r") as f:
- text = f.read()
- structure = json_decode(text)
+ if design:
+ filename = self._validated_config_filename(design)
+ with open(filename, "r") as f:
+ text = f.read()
+ structure = json_decode(text)
+ else:
+ structure = {}
+ # Attributes and Children used to be merged, support this
+ attributes = structure.get("attributes", structure)
+ children = structure.get("children", structure)
# Set the layout table
- layout_table = Table(self.layout.meta)
- for part_name, part_structure in structure.get("layout", {}).items():
- layout_table.append([
- part_name, "", part_structure["x"], part_structure["y"],
- part_structure["visible"]])
- self.set_layout(layout_table)
+ name, mri, x, y, visible = [], [], [], [], []
+ for part_name, d in attributes.get("layout", {}).items():
+ name.append(part_name)
+ mri.append("")
+ x.append(d["x"])
+ y.append(d["y"])
+ visible.append(d["visible"])
+ self.set_layout(LayoutTable(name, mri, x, y, visible))
# Set the exports table
- exports_table = Table(self.exports.meta)
- for name, export_name in structure.get("exports", {}).items():
- exports_table.append([name, export_name])
- self.exports.set_value(exports_table)
+ source, export = [], []
+ for source_name, export_name in attributes.get("exports", {}).items():
+ source.append(source_name)
+ export.append(export_name)
+ self.exports.set_value(ExportTable(source, export))
+ # Set other attributes
+ our_values = {k: v for k, v in attributes.items()
+ if k in self.our_config_attributes}
+ block = self.make_view()
+ block.put_attribute_values(our_values)
# Run the load hook to get parts to load their own structure
- self.run_hook(self.Load,
- self.create_part_contexts(only_visible=False),
- structure)
+ self.run_hooks(
+ LoadHook(p, c, children.get(p.name, {}))
+ for p, c in self.create_part_contexts(only_visible=False).items())
self._mark_clean(design)
def _mark_clean(self, design):
@@ -492,8 +500,8 @@ def _mark_clean(self, design):
self.saved_visibility = self.layout.value.visible
self.saved_exports = self.exports.value.to_dict()
# Now we are clean, modified should clear
- self.part_modified = OrderedDict()
+ self.part_modified = {}
self.update_modified()
self._set_layout_names(design)
self.design.set_value(design)
- self._update_block_endpoints()
+ self.update_block_endpoints()
diff --git a/malcolm/modules/builtin/controllers/proxycontroller.py b/malcolm/modules/builtin/controllers/proxycontroller.py
index be6eabfbf..ded2c630b 100644
--- a/malcolm/modules/builtin/controllers/proxycontroller.py
+++ b/malcolm/modules/builtin/controllers/proxycontroller.py
@@ -1,33 +1,48 @@
-from malcolm.core import Post, Subscribe, Put, Controller, method_takes, \
- REQUIRED, Alarm, Process, Unsubscribe, Delta, Queue
-from malcolm.modules.builtin.vmetas import StringMeta
+from annotypes import Anno
+from malcolm.core import Post, Subscribe, Put, Hook, Alarm, Unsubscribe, \
+ Delta, Queue, ProcessStopHook, ProcessStartHook, deserialize_object, \
+ Response, UnpublishedInfo, UUnpublishedInfos
+from .basiccontroller import BasicController, AMri
+from ..infos import HealthInfo
-@method_takes(
- "comms", StringMeta("Malcolm resource id of client comms"), REQUIRED,
- "mri", StringMeta("Malcolm resource id of created block"), REQUIRED)
-class ProxyController(Controller):
+with Anno("Malcolm resource id of client comms"):
+ AComms = str
+with Anno("Whether to re-publish this block via server comms"):
+ APublish = bool
+
+
+class ProxyController(BasicController):
"""Sync a local block with a given remote block"""
- def __init__(self, process, parts, params):
- self.params = params
- super(ProxyController, self).__init__(process, params.mri, parts)
- self.client_comms = process.get_controller(params.comms)
- self.update_health(self, Alarm.invalid("Uninitialized"))
+
+ def __init__(self, mri, comms, publish=False):
+ # type: (AMri, AComms, APublish) -> None
+ super(ProxyController, self).__init__(mri)
+ self.comms = comms
+ self.publish = publish
+ self.client_comms = None
+ self.update_health(self, HealthInfo(Alarm.invalid("Uninitialized")))
self._response_queue = Queue()
self._notify_response = True
self._first_response_queue = Queue()
+ # Hooks
+ self.register_hooked(ProcessStartHook, self.init)
+ self.register_hooked(ProcessStopHook, self.halt)
- @Process.Init
def init(self):
- subscribe = Subscribe(
- path=[self.params.mri], delta=True, callback=self.handle_response)
+ # type: () -> UUnpublishedInfos
+ self.client_comms = self.process.get_controller(self.comms)
+ subscribe = Subscribe(path=[self.mri], delta=True)
+ subscribe.set_callback(self.handle_response)
self.client_comms.send_to_server(subscribe)
# Wait until connected
self._first_response_queue.get(timeout=5)
+ if not self.publish:
+ return UnpublishedInfo(self.mri)
- @Process.Halt
def halt(self):
- unsubscribe = Unsubscribe(callback=self.handle_response)
+ unsubscribe = Unsubscribe()
+ unsubscribe.set_callback(self.handle_response)
self.client_comms.send_to_server(unsubscribe)
def handle_request(self, request):
@@ -38,46 +53,61 @@ def handle_request(self, request):
return super(ProxyController, self).handle_request(request)
def handle_response(self, response):
+ # type: (Response) -> None
self._response_queue.put(response)
- return self.spawn(self._handle_response)
+ self.spawn(self._handle_response)
def _handle_response(self):
with self.changes_squashed:
if self._notify_response:
self._first_response_queue.put(True)
self._notify_response = False
- response = self._response_queue.get(timeout=0)
+ response = self._response_queue.get(timeout=0) # type: Response
if not isinstance(response, Delta):
# Return or Error is the end of our subscription, log and ignore
self.log.debug("Proxy got response %r", response)
return
for change in response.changes:
- path = change[0]
- if len(path) == 0:
- assert len(change) == 2, \
- "Can't delete root block with change %r" % (change,)
- self._regenerate_block(change[1])
- elif len(path) == 1 and path[0] not in ("health", "meta"):
- if len(change) == 1:
- # Delete a field
- self._block.remove_endpoint(path[1])
- else:
- # Change a single field of the block
- self._block.set_endpoint_data(path[1], change[1])
- elif len(path) == 2 and path[:1] == ["health", "alarm"]:
- # If we got an alarm update for health
- assert len(change) == 2, "Can't delete health alarm"
- self.update_health(self, change[1])
- elif path[0] not in ("health", "meta"):
- # Update a child of the block
- assert len(change) == 2, \
- "Can't delete entries in Attributes or Methods"
- ob = self._block
- for p in path[:-1]:
- ob = ob[p]
- getattr(ob, "set_%s" % path[-1])(change[1])
- else:
- raise ValueError("Bad response %s" % response)
+ try:
+ self._handle_change(change)
+ except Exception:
+ self.log.exception("Error handling %s", response)
+ raise
+
+ def _handle_change(self, change):
+ path = change[0]
+ if len(path) == 0:
+ assert len(change) == 2, \
+ "Can't delete root block with change %r" % (change,)
+ self._regenerate_block(change[1])
+ elif len(path) == 1 and path[0] not in ("health", "meta"):
+ if len(change) == 1:
+ # Delete a field
+ self._block.remove_endpoint(path[1])
+ else:
+ # Change a single field of the block
+ self._block.set_endpoint_data(path[1], change[1])
+ elif path[0] not in ("health", "meta"):
+ # Update a child of the block
+ assert len(change) == 2, \
+ "Can't delete entries in Attributes or Methods"
+ ob = self._block
+ for p in path[:-1]:
+ ob = ob[p]
+ # special case attribute values and timeStamps
+ if len(path) == 2 and path[-1] == "value":
+ ob.set_value(change[1], set_alarm_ts=False)
+ elif len(path) == 2 and path[-1] == "timeStamp":
+ ob.set_ts(change[1])
+ else:
+ getattr(ob, "set_%s" % path[-1])(change[1])
+ elif len(path) == 2 and path[:1] == ["health", "alarm"]:
+ # If we got an alarm update for health
+ assert len(change) == 2, "Can't delete health alarm"
+ alarm = deserialize_object(change[1], Alarm)
+ self.update_health(self, HealthInfo(alarm))
+ else:
+ raise ValueError("Bad change %s" % (change,))
def _regenerate_block(self, d):
for field in list(self._block):
@@ -85,10 +115,8 @@ def _regenerate_block(self, d):
self._block.remove_endpoint(field)
for field, value in d.items():
if field == "health":
- if value["alarm"]["severity"]:
- self.update_health(self, value["alarm"])
- else:
- self.update_health(self, None)
+ alarm = deserialize_object(value["alarm"], Alarm)
+ self.update_health(self, HealthInfo(alarm))
elif field == "meta":
# TODO: set meta
pass
diff --git a/malcolm/modules/builtin/controllers/servercomms.py b/malcolm/modules/builtin/controllers/servercomms.py
index c1189603c..ea6ca0fe3 100644
--- a/malcolm/modules/builtin/controllers/servercomms.py
+++ b/malcolm/modules/builtin/controllers/servercomms.py
@@ -1,6 +1,18 @@
-from .statefulcontroller import StatefulController
+from malcolm.core import Part
+from .statefulcontroller import StatefulController, AMri, ADescription, \
+ AUseCothread
+from ..infos import RequestInfo
class ServerComms(StatefulController):
"""Abstract class for dealing with requests from outside"""
- pass
+ def __init__(self, mri, description="", use_cothread=True):
+ # type: (AMri, ADescription, AUseCothread) -> None
+ super(ServerComms, self).__init__(mri, description, use_cothread)
+ self.info_registry.add_reportable(
+ RequestInfo, self.update_request_received)
+
+ def update_request_received(self, _, info):
+ # type: (Part, RequestInfo) -> None
+ controller = self.process.get_controller(info.mri)
+ controller.handle_request(info.request).get()
diff --git a/malcolm/modules/builtin/controllers/statefulcontroller.py b/malcolm/modules/builtin/controllers/statefulcontroller.py
index 453eacd60..b918a487d 100644
--- a/malcolm/modules/builtin/controllers/statefulcontroller.py
+++ b/malcolm/modules/builtin/controllers/statefulcontroller.py
@@ -1,70 +1,17 @@
-from malcolm.compat import OrderedDict
-from malcolm.core import Hook, method_writeable_in, method_takes, Alarm, \
- MethodModel, AttributeModel, Process
-from malcolm.modules.builtin.vmetas import ChoiceMeta
-from .basiccontroller import BasicController
-
-
-class StatefulStates(object):
- """The most basic Malcolm state machine"""
-
- RESETTING = "Resetting"
- DISABLED = "Disabled"
- DISABLING = "Disabling"
- FAULT = "Fault"
- READY = "Ready"
-
- def __init__(self):
- self._allowed = OrderedDict()
- # These are all the states we can possibly be in
- self.possible_states = []
- self.create_block_transitions()
- self.create_error_disable_transitions()
-
- def create_block_transitions(self):
- self.set_allowed(self.RESETTING, self.READY)
-
- def create_error_disable_transitions(self):
- block_states = self.possible_states[:]
-
- # Set transitions for standard states
- for state in block_states:
- self.set_allowed(state, self.FAULT)
- self.set_allowed(state, self.DISABLING)
- self.set_allowed(self.FAULT, [self.RESETTING, self.DISABLING])
- self.set_allowed(self.DISABLING, [self.FAULT, self.DISABLED])
- self.set_allowed(self.DISABLED, self.RESETTING)
-
- def transition_allowed(self, initial_state, target_state):
- """
- Check if a transition between two states is allowed
-
- Args:
- initial_state(str): Initial state
- target_state(str): Target state
-
- Returns:
- bool: True if allowed, False if not
- """
- assert initial_state in self._allowed, \
- "%s is not in %s" % (initial_state, list(self._allowed))
- return target_state in self._allowed[initial_state]
+from annotypes import TYPE_CHECKING
- def set_allowed(self, initial_state, allowed_states):
- """Add an allowed transition state
-
- Args:
- initial_state (str): Initial state
- allowed_states (str or list): state or list of states that
- initial_state can transition to
- """
- if not isinstance(allowed_states, list):
- allowed_states = [allowed_states]
+from malcolm.compat import OrderedDict
+from malcolm.core import Alarm, MethodModel, AttributeModel, ProcessStartHook, \
+ ProcessStopHook, ChoiceMeta, Hook, Widget, Context, Part, Request
+from malcolm.modules.builtin.util import StatefulStates
+from .basiccontroller import BasicController, AMri, ADescription, AUseCothread
+from ..hooks import InitHook, ResetHook, DisableHook, HaltHook
+from ..infos import HealthInfo
- self._allowed.setdefault(initial_state, set()).update(allowed_states)
- for state in allowed_states + [initial_state]:
- if state not in self.possible_states:
- self.possible_states.append(state)
+if TYPE_CHECKING:
+ from typing import Union, Dict, Callable
+ Field = Union[AttributeModel, MethodModel]
+ ChildrenWriteable = Dict[str, Dict[Field, bool]]
ss = StatefulStates
@@ -72,89 +19,67 @@ def set_allowed(self, initial_state, allowed_states):
class StatefulController(BasicController):
"""A controller that implements `StatefulStates`"""
- # The stateSet that this controller implements
- stateSet = ss()
- # {state (str): {Meta/MethodMeta/Attribute: writeable (bool)}
- _children_writeable = None
- # Attributes
- state = None
-
- Init = Hook()
- """Called when this controller is told to start by the process
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- """
-
- Halt = Hook()
- """Called when this controller is told to halt
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- """
-
- Reset = Hook()
- """Called at reset() to reset all parts to a known good state
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- """
-
- Disable = Hook()
- """Called at disable() to stop all parts updating their attributes
-
- Args:
- context (Context): The context that should be used to perform operations
- on child blocks
- """
-
- def __init__(self, process, parts, params):
- self._children_writeable = {}
- super(StatefulController, self).__init__(process, parts, params)
+ # The state_set that this controller implements
+ state_set = ss()
+
+ def __init__(self, mri, description="", use_cothread=True):
+ # type: (AMri, ADescription, AUseCothread) -> None
+ super(StatefulController, self).__init__(mri, description, use_cothread)
+ self._children_writeable = {} # type: ChildrenWriteable
+ self.state = ChoiceMeta(
+ "StateMachine State of Block", self.state_set.possible_states,
+ tags=[Widget.TEXTUPDATE.tag()]).create_attribute_model(ss.DISABLING)
+ self.field_registry.add_attribute_model("state", self.state)
+ self.field_registry.add_method_model(self.disable)
+ self.set_writeable_in(self.field_registry.add_method_model(self.reset),
+ ss.DISABLED, ss.FAULT)
self.transition(ss.DISABLED)
+ self.register_hooked(ProcessStartHook, self.init)
+ self.register_hooked(ProcessStopHook, self.halt)
+
+ def set_writeable_in(self, field, *states):
+ # Field has defined when it should be writeable, just check that
+ # this is valid for this state_set
+ for state in states:
+ assert state in self.state_set.possible_states, \
+ "State %s is not one of the valid states %s" % \
+ (state, self.state_set.possible_states)
+ for state in self.state_set.possible_states:
+ state_writeable = self._children_writeable.setdefault(state, {})
+ state_writeable[field] = state in states
- def create_attribute_models(self):
- """MethodModel that should provide Attribute instances for Block
+ def create_part_contexts(self):
+ # type: () -> Dict[Part, Context]
+ part_contexts = OrderedDict()
+ for part in self.parts.values():
+ part_contexts[part] = Context(self.process)
+ return part_contexts
- Yields:
- tuple: (string name, Attribute, callable put_function).
- """
- for y in super(StatefulController, self).create_attribute_models():
- yield y
- # Create read-only attribute for current state string
- meta = ChoiceMeta(
- "State of Block", self.stateSet.possible_states, label="State")
- self.state = meta.create_attribute_model(ss.DISABLING)
- yield "state", self.state, None
-
- @Process.Init
def init(self):
self.try_stateful_function(ss.RESETTING, ss.READY, self.do_init)
def do_init(self):
- self.run_hook(self.Init, self.create_part_contexts())
+ self.run_hooks(InitHook(part, context)
+ for part, context in self.create_part_contexts().items())
- @Process.Halt
def halt(self):
- self.run_hook(self.Halt, self.create_part_contexts())
+ self.run_hooks(HaltHook(part, context)
+ for part, context in self.create_part_contexts().items())
self.disable()
- @method_takes()
def disable(self):
self.try_stateful_function(ss.DISABLING, ss.DISABLED, self.do_disable)
def do_disable(self):
- self.run_hook(self.Disable, self.create_part_contexts())
+ self.run_hooks(DisableHook(part, context)
+ for part, context in self.create_part_contexts().items())
- @method_writeable_in(ss.DISABLED, ss.FAULT)
def reset(self):
self.try_stateful_function(ss.RESETTING, ss.READY, self.do_reset)
def do_reset(self):
- self.run_hook(self.Reset, self.create_part_contexts())
+ self.run_hooks(ResetHook(part, context)
+ for part, context in self.create_part_contexts().items())
def go_to_error_state(self, exception):
if self.state.value != ss.FAULT:
@@ -169,7 +94,7 @@ def transition(self, state, message=""):
"""
with self.changes_squashed:
initial_state = self.state.value
- if self.stateSet.transition_allowed(
+ if self.state_set.transition_allowed(
initial_state=initial_state, target_state=state):
self.log.debug(
"Transitioning from %s to %s", initial_state, state)
@@ -179,7 +104,7 @@ def transition(self, state, message=""):
alarm = Alarm.major(message)
else:
alarm = Alarm()
- self.update_health(self, alarm)
+ self.update_health(self, HealthInfo(alarm))
self.state.set_value(state)
self.state.set_alarm(alarm)
for child, writeable in self._children_writeable[state].items():
@@ -187,8 +112,6 @@ def transition(self, state, message=""):
child.meta.set_writeable(writeable)
elif isinstance(child, MethodModel):
child.set_writeable(writeable)
- for element in child.takes.elements.values():
- element.set_writeable(writeable)
else:
raise TypeError("Cannot transition from %s to %s" %
(initial_state, state))
@@ -200,33 +123,28 @@ def try_stateful_function(self, start_state, end_state, func, *args,
func(*args, **kwargs)
self.transition(end_state)
except Exception as e: # pylint:disable=broad-except
+ self.log.exception(
+ "Exception running %s %s %s transitioning from %s to %s",
+ func, args, kwargs, start_state, end_state)
self.go_to_error_state(e)
raise
def add_block_field(self, name, child, writeable_func):
super(StatefulController, self).add_block_field(
name, child, writeable_func)
- # Set children_writeable dict
- if isinstance(child, AttributeModel):
- states = child.meta.writeable_in
- else:
- states = child.writeable_in
- if states:
- # Field has defined when it should be writeable, just check that
- # this is valid for this stateSet
- for state in states:
- assert state in self.stateSet.possible_states, \
- "State %s is not one of the valid states %s" % \
- (state, self.stateSet.possible_states)
- elif writeable_func is not None:
- # Field is writeable but has not defined when it should be
- # writeable, so calculate it from the possible states
- states = [
- state for state in self.stateSet.possible_states
- if state not in (ss.DISABLING, ss.DISABLED)]
- else:
- # Field is never writeable, so will never need to change state
+ # If we don't have a writeable func it can never be writeable
+ if writeable_func is None:
return
- for state in self.stateSet.possible_states:
+ # If we have already registered an explicit set then we are done
+ for state in self.state_set.possible_states:
+ state_writeable = self._children_writeable.get(state, {})
+ if child in state_writeable:
+ return
+ # Field is writeable but has not defined when it should be
+ # writeable, so calculate it from the possible states
+ states = [
+ state for state in self.state_set.possible_states
+ if state not in (ss.DISABLING, ss.DISABLED)]
+ for state in self.state_set.possible_states:
state_writeable = self._children_writeable.setdefault(state, {})
state_writeable[child] = state in states
diff --git a/malcolm/modules/builtin/defines.py b/malcolm/modules/builtin/defines.py
index 12ff7f739..6abdffe6b 100644
--- a/malcolm/modules/builtin/defines.py
+++ b/malcolm/modules/builtin/defines.py
@@ -1,86 +1,119 @@
import os
import subprocess
-import types
-import sys
-
-from malcolm.core import method_takes, REQUIRED, Importer
-from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta
-
-
-@method_takes(
- "name", StringMeta("The name of the defined parameter"), REQUIRED,
- "value", StringMeta("The value of the defined parameter"), REQUIRED)
-def string(params):
+import imp
+import importlib
+
+from annotypes import Anno, add_call_types
+import numpy as np
+
+from malcolm.core import Define
+
+
+def import_package_from_path(name, path):
+ dirname, basename = os.path.abspath(path).rsplit(os.sep, 1)
+ file, pathname, description = imp.find_module(basename, [dirname])
+ try:
+ mod = imp.load_module(name, file, pathname, description)
+ finally:
+ if file is not None:
+ file.close()
+ parent_name, attr_name = name.rsplit(".", 1)
+ parent = importlib.import_module(parent_name)
+ setattr(parent, attr_name, mod)
+
+
+with Anno("The name of the defined parameter"):
+ AName = str
+with Anno("The value of the defined parameter"):
+ AStringValue = str
+with Anno("The value of the defined parameter"):
+ AFloat64Value = np.float64
+with Anno("The value of the defined parameter"):
+ AInt32Value = np.int32
+with Anno("The Define that has been created"):
+ ADefine = Define
+
+
+@add_call_types
+def string(name, value):
+ # type: (AName, AStringValue) -> ADefine
"""Define a string parameter to be used within this YAML file"""
- return {params.name: params.value}
+ return Define(name, value)
-@method_takes(
- "name", StringMeta("The name of the defined parameter"), REQUIRED,
- "value", NumberMeta(
- "float64", "The value of the defined parameter"), REQUIRED)
-def float64(params):
- """Define a string parameter to be used within this YAML file"""
- return {params.name: params.value}
+@add_call_types
+def float64(name, value):
+ # type: (AName, AFloat64Value) -> ADefine
+ """Define a float64 parameter to be used within this YAML file"""
+ return Define(name, value)
-@method_takes(
- "name", StringMeta("The name of the defined parameter"), REQUIRED,
- "value", NumberMeta(
- "int32", "The value of the defined parameter"), REQUIRED)
-def int32(params):
- """Define a string parameter to be used within this YAML file"""
- return {params.name: params.value}
+@add_call_types
+def int32(name, value):
+ # type: (AName, AInt32Value) -> ADefine
+ """Define an int32 parameter to be used within this YAML file"""
+ return Define(name, value)
-@method_takes(
- "value", StringMeta("The docstring value"), REQUIRED)
-def docstring(params):
+@add_call_types
+def docstring(value):
+ # type: (AStringValue) -> ADefine
"""Define the docstring for the YAML file"""
- return {"docstring": params.value}
+ return Define("docstring", value)
+
+
+with Anno("The environment variable name to get the value from"):
+ AEnvSource = str
-@method_takes(
- "name", StringMeta("The name of the defined parameter"), REQUIRED,
- "env", StringMeta(
- "The environment variable name to get the value from"), REQUIRED)
-def env_string(params):
+@add_call_types
+def env_string(name, env):
+ # type: (AName, AEnvSource) -> ADefine
"""Define a string parameter coming from the environment to be used within
this YAML file"""
- return {params.name: os.environ[params.env]}
+ return Define(name, os.environ[env])
-@method_takes(
- "name", StringMeta("The name of the defined parameter"), REQUIRED,
- "cmd", StringMeta(
- "The shell command to run to get the value from"), REQUIRED)
-def cmd_string(params):
+with Anno("The shell command to run to get the value from"):
+ ACmd = str
+
+
+@add_call_types
+def cmd_string(name, cmd):
+ # type: (AName, ACmd) -> ADefine
"""Define a string parameter coming from a shell command to be used within
this YAML file. Trailing newlines will be stripped."""
- value = subprocess.check_output(params.cmd, shell=True).rstrip("\n")
- return {params.name: value}
+ value = subprocess.check_output(cmd, shell=True).rstrip("\n")
+ return Define(name, value)
+
+with Anno("The environment variable name to set"):
+ AEnvName = str
+with Anno("The value of the exported environment variable"):
+ AEnvValue = str
-@method_takes(
- "name", StringMeta(
- "The environment variable name to set"), REQUIRED,
- "value", StringMeta(
- "The value of the exported environment variable"), REQUIRED)
-def export_env_string(params):
+
+@add_call_types
+def export_env_string(name, value):
+ # type: (AEnvName, AEnvValue) -> ADefine
"""Exports an environment variable with the given value"""
- os.environ[params.name] = params.value
- return {params.name: params.value}
+ os.environ[name] = value
+ return Define(name, value)
+
+
+with Anno("The name of the exported module"):
+ AModuleName = str
+with Anno("The path of a python package dir to insert as "
+ "malcolm.modules."):
+ AModulePath = str
-@method_takes(
- "name", StringMeta("The name of the exported module"), REQUIRED,
- "path", StringMeta("The path of a python package dir to insert as "
- "malcolm.modules."), REQUIRED)
-def module_path(params):
+@add_call_types
+def module_path(name, path):
+ # type: (AModuleName, AModulePath) -> ADefine
"""Load an external malcolm module (e.g. ADCore/etc/malcolm)"""
- importer = Importer()
- assert os.path.isdir(params.path), "%r doesn't exist" % params.path
- name = "malcolm.modules.%s" % params.name
- importer.import_package_from_path(name, params.path)
- importer.import_special_subpackages(name, params.path)
- return {params.name: params.path}
+ define = Define(name, path)
+ assert os.path.isdir(path), "%r doesn't exist" % path
+ name = "malcolm.modules.%s" % name
+ import_package_from_path(name, path)
+ return define
diff --git a/malcolm/modules/builtin/docs/controllers_api.rst b/malcolm/modules/builtin/docs/util_api.rst
similarity index 87%
rename from malcolm/modules/builtin/docs/controllers_api.rst
rename to malcolm/modules/builtin/docs/util_api.rst
index f3644335d..de8ea6e1c 100644
--- a/malcolm/modules/builtin/docs/controllers_api.rst
+++ b/malcolm/modules/builtin/docs/util_api.rst
@@ -1,18 +1,7 @@
-controllers
-===========
+util
+====
-.. module:: malcolm.modules.builtin.controllers
-
-.. autoclass:: ServerComms
- :members:
-
-.. autoclass:: ClientComms
- :members:
-
-.. autoclass:: BasicController
- :members:
-
-.. autoclass:: ProxyController
+.. automodule:: malcolm.modules.builtin.util
:members:
.. autoclass:: StatefulStates
@@ -46,9 +35,6 @@ controllers
Disabling -> Disabled
Disabled -> Resetting [label="reset()"]
-.. autoclass:: StatefulController
- :members:
-
.. autoclass:: ManagerStates
:members:
@@ -84,5 +70,3 @@ controllers
Disabling -> Disabled
Disabled -> Resetting [label="reset()"]
-.. autoclass:: ManagerController
- :members:
diff --git a/malcolm/modules/builtin/hooks.py b/malcolm/modules/builtin/hooks.py
new file mode 100644
index 000000000..d875d1483
--- /dev/null
+++ b/malcolm/modules/builtin/hooks.py
@@ -0,0 +1,100 @@
+import weakref
+
+from annotypes import Anno, Any, Mapping, Sequence, Array, Union, TypeVar
+
+from malcolm.core import Hook, Part, Context
+from .infos import PortInfo, LayoutInfo
+from .util import LayoutTable
+
+with Anno("The part that has attached to the Hook"):
+ APart = Part
+with Anno("Context that should be used to perform operations on child blocks"):
+ AContext = Context
+
+T = TypeVar("T")
+
+
+class ControllerHook(Hook[T]):
+ """A hook that takes Part and Context for use in controllers"""
+
+ def __init__(self, part, context, **kwargs):
+ # type: (APart, AContext, **Any) -> None
+ # Pass a weak reference to our children
+ super(ControllerHook, self).__init__(
+ part, context=weakref.proxy(context), **kwargs)
+ # But hold a strong reference here to stop it disappearing
+ self.context = context
+
+ def prepare(self):
+ # type: () -> None
+ # context might have been aborted but have nothing servicing
+ # the queue, we still want the legitimate messages on the
+ # queue so just tell it to ignore stops it got before now
+ self.context.ignore_stops_before_now()
+
+ def stop(self):
+ # type: () -> None
+ self.context.stop()
+
+
+class InitHook(ControllerHook[None]):
+ """Called when this controller is told to start by the process"""
+
+
+class ResetHook(ControllerHook[None]):
+ """Called at reset() to reset all parts to a known good state"""
+
+
+class HaltHook(ControllerHook[None]):
+ """Called when this controller is told to halt"""
+
+
+class DisableHook(ControllerHook[None]):
+ """Called at disable() to stop all parts updating their attributes"""
+
+
+with Anno("The PortInfos for all the parts"):
+ APortMap = Mapping[str, Array[PortInfo]]
+with Anno("A possibly partial set of changes to the layout table that "
+ "should be acted on"):
+ ALayoutTable = LayoutTable
+with Anno("The current layout information"):
+ ALayoutInfos = Array[LayoutInfo]
+ULayoutInfos = Union[ALayoutInfos, Sequence[LayoutInfo], LayoutInfo, None]
+
+
+class LayoutHook(ControllerHook[ULayoutInfos]):
+ """Called when layout table set and at init to update child layout"""
+
+ def __init__(self, part, context, ports, layout):
+ # type: (APart, AContext, APortMap, ALayoutTable) -> None
+ super(LayoutHook, self).__init__(
+ part, context, ports=ports, layout=layout)
+
+ def validate_return(self, ret):
+ # type: (ULayoutInfos) -> ALayoutInfos
+ """Check that all returned infos are LayoutInfos"""
+ return ALayoutInfos(ret)
+
+
+with Anno("The serialized structure to load"):
+ AStructure = Mapping[str, Mapping]
+
+
+class LoadHook(ControllerHook[None]):
+ """Called at load() to load child settings from a structure"""
+
+ def __init__(self, part, context, structure):
+ # type: (APart, AContext, AStructure) -> None
+ super(LoadHook, self).__init__(part, context, structure=structure)
+
+
+class SaveHook(ControllerHook[AStructure]):
+ """Called at save() to serialize child settings into a dict structure"""
+
+ def validate_return(self, ret):
+ # type: (AStructure) -> AStructure
+ """Check that a serialized structure is returned"""
+ assert isinstance(ret, dict), \
+ "Expected a structure, got %r" % (ret,)
+ return ret
diff --git a/malcolm/modules/builtin/infos.py b/malcolm/modules/builtin/infos.py
new file mode 100644
index 000000000..469666b31
--- /dev/null
+++ b/malcolm/modules/builtin/infos.py
@@ -0,0 +1,128 @@
+from annotypes import TYPE_CHECKING
+
+from malcolm.core import Info, Alarm, Port, Request
+
+if TYPE_CHECKING:
+ from typing import Callable, List, Any, Dict
+
+
+class TitleInfo(Info):
+ """Used to tell the Controller the title of the Block should change
+
+ Args:
+ title: The new title of the Block
+ """
+ def __init__(self, title):
+ # type: (str) -> None
+ self.title = title
+
+
+class HealthInfo(Info):
+ """Used to tell the Controller a part has an alarm or not
+
+ Args:
+ alarm: The alarm that should be used for the health of the block
+ """
+ def __init__(self, alarm):
+ # type: (Alarm) -> None
+ self.alarm = alarm
+
+
+class PortInfo(Info):
+ """Info about a port to be used by other child parts for connection
+
+ Args:
+ name: The name of the attribute
+ port: The type of the port
+ """
+ def __init__(self, name, port):
+ # type: (str, Port) -> None
+ self.name = name
+ self.port = port
+
+
+class InPortInfo(PortInfo):
+ """Info about an inport from the attribute tag
+
+ Args:
+ name: The name of the attribute
+ port: The type of the port
+ disconnected_value: The value that will be set when the inport is
+ disconnected, E.g. '' or 'ZERO'
+ value: Initial value of the attribute
+ """
+ def __init__(self, name, port, disconnected_value, value):
+ # type: (str, Port, str, Any) -> None
+ super(InPortInfo, self).__init__(name, port)
+ self.disconnected_value = disconnected_value
+ self.value = value
+
+
+class OutPortInfo(PortInfo):
+ """Info about an outport from the attribute tag
+
+ Args:
+ name: The name of the attribute
+ port: The type of the port
+ connected_value: The value that an inport will be set to when connected
+ to this outport, E.g. 'PCOMP1.OUT' or 'DET.STATS'
+ """
+ def __init__(self, name, port, connected_value):
+ # type: (str, Port, str) -> None
+ super(OutPortInfo, self).__init__(name, port)
+ self.connected_value = connected_value
+
+
+class LayoutInfo(Info):
+ """Info about the position and visibility of a child block in a layout
+
+ Args:
+ mri: Malcolm full name of child block
+ x: X Coordinate of child block
+ y: Y Coordinate of child block
+ visible: Whether child block is visible
+ """
+ def __init__(self, mri, x, y, visible):
+ # type: (str, float, float, bool) -> None
+ self.mri = mri
+ self.x = x
+ self.y = y
+ self.visible = visible
+
+
+class PartExportableInfo(Info):
+ """Info about the exportable fields and port infos for a Part
+
+ Args:
+ names: The list of fields that the Part thinks are exportable
+ port_infos: The list of PortInfo objects that the Part exposes
+ """
+ def __init__(self, names, port_infos):
+ # type: (List[str], List[PortInfo]) -> None
+ self.names = names
+ self.port_infos = port_infos
+
+
+class PartModifiedInfo(Info):
+ """Info about whether the part was modified or not
+
+ Args:
+ modified: {attr_name: message} for all attributes that have been
+ modified from the saved value
+ """
+ def __init__(self, modified):
+ # type: (Dict[str, str]) -> None
+ self.modified = modified
+
+
+class RequestInfo(Info):
+ """Info saying that the part has received a request that needs servicing
+
+ Args:
+ request: The request that needs servicing, with callback filled in
+ mri: The mri of the controller that should handle it
+ """
+ def __init__(self, request, mri):
+ # type: (Request, str) -> None
+ self.request = request
+ self.mri = mri
diff --git a/malcolm/modules/builtin/infos/__init__.py b/malcolm/modules/builtin/infos/__init__.py
deleted file mode 100644
index dcf34c0d7..000000000
--- a/malcolm/modules/builtin/infos/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from .layoutinfo import LayoutInfo
-from .portinfo import PortInfo
-
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
diff --git a/malcolm/modules/builtin/infos/layoutinfo.py b/malcolm/modules/builtin/infos/layoutinfo.py
deleted file mode 100644
index a038833b0..000000000
--- a/malcolm/modules/builtin/infos/layoutinfo.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from malcolm.core import Info
-
-
-class LayoutInfo(Info):
- """Info about the position and visibility of a child block in a layout
-
- Args:
- mri (str): Malcolm full name of child block
- x (float): X Coordinate of child block
- y (float): Y Coordinate of child block
- visible (bool): Whether child block is visible
- """
- def __init__(self, mri, x, y, visible):
- self.mri = mri
- self.x = x
- self.y = y
- self.visible = visible
diff --git a/malcolm/modules/builtin/infos/portinfo.py b/malcolm/modules/builtin/infos/portinfo.py
deleted file mode 100644
index 3f9a001be..000000000
--- a/malcolm/modules/builtin/infos/portinfo.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from malcolm.core import Info
-from malcolm.tags import port_types
-
-
-class PortInfo(Info):
- """Info about a port and its value in a class
-
- Args:
- name (str): The name of the attribute
- value: Initial value
- direction (str): Direction of the port e.g. "in" or "out"
- type (str): Type of the port, e.g. "bool" or "NDArray"
- extra (str): For outports, value that will be set when port is selected,
- e.g. "PCOMP1.OUT" or "DET.STATS". For inports, value that will be
- set when port is disconnected, e.g. "" or "ZERO"
- """
- def __init__(self, name, value, direction, type, extra):
- self.name = name
- self.value = value
- assert direction in ("in", "out"), \
- "Direction should be 'in' or 'out', got %r" % direction
- self.direction = direction
- assert type in port_types, \
- "Type should be in %s, got %r" % (port_types, type)
- self.type = type
- self.extra = extra
diff --git a/malcolm/modules/builtin/parameters.py b/malcolm/modules/builtin/parameters.py
index f0d7daee6..b98685355 100644
--- a/malcolm/modules/builtin/parameters.py
+++ b/malcolm/modules/builtin/parameters.py
@@ -1,48 +1,53 @@
-from malcolm.core import method_takes, REQUIRED, OPTIONAL
-from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta
-
-
-def args_for_takes(params, meta_cls, *meta_args):
- meta_args += (params.description,)
- meta = meta_cls(*meta_args)
- if hasattr(params, "default"):
- return [params.name, meta, params.default]
- else:
- return [params.name, meta, REQUIRED]
+from annotypes import Anno, add_call_types
default_desc = "Default value for parameter. If not specified, parameter is " \
"required"
-@method_takes(
- "name", StringMeta(
- "Specify that this class will take a parameter name"), REQUIRED,
- "description", StringMeta(
- "Description of this parameter"), REQUIRED,
- "default", StringMeta(default_desc), OPTIONAL)
-def string(params):
+with Anno("Specify that this class will take a parameter name"):
+ AName = str
+with Anno("Description of this parameter"):
+ ADescription = str
+with Anno(default_desc):
+ AStringDefault = str
+with Anno(default_desc):
+ AFloat64Default = float
+with Anno(default_desc):
+ AInt32Default = int
+with Anno("The Anno representing the parameter"):
+ AAnno = Anno
+
+
+def common_args(name, default):
+ for s in name.split("_"):
+ # Only support UPP3R or l0wer case for each _ section
+ assert s.islower() or s.isupper(), \
+ "Parameter %s should be snake_case" % (name,)
+ ret = dict(name=name)
+ if default is not None:
+ ret["default"] = default
+ return ret
+
+
+@add_call_types
+def string(name, description, default=None):
+ # type: (AName, ADescription, AStringDefault) -> AAnno
"""Add a string parameter to be passed when instantiating this YAML file"""
- return args_for_takes(params, StringMeta)
+ args = common_args(name, default)
+ return Anno(description, typ=str, **args)
-@method_takes(
- "name", StringMeta(
- "Specify that this class will take a parameter name"), REQUIRED,
- "description", StringMeta(
- "Description of this parameter"), REQUIRED,
- "default", NumberMeta("float64", default_desc), OPTIONAL)
-def float64(params):
+@add_call_types
+def float64(name, description, default=None):
+ # type: (AName, ADescription, AFloat64Default) -> AAnno
"""Add a float64 parameter to be passed when instantiating this YAML file"""
- return args_for_takes(params, NumberMeta, "float64")
+ args = common_args(name, default)
+ return Anno(description, typ=float, **args)
-@method_takes(
- "name", StringMeta(
- "Specify that this class will take a parameter name"), REQUIRED,
- "description", StringMeta(
- "Description of this parameter"), REQUIRED,
- "default", NumberMeta("int32", default_desc), OPTIONAL)
-def int32(params):
+@add_call_types
+def int32(name, description, default=None):
+ # type: (AName, ADescription, AInt32Default) -> AAnno
"""Add an int32 parameter to be passed when instantiating this YAML file"""
- return args_for_takes(params, NumberMeta, "int32")
-
+ args = common_args(name, default)
+ return Anno(description, typ=int, **args)
diff --git a/malcolm/modules/builtin/parts/__init__.py b/malcolm/modules/builtin/parts/__init__.py
index 40168b4aa..b4902827e 100644
--- a/malcolm/modules/builtin/parts/__init__.py
+++ b/malcolm/modules/builtin/parts/__init__.py
@@ -1,11 +1,12 @@
-from .childpart import ChildPart
+from .childpart import ChildPart, APartName, AMri, AInitialVisibility
from .choicepart import ChoicePart
from .float64part import Float64Part
from .grouppart import GroupPart
from .iconpart import IconPart
-from .labelpart import LabelPart
-from .statefulchildpart import StatefulChildPart
+from .titlepart import TitlePart
from .stringpart import StringPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/builtin/parts/attributepart.py b/malcolm/modules/builtin/parts/attributepart.py
deleted file mode 100644
index 10000012e..000000000
--- a/malcolm/modules/builtin/parts/attributepart.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from malcolm.core import Part, method_takes, REQUIRED
-from malcolm.tags import widget_types, widget, config, group
-from malcolm.modules.builtin.vmetas import StringMeta, BooleanMeta, ChoiceMeta
-
-
-@method_takes(
- "name", StringMeta("Name of the created attribute"), REQUIRED,
- "description", StringMeta("Desc of created attribute"), REQUIRED,
- "widget", ChoiceMeta("Widget type", [""] + widget_types), "",
- "writeable", BooleanMeta("Is the attribute writeable?"), False,
- "group", StringMeta("If given, which GUI group should we attach to"), "",
- "config", BooleanMeta(
- "If writeable, should this field be loaded/saved?"), True)
-class AttributePart(Part):
- def __init__(self, params):
- # The created attribute
- self.attr = None
- # Store params
- self.params = params
- super(AttributePart, self).__init__(params.name)
-
- def create_attribute_models(self):
- # Find the tags
- tags = self.create_tags()
- # Make a meta object for our attribute
- meta = self.create_meta(self.params.description, tags)
- # The attribute we will be publishing
- initial_value = self.get_initial_value()
- self.attr = meta.create_attribute_model(initial_value)
- if self.is_writeable():
- writeable_func = self.get_writeable_func()
- else:
- writeable_func = None
- yield self.params.name, self.attr, writeable_func
-
- def create_meta(self, description, tags):
- raise NotImplementedError()
-
- def is_writeable(self):
- return self.params.writeable
-
- def get_writeable_func(self):
- return self.attr.set_value
-
- def create_tags(self):
- tags = []
- if self.params.widget:
- tags.append(widget(self.params.widget))
- if self.params.config and self.is_writeable():
- # If we have a writeable func we can be a config param
- tags.append(config())
- if self.params.group:
- # If we have a group then add the tag
- tags.append(group(self.params.group))
- return tags
-
- def get_initial_value(self):
- """Implement this to override the attribute's initial value at creation
- """
- return None
diff --git a/malcolm/modules/builtin/parts/childpart.py b/malcolm/modules/builtin/parts/childpart.py
index b01ceed4d..653af8df7 100644
--- a/malcolm/modules/builtin/parts/childpart.py
+++ b/malcolm/modules/builtin/parts/childpart.py
@@ -1,77 +1,185 @@
import re
import numpy as np
+from annotypes import Anno, add_call_types, TYPE_CHECKING
from malcolm.compat import OrderedDict
-from malcolm.core import Part, REQUIRED, method_takes, serialize_object, \
- Attribute, Subscribe, Unsubscribe, Put, Alarm, AlarmSeverity, AlarmStatus, \
- Queue
-from malcolm.modules.builtin.controllers import ManagerController
-from malcolm.modules.builtin.infos import PortInfo, LayoutInfo
-from malcolm.modules.builtin.vmetas import StringMeta
-from malcolm.tags import config
+from malcolm.core import Part, serialize_object, Attribute, Subscribe, \
+ Unsubscribe, APartName, Port, Controller, Response, ABORT_TIMEOUT, \
+ get_config_tag
+from ..infos import PortInfo, LayoutInfo, OutPortInfo, InPortInfo, \
+ PartExportableInfo, PartModifiedInfo
+from ..hooks import InitHook, HaltHook, ResetHook, LayoutHook, DisableHook, \
+ AContext, APortMap, ALayoutTable, LoadHook, SaveHook, AStructure, \
+ ULayoutInfos
+from ..util import StatefulStates
+
+if TYPE_CHECKING:
+ from typing import Dict, Any, List, Type, TypeVar, Tuple
+
+ TP = TypeVar("TP", bound=PortInfo)
+
+
+with Anno("Malcolm resource id of child object"):
+ AMri = str
+with Anno("Whether the part is initially visible with no config loaded, None "
+ "means only if child in/outports are connected to another Block"):
+ AInitialVisibility = bool
+with Anno("If the child is a StatefulController then this should be True"):
+ AStateful = bool
port_tag_re = re.compile(r"(in|out)port:(.*):(.*)")
-@method_takes(
- "name", StringMeta("Name of the Part within the controller"), REQUIRED,
- "mri", StringMeta("Malcolm resource id of child object"), REQUIRED)
+ss = StatefulStates
+
+
class ChildPart(Part):
- def __init__(self, params):
- # Layout options
- self.x = 0
- self.y = 0
- self.visible = None
+ def __init__(self, name, mri, initial_visibility=None, stateful=True):
+ # type: (APartName, AMri, AInitialVisibility, AStateful) -> None
+ super(ChildPart, self).__init__(name)
+ self.stateful = stateful
+ self.mri = mri
+ self.x = 0.0 # type: float
+ self.y = 0.0 # type: float
+ self.visible = initial_visibility # type: bool
# {part_name: visible} saying whether part_name is visible
- self.part_visible = {}
+ self.part_visibility = {} # type: Dict[str, bool]
# {attr_name: attr_value} of last saved/loaded structure
- self.saved_structure = {}
+ self.saved_structure = {} # type: Dict[str, Any]
# {attr_name: modified_message} of current values
- self.modified_messages = {}
+ self.modified_messages = {} # type: Dict[str, str]
# The controller hosting our child
- self.child_controller = None
+ self.child_controller = None # type: Controller
# {id: Subscribe} for subscriptions to config tagged fields
- self.config_subscriptions = {}
- # set(attribute_name) where the attribute is a config tagged field
- # we are modifying
- self.we_modified = set()
- # Update queue of modified alarms
- self.modified_update_queue = Queue()
- # Update queue of exportable fields
- self.exportable_update_queue = Queue()
+ self.config_subscriptions = {} # type: Dict[int, Subscribe]
# {attr_name: PortInfo}
- self.port_infos = {}
- # Store params
- self.params = params
- super(ChildPart, self).__init__(params.name)
-
- def notify_dispatch_request(self, request):
- """Will be called when a context passed to a hooked function is about
- to dispatch a request"""
- if isinstance(request, Put):
- self.we_modified.add(request.path[-2])
-
- @ManagerController.Init
+ self.port_infos = {} # type: Dict[str, PortInfo]
+ # Hooks
+ self.register_hooked(InitHook, self.init)
+ self.register_hooked(HaltHook, self.halt)
+ self.register_hooked(LayoutHook, self.layout)
+ self.register_hooked(LoadHook, self.load)
+ self.register_hooked(SaveHook, self.save)
+ self.register_hooked(DisableHook, self.disable)
+ self.register_hooked(ResetHook, self.reset)
+
+ @add_call_types
def init(self, context):
+ # type: (AContext) -> None
+ self.child_controller = context.get_controller(self.mri)
+ if self.stateful:
+ # Wait for a while until the child is ready as it changes the
+ # save state
+ context.when_matches(
+ [self.mri, "state", "value"], ss.READY,
+ [ss.FAULT, ss.DISABLED], timeout=ABORT_TIMEOUT)
# Save what we have
self.save(context)
- # Monitor the child configure for changes
- self.child_controller = context.get_controller(self.params.mri)
- subscribe = Subscribe(path=[self.params.mri, "meta", "fields"],
- callback=self.update_part_exportable)
+ subscribe = Subscribe(path=[self.mri, "meta", "fields"])
+ subscribe.set_callback(self.update_part_exportable)
# Wait for the first update to come in
self.child_controller.handle_request(subscribe).wait()
- @ManagerController.Halt
- def halt(self, context):
- unsubscribe = Unsubscribe(callback=self.update_part_exportable)
+ @add_call_types
+ def disable(self, context):
+ # type: (AContext) -> None
+ # TODO: do we actually want to disable children on disable?
+ child = context.block_view(self.mri)
+ if self.stateful and child.disable.writeable:
+ child.disable()
+
+ @add_call_types
+ def reset(self, context):
+ # type: (AContext) -> None
+ child = context.block_view(self.mri)
+ if self.stateful and child.reset.writeable:
+ child.reset()
+
+ @add_call_types
+ def halt(self):
+ # type: () -> None
+ unsubscribe = Unsubscribe()
+ unsubscribe.set_callback(self.update_part_exportable)
self.child_controller.handle_request(unsubscribe)
+ @add_call_types
+ def layout(self, context, ports, layout):
+ # type: (AContext, APortMap, ALayoutTable) -> ULayoutInfos
+ first_call = not self.part_visibility
+ for i, name in enumerate(layout.name):
+ visible = layout.visible[i]
+ if name == self.name:
+ if self.visible and not visible:
+ self.sever_inports(context, ports)
+ self.x = layout.x[i]
+ self.y = layout.y[i]
+ self.visible = visible
+ else:
+ was_visible = self.part_visibility.get(name, False)
+ if was_visible and not visible:
+ self.sever_inports(context, ports, name)
+ self.part_visibility[name] = visible
+ # If this is the first call work out which parts are visible if not
+ # specified in the initial layout table
+ if first_call:
+ self.calculate_part_visibility(ports)
+ # If not specified then take our own visibility from this same dict
+ if self.visible is None:
+ self.visible = self.part_visibility.get(self.name, False)
+ ret = LayoutInfo(
+ mri=self.mri, x=self.x, y=self.y, visible=self.visible)
+ return [ret]
+
+ @add_call_types
+ def load(self, context, structure):
+ # type: (AContext, AStructure) -> None
+ child = context.block_view(self.mri)
+ iterations = {} # type: Dict[int, Dict[str, Tuple[Attribute, Any]]]
+ for k, v in structure.items():
+ try:
+ attr = getattr(child, k)
+ except AttributeError:
+ self.log.warning("Cannot restore non-existant attr %s" % k)
+ else:
+ tag = get_config_tag(attr.meta.tags)
+ if tag:
+ iteration = int(tag.split(":")[1])
+ iterations.setdefault(iteration, {})[k] = (attr, v)
+ else:
+ self.log.warning("Attr %s is not config tagged" % k)
+ # Do this first so that any callbacks that happen in the put know
+ # not to notify controller
+ self.saved_structure = structure
+ # TODO: only load design if visible and not at init
+ for _, params in sorted(iterations.items()):
+ # Call each iteration as a separate operation, only putting the
+ # ones that need to change
+ to_set = {}
+ for k, (attr, v) in params.items():
+ try:
+ np.testing.assert_equal(serialize_object(attr.value), v)
+ except AssertionError:
+ to_set[k] = v
+ child.put_attribute_values(to_set)
+
+ @add_call_types
+ def save(self, context):
+ # type: (AContext) -> AStructure
+ child = context.block_view(self.mri)
+ part_structure = OrderedDict()
+ for k in child:
+ attr = getattr(child, k)
+ if isinstance(attr, Attribute) and get_config_tag(attr.meta.tags):
+ part_structure[k] = serialize_object(attr.value)
+ self.saved_structure = part_structure
+ return part_structure
+
def update_part_exportable(self, response):
+ # type: (Response) -> None
# Get a child context to check if we have a config field
- child = self.child_controller.block_view()
+ child = self.child_controller.make_view()
spawned = []
if response.value:
new_fields = response.value
@@ -82,7 +190,8 @@ def update_part_exportable(self, response):
for subscribe in self.config_subscriptions.values():
attr_name = subscribe.path[-2]
if attr_name not in new_fields:
- unsubscribe = Unsubscribe(subscribe.id, subscribe.callback)
+ unsubscribe = Unsubscribe(subscribe.id)
+ unsubscribe.set_callback(subscribe.callback)
spawned.append(
self.child_controller.handle_request(unsubscribe))
self.port_infos.pop(attr_name, None)
@@ -96,18 +205,23 @@ def update_part_exportable(self, response):
for tag in attr.meta.tags:
match = port_tag_re.match(tag)
if match:
- d, type, extra = match.groups()
- self.port_infos[field] = PortInfo(
- name=field, value=attr.value, direction=d,
- type=type, extra=extra)
- if isinstance(attr, Attribute) and config() in attr.meta.tags:
+ d, port, extra = match.groups()
+ if d == "out":
+ info = OutPortInfo(name=field, port=Port(port),
+ connected_value=extra)
+ else:
+ info = InPortInfo(name=field, port=Port(port),
+ disconnected_value=extra,
+ value=attr.value)
+ self.port_infos[field] = info
+ if isinstance(attr, Attribute) and get_config_tag(attr.meta.tags):
if self.config_subscriptions:
new_id = max(self.config_subscriptions) + 1
else:
new_id = 1
subscribe = Subscribe(id=new_id,
- path=[self.params.mri, field, "value"],
- callback=self.update_part_modified)
+ path=[self.mri, field, "value"])
+ subscribe.set_callback(self.update_part_modified)
self.config_subscriptions[new_id] = subscribe
# Signal that any change we get is a difference
if field not in self.saved_structure:
@@ -123,16 +237,10 @@ def update_part_exportable(self, response):
# still get the most up to date data
port_infos = [
self.port_infos[f] for f in new_fields if f in self.port_infos]
- self.exportable_update_queue.put((new_fields, port_infos))
- self.spawn(self._update_part_exportable).wait()
-
- def _update_part_exportable(self):
- # We spawned just above, so there is definitely something on the
- # queue
- fields, port_infos = self.exportable_update_queue.get(timeout=0)
- self.controller.update_exportable(self, fields, port_infos)
+ self.registrar.report(PartExportableInfo(new_fields, port_infos))
def update_part_modified(self, response):
+ # type: (Response) -> None
subscribe = self.config_subscriptions[response.id]
name = subscribe.path[-2]
original_value = self.saved_structure[name]
@@ -141,172 +249,87 @@ def update_part_modified(self, response):
except AssertionError:
message = "%s.%s.value = %r not %r" % (
self.name, name, response.value, original_value)
- if name in self.we_modified:
- message = "(We modified) " + message
- self.modified_messages[name] = message
else:
- self.modified_messages.pop(name, None)
- message_list = []
- only_modified_by_us = True
- # Tell the controller what has changed
- for name, message in self.modified_messages.items():
- if name not in self.we_modified:
- only_modified_by_us = False
- message_list.append(message)
- if message_list:
- if only_modified_by_us:
- severity = AlarmSeverity.NO_ALARM
- else:
- severity = AlarmSeverity.MINOR_ALARM
- alarm = Alarm(
- severity, AlarmStatus.CONF_STATUS, "\n".join(message_list))
- else:
- alarm = None
- # Put data on the queue, so if spawns are handled out of order we
- # still get the most up to date data
- self.modified_update_queue.put(alarm)
- self.spawn(self._update_part_modified).wait()
-
- def _update_part_modified(self):
- # We spawned just above, so there is definitely something on the
- # queue
- alarm = self.modified_update_queue.get(timeout=0)
- self.controller.update_modified(self, alarm)
-
- @ManagerController.Layout
- def layout(self, context, part_info, layout_table):
- # if this is the first call, we need to calculate if we are visible
- # or not
- if self.visible is None:
- self.visible = self.child_connected(part_info)
- for i, name in enumerate(layout_table.name):
- x = layout_table.x[i]
- y = layout_table.y[i]
- visible = layout_table.visible[i]
- if name == self.name:
- if self.visible and not visible:
- self.sever_inports(context, part_info)
- self.x = x
- self.y = y
- self.visible = visible
+ message = None
+ last_message = self.modified_messages.get(name, None)
+ if message != last_message:
+ # Tell the controller if something has changed
+ if message:
+ self.modified_messages[name] = message
else:
- was_visible = self.part_visible.get(name, False)
- if was_visible and not visible:
- self.sever_inports(context, part_info, name)
- self.part_visible[name] = visible
- ret = LayoutInfo(
- mri=self.params.mri, x=self.x, y=self.y, visible=self.visible)
- return [ret]
+ self.modified_messages.pop(name, None)
+ info = PartModifiedInfo(self.modified_messages.copy())
+ self.registrar.report(info)
- @ManagerController.Load
- def load(self, context, structure):
- child = context.block_view(self.params.mri)
- part_structure = structure.get(self.name, {})
- params = {}
- for k, v in part_structure.items():
- try:
- attr = getattr(child, k)
- except AttributeError:
- self.log.warning("Cannot restore non-existant attr %s" % k)
- else:
- try:
- np.testing.assert_equal(serialize_object(attr.value), v)
- except AssertionError:
- params[k] = v
- # Do this first so that any callbacks that happen in the put know
- # not to notify controller
- self.saved_structure = part_structure
- if params:
- child.put_attribute_values(params)
-
- @ManagerController.Save
- def save(self, context):
- child = context.block_view(self.params.mri)
- part_structure = OrderedDict()
- for k in child:
- attr = getattr(child, k)
- if isinstance(attr, Attribute) and "config" in attr.meta.tags:
- part_structure[k] = serialize_object(attr.value)
- self.saved_structure = part_structure
- return part_structure
+ def _get_flowgraph_ports(self, ports, typ):
+ # type: (APortMap, Type[TP]) -> Dict[str, TP]
+ ret = {}
+ for port_info in ports.get(self.name, []):
+ if isinstance(port_info, typ):
+ ret[port_info.name] = port_info
+ return ret
- def _get_flowgraph_ports(self, part_info, direction):
- # {attr_name: port_info}
- ports = {}
- for port_info in part_info.get(self.name, []):
- if port_info.direction == direction:
- ports[port_info.name] = port_info
- return ports
-
- def _outport_lookup(self, port_infos):
+ def _outport_lookup(self, info_list):
+ # type: (List[PortInfo]) -> Dict[str, Port]
outport_lookup = {}
- for outport_info in port_infos:
- if outport_info.direction == "out":
- outport_lookup[outport_info.extra] = outport_info.type
+ for info in info_list:
+ if isinstance(info, OutPortInfo):
+ outport_lookup[info.connected_value] = info.port
return outport_lookup
- def sever_inports(self, context, part_info, connected_to=None):
+ def sever_inports(self, context, ports, connected_to=None):
+ # type: (AContext, APortMap, str) -> None
"""Conditionally sever inports of the child. If connected_to is then
None then sever all, otherwise restrict to connected_to's outports
Args:
context (Context): The context to use
- part_info (dict): {part_name: [PortInfo]}
+ ports (dict): {part_name: [PortInfo]}
connected_to (str): Restrict severing to this part
"""
# Find the outports to connect to
if connected_to:
# Calculate a lookup of the outport "name" to type
outport_lookup = self._outport_lookup(
- part_info.get(connected_to, []))
+ ports.get(connected_to, []))
else:
outport_lookup = True
# Find our inports
- inports = self._get_flowgraph_ports(part_info, "in")
+ inports = self._get_flowgraph_ports(ports, InPortInfo)
# If we have inports that need to be disconnected then do so
if inports and outport_lookup:
- child = context.block_view(self.params.mri)
+ child = context.block_view(self.mri)
attribute_values = {}
for name, port_info in inports.items():
if outport_lookup is True or outport_lookup.get(
- child[name].value, None) == port_info.type:
- attribute_values[name] = port_info.extra
+ child[name].value, None) == port_info.port:
+ attribute_values[name] = port_info.disconnected_value
child.put_attribute_values(attribute_values)
- def child_connected(self, part_info):
- """Calculate if anything is connected to us or we are connected to
- anything else
+ def calculate_part_visibility(self, ports):
+ # type: (APortMap) -> None
+ """Calculate what is connected to what
Args:
- part_info (dict): {part_name: [PortInfo]} from other ports
-
- Returns:
- bool: True if we are connected or have nothing to connect
+ ports: {part_name: [PortInfo]} from other ports
"""
- has_ports = False
- # See if our inports are connected to anything
- inports = self._get_flowgraph_ports(part_info, "in")
- for name, inport_info in inports.items():
- disconnected_value = inport_info.extra
- has_ports = True
- if inport_info.value != disconnected_value:
- return True
- # Calculate a lookup of outport "name" to their types
- outport_lookup = self._outport_lookup(part_info.get(self.name, []))
- if outport_lookup:
- has_ports = True
- # See if anything is connected to one of our outports
- for inport_info in PortInfo.filter_values(part_info):
- if inport_info.direction == "in":
- if outport_lookup.get(
- inport_info.value, None) == inport_info.type:
- return True
- # If we have ports and they haven't been connected to anything then
- # we are disconnected
- if has_ports:
- return False
- # otherwise, treat a block with no ports as connected
- else:
- return True
+ # Calculate a lookup of outport connected_value to part_name
+ outport_lookup = {}
+ for part_name, port_infos in OutPortInfo.filter_parts(ports).items():
+ for port_info in port_infos:
+ outport_lookup[port_info.connected_value] = (
+ part_name, port_info.port)
+ # Look through all the inports, and set both ends of the connection
+ # to visible if they aren't specified
+ for part_name, port_infos in InPortInfo.filter_parts(ports).items():
+ for port_info in port_infos:
+ if port_info.value != port_info.disconnected_value:
+ conn_part, port = outport_lookup.get(
+ port_info.value, (None, None))
+ if conn_part and port == port_info.port:
+ if conn_part not in self.part_visibility:
+ self.part_visibility[conn_part] = True
+ if part_name not in self.part_visibility:
+ self.part_visibility[part_name] = True
diff --git a/malcolm/modules/builtin/parts/choicepart.py b/malcolm/modules/builtin/parts/choicepart.py
index a84a29a80..b7f331e99 100644
--- a/malcolm/modules/builtin/parts/choicepart.py
+++ b/malcolm/modules/builtin/parts/choicepart.py
@@ -1,16 +1,38 @@
-from malcolm.core import method_also_takes, REQUIRED
-from malcolm.modules.builtin.vmetas import ChoiceMeta, StringMeta, StringArrayMeta
-from .attributepart import AttributePart
+from annotypes import Anno, Array, Sequence, Union
+from enum import Enum
+from malcolm.core import Part, PartRegistrar, ChoiceMeta, APartName, \
+ AMetaDescription
+from ..util import set_tags, AWriteable, AConfig, AGroup, AWidget
-@method_also_takes(
- "choices", StringArrayMeta("Possible choices for this attribute"), REQUIRED,
- "initialValue", StringMeta("Initial value of attribute"), REQUIRED,
-)
-class ChoicePart(AttributePart):
- def get_initial_value(self):
- return self.params.initialValue
- def create_meta(self, description, tags):
- return ChoiceMeta(
- choices=self.params.choices, description=description, tags=tags)
+with Anno("Possible choices for this attribute"):
+ AChoices = Array[str]
+with Anno("Initial value of the created attribute"):
+ AValue = str
+UChoices = Union[AChoices, Sequence[Enum], Sequence[str], str]
+
+
+class ChoicePart(Part):
+ """Create a single choice Attribute on the Block"""
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ choices, # type: UChoices
+ value, # type: AValue
+ writeable=False, # type: AWriteable
+ config=1, # type: AConfig
+ group=None, # type: AGroup
+ widget=None, # type: AWidget
+ ):
+ # type: (...) -> None
+ super(ChoicePart, self).__init__(name)
+ meta = ChoiceMeta(description, choices)
+ set_tags(meta, writeable, config, group, widget)
+ self.attr = meta.create_attribute_model(value)
+ self.writeable_func = self.attr.set_value if writeable else None
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model(self.name, self.attr, self.writeable_func)
+
diff --git a/malcolm/modules/builtin/parts/float64part.py b/malcolm/modules/builtin/parts/float64part.py
index 1c44bc896..d8483670f 100644
--- a/malcolm/modules/builtin/parts/float64part.py
+++ b/malcolm/modules/builtin/parts/float64part.py
@@ -1,14 +1,31 @@
-from malcolm.core import method_also_takes
-from malcolm.modules.builtin.vmetas import NumberMeta
-from .attributepart import AttributePart
+from annotypes import Anno
+from malcolm.core import Part, PartRegistrar, NumberMeta, APartName, \
+ AMetaDescription
+from ..util import set_tags, AWriteable, AConfig, AGroup, AWidget
-@method_also_takes(
- "initialValue", NumberMeta("float64", "Initial value of attribute"), 0.0,
-)
-class Float64Part(AttributePart):
- def get_initial_value(self):
- return self.params.initialValue
+with Anno("Initial value of the created attribute"):
+ Value = float
- def create_meta(self, description, tags):
- return NumberMeta("float64", description=description, tags=tags)
+
+class Float64Part(Part):
+ """Create a single float64 Attribute on the Block"""
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ writeable=False, # type: AWriteable
+ config=1, # type: AConfig
+ group=None, # type: AGroup
+ widget=None, # type: AWidget
+ value=0.0, # type: Value
+ ):
+ # type: (...) -> None
+ super(Float64Part, self).__init__(name)
+ meta = NumberMeta("float64", description)
+ set_tags(meta, writeable, config, group, widget)
+ self.attr = meta.create_attribute_model(value)
+ self.writeable_func = self.attr.set_value if writeable else None
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model(self.name, self.attr, self.writeable_func)
diff --git a/malcolm/modules/builtin/parts/grouppart.py b/malcolm/modules/builtin/parts/grouppart.py
index dd84b5953..e1ff55f18 100644
--- a/malcolm/modules/builtin/parts/grouppart.py
+++ b/malcolm/modules/builtin/parts/grouppart.py
@@ -1,23 +1,17 @@
-from malcolm.core import method_takes, REQUIRED, create_class_params
-from malcolm.modules.builtin.vmetas import ChoiceMeta, StringMeta
-from .attributepart import AttributePart
+from malcolm.core import Part, PartRegistrar, ChoiceMeta, APartName, \
+ AMetaDescription, Widget
+from ..util import set_tags
-@method_takes(
- "name", StringMeta("Name of the created attribute"), REQUIRED,
- "description", StringMeta("Desc of created attribute"), REQUIRED)
-class GroupPart(AttributePart):
- """Part representing a GUI group other attributes attach to"""
- def __init__(self, params):
- params = create_class_params(
- super(GroupPart, self),
- widget="group", writeable=True, config=True, **params)
- super(GroupPart, self).__init__(params)
+class GroupPart(Part):
+ """Part representing a GUI group other Attributes attach to"""
+ def __init__(self, name, description):
+ # type: (APartName, AMetaDescription) -> None
+ super(GroupPart, self).__init__(name)
+ meta = ChoiceMeta(description, ["expanded", "collapsed"])
+ set_tags(meta, writeable=True, widget=Widget.GROUP)
+ self.attr = meta.create_attribute_model("expanded")
- def get_initial_value(self):
- return "expanded"
-
- def create_meta(self, description, tags):
- return ChoiceMeta(
- choices=["expanded", "collapsed"],
- description=description, tags=tags)
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model(self.name, self.attr, self.attr.set_value)
diff --git a/malcolm/modules/builtin/parts/iconpart.py b/malcolm/modules/builtin/parts/iconpart.py
index db4e137fd..1bae50742 100644
--- a/malcolm/modules/builtin/parts/iconpart.py
+++ b/malcolm/modules/builtin/parts/iconpart.py
@@ -1,26 +1,27 @@
-from malcolm.core import method_takes, create_class_params
-from malcolm.modules.builtin.vmetas import StringMeta
-from .attributepart import AttributePart
+from annotypes import Anno
+from malcolm.core import Part, PartRegistrar, StringMeta, Widget
+from ..util import set_tags
-@method_takes(
- "svg", StringMeta("If given, path to svg for initial value"), "")
-class IconPart(AttributePart):
+
+with Anno("If given, path to svg for initial value"):
+ Svg = str
+
+
+class IconPart(Part):
"""Part representing a the icon a GUI should display"""
- def __init__(self, params):
+ def __init__(self, svg=""):
+ # type: (Svg) -> None
+ super(IconPart, self).__init__("icon")
+ meta = StringMeta("SVG icon for the Block")
+ set_tags(meta, widget=Widget.ICON)
try:
- with open(params.svg) as f:
- self.svg_text = f.read()
+ with open(svg) as f:
+ svg_text = f.read()
except IOError:
- self.svg_text = ""
- params = create_class_params(
- super(IconPart, self), name="icon",
- description="SVG icon for Block", widget="icon", writeable=False,
- config=False)
- super(IconPart, self).__init__(params)
-
- def get_initial_value(self):
- return self.svg_text
+ svg_text = ""
+ self.attr = meta.create_attribute_model(svg_text)
- def create_meta(self, description, tags):
- return StringMeta(description=description, tags=tags)
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model(self.name, self.attr)
diff --git a/malcolm/modules/builtin/parts/labelpart.py b/malcolm/modules/builtin/parts/labelpart.py
deleted file mode 100644
index 31bf5fbe2..000000000
--- a/malcolm/modules/builtin/parts/labelpart.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from malcolm.core import method_takes, REQUIRED, create_class_params
-from malcolm.modules.builtin.vmetas import StringMeta
-from .attributepart import AttributePart
-
-
-@method_takes(
- "initialValue", StringMeta("Initial value of Block label"), REQUIRED,
- "group", StringMeta("If given, which GUI group should we attach to"), "",
-)
-class LabelPart(AttributePart):
- """Part representing a the icon a GUI should display"""
- def __init__(self, params):
- self.initial_value = params.initialValue
- params = create_class_params(
- super(LabelPart, self), name="label",
- description="Label for created block", widget="textinput",
- group=params.group, writeable=True, config=True)
- super(LabelPart, self).__init__(params)
-
- def get_initial_value(self):
- self.controller.set_label(self.initial_value)
- return self.initial_value
-
- def create_meta(self, description, tags):
- return StringMeta(description=description, tags=tags)
-
- def get_writeable_func(self):
- return self.set_label
-
- def set_label(self, value):
- with self.controller.changes_squashed:
- self.controller.set_label(value)
- self.attr.set_value(value)
diff --git a/malcolm/modules/builtin/parts/statefulchildpart.py b/malcolm/modules/builtin/parts/statefulchildpart.py
deleted file mode 100644
index cca052fa6..000000000
--- a/malcolm/modules/builtin/parts/statefulchildpart.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from malcolm.modules.builtin.controllers import ManagerController
-from .childpart import ChildPart
-
-
-ss = ManagerController.stateSet
-
-
-class StatefulChildPart(ChildPart):
- @ManagerController.Init
- def init(self, context):
- # Wait for a while until the child is ready as it changes the save state
- context.when_matches(
- [self.params.mri, "state", "value"], ss.READY,
- [ss.FAULT, ss.DISABLED])
- super(StatefulChildPart, self).init(context)
-
- @ManagerController.Disable
- def disable(self, context):
- child = context.block_view(self.params.mri)
- if child.disable.writeable:
- child.disable()
-
- @ManagerController.Reset
- def reset(self, context):
- child = context.block_view(self.params.mri)
- if child.reset.writeable:
- child.reset()
diff --git a/malcolm/modules/builtin/parts/stringpart.py b/malcolm/modules/builtin/parts/stringpart.py
index 9fb83b127..7072f2daf 100644
--- a/malcolm/modules/builtin/parts/stringpart.py
+++ b/malcolm/modules/builtin/parts/stringpart.py
@@ -1,15 +1,31 @@
-from malcolm.core import method_also_takes
-from malcolm.modules.builtin.vmetas import StringMeta
-from .attributepart import AttributePart
+from annotypes import Anno
+from malcolm.core import Part, PartRegistrar, StringMeta, APartName, \
+ AMetaDescription
+from ..util import set_tags, AWriteable, AConfig, AGroup, AWidget
-@method_also_takes(
- "initialValue", StringMeta("Initial value of attribute"), "",
-)
-class StringPart(AttributePart):
- def get_initial_value(self):
- return self.params.initialValue
+with Anno("Initial value of the created attribute"):
+ Value = str
- def create_meta(self, description, tags):
- return StringMeta(description=description, tags=tags)
+class StringPart(Part):
+ """Create a single string Attribute on the Block"""
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ writeable=False, # type: AWriteable
+ config=1, # type: AConfig
+ group=None, # type: AGroup
+ widget=None, # type: AWidget
+ value="", # type: Value
+ ):
+ # type: (...) -> None
+ super(StringPart, self).__init__(name)
+ meta = StringMeta(description)
+ set_tags(meta, writeable, config, group, widget)
+ self.attr = meta.create_attribute_model(value)
+ self.writeable_func = self.attr.set_value if writeable else None
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ registrar.add_attribute_model(self.name, self.attr, self.writeable_func)
diff --git a/malcolm/modules/builtin/parts/titlepart.py b/malcolm/modules/builtin/parts/titlepart.py
new file mode 100644
index 000000000..8a1be465e
--- /dev/null
+++ b/malcolm/modules/builtin/parts/titlepart.py
@@ -0,0 +1,31 @@
+from annotypes import Anno
+
+from malcolm.core import Part, PartRegistrar, Widget, StringMeta
+from ..infos import TitleInfo
+from ..util import set_tags
+
+
+with Anno("Initial value of Block label"):
+ Value = str
+
+
+class TitlePart(Part):
+ """Part representing a the title of the Block a GUI should display"""
+ def __init__(self, value):
+ # type: (Value) -> None
+ super(TitlePart, self).__init__("label")
+ meta = StringMeta("Label for the block")
+ set_tags(meta, writeable=True, widget=Widget.TITLE)
+ self.attr = meta.create_attribute_model()
+ self.registrar = None # type: PartRegistrar
+ self.initial_value = value
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.registrar = registrar
+ registrar.add_attribute_model(self.name, self.attr, self.set_label)
+ self.set_label(self.initial_value)
+
+ def set_label(self, value):
+ self.attr.set_value(value)
+ self.registrar.report(TitleInfo(value))
diff --git a/malcolm/modules/builtin/util.py b/malcolm/modules/builtin/util.py
new file mode 100644
index 000000000..73a156a8f
--- /dev/null
+++ b/malcolm/modules/builtin/util.py
@@ -0,0 +1,122 @@
+from annotypes import Anno, Array, Union, Sequence
+
+from malcolm.core import VMeta, Widget, group_tag, config_tag, Port, Table, \
+ StateSet
+
+with Anno("Is the attribute writeable?"):
+ AWriteable = bool
+with Anno("If writeable, which iteration should this field be loaded/saved in?"
+ " 0 means do not restore"):
+ AConfig = int
+with Anno("If given, which GUI group should we attach to"):
+ AGroup = str
+with Anno("If given, use this widget instead of the default"):
+ AWidget = Widget
+with Anno("If given, mark this as an inport of the given type"):
+ AInPort = Port
+
+
+def set_tags(meta, # type: VMeta
+ writeable=False, # type: AWriteable
+ config=1, # type: AConfig
+ group=None, # type: AGroup
+ widget=None, # type: AWidget
+ inport=None, # type: AInPort
+ ):
+ # type: (...) -> None
+ tags = []
+ meta.set_writeable(writeable)
+ if widget is None:
+ widget = meta.default_widget()
+ if widget is not Widget.NONE:
+ tags.append(widget.tag())
+ if config and writeable:
+ # We only allow config tags on writeable functions
+ tags.append(config_tag(config))
+ if group:
+ # If we have a group then add the tag
+ tags.append(group_tag(group))
+ if inport:
+ tags.append(inport.inport_tag(disconnected_value=""))
+ meta.set_tags(tags)
+
+
+with Anno("Names of the layout parts"):
+ ANameArray = Array[str]
+with Anno("Malcolm full names of child blocks"):
+ AMriArray = Array[str]
+with Anno("X Coordinates of child blocks"):
+ AXArray = Array[float]
+with Anno("Y Coordinates of child blocks"):
+ AYArray = Array[float]
+with Anno("Whether child blocks are visible"):
+ AVisibleArray = Array[bool]
+UNameArray = Union[ANameArray, Sequence[str]]
+UMriArray = Union[AMriArray, Sequence[str]]
+UXArray = Union[AXArray, Sequence[float]]
+UYArray = Union[AYArray, Sequence[float]]
+UVisibleArray = Union[AVisibleArray, Sequence[bool]]
+
+
+class LayoutTable(Table):
+ def __init__(self, name, mri, x, y, visible):
+ # type: (UNameArray, UMriArray, UXArray, UYArray, UVisibleArray) -> None
+ self.name = ANameArray(name)
+ self.mri = AMriArray(mri)
+ self.x = AXArray(x)
+ self.y = AYArray(y)
+ self.visible = AVisibleArray(visible)
+
+
+with Anno("Name of the block.field to export"):
+ ASourceNameArray = Array[str]
+with Anno("Name of the field to export as"):
+ AExportNameArray = Array[str]
+USourceNameArray = Union[ASourceNameArray, Sequence[str]]
+UExportNameArray = Union[AExportNameArray, Sequence[str]]
+
+
+class ExportTable(Table):
+ def __init__(self, source, export):
+ # type: (USourceNameArray, UExportNameArray) -> None
+ self.source = ASourceNameArray(source)
+ self.export = AExportNameArray(export)
+
+
+class StatefulStates(StateSet):
+ RESETTING = "Resetting"
+ DISABLED = "Disabled"
+ DISABLING = "Disabling"
+ FAULT = "Fault"
+ READY = "Ready"
+
+ def __init__(self):
+ super(StatefulStates, self).__init__()
+ self.create_block_transitions()
+ self.create_error_disable_transitions()
+
+ def create_block_transitions(self):
+ self.set_allowed(self.RESETTING, self.READY)
+
+ def create_error_disable_transitions(self):
+ block_states = self.possible_states[:]
+
+ # Set transitions for standard states
+ for state in block_states:
+ self.set_allowed(state, self.FAULT)
+ self.set_allowed(state, self.DISABLING)
+ self.set_allowed(self.FAULT, self.RESETTING, self.DISABLING)
+ self.set_allowed(self.DISABLING, self.FAULT, self.DISABLED)
+ self.set_allowed(self.DISABLED, self.RESETTING)
+
+
+class ManagerStates(StatefulStates):
+ SAVING = "Saving"
+ LOADING = "Loading"
+
+ def create_block_transitions(self):
+ super(ManagerStates, self).create_block_transitions()
+ self.set_allowed(self.READY, self.SAVING)
+ self.set_allowed(self.SAVING, self.READY)
+ self.set_allowed(self.READY, self.LOADING)
+ self.set_allowed(self.LOADING, self.READY)
\ No newline at end of file
diff --git a/malcolm/modules/builtin/vmetas/__init__.py b/malcolm/modules/builtin/vmetas/__init__.py
deleted file mode 100644
index 18510d7a4..000000000
--- a/malcolm/modules/builtin/vmetas/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from .booleanarraymeta import BooleanArrayMeta
-from .booleanmeta import BooleanMeta
-from .choicearraymeta import ChoiceArrayMeta
-from .choicemeta import ChoiceMeta
-from .numberarraymeta import NumberArrayMeta
-from .numbermeta import NumberMeta
-from .stringarraymeta import StringArrayMeta
-from .stringmeta import StringMeta
-from .tablemeta import TableMeta
-
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
diff --git a/malcolm/modules/builtin/vmetas/booleanarraymeta.py b/malcolm/modules/builtin/vmetas/booleanarraymeta.py
deleted file mode 100644
index b45bc1a95..000000000
--- a/malcolm/modules/builtin/vmetas/booleanarraymeta.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import numpy as np
-
-from malcolm.core import Serializable, VArrayMeta
-from .numberarraymeta import validate_array
-
-
-@Serializable.register_subclass("malcolm:core/BooleanArrayMeta:1.0")
-class BooleanArrayMeta(VArrayMeta):
- """Meta object containing information for a boolean array"""
-
- def validate(self, value):
- """Cast value to boolean array and return it
-
- Args:
- value: Value to validate
-
- Returns:
- `numpy.ndarray` Value as a boolean numpy array
- """
- return validate_array(value, np.bool_)
-
- def doc_type_string(self):
- return "[bool]"
diff --git a/malcolm/modules/builtin/vmetas/booleanmeta.py b/malcolm/modules/builtin/vmetas/booleanmeta.py
deleted file mode 100644
index c2defae12..000000000
--- a/malcolm/modules/builtin/vmetas/booleanmeta.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from malcolm.core import Serializable, VMeta
-
-
-@Serializable.register_subclass("malcolm:core/BooleanMeta:1.0")
-class BooleanMeta(VMeta):
- """Meta object containing information for a boolean"""
-
- def validate(self, value):
- """Cast value to boolean and return it
-
- Args:
- value: Value to validate
-
- Returns:
- bool: Value as a boolean
- """
- return bool(value)
-
- def doc_type_string(self):
- return "bool"
diff --git a/malcolm/modules/builtin/vmetas/choicearraymeta.py b/malcolm/modules/builtin/vmetas/choicearraymeta.py
deleted file mode 100644
index b1a14b3c1..000000000
--- a/malcolm/modules/builtin/vmetas/choicearraymeta.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from malcolm.compat import str_
-from malcolm.core import Serializable, StringArray, VArrayMeta
-from .choicemeta import ChoiceMeta
-
-
-@Serializable.register_subclass("malcolm:core/ChoiceArrayMeta:1.0")
-class ChoiceArrayMeta(ChoiceMeta, VArrayMeta):
- """Meta object containing information for a choice array"""
-
- def validate(self, value):
- """Verify value can be iterated and cast elements to choices
-
- Args:
- value (list): Value to be validated
-
- Returns:
- StringArray: The validated value
- """
- if value is None:
- return StringArray()
- elif isinstance(value, str_):
- raise ValueError("Expected iterable of strings, got %r" % value)
- else:
- for i, choice in enumerate(value):
- if choice not in self.choices:
- raise ValueError("%s is not a valid value for element %s" %
- (choice, i))
- return StringArray(value)
-
- def doc_type_string(self):
- return "[%s]" % super(ChoiceArrayMeta, self).doc_type_string()
diff --git a/malcolm/modules/builtin/vmetas/choicemeta.py b/malcolm/modules/builtin/vmetas/choicemeta.py
deleted file mode 100644
index 5a6daa7da..000000000
--- a/malcolm/modules/builtin/vmetas/choicemeta.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from malcolm.compat import str_
-from malcolm.core import Serializable, deserialize_object, VMeta, StringArray
-
-
-@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
-class ChoiceMeta(VMeta):
- """Meta object containing information for a enum"""
-
- endpoints = ["description", "choices", "tags", "writeable", "label"]
-
- def __init__(self, description="", choices=(), tags=(), writeable=False,
- label=""):
- super(ChoiceMeta, self).__init__(description, tags, writeable, label)
- self.choices = self.set_choices(choices)
-
- def set_choices(self, choices):
- """Set the choices list"""
- choices = StringArray(deserialize_object(c, str_) for c in choices)
- # TODO: what if the value is no longer in the list?
- return self.set_endpoint_data("choices", choices)
-
- def validate(self, value):
- """Check if the value is valid returns it
-
- Args:
- value: Value to validate
-
- Returns:
- str: Value if it is valid
- Raises:
- exceptions.ValueError: If value not valid
- """
- if value is None:
- if self.choices:
- return self.choices[0]
- else:
- return ""
- elif value in self.choices:
- return value
- elif isinstance(value, int) and value < len(self.choices):
- return self.choices[value]
- else:
- raise ValueError(
- "%s is not a valid value in %s" % (value, self.choices))
-
- def doc_type_string(self):
- return " | ".join([repr(x) for x in self.choices])
diff --git a/malcolm/modules/builtin/vmetas/numberarraymeta.py b/malcolm/modules/builtin/vmetas/numberarraymeta.py
deleted file mode 100644
index 4d86d2b6b..000000000
--- a/malcolm/modules/builtin/vmetas/numberarraymeta.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import numpy as np
-
-from malcolm.core import Serializable, VArrayMeta
-from .numbermeta import NumberMeta
-
-
-def validate_array(value, dtype):
- if value is None:
- # Make an empty array
- cast = np.array([], dtype=dtype)
- elif type(value) == list:
- # Cast to numpy array
- cast = np.array(value, dtype=dtype)
- else:
- # Check we are given a numpy array
- if not hasattr(value, 'dtype'):
- raise TypeError("Expected numpy array or list, got %s"
- % type(value))
- if value.dtype != np.dtype(dtype):
- raise TypeError("Expected %s, got %s" %
- (np.dtype(dtype), value.dtype))
- cast = value
- cast.setflags(write=False)
- return cast
-
-
-@Serializable.register_subclass("malcolm:core/NumberArrayMeta:1.0")
-class NumberArrayMeta(NumberMeta, VArrayMeta):
- """Meta object containing information for an array of numerical values"""
- def validate(self, value):
- return validate_array(value, self.dtype)
-
- def doc_type_string(self):
- return "[%s]" % self.dtype
diff --git a/malcolm/modules/builtin/vmetas/numbermeta.py b/malcolm/modules/builtin/vmetas/numbermeta.py
deleted file mode 100644
index b9069bda6..000000000
--- a/malcolm/modules/builtin/vmetas/numbermeta.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import numpy as np
-
-from malcolm.core import Serializable, VMeta
-
-
-@Serializable.register_subclass("malcolm:core/NumberMeta:1.0")
-class NumberMeta(VMeta):
- """Meta object containing information for a numerical value"""
-
- endpoints = ["dtype", "description", "tags", "writeable", "label"]
- _dtypes = ["int8", "uint8", "int16", "uint16", "int32", "uint32", "int64",
- "uint64", "float32", "float64"]
-
- def __init__(self, dtype="float64", description="", tags=(),
- writeable=False, label=""):
- super(NumberMeta, self).__init__(description, tags, writeable, label)
- # like np.float64
- self._np_dtype = None
- # like "float64"
- self.dtype = self.set_dtype(dtype)
-
- def set_dtype(self, dtype):
- """Set the dtype string"""
- assert dtype in self._dtypes, \
- "Expected dtype to be in %s, got %s" % (self._dtypes, dtype)
- self._np_dtype = getattr(np, dtype)
- return self.set_endpoint_data("dtype", dtype)
-
- def validate(self, value):
- if value is None:
- value = 0
- cast = self._np_dtype(value)
- return cast
-
- def doc_type_string(self):
- return "%s" % self.dtype
diff --git a/malcolm/modules/builtin/vmetas/stringarraymeta.py b/malcolm/modules/builtin/vmetas/stringarraymeta.py
deleted file mode 100644
index 18de1cbd6..000000000
--- a/malcolm/modules/builtin/vmetas/stringarraymeta.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from malcolm.core import StringArray, Serializable, VArrayMeta
-
-
-@Serializable.register_subclass("malcolm:core/StringArrayMeta:1.0")
-class StringArrayMeta(VArrayMeta):
- """Meta object containing information for a string array"""
-
- def validate(self, value):
- """Verify value can be iterated and cast elements to strings
-
- Args:
- value (list): value to be validated
-
- Returns:
- StringArray: the validated value
- """
- if value is None:
- return StringArray()
- else:
- return StringArray(value)
-
- def doc_type_string(self):
- return "[str]"
diff --git a/malcolm/modules/builtin/vmetas/stringmeta.py b/malcolm/modules/builtin/vmetas/stringmeta.py
deleted file mode 100644
index 9a80a4296..000000000
--- a/malcolm/modules/builtin/vmetas/stringmeta.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from malcolm.core import Serializable, VMeta
-
-
-@Serializable.register_subclass("malcolm:core/StringMeta:1.0")
-class StringMeta(VMeta):
- """Meta object containing information for a string"""
-
- def validate(self, value):
- """Check if the value is None and returns "", else casts value to a
- string and returns it
-
- Args:
- value: Value to validate
-
- Returns:
- str: Value as a string [If value is not None]
- """
-
- if value is None:
- return ""
- else:
- return str(value)
-
- def doc_type_string(self):
- return "str"
diff --git a/malcolm/modules/builtin/vmetas/tablemeta.py b/malcolm/modules/builtin/vmetas/tablemeta.py
deleted file mode 100644
index 50eee3c08..000000000
--- a/malcolm/modules/builtin/vmetas/tablemeta.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from malcolm.compat import str_, OrderedDict
-from malcolm.core import NTTable, Serializable, deserialize_object, Table, \
- VMeta, VArrayMeta
-
-
-@Serializable.register_subclass("malcolm:core/TableMeta:1.0")
-class TableMeta(VMeta):
-
- endpoints = ["elements", "description", "tags", "writeable", "label"]
- attribute_class = NTTable
-
- def __init__(self, description="", tags=(), writeable=False, label="",
- elements=None):
- super(TableMeta, self).__init__(description, tags, writeable, label)
- if elements is None:
- elements = {}
- self.elements = self.set_elements(elements)
-
- def set_elements(self, elements):
- """Set the elements dict from a serialized dict"""
- deserialized = OrderedDict()
- for k, v in elements.items():
- if k != "typeid":
- k = deserialize_object(k, str_)
- deserialized[k] = deserialize_object(v, VArrayMeta)
- return self.set_endpoint_data("elements", deserialized)
-
- def validate(self, value):
- if value is None:
- value = {}
- if isinstance(value, Table):
- if self != value.meta:
- # Make a table using ourself as the meta
- value = value.to_dict()
- value.pop("typeid", None)
- value = Table(self, value)
- else:
- # Should be a dict
- value = Table(self, value)
- # Check column lengths
- value.verify_column_lengths()
- return value
-
- def doc_type_string(self):
- return "`Table`"
diff --git a/malcolm/modules/ca/__init__.py b/malcolm/modules/ca/__init__.py
index e69de29bb..d8bce8cb6 100644
--- a/malcolm/modules/ca/__init__.py
+++ b/malcolm/modules/ca/__init__.py
@@ -0,0 +1 @@
+from . import parts, util
diff --git a/malcolm/modules/ca/docs/parts_api.rst b/malcolm/modules/ca/docs/parts_api.rst
index d7949c5f5..289f15e59 100644
--- a/malcolm/modules/ca/docs/parts_api.rst
+++ b/malcolm/modules/ca/docs/parts_api.rst
@@ -1,35 +1,19 @@
parts
=====
-.. module:: malcolm.modules.ca.parts
-
-.. autoclass:: CAActionPart
+.. autoclass:: malcolm.modules.ca.parts.CAActionPart
:members:
Commonly a group of pvs are used to represent a method call like this::
caput(pv, wait=True)
- assert caget(statusPv) == goodStatus
-
- This `Part` wraps up this design pattern as a Malcolm method
-
-.. autoclass:: CABooleanPart
- :members:
+ assert caget(status_pv) == good_status, \
+ "Action failed with message: %s" % caget(message_pv)
-.. autoclass:: CACharArrayPart
- :members:
+ This `Part` wraps up this design pattern as a Malcolm `Method`
-.. autoclass:: CAChoicePart
- :members:
-.. autoclass:: CADoubleArrayPart
+.. automodule:: malcolm.modules.ca.parts
:members:
+ :exclude-members: CAActionPart
-.. autoclass:: CADoublePart
- :members:
-
-.. autoclass:: CALongPart
- :members:
-
-.. autoclass:: CAStringPart
- :members:
diff --git a/malcolm/modules/ca/parts/__init__.py b/malcolm/modules/ca/parts/__init__.py
index f08bb7ac7..cc929f8f0 100644
--- a/malcolm/modules/ca/parts/__init__.py
+++ b/malcolm/modules/ca/parts/__init__.py
@@ -8,5 +8,7 @@
from .calongpart import CALongPart
from .castringpart import CAStringPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/ca/parts/caactionpart.py b/malcolm/modules/ca/parts/caactionpart.py
index b49ef1177..13a7b0d28 100644
--- a/malcolm/modules/ca/parts/caactionpart.py
+++ b/malcolm/modules/ca/parts/caactionpart.py
@@ -1,65 +1,79 @@
-from malcolm.modules.builtin.controllers import StatefulController
-from malcolm.core import Part, method_takes, REQUIRED, MethodModel
-from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta, BooleanMeta
-from .catoolshelper import CaToolsHelper
+from annotypes import Anno
+
+from malcolm.core import Part, PartRegistrar
+from malcolm.modules import builtin
+from ..util import CaToolsHelper, APartName, AMetaDescription, APv
+
+with Anno("Status pv to see if successful"):
+ StatusPv = str
+with Anno("Good value for status pv"):
+ GoodStatus = str
+with Anno("PV containing error message if unsuccessful"):
+ MessagePv = str
+with Anno("Value to write to pv when method called"):
+ Value = int
+with Anno("Wait for caput callback?"):
+ Wait = bool
-@method_takes(
- "name", StringMeta("Name of the created method"), REQUIRED,
- "description", StringMeta("desc of created method"), REQUIRED,
- "pv", StringMeta("full pv to write to when method called"), REQUIRED,
- "statusPv", StringMeta("Status pv to see if successful"), "",
- "goodStatus", StringMeta("Good value for status pv"), "",
- "messagePv", StringMeta("PV containing error message if unsuccessful"), "",
- "value", NumberMeta("int32", "value to write to pv when method called"), 1,
- "wait", BooleanMeta("Wait for caput callback?"), True)
class CAActionPart(Part):
"""Group a number of PVs together that represent a method like acquire()"""
- def __init__(self, params):
- """
- Args:
- params (Map): The params to initialize with
- """
- self.method = None
- self.params = params
+
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ status_pv="", # type: StatusPv
+ good_status="", # type: GoodStatus
+ message_pv="", # type: MessagePv
+ value=1, # type: Value
+ wait=True, # type: Wait
+ ):
+ # type: (...) -> None
+ super(CAActionPart, self).__init__(name)
self.catools = CaToolsHelper.instance()
- super(CAActionPart, self).__init__(params.name)
+ self.description = description
+ self.pv = pv
+ self.status_pv = status_pv
+ self.good_status = good_status
+ self.message_pv = message_pv
+ self.value = value
+ self.wait = wait
+ # Hooks
+ self.register_hooked((builtin.hooks.InitHook,
+ builtin.hooks.ResetHook), self.connect_pvs)
- def create_method_models(self):
- # Method instance
- self.method = MethodModel(self.params.description)
- # TODO: set widget tag?
- yield self.params.name, self.method, self.caput
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(CAActionPart, self).setup(registrar)
+ registrar.add_method_model(self.caput, self.name, self.description)
- @StatefulController.Reset
- def connect_pvs(self, _):
- pvs = [self.params.pv]
- if self.params.statusPv:
- pvs.append(self.params.statusPv)
+ def connect_pvs(self):
+ pvs = [self.pv]
+ if self.status_pv:
+ pvs.append(self.status_pv)
+ if self.message_pv:
+ pvs.append(self.message_pv)
ca_values = self.catools.caget(pvs)
# check connection is ok
for i, v in enumerate(ca_values):
assert v.ok, "CA connect failed with %s" % v.state_strings[v.state]
def caput(self):
- if self.params.wait:
- cmd = "caput -c -w 1000"
- else:
- cmd = "caput"
- self.log.info("%s %s %s", cmd, self.params.pv, self.params.value)
+ self.log.info("caput %s %s", self.pv, self.value)
self.catools.caput(
- self.params.pv, self.params.value,
- wait=self.params.wait, timeout=None)
- if self.params.statusPv:
+ self.pv, self.value,
+ wait=self.wait, timeout=None)
+ if self.status_pv:
status = self.catools.caget(
- self.params.statusPv,
+ self.status_pv,
datatype=self.catools.DBR_STRING)
- if self.params.messagePv:
+ if self.message_pv:
message = " %s:" % self.catools.caget(
- self.params.messagePv,
+ self.message_pv,
datatype=self.catools.DBR_CHAR_STR)
else:
message = ""
- assert status == self.params.goodStatus, \
- "Status %s:%s while performing '%s %s %s'" % (
- status, message, cmd, self.params.pv, self.params.value)
+ assert status == self.good_status, \
+ "Status %s:%s while performing 'caput %s %s'" % (
+ status, message, self.pv, self.value)
diff --git a/malcolm/modules/ca/parts/caarraypart.py b/malcolm/modules/ca/parts/caarraypart.py
deleted file mode 100644
index 6d326cf3d..000000000
--- a/malcolm/modules/ca/parts/caarraypart.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from .capart import CAPart
-
-
-class CAArrayPart(CAPart):
- """Abstract class with better logging for CAParts with array types"""
-
- def format_caput_value(self, value):
- l = len(value)
- v = " ".join(str(x) for x in value)
- self.log.info("caput -c -w %s %s -a %d %s",
- self.params.timeout, self.params.pv, l, v)
- return value
diff --git a/malcolm/modules/ca/parts/cabooleanpart.py b/malcolm/modules/ca/parts/cabooleanpart.py
index 0f11467f0..052a686f6 100644
--- a/malcolm/modules/ca/parts/cabooleanpart.py
+++ b/malcolm/modules/ca/parts/cabooleanpart.py
@@ -1,16 +1,34 @@
-from malcolm.modules.builtin.vmetas import BooleanMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, BooleanMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CABooleanPart(CAPart):
+class CABooleanPart(Part):
"""Defines a boolean `Attribute` that talks to a DBR_LONG longout PV"""
- def create_meta(self, description, tags):
- return BooleanMeta(description=description, tags=tags)
-
- def get_datatype(self):
- return self.catools.DBR_LONG
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CABooleanPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ BooleanMeta(description), catools.DBR_LONG, pv, rbv, rbv_suff,
+ min_delta, timeout, inport, widget, group, config)
def caput(self, value):
- value = int(value)
- super(CABooleanPart, self).caput(value)
+ self.caa.caput(int(value))
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked, self.caput)
diff --git a/malcolm/modules/ca/parts/cachararraypart.py b/malcolm/modules/ca/parts/cachararraypart.py
index 1b4af2321..26520d02b 100644
--- a/malcolm/modules/ca/parts/cachararraypart.py
+++ b/malcolm/modules/ca/parts/cachararraypart.py
@@ -1,17 +1,31 @@
-from malcolm.modules.builtin.vmetas import StringMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, StringMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CACharArrayPart(CAPart):
+class CACharArrayPart(Part):
"""Defines a string `Attribute` that talks to a DBR_CHAR_STR waveform PV"""
- def create_meta(self, description, tags):
- return StringMeta(description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CACharArrayPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ StringMeta(description), catools.DBR_CHAR_STR, pv, rbv, rbv_suff,
+ min_delta, timeout, inport, widget, group, config)
- def get_datatype(self):
- return self.catools.DBR_CHAR_STR
-
- def format_caput_value(self, value):
- self.log.info("caput -c -w %s -S %s %r",
- self.params.timeout, self.params.pv, value)
- return value
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked)
diff --git a/malcolm/modules/ca/parts/cachoicepart.py b/malcolm/modules/ca/parts/cachoicepart.py
index 0e89092d5..f75a5cc5a 100644
--- a/malcolm/modules/ca/parts/cachoicepart.py
+++ b/malcolm/modules/ca/parts/cachoicepart.py
@@ -1,34 +1,41 @@
-from malcolm.compat import long_
-from malcolm.modules.builtin.vmetas import ChoiceMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, ChoiceMeta
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CAChoicePart(CAPart):
- """Defines a string `Attribute` that talks to a DBR_ENUM mbbo PV"""
+class CAChoicePart(Part):
+ """Defines a choice `Attribute` that talks to a DBR_ENUM mbbo PV"""
- def create_meta(self, description, tags):
- return ChoiceMeta(description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CAChoicePart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.meta = ChoiceMeta(description)
+ self.caa = CAAttribute(
+ self.meta, catools.DBR_ENUM, pv, rbv, rbv_suff, min_delta, timeout,
+ inport, widget, group, config, self.on_connect)
- def get_datatype(self):
- return self.catools.DBR_ENUM
-
- def set_initial_metadata(self, value):
- self.attr.meta.set_choices(value.enums)
+ def on_connect(self, value):
+ self.meta.set_choices(value.enums)
def caput(self, value):
- if isinstance(value, int) or isinstance(value, long_):
- # Already have the index, so validate it.
- if value < len(self.attr.meta.choices):
- pass
- else:
- raise ValueError("Provided index %d exceeds list length %d"
- % (value, len(self.attr.meta.choices)))
- else:
- # Validate that value is in the choices list; if so, get its index
- if value in self.attr.meta.choices:
- value = self.attr.meta.choices.index(value)
- else:
- raise ValueError("Provided value \"%s\" invalid selection from"
- " choices [%s]" %
- (value, ", ".join(self.attr.meta.choices)))
- super(CAChoicePart, self).caput(value)
+ # Turn the string value int the index of the choice list. We are
+ # passed a validated value, so it is guaranteed to be in choices
+ value = self.meta.choices.index(value)
+ self.caa.caput(value)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked, self.caput)
diff --git a/malcolm/modules/ca/parts/cadoublearraypart.py b/malcolm/modules/ca/parts/cadoublearraypart.py
index 458332f9f..a08fe6a81 100644
--- a/malcolm/modules/ca/parts/cadoublearraypart.py
+++ b/malcolm/modules/ca/parts/cadoublearraypart.py
@@ -1,12 +1,40 @@
-from malcolm.modules.builtin.vmetas import NumberArrayMeta
-from .caarraypart import CAArrayPart
+from annotypes import Array
+from malcolm.core import Part, PartRegistrar, NumberArrayMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CADoubleArrayPart(CAArrayPart):
+
+class CADoubleArrayPart(Part):
"""Defines a float64[] `Attribute` that talks to a DBR_DOUBLE waveform PV"""
- def create_meta(self, description, tags):
- return NumberArrayMeta("float64", description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CADoubleArrayPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ NumberArrayMeta("float64", description), catools.DBR_DOUBLE, pv,
+ rbv, rbv_suff, min_delta, timeout, inport, widget, group, config)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked, self.caput)
- def get_datatype(self):
- return self.catools.DBR_DOUBLE
+ def caput(self, value):
+ if isinstance(value, Array):
+ # Unwrap the array before passing to numpy in case it was already
+ # a numpy array
+ value = value.seq
+ self.caa.caput(value)
diff --git a/malcolm/modules/ca/parts/cadoublepart.py b/malcolm/modules/ca/parts/cadoublepart.py
index 3a842dfd5..3b01794ea 100644
--- a/malcolm/modules/ca/parts/cadoublepart.py
+++ b/malcolm/modules/ca/parts/cadoublepart.py
@@ -1,12 +1,31 @@
-from malcolm.modules.builtin.vmetas import NumberMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, NumberMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CADoublePart(CAPart):
+class CADoublePart(Part):
"""Defines a float64 `Attribute` that talks to a DBR_DOUBLE ao PV"""
- def create_meta(self, description, tags):
- return NumberMeta("float64", description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CADoublePart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ NumberMeta("float64", description), catools.DBR_DOUBLE, pv, rbv,
+ rbv_suff, min_delta, timeout, inport, widget, group, config)
- def get_datatype(self):
- return self.catools.DBR_DOUBLE
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked)
diff --git a/malcolm/modules/ca/parts/calongarraypart.py b/malcolm/modules/ca/parts/calongarraypart.py
index bdf7d2336..be5faa9b0 100644
--- a/malcolm/modules/ca/parts/calongarraypart.py
+++ b/malcolm/modules/ca/parts/calongarraypart.py
@@ -1,12 +1,40 @@
-from malcolm.modules.builtin.vmetas import NumberArrayMeta
-from .caarraypart import CAArrayPart
+from annotypes import Array
+from malcolm.core import Part, PartRegistrar, NumberArrayMeta
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CALongArrayPart(CAArrayPart):
- """Defines a float64[] `Attribute` that talks to a DBR_LONG waveform PV"""
- def create_meta(self, description, tags):
- return NumberArrayMeta("int32", description=description, tags=tags)
+class CALongArrayPart(Part):
+ """Defines an int32[] `Attribute` that talks to a DBR_LONG waveform PV"""
- def get_datatype(self):
- return self.catools.DBR_LONG
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CALongArrayPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ NumberArrayMeta("int32", description), catools.DBR_LONG, pv, rbv,
+ rbv_suff, min_delta, timeout, inport, widget, group, config)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked, self.caput)
+
+ def caput(self, value):
+ if isinstance(value, Array):
+ # Unwrap the array before passing to numpy in case it was already
+ # a numpy array
+ value = value.seq
+ self.caa.caput(value)
diff --git a/malcolm/modules/ca/parts/calongpart.py b/malcolm/modules/ca/parts/calongpart.py
index 341f7eafc..ab3f42357 100644
--- a/malcolm/modules/ca/parts/calongpart.py
+++ b/malcolm/modules/ca/parts/calongpart.py
@@ -1,12 +1,32 @@
-from malcolm.modules.builtin.vmetas import NumberMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, NumberMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CALongPart(CAPart):
+class CALongPart(Part):
"""Defines an int32 `Attribute` that talks to a DBR_LONG longout PV"""
- def create_meta(self, description, tags):
- return NumberMeta("int32", description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CALongPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ NumberMeta("int32", description), catools.DBR_LONG, pv, rbv,
+ rbv_suff, min_delta, timeout, inport, widget, group, config)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked)
- def get_datatype(self):
- return self.catools.DBR_LONG
diff --git a/malcolm/modules/ca/parts/capart.py b/malcolm/modules/ca/parts/capart.py
deleted file mode 100644
index a20ba1dba..000000000
--- a/malcolm/modules/ca/parts/capart.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import time
-
-from malcolm.modules.builtin.controllers import StatefulController
-from malcolm.core import method_takes, REQUIRED, Alarm, AlarmStatus, TimeStamp
-from malcolm.modules.builtin.parts.attributepart import AttributePart
-from malcolm.tags import widget_types, inport, port_types
-from malcolm.modules.builtin.vmetas import StringMeta, ChoiceMeta, \
- BooleanMeta, NumberMeta
-from .catoolshelper import CaToolsHelper
-
-
-@method_takes(
- "name", StringMeta("Name of the created attribute"), REQUIRED,
- "description", StringMeta("Description of created attribute"), REQUIRED,
- "pv", StringMeta("Full pv of demand and default for rbv"), "",
- "rbv", StringMeta("Override for rbv"), "",
- "rbvSuff", StringMeta("Set rbv to pv + rbv_suff"), "",
- "widget", ChoiceMeta("Widget type", [""] + widget_types), "",
- "inport", ChoiceMeta("Inport type", [""] + port_types), "",
- "group", StringMeta("If given, which GUI group should we attach to"), "",
- "config", BooleanMeta("Should this field be loaded/saved?"), True,
- "minDelta", NumberMeta(
- "float64", "Minimum time between attribute updates in seconds"), 0.05,
- "timeout", NumberMeta(
- "float64", "Max time to wait for puts to complete, <0 is forever"), 5.0)
-class CAPart(AttributePart):
- """Abstract class for exposing PVs as `Attribute` instances"""
- def __init__(self, params):
- if not params.rbv and not params.pv:
- raise ValueError('Must pass pv or rbv')
- if not params.rbv:
- if params.rbvSuff:
- params.rbv = params.pv + params.rbvSuff
- else:
- params.rbv = params.pv
- # Camonitor subscription
- self.monitor = None
- self.catools = CaToolsHelper.instance()
- self._update_after = 0
- super(CAPart, self).__init__(params)
-
- def is_writeable(self):
- return bool(self.params.pv)
-
- def get_writeable_func(self):
- return self.caput
-
- def create_tags(self):
- tags = super(CAPart, self).create_tags()
- if self.params.inport:
- tags.append(inport(self.params.inport, ""))
- return tags
-
- def get_datatype(self):
- raise NotImplementedError
-
- def set_initial_metadata(self, value):
- """Implement this to set some metadata on the attribute from the initial
- CA connect before the first update_value()"""
- pass
-
- @StatefulController.Init
- @StatefulController.Reset
- def reset(self, context=None):
- # release old monitor
- self.close_monitor()
- # make the connection in cothread's thread, use caget for initial value
- pvs = [self.params.rbv]
- if self.params.pv:
- pvs.append(self.params.pv)
- ca_values = self.catools.caget(
- pvs, format=self.catools.FORMAT_CTRL, datatype=self.get_datatype())
- # check connection is ok
- for i, v in enumerate(ca_values):
- assert v.ok, "CA connect failed with %s" % v.state_strings[v.state]
- self.set_initial_metadata(ca_values[0])
- self.update_value(ca_values[0])
- # now setup monitor on rbv
- self.monitor = self.catools.camonitor(
- self.params.rbv, self.monitor_callback,
- format=self.catools.FORMAT_TIME, datatype=self.get_datatype(),
- notify_disconnect=True)
-
- @StatefulController.Disable
- def close_monitor(self, context=None):
- if self.monitor is not None:
- self.monitor.close()
- self.monitor = None
-
- def format_caput_value(self, value):
- self.log.info("caput -c -w %s %s %s",
- self.params.timeout, self.params.pv, value)
- return value
-
- def caput(self, value):
- value = self.format_caput_value(value)
- if self.params.timeout < 0:
- timeout = None
- else:
- timeout = self.params.timeout
- self.catools.caput(
- self.params.pv, value, wait=True, timeout=timeout,
- datatype=self.get_datatype())
- # now do a caget
- value = self.catools.caget(
- self.params.rbv,
- format=self.catools.FORMAT_TIME, datatype=self.get_datatype())
- self.update_value(value)
-
- def monitor_callback(self, value):
- now = time.time()
- delta = now - self._update_after
- self.update_value(value)
- # See how long to sleep for to make sure we don't get more than one
- # update at < minDelta interval
- if delta > self.params.minDelta:
- # If we were more than minDelta late then reset next update time
- self._update_after = now + self.params.minDelta
- elif delta < 0:
- # If delta is less than zero sleep for a bit
- self.catools.cothread.Sleep(-delta)
- else:
- # If we were within the delta window just increment next update
- self._update_after += self.params.minDelta
-
- def update_value(self, value):
- if not value.ok:
- self.attr.set_value(None, alarm=Alarm.invalid("PV disconnected"))
- else:
- if value.severity:
- alarm = Alarm(severity=value.severity,
- status=AlarmStatus.RECORD_STATUS,
- message="PV in alarm state")
- else:
- alarm = Alarm.ok
- # We only have a raw_stamp attr on monitor, the initial
- # caget with CTRL doesn't give us a timestamp
- ts = TimeStamp(*getattr(value, "raw_stamp", (None, None)))
- value = self.attr.meta.validate(value)
- self.attr.set_value_alarm_ts(value, alarm, ts)
diff --git a/malcolm/modules/ca/parts/castringpart.py b/malcolm/modules/ca/parts/castringpart.py
index d576cf0f9..20c921184 100644
--- a/malcolm/modules/ca/parts/castringpart.py
+++ b/malcolm/modules/ca/parts/castringpart.py
@@ -1,12 +1,32 @@
-from malcolm.modules.builtin.vmetas import StringMeta
-from .capart import CAPart
+from malcolm.core import Part, PartRegistrar, StringMeta, Hook
+from ..util import CaToolsHelper, CAAttribute, APartName, AMetaDescription, \
+ APv, ARbv, ARbvSuff, AMinDelta, ATimeout, AInPort, AWidget, AGroup, AConfig
-class CAStringPart(CAPart):
+class CAStringPart(Part):
"""Defines a string `Attribute` that talks to a DBR_STRING stringout PV"""
- def create_meta(self, description, tags):
- return StringMeta(description=description, tags=tags)
+ def __init__(self,
+ name, # type: APartName
+ description, # type: AMetaDescription
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=True, # type: AConfig
+ ):
+ # type: (...) -> None
+ super(CAStringPart, self).__init__(name)
+ catools = CaToolsHelper.instance()
+ self.caa = CAAttribute(
+ StringMeta(description), catools.DBR_STRING, pv, rbv, rbv_suff,
+ min_delta, timeout, inport, widget, group, config)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ self.caa.setup(registrar, self.name, self.register_hooked)
- def get_datatype(self):
- return self.catools.DBR_STRING
diff --git a/malcolm/modules/ca/parts/catoolshelper.py b/malcolm/modules/ca/parts/catoolshelper.py
deleted file mode 100644
index 8bdc8af34..000000000
--- a/malcolm/modules/ca/parts/catoolshelper.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import threading
-
-from malcolm.core import Queue
-from malcolm.compat import maybe_import_cothread
-
-
-def _import_cothread(q):
- import cothread
- from cothread import catools
- from cothread.input_hook import _install_readline_hook
- _install_readline_hook(None)
- q.put((cothread, catools))
- # Wait forever
- cothread.Event().Wait()
-
-
-class CaToolsHelper(object):
- _instance = None
-
- def __init__(self):
- assert not self._instance, \
- "Can't create more than one instance of Singleton. Use instance()"
- self.cothread = maybe_import_cothread()
- if self.cothread:
- # We can use it in this thread
- from cothread import catools
- self.in_cothread_thread = True
- else:
- # We need our own thread to run it in
- q = Queue()
- threading.Thread(target=_import_cothread, args=(q,)).start()
- self.cothread, catools = q.get()
- self.in_cothread_thread = False
- self.catools = catools
- self.DBR_STRING = catools.DBR_STRING
- self.DBR_LONG = catools.DBR_LONG
- self.DBR_DOUBLE = catools.DBR_DOUBLE
- self.FORMAT_CTRL = catools.FORMAT_CTRL
- self.FORMAT_TIME = catools.FORMAT_TIME
- self.DBR_ENUM = catools.DBR_ENUM
- self.DBR_CHAR_STR = catools.DBR_CHAR_STR
-
- def caget(self, *args, **kwargs):
- if self.in_cothread_thread:
- return self.catools.caget(*args, **kwargs)
- else:
- return self.cothread.CallbackResult(
- self.catools.caget, *args, **kwargs)
-
- def caput(self, *args, **kwargs):
- if self.in_cothread_thread:
- return self.catools.caput(*args, **kwargs)
- else:
- return self.cothread.CallbackResult(
- self.catools.caput, *args, **kwargs)
-
- def camonitor(self, *args, **kwargs):
- if self.in_cothread_thread:
- return self.catools.camonitor(*args, **kwargs)
- else:
- return self.cothread.CallbackResult(
- self.catools.camonitor, *args, **kwargs)
-
- @classmethod
- def instance(cls):
- if not cls._instance:
- cls._instance = CaToolsHelper()
- return cls._instance
-
diff --git a/malcolm/modules/ca/util.py b/malcolm/modules/ca/util.py
new file mode 100644
index 000000000..75f78dab3
--- /dev/null
+++ b/malcolm/modules/ca/util.py
@@ -0,0 +1,227 @@
+import threading
+import time
+
+from annotypes import Anno, TYPE_CHECKING
+
+from malcolm.compat import maybe_import_cothread
+from malcolm.core import Queue, VMeta, Alarm, AlarmStatus, TimeStamp, \
+ Loggable, APartName, AMetaDescription, Hook, PartRegistrar
+from malcolm.modules.builtin.util import set_tags, AWidget, AGroup, AConfig, \
+ AInPort
+from malcolm.modules.builtin.hooks import InitHook, ResetHook, DisableHook
+
+if TYPE_CHECKING:
+ from typing import Callable, Any, Union, Type, Sequence, Optional, List
+
+ Hooks = Union[Type[Hook], Sequence[Type[Hook]]]
+ ArgsGen = Callable[(), List[str]]
+ Register = Callable[(Hooks, Callable, Optional[ArgsGen]), None]
+
+
+# Store them here for re-export
+APartName = APartName
+AMetaDescription = AMetaDescription
+
+
+with Anno("Full pv of demand and default for rbv"):
+ APv = str
+with Anno("Override for rbv"):
+ ARbv = str
+with Anno("Set rbv to pv + rbv_suff"):
+ ARbvSuff = str
+with Anno("Minimum time between attribute updates in seconds"):
+ AMinDelta = float
+with Anno("Max time to wait for puts to complete, <0 is forever"):
+ ATimeout = float
+
+
+class CAAttribute(Loggable):
+ def __init__(self,
+ meta, # type: VMeta
+ datatype, # type: Any
+ pv="", # type: APv
+ rbv="", # type: ARbv
+ rbv_suff="", # type: ARbvSuff
+ min_delta=0.05, # type: AMinDelta
+ timeout=5.0, # type: ATimeout
+ inport=None, # type: AInPort
+ widget=None, # type: AWidget
+ group=None, # type: AGroup
+ config=1, # type: AConfig
+ on_connect=None, # type: Callable[[Any], None]
+ ):
+ # type: (...) -> None
+ self.set_logger(pv=pv, rbv=rbv)
+ writeable = bool(pv)
+ set_tags(meta, writeable, config, group, widget, inport)
+ if not rbv and not pv:
+ raise ValueError('Must pass pv or rbv')
+ if not rbv:
+ if rbv_suff:
+ rbv = pv + rbv_suff
+ else:
+ rbv = pv
+ self.pv = pv
+ self.rbv = rbv
+ self.datatype = datatype
+ self.min_delta = min_delta
+ self.timeout = timeout
+ self.on_connect = on_connect
+ self.attr = meta.create_attribute_model()
+ # Camonitor subscription
+ self.monitor = None
+ self.catools = CaToolsHelper.instance()
+ self._update_after = 0
+
+ def reconnect(self):
+ # release old monitor
+ self.disconnect()
+ # make the connection in cothread's thread, use caget for initial value
+ pvs = [self.rbv]
+ if self.pv and self.pv != self.rbv:
+ pvs.append(self.pv)
+ ca_values = self.catools.checking_caget(
+ pvs, format=self.catools.FORMAT_CTRL, datatype=self.datatype)
+ if self.on_connect:
+ self.on_connect(ca_values[0])
+ self._update_value(ca_values[0])
+ # now setup monitor on rbv
+ self.monitor = self.catools.camonitor(
+ self.rbv, self._monitor_callback,
+ format=self.catools.FORMAT_TIME, datatype=self.datatype,
+ notify_disconnect=True)
+
+ def disconnect(self):
+ if self.monitor is not None:
+ self.monitor.close()
+ self.monitor = None
+
+ def caput(self, value):
+ if self.timeout < 0:
+ timeout = None
+ else:
+ timeout = self.timeout
+ self.log.info("caput %s %s", self.pv, value)
+ self.catools.caput(
+ self.pv, value, wait=True, timeout=timeout,
+ datatype=self.datatype)
+ # now do a caget
+ value = self.catools.caget(
+ self.rbv,
+ format=self.catools.FORMAT_TIME, datatype=self.datatype)
+ self._update_value(value)
+
+ def _monitor_callback(self, value):
+ now = time.time()
+ delta = now - self._update_after
+ self._update_value(value)
+ # See how long to sleep for to make sure we don't get more than one
+ # update at < min_delta interval
+ if delta > self.min_delta:
+ # If we were more than min_delta late then reset next update time
+ self._update_after = now + self.min_delta
+ elif delta < 0:
+ # If delta is less than zero sleep for a bit
+ self.catools.cothread.Sleep(-delta)
+ else:
+ # If we were within the delta window just increment next update
+ self._update_after += self.min_delta
+
+ def _update_value(self, value):
+ if not value.ok:
+ self.attr.set_value(None, alarm=Alarm.invalid("PV disconnected"))
+ else:
+ if value.severity:
+ alarm = Alarm(severity=value.severity,
+ status=AlarmStatus.RECORD_STATUS,
+ message="PV in alarm state")
+ else:
+ alarm = Alarm.ok
+ # We only have a raw_stamp attr on monitor, the initial
+ # caget with CTRL doesn't give us a timestamp
+ ts = TimeStamp(*getattr(value, "raw_stamp", (None, None)))
+ value = self.attr.meta.validate(value)
+ self.attr.set_value_alarm_ts(value, alarm, ts)
+
+ def setup(self, registrar, name, register_hooked, writeable_func=None):
+ # type: (PartRegistrar, str, Register, Callable[[Any], None]) -> None
+ if self.pv:
+ if writeable_func is None:
+ writeable_func = self.caput
+ else:
+ writeable_func = None
+ registrar.add_attribute_model(name, self.attr, writeable_func)
+ register_hooked(DisableHook, self.disconnect)
+ register_hooked((InitHook, ResetHook), self.reconnect)
+
+
+def _import_cothread(q):
+ import cothread
+ from cothread import catools
+ from cothread.input_hook import _install_readline_hook
+ _install_readline_hook(None)
+ q.put((cothread, catools))
+ # Wait forever
+ cothread.Event().Wait()
+
+
+class CaToolsHelper(object):
+ _instance = None
+
+ def __init__(self):
+ assert not self._instance, \
+ "Can't create more than one instance of Singleton. Use instance()"
+ self.cothread = maybe_import_cothread()
+ if self.cothread:
+ # We can use it in this thread
+ from cothread import catools
+ self.in_cothread_thread = True
+ else:
+ # We need our own thread to run it in
+ q = Queue()
+ threading.Thread(target=_import_cothread, args=(q,)).start()
+ self.cothread, catools = q.get()
+ self.in_cothread_thread = False
+ self.catools = catools
+ self.DBR_STRING = catools.DBR_STRING
+ self.DBR_LONG = catools.DBR_LONG
+ self.DBR_DOUBLE = catools.DBR_DOUBLE
+ self.FORMAT_CTRL = catools.FORMAT_CTRL
+ self.FORMAT_TIME = catools.FORMAT_TIME
+ self.DBR_ENUM = catools.DBR_ENUM
+ self.DBR_CHAR_STR = catools.DBR_CHAR_STR
+
+ def caget(self, *args, **kwargs):
+ if self.in_cothread_thread:
+ return self.catools.caget(*args, **kwargs)
+ else:
+ return self.cothread.CallbackResult(
+ self.catools.caget, *args, **kwargs)
+
+ def caput(self, *args, **kwargs):
+ if self.in_cothread_thread:
+ return self.catools.caput(*args, **kwargs)
+ else:
+ return self.cothread.CallbackResult(
+ self.catools.caput, *args, **kwargs)
+
+ def camonitor(self, *args, **kwargs):
+ if self.in_cothread_thread:
+ return self.catools.camonitor(*args, **kwargs)
+ else:
+ return self.cothread.CallbackResult(
+ self.catools.camonitor, *args, **kwargs)
+
+ def checking_caget(self, values, *args, **kwargs):
+ ca_values = self.caget(values, *args, **kwargs)
+ # check connection is ok
+ for i, v in enumerate(ca_values):
+ assert v.ok, "CA connect failed with %s" % v.state_strings[v.state]
+ return ca_values
+
+ @classmethod
+ def instance(cls):
+ if not cls._instance:
+ cls._instance = CaToolsHelper()
+ return cls._instance
+
diff --git a/malcolm/modules/demo/DEMO-AREADETECTOR.yaml b/malcolm/modules/demo/DEMO-AREADETECTOR.yaml
index 57243f75d..7d9878c87 100644
--- a/malcolm/modules/demo/DEMO-AREADETECTOR.yaml
+++ b/malcolm/modules/demo/DEMO-AREADETECTOR.yaml
@@ -13,13 +13,12 @@
# Create some Blocks
- ADSimDetector.blocks.sim_detector_runnable_block:
- mriPrefix: DETECTOR
- configDir: $(yamldir)/saved_designs
- pvPrefix: $(hostname)-AD-SIM-01
- drvSuffix: CAM
- initialDesign: demo_design
+ mri_prefix: DETECTOR
+ config_dir: $(yamldir)/saved_designs
+ pv_prefix: $(hostname)-AD-SIM-01
+ drv_suffix: CAM
+ initial_design: demo_design
# Add a webserver
- web.blocks.web_server_block:
mri: WEB
- port: 8090
diff --git a/malcolm/modules/demo/DEMO-HELLO.yaml b/malcolm/modules/demo/DEMO-HELLO.yaml
index ac7a5dfc9..d1a827f69 100644
--- a/malcolm/modules/demo/DEMO-HELLO.yaml
+++ b/malcolm/modules/demo/DEMO-HELLO.yaml
@@ -11,4 +11,3 @@
# Add a webserver
- web.blocks.web_server_block:
mri: WEB
- port: 8080
diff --git a/malcolm/modules/demo/DEMO-SCANNING.yaml b/malcolm/modules/demo/DEMO-SCANNING.yaml
index ec405d2d1..4ee92d73c 100644
--- a/malcolm/modules/demo/DEMO-SCANNING.yaml
+++ b/malcolm/modules/demo/DEMO-SCANNING.yaml
@@ -12,28 +12,27 @@
value: 6065
- builtin.defines.string:
- name: configDir
+ name: config_dir
value: $(yamldir)/saved_designs
# Create some Blocks
- ADSimDetector.blocks.sim_detector_runnable_block:
- mriPrefix: DETECTOR
- configDir: $(configDir)
- pvPrefix: $(hostname)-AD-SIM-01
- drvSuffix: CAM
- initialDesign: demo_design
+ mri_prefix: DETECTOR
+ config_dir: $(config_dir)
+ pv_prefix: $(hostname)-AD-SIM-01
+ drv_suffix: CAM
+ initial_design: demo_design
- demo.blocks.ticker_block:
mri: TICKER
- configDir: $(configDir)
+ config_dir: $(config_dir)
- demo.blocks.scan_block:
mri: SCAN
- configDir: $(configDir)
+ config_dir: $(config_dir)
det: DETECTOR
motors: TICKER
# Add a webserver
- web.blocks.web_server_block:
mri: WEB
- port: 8090
diff --git a/malcolm/modules/demo/DEMO-TICKER.yaml b/malcolm/modules/demo/DEMO-TICKER.yaml
index b0497c5c7..dd4325d19 100644
--- a/malcolm/modules/demo/DEMO-TICKER.yaml
+++ b/malcolm/modules/demo/DEMO-TICKER.yaml
@@ -1,9 +1,8 @@
# Create some Blocks
- demo.blocks.ticker_block:
mri: TICKER
- configDir: /tmp
+ config_dir: /tmp
# Add a webserver
- web.blocks.web_server_block:
mri: WEB
- port: 8080
diff --git a/malcolm/modules/demo/__init__.py b/malcolm/modules/demo/__init__.py
index e69de29bb..f6ea27363 100644
--- a/malcolm/modules/demo/__init__.py
+++ b/malcolm/modules/demo/__init__.py
@@ -0,0 +1 @@
+from . import parts
diff --git a/malcolm/modules/demo/blocks/scan_block.yaml b/malcolm/modules/demo/blocks/scan_block.yaml
index a259999e6..842308b86 100644
--- a/malcolm/modules/demo/blocks/scan_block.yaml
+++ b/malcolm/modules/demo/blocks/scan_block.yaml
@@ -11,7 +11,7 @@
description: MRI for the motor controller
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.defines.docstring:
@@ -20,18 +20,21 @@
- scanning.controllers.RunnableController:
mri: $(mri)
- axesToMove: [x, y]
- configDir: $(configDir)
+ config_dir: $(config_dir)
description: $(docstring)
- ADCore.parts.DatasetTablePart:
name: DSET
+- scanning.parts.SimultaneousAxesPart:
+ value: [x, y]
+
- ADCore.parts.DatasetRunnableChildPart:
name: DET
mri: $(det)
+ initial_visibility: True
- scanning.parts.RunnableChildPart:
name: MOTORS
mri: $(motors)
-
+ initial_visibility: True
diff --git a/malcolm/modules/demo/blocks/ticker_block.yaml b/malcolm/modules/demo/blocks/ticker_block.yaml
index e18510fe1..b42e9b082 100644
--- a/malcolm/modules/demo/blocks/ticker_block.yaml
+++ b/malcolm/modules/demo/blocks/ticker_block.yaml
@@ -4,7 +4,7 @@
description: Malcolm resource id of the Block
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.defines.docstring:
@@ -14,7 +14,7 @@
- scanning.controllers.RunnableController:
mri: $(mri)
- configDir: $(configDir)
+ config_dir: $(config_dir)
description: $(docstring)
# Add the Counter blocks
diff --git a/malcolm/modules/demo/parts/__init__.py b/malcolm/modules/demo/parts/__init__.py
index 539d06019..922bfc978 100644
--- a/malcolm/modules/demo/parts/__init__.py
+++ b/malcolm/modules/demo/parts/__init__.py
@@ -2,5 +2,7 @@
from .counterpart import CounterPart
from .scantickerpart import ScanTickerPart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/demo/parts/counterpart.py b/malcolm/modules/demo/parts/counterpart.py
index 9150052ec..f09a2fe0e 100644
--- a/malcolm/modules/demo/parts/counterpart.py
+++ b/malcolm/modules/demo/parts/counterpart.py
@@ -1,31 +1,26 @@
-from malcolm.core import method_takes, Part, REQUIRED
-from malcolm.modules.builtin.vmetas import NumberMeta, StringMeta
-from malcolm.tags import config
+from malcolm.core import Part, config_tag, NumberMeta, PartRegistrar
-@method_takes(
- "name", StringMeta("Name of the Part within the controller"), REQUIRED)
class CounterPart(Part):
"""Defines a counter `Attribute` with zero and increment `Method` objects"""
- #: `AttributeModel` that will hold the counter value
counter = None
+ """Holds the current counter value"""
- def __init__(self, params):
- super(CounterPart, self).__init__(params.name)
-
- def create_attribute_models(self):
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
# Create writeable attribute for current counter value
- meta = NumberMeta("float64", "A counter", tags=[config()])
- self.counter = meta.create_attribute_model()
- yield "counter", self.counter, self.counter.set_value
+ self.counter = NumberMeta(
+ "float64", "The current value of the counter", tags=[config_tag()]
+ ).create_attribute_model()
+ registrar.add_attribute_model(
+ "counter", self.counter, self.counter.set_value)
+ registrar.add_method_model(self.zero)
+ registrar.add_method_model(self.increment)
- @method_takes()
def zero(self):
"""Zero the counter attribute"""
self.counter.set_value(0)
- @method_takes()
def increment(self):
"""Add one to the counter attribute"""
self.counter.set_value(self.counter.value + 1)
-
diff --git a/malcolm/modules/demo/parts/hellopart.py b/malcolm/modules/demo/parts/hellopart.py
index 74d34d788..b43d176cd 100644
--- a/malcolm/modules/demo/parts/hellopart.py
+++ b/malcolm/modules/demo/parts/hellopart.py
@@ -1,33 +1,35 @@
from __future__ import print_function
-
import time
-from malcolm.core import Part, method_takes, method_returns, REQUIRED
-from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta
+from annotypes import Anno, add_call_types
+
+from malcolm.core import Part
+
+
+with Anno("The name of the person to greet"):
+ AName = str
+with Anno("Time to wait before returning"):
+ ASleep = float
+with Anno("The manufactured greeting"):
+ AGreeting = str
-@method_takes(
- "name", StringMeta("Name of the Part within the controller"), REQUIRED)
class HelloPart(Part):
"""Defines greet and error `Method` objects on a `Block`"""
- def __init__(self, params):
- super(HelloPart, self).__init__(params.name)
-
- @method_takes(
- "name", StringMeta("a name"), REQUIRED,
- "sleep", NumberMeta("float64", "Time to wait before returning"), 0,
- )
- @method_returns(
- "greeting", StringMeta(description="a greeting"), REQUIRED
- )
- def greet(self, parameters, return_map):
+
+ def setup(self, registrar):
+ registrar.add_method_model(self.greet)
+ registrar.add_method_model(self.error)
+
+ @add_call_types
+ def greet(self, name, sleep=0):
+ # type: (AName, ASleep) -> AGreeting
"""Optionally sleep seconds, then return a greeting to """
print("Manufacturing greeting...")
- time.sleep(parameters.sleep)
- return_map.greeting = "Hello %s" % parameters.name
- return return_map
+ time.sleep(sleep)
+ greeting = "Hello %s" % name
+ return greeting
- @method_takes()
def error(self):
"""Raise an error"""
raise RuntimeError("You called method error()")
diff --git a/malcolm/modules/demo/parts/scantickerpart.py b/malcolm/modules/demo/parts/scantickerpart.py
index d420cac0b..919c71b96 100644
--- a/malcolm/modules/demo/parts/scantickerpart.py
+++ b/malcolm/modules/demo/parts/scantickerpart.py
@@ -1,69 +1,89 @@
import time
-from malcolm.modules.scanning.controllers import \
- RunnableController
-from malcolm.core import REQUIRED, method_takes
-from malcolm.modules.builtin.parts import ChildPart
-from malcolm.modules.builtin.vmetas import StringArrayMeta, NumberMeta
-from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta
+from annotypes import Anno, add_call_types
+from malcolm.core import APartName, PartRegistrar
+from malcolm.modules import builtin, scanning
-class ScanTickerPart(ChildPart):
+with Anno("If >0, raise an exception at the end of this step"):
+ AExceptionStep = int
+
+
+class ScanTickerPart(builtin.parts.ChildPart):
"""Provides control of a `counter_block` within a `RunnableController`"""
- # Generator instance
- generator = None
- # Where to start
- completed_steps = None
- # How many steps to do
- steps_to_do = None
- # When to blow up
- exception_step = None
- @RunnableController.Configure
- @RunnableController.PostRunArmed
- @RunnableController.Seek
- @method_takes(
- "generator", PointGeneratorMeta("Generator instance"), REQUIRED,
- "axesToMove", StringArrayMeta(
- "List of axes in inner dimension of generator that should be moved"
- ), REQUIRED,
- "exceptionStep", NumberMeta(
- "int32", "If >0, raise an exception at the end of this step"), 0)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
+ def __init__(self, name, mri):
+ # type: (APartName, builtin.parts.AMri) -> None
+ super(ScanTickerPart, self).__init__(
+ name, mri, initial_visibility=True, stateful=False)
+ # Generator instance
+ self.generator = None # type: scanning.hooks.AGenerator
+ # Where to start
+ self.completed_steps = None # type: int
+ # How many steps to do
+ self.steps_to_do = None # type: int
+ # When to blow up
+ self.exception_step = None # type: int
+ # Hooks
+ self.register_hooked((scanning.hooks.ConfigureHook,
+ scanning.hooks.PostRunArmedHook,
+ scanning.hooks.SeekHook), self.configure)
+ self.register_hooked((scanning.hooks.RunHook,
+ scanning.hooks.ResumeHook), self.run)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(ScanTickerPart, self).setup(registrar)
+ # Tell the controller to expose some extra configure parameters
+ registrar.report(scanning.hooks.ConfigureHook.create_info(
+ self.configure))
+
+ # Allow CamelCase for arguments as they will be serialized by parent
+ # noinspection PyPep8Naming
+ @add_call_types
+ def configure(self,
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ # The following were passed from the user calling configure()
+ generator, # type: scanning.hooks.AGenerator
+ axesToMove, # type: scanning.hooks.AAxesToMove
+ exceptionStep=0, # type: AExceptionStep
+ ):
+ # type: (...) -> None
# If we are being asked to move
- if self.name in params.axesToMove:
+ if self.name in axesToMove:
# Just store the generator and place we need to start
- self.generator = params.generator
+ self.generator = generator
self.completed_steps = completed_steps
self.steps_to_do = steps_to_do
- self.exception_step = params.exceptionStep
+ self.exception_step = exceptionStep
else:
# Flag nothing to do
self.generator = None
- @RunnableController.Run
- @RunnableController.Resume
- def run(self, context, update_completed_steps):
+ @add_call_types
+ def run(self, context):
+ # type: (scanning.hooks.AContext) -> None
+ if not self.generator:
+ return
# Start time so everything is relative
point_time = time.time()
- if self.generator:
- child = context.block_view(self.params.mri)
- for i in range(self.completed_steps,
- self.completed_steps + self.steps_to_do):
- self.log.debug("Starting point %s", i)
- # Get the point we are meant to be scanning
- point = self.generator.get_point(i)
- # Update the child counter_block to be the demand position
- position = point.positions[self.name]
- child.counter.put_value(position)
- # Wait until the next point is due
- point_time += point.duration
- wait_time = point_time - time.time()
- self.log.debug("%s Sleeping %s", self.name, wait_time)
- context.sleep(wait_time)
- # Update the point as being complete
- update_completed_steps(i + 1, self)
- # If this is the exception step then blow up
- assert i + 1 != self.exception_step, \
- "Raising exception at step %s" % self.exception_step
+ child = context.block_view(self.mri)
+ for i in range(self.completed_steps,
+ self.completed_steps + self.steps_to_do):
+ self.log.debug("Starting point %s", i)
+ # Get the point we are meant to be scanning
+ point = self.generator.get_point(i)
+ # Update the child counter_block to be the demand position
+ position = point.positions[self.name]
+ child.counter.put_value(position)
+ # Wait until the next point is due
+ point_time += point.duration
+ wait_time = point_time - time.time()
+ self.log.debug("%s Sleeping %s", self.name, wait_time)
+ context.sleep(wait_time)
+ # Update the point as being complete
+ self.registrar.report(scanning.infos.RunProgressInfo(i + 1))
+ # If this is the exception step then blow up
+ assert i + 1 != self.exception_step, \
+ "Raising exception at step %s" % self.exception_step
diff --git a/malcolm/modules/demo/saved_designs/DETECTOR/demo_design.json b/malcolm/modules/demo/saved_designs/DETECTOR/demo_design.json
index 9254bed4b..87fb4871c 100644
--- a/malcolm/modules/demo/saved_designs/DETECTOR/demo_design.json
+++ b/malcolm/modules/demo/saved_designs/DETECTOR/demo_design.json
@@ -1,93 +1,98 @@
{
- "layout": {
+ "attributes": {
+ "layout": {
+ "DRV": {
+ "x": 0.0,
+ "y": 0.0,
+ "visible": true
+ },
+ "STAT": {
+ "x": 0.0,
+ "y": 0.0,
+ "visible": true
+ },
+ "POS": {
+ "x": 0.0,
+ "y": 0.0,
+ "visible": true
+ },
+ "HDF5": {
+ "x": 0.0,
+ "y": 0.0,
+ "visible": true
+ }
+ },
+ "exports": {},
+ "readoutTime": 4.0000000000000003e-05
+ },
+ "children": {
"DRV": {
- "x": 0.0,
- "y": 0.0,
- "visible": true
+ "arrayCallbacks": true,
+ "arrayCounter": 0,
+ "attributesFile": "",
+ "imageMode": "Multiple",
+ "numImages": 30,
+ "triggerMode": "Internal",
+ "exposure": 0.49996000000000002,
+ "acquirePeriod": 0.0050000000000000001,
+ "gainX": 1.0,
+ "gainY": 1.0
},
- "HDF5": {
- "x": 0.0,
- "y": 0.0,
- "visible": true
+ "STAT": {
+ "arrayCallbacks": true,
+ "arrayCounter": 2571,
+ "attributesFile": "/tmp/DETECTOR:STAT-attributes.xml",
+ "inp": "ADSIM.POS",
+ "enableCallbacks": true,
+ "computeStatistics": true
},
"POS": {
- "x": 0.0,
- "y": 0.0,
- "visible": true
+ "arrayCallbacks": true,
+ "arrayCounter": 2571,
+ "attributesFile": "",
+ "inp": "ADSIM.CAM",
+ "enableCallbacks": true,
+ "idStart": 1
},
- "STAT": {
- "x": 0.0,
- "y": 0.0,
- "visible": true
+ "HDF5": {
+ "arrayCallbacks": false,
+ "arrayCounter": 0,
+ "attributesFile": "",
+ "inp": "ADSIM.stat",
+ "enableCallbacks": true,
+ "fileWriteMode": "Stream",
+ "swmrMode": true,
+ "positionMode": true,
+ "dimAttDatasets": true,
+ "lazyOpen": true,
+ "numCapture": 0,
+ "flushDataPerNFrames": 2,
+ "flushAttrPerNFrames": 2,
+ "xml": "/tmp/DETECTOR:HDF5-layout.xml",
+ "filePath": "/tmp/",
+ "fileName": "det",
+ "fileTemplate": "%s%s.h5",
+ "numExtraDims": 1,
+ "posNameDimN": "d1",
+ "posNameDimX": "d0",
+ "posNameDimY": "",
+ "posNameDim3": "",
+ "posNameDim4": "",
+ "posNameDim5": "",
+ "posNameDim6": "",
+ "posNameDim7": "",
+ "posNameDim8": "",
+ "posNameDim9": "",
+ "extraDimSizeN": 5,
+ "extraDimSizeX": 6,
+ "extraDimSizeY": 1,
+ "extraDimSize3": 1,
+ "extraDimSize4": 1,
+ "extraDimSize5": 1,
+ "extraDimSize6": 1,
+ "extraDimSize7": 1,
+ "extraDimSize8": 1,
+ "extraDimSize9": 1
}
- },
- "exports": {},
- "DRV": {
- "arrayCallbacks": true,
- "arrayCounter": 0,
- "attributesFile": "",
- "imageMode": "Multiple",
- "numImages": 30,
- "triggerMode": "Internal",
- "exposure": 0.49996000000000002,
- "acquirePeriod": 0.0050000000000000001,
- "gainX": 1.0,
- "gainY": 1.0
- },
- "HDF5": {
- "arrayCallbacks": false,
- "arrayCounter": 0,
- "attributesFile": "",
- "inp": "ADSIM.stat",
- "enableCallbacks": true,
- "fileWriteMode": "Stream",
- "swmrMode": true,
- "positionMode": true,
- "dimAttDatasets": true,
- "lazyOpen": true,
- "numCapture": 0,
- "flushDataPerNFrames": 2,
- "flushAttrPerNFrames": 2,
- "xml": "/tmp/DETECTOR:HDF5-layout.xml",
- "filePath": "/tmp/",
- "fileName": "det",
- "fileTemplate": "%s%s.h5",
- "numExtraDims": 1,
- "posNameDimN": "d1",
- "posNameDimX": "d0",
- "posNameDimY": "",
- "posNameDim3": "",
- "posNameDim4": "",
- "posNameDim5": "",
- "posNameDim6": "",
- "posNameDim7": "",
- "posNameDim8": "",
- "posNameDim9": "",
- "extraDimSizeN": 5,
- "extraDimSizeX": 6,
- "extraDimSizeY": 1,
- "extraDimSize3": 1,
- "extraDimSize4": 1,
- "extraDimSize5": 1,
- "extraDimSize6": 1,
- "extraDimSize7": 1,
- "extraDimSize8": 1,
- "extraDimSize9": 1
- },
- "POS": {
- "arrayCallbacks": true,
- "arrayCounter": 2571,
- "attributesFile": "",
- "inp": "ADSIM.CAM",
- "enableCallbacks": true,
- "idStart": 1
- },
- "STAT": {
- "arrayCallbacks": true,
- "arrayCounter": 2571,
- "attributesFile": "/tmp/DETECTOR:STAT-attributes.xml",
- "inp": "ADSIM.POS",
- "enableCallbacks": true,
- "computeStatistics": true
}
}
\ No newline at end of file
diff --git a/malcolm/modules/dtacq/blocks/dtacq_runnable_block.yaml b/malcolm/modules/dtacq/blocks/dtacq_runnable_block.yaml
index 25b98335b..e24adbbe1 100644
--- a/malcolm/modules/dtacq/blocks/dtacq_runnable_block.yaml
+++ b/malcolm/modules/dtacq/blocks/dtacq_runnable_block.yaml
@@ -1,55 +1,55 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- builtin.defines.docstring:
value: |
Device block corresponding to dtacq + reframe + stat + pos + hdf writer.
- - Detector driver should have pv prefix $(pvPrefix):ADC
- - Reframe plugin should have pv prefix $(pvPrefix):TRIG
- - Pos should have pv prefix $(pvPrefix):POS
- - Stat should have pv prefix $(pvPrefix):STAT
- - HDF should have pv prefix $(pvPrefix):HDF5
+ - Detector driver should have pv prefix $(pv_prefix):ADC
+ - Reframe plugin should have pv prefix $(pv_prefix):TRIG
+ - Pos should have pv prefix $(pv_prefix):POS
+ - Stat should have pv prefix $(pv_prefix):STAT
+ - HDF should have pv prefix $(pv_prefix):HDF5
- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
description: $(docstring)
- dtacq.blocks.dtacq_driver_block:
- mri: $(mriPrefix):DRV
- prefix: $(pvPrefix):ADC
+ mri: $(mri_prefix):DRV
+ prefix: $(pv_prefix):ADC
#- ADCore.parts.ExposureDetectorDriverPart:
# name: DRV
-# mri: $(mriPrefix):DRV
+# mri: $(mri_prefix):DRV
# readoutTime: 200e-6
- adUtil.blocks.reframe_plugin_block:
- mri: $(mriPrefix):TRIG
- prefix: $(pvPrefix):TRIG
+ mri: $(mri_prefix):TRIG
+ prefix: $(pv_prefix):TRIG
- adUtil.parts.ReframePluginPart:
name: TRIG
- mri: $(mriPrefix):TRIG
+ mri: $(mri_prefix):TRIG
- ADCore.blocks.stats_plugin_block:
- mri: $(mriPrefix):STAT
- prefix: $(pvPrefix):STAT
+ mri: $(mri_prefix):STAT
+ prefix: $(pv_prefix):STAT
- ADCore.parts.StatsPluginPart:
name: STAT
- mri: $(mriPrefix):STAT
+ mri: $(mri_prefix):STAT
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/excalibur/blocks/__init__.py b/malcolm/modules/excalibur/blocks/__init__.py
index a0c6b258f..f5252f05d 100644
--- a/malcolm/modules/excalibur/blocks/__init__.py
+++ b/malcolm/modules/excalibur/blocks/__init__.py
@@ -1,21 +1,14 @@
from malcolm.yamlutil import make_block_creator, check_yaml_names
-excalibur_detector_driver_block = make_block_creator(
- __file__, "excalibur_detector_driver_block.yaml")
-
-excalibur_detector_runnable_block = make_block_creator(
- __file__, "excalibur_detector_runnable_block.yaml")
-
-fem_detector_driver_block = make_block_creator(
- __file__, "fem_detector_driver_block.yaml")
-
-fem_detector_runnable_block = make_block_creator(
- __file__, "fem_detector_runnable_block.yaml")
-
+excalibur_driver_block = make_block_creator(
+ __file__, "excalibur_driver_block.yaml")
+excalibur_runnable_block = make_block_creator(
+ __file__, "excalibur_runnable_block.yaml")
+fem_driver_block = make_block_creator(
+ __file__, "fem_driver_block.yaml")
+fem_runnable_block = make_block_creator(
+ __file__, "fem_runnable_block.yaml")
gap_plugin_block = make_block_creator(
__file__, "gap_plugin_block.yaml")
-
-excalibur_munge_block = make_block_creator(
- __file__, "excalibur_munge_block.yaml")
__all__ = check_yaml_names(globals())
diff --git a/malcolm/modules/excalibur/blocks/excalibur_detector_runnable_block.yaml b/malcolm/modules/excalibur/blocks/excalibur_detector_runnable_block.yaml
deleted file mode 100644
index 23a448249..000000000
--- a/malcolm/modules/excalibur/blocks/excalibur_detector_runnable_block.yaml
+++ /dev/null
@@ -1,96 +0,0 @@
-- builtin.parameters.string:
- name: mriPrefix
- description: Malcolm resource id of the Block and prefix for children
-
-- builtin.parameters.string:
- name: pvPrefix
- description: PV prefix for driver and all plugins
-
-- builtin.parameters.string:
- name: configDir
- description: Where to store saved configs
-
-- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):1
- pvPrefix: $(pvPrefix):1
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):2
- pvPrefix: $(pvPrefix):2
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):3
- pvPrefix: $(pvPrefix):3
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):4
- pvPrefix: $(pvPrefix):4
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):5
- pvPrefix: $(pvPrefix):5
- configDir: $(configDir)
-
-- excalibur.blocks.fem_detector_runnable_block:
- mriPrefix: $(mriPrefix):6
- pvPrefix: $(pvPrefix):6
- configDir: $(configDir)
-
-- excalibur.parts.FemChildPart:
- name: FEM1
- mri: $(mriPrefix):1
-
-- excalibur.parts.FemChildPart:
- name: FEM2
- mri: $(mriPrefix):2
-
-- excalibur.parts.FemChildPart:
- name: FEM3
- mri: $(mriPrefix):3
-
-- excalibur.parts.FemChildPart:
- name: FEM4
- mri: $(mriPrefix):4
-
-- excalibur.parts.FemChildPart:
- name: FEM5
- mri: $(mriPrefix):5
-
-- excalibur.parts.FemChildPart:
- name: FEM6
- mri: $(mriPrefix):6
-
-- excalibur.blocks.excalibur_detector_driver_block:
- mri: $(mriPrefix):MASTER
- prefix: $(pvPrefix):CONFIG:ACQUIRE
-
-- excalibur.parts.ExcaliburDriverPart:
- name: CONFIG
- mri: $(mriPrefix):MASTER
- readoutTime: 12e-6
-
-- excalibur.parts.VDSWrapperPart:
- name: VDS
- dataType: int32
- stripeHeight: 259
- stripeWidth: 2069
-
-- ADCore.parts.DatasetTablePart:
- name: DSET
-
-- excalibur.blocks.excalibur_munge_block:
- mri: $(mriPrefix):MUNGE
- prefix: $(pvPrefix):MUNGE
-
-- excalibur.parts.ExcaliburFileMungePart:
- name: MUNGE
- mri: $(mriPrefix):MUNGE
-
diff --git a/malcolm/modules/excalibur/blocks/excalibur_detector_driver_block.yaml b/malcolm/modules/excalibur/blocks/excalibur_driver_block.yaml
similarity index 100%
rename from malcolm/modules/excalibur/blocks/excalibur_detector_driver_block.yaml
rename to malcolm/modules/excalibur/blocks/excalibur_driver_block.yaml
diff --git a/malcolm/modules/excalibur/blocks/excalibur_munge_block.yaml b/malcolm/modules/excalibur/blocks/excalibur_munge_block.yaml
deleted file mode 100644
index 851381b70..000000000
--- a/malcolm/modules/excalibur/blocks/excalibur_munge_block.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
-- builtin.parameters.string:
- name: mri
- description: Malcolm resource id of the Block
-
-- builtin.parameters.string:
- name: prefix
- description: The root PV for the all records
-
-- builtin.controllers.StatefulController:
- mri: $(mri)
- description: Stupif block used to fake an HDF5 writer
diff --git a/malcolm/modules/excalibur/blocks/excalibur_runnable_block.yaml b/malcolm/modules/excalibur/blocks/excalibur_runnable_block.yaml
new file mode 100644
index 000000000..f2070128e
--- /dev/null
+++ b/malcolm/modules/excalibur/blocks/excalibur_runnable_block.yaml
@@ -0,0 +1,90 @@
+- builtin.parameters.string:
+ name: mri_prefix
+ description: Malcolm resource id of the Block and prefix for children
+
+- builtin.parameters.string:
+ name: pv_prefix
+ description: PV prefix for driver and all plugins
+
+- builtin.parameters.string:
+ name: config_dir
+ description: Where to store saved configs
+
+- scanning.controllers.RunnableController:
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):1
+ pv_prefix: $(pv_prefix):1
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):2
+ pv_prefix: $(pv_prefix):2
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):3
+ pv_prefix: $(pv_prefix):3
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):4
+ pv_prefix: $(pv_prefix):4
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):5
+ pv_prefix: $(pv_prefix):5
+ config_dir: $(config_dir)
+
+- excalibur.blocks.fem_runnable_block:
+ mri_prefix: $(mri_prefix):6
+ pv_prefix: $(pv_prefix):6
+ config_dir: $(config_dir)
+
+- excalibur.parts.FemChildPart:
+ name: FEM1
+ mri: $(mri_prefix):1
+
+- excalibur.parts.FemChildPart:
+ name: FEM2
+ mri: $(mri_prefix):2
+
+- excalibur.parts.FemChildPart:
+ name: FEM3
+ mri: $(mri_prefix):3
+
+- excalibur.parts.FemChildPart:
+ name: FEM4
+ mri: $(mri_prefix):4
+
+- excalibur.parts.FemChildPart:
+ name: FEM5
+ mri: $(mri_prefix):5
+
+- excalibur.parts.FemChildPart:
+ name: FEM6
+ mri: $(mri_prefix):6
+
+- excalibur.blocks.excalibur_driver_block:
+ mri: $(mri_prefix):MASTER
+ prefix: $(pv_prefix):CONFIG:ACQUIRE
+
+- excalibur.parts.ExcaliburDriverPart:
+ name: CONFIG
+ mri: $(mri_prefix):MASTER
+
+- ADCore.parts.ExposureDeadtimePart:
+ name: DEADTIME
+ initial_readout_time: 12e-6
+
+- excalibur.parts.VDSWrapperPart:
+ name: VDS
+ data_type: int32
+ stripe_height: 259
+ stripe_width: 2069
+
+- ADCore.parts.DatasetTablePart:
+ name: DSET
diff --git a/malcolm/modules/excalibur/blocks/fem_detector_driver_block.yaml b/malcolm/modules/excalibur/blocks/fem_driver_block.yaml
similarity index 100%
rename from malcolm/modules/excalibur/blocks/fem_detector_driver_block.yaml
rename to malcolm/modules/excalibur/blocks/fem_driver_block.yaml
diff --git a/malcolm/modules/excalibur/blocks/fem_detector_runnable_block.yaml b/malcolm/modules/excalibur/blocks/fem_runnable_block.yaml
similarity index 56%
rename from malcolm/modules/excalibur/blocks/fem_detector_runnable_block.yaml
rename to malcolm/modules/excalibur/blocks/fem_runnable_block.yaml
index e85471f9a..e3bfc0596 100644
--- a/malcolm/modules/excalibur/blocks/fem_detector_runnable_block.yaml
+++ b/malcolm/modules/excalibur/blocks/fem_runnable_block.yaml
@@ -1,35 +1,35 @@
- builtin.parameters.string:
- name: mriPrefix
+ name: mri_prefix
description: Malcolm resource id of the Block and prefix for children
- builtin.parameters.string:
- name: pvPrefix
+ name: pv_prefix
description: PV prefix for driver and all plugins
- builtin.parameters.string:
- name: configDir
+ name: config_dir
description: Where to store saved configs
- scanning.controllers.RunnableController:
- mri: $(mriPrefix)
- configDir: $(configDir)
+ mri: $(mri_prefix)
+ config_dir: $(config_dir)
-- excalibur.blocks.fem_detector_driver_block:
- mri: $(mriPrefix):FEM
- prefix: $(pvPrefix):FEM
+- excalibur.blocks.fem_driver_block:
+ mri: $(mri_prefix):FEM
+ prefix: $(pv_prefix):FEM
- excalibur.parts.FemDriverPart:
name: FEM
- mri: $(mriPrefix):FEM
+ mri: $(mri_prefix):FEM
- ADCore.blocks.stats_plugin_block:
- mri: $(mriPrefix):STAT
- prefix: $(pvPrefix):STAT
+ mri: $(mri_prefix):STAT
+ prefix: $(pv_prefix):STAT
- ADCore.parts.StatsPluginPart:
name: STAT
- mri: $(mriPrefix):STAT
+ mri: $(mri_prefix):STAT
- ADCore.includes.filewriting_collection:
- pvPrefix: $(pvPrefix)
- mriPrefix: $(mriPrefix)
+ pv_prefix: $(pv_prefix)
+ mri_prefix: $(mri_prefix)
diff --git a/malcolm/modules/excalibur/blocks/gap_plugin_block.yaml b/malcolm/modules/excalibur/blocks/gap_plugin_block.yaml
index 8c3c7a779..3549c800f 100644
--- a/malcolm/modules/excalibur/blocks/gap_plugin_block.yaml
+++ b/malcolm/modules/excalibur/blocks/gap_plugin_block.yaml
@@ -17,5 +17,4 @@
name: fillValue
description: Fill value for gaps
pv: $(prefix):GapFillConstant
- rbvSuff: _RBV
- widget: textinput
+ rbv_suff: _RBV
diff --git a/malcolm/modules/excalibur/parts/__init__.py b/malcolm/modules/excalibur/parts/__init__.py
index a33f5cb49..9010095c8 100644
--- a/malcolm/modules/excalibur/parts/__init__.py
+++ b/malcolm/modules/excalibur/parts/__init__.py
@@ -3,7 +3,8 @@
from .femdriverpart import FemDriverPart
from .gappluginpart import GapPluginPart
from .vdswrapperpart import VDSWrapperPart
-from .excaliburfilemungepart import ExcaliburFileMungePart
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/excalibur/parts/excaliburdriverpart.py b/malcolm/modules/excalibur/parts/excaliburdriverpart.py
index 98e3c2f6c..716589d1a 100644
--- a/malcolm/modules/excalibur/parts/excaliburdriverpart.py
+++ b/malcolm/modules/excalibur/parts/excaliburdriverpart.py
@@ -1,8 +1,16 @@
-from malcolm.modules.ADCore.parts import ExposureDetectorDriverPart
-
-
-class ExcaliburDriverPart(ExposureDetectorDriverPart):
- def is_hardware_triggered(self, child):
- return child.triggerMode.value != "Internal"
+from malcolm.modules import ADCore, scanning
+class ExcaliburDriverPart(ADCore.parts.DetectorDriverPart):
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ completed_steps, # type: scanning.hooks.ACompletedSteps
+ steps_to_do, # type: scanning.hooks.AStepsToDo
+ generator, # type: scanning.hooks.AGenerator
+ **kwargs # type: **scanning.hooks.Any
+ ):
+ # type: (...) -> None
+ child = context.block_view(self.mri)
+ self.is_hardware_triggered = child.triggerMode.value != "Internal"
+ super(ExcaliburDriverPart, self).configure(
+ context, completed_steps, steps_to_do, generator, **kwargs)
diff --git a/malcolm/modules/excalibur/parts/excaliburfilemungepart.py b/malcolm/modules/excalibur/parts/excaliburfilemungepart.py
deleted file mode 100644
index 8fc038271..000000000
--- a/malcolm/modules/excalibur/parts/excaliburfilemungepart.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from malcolm.core import method_takes
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.builtin.vmetas import StringMeta
-from malcolm.modules.scanning.controllers import RunnableController
-
-class ExcaliburFileMungePart(StatefulChildPart):
- @RunnableController.Configure
- @method_takes(
- "formatName", StringMeta(
- "Argument for fileTemplate, normally filename without extension"),
- "det")
- def configure(self, context, completed_steps, steps_to_do, part_info, params):
- pass
diff --git a/malcolm/modules/excalibur/parts/femchildpart.py b/malcolm/modules/excalibur/parts/femchildpart.py
index 6cb8e4b30..399003b21 100644
--- a/malcolm/modules/excalibur/parts/femchildpart.py
+++ b/malcolm/modules/excalibur/parts/femchildpart.py
@@ -1,22 +1,15 @@
-from malcolm.core import method_takes, REQUIRED
-from malcolm.modules.builtin.vmetas import StringMeta
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.ADCore.parts import DatasetRunnableChildPart
+from malcolm.modules import scanning, ADCore
-class FemChildPart(DatasetRunnableChildPart):
-
- # MethodMeta will be filled in at reset()
- @RunnableController.Configure
- @method_takes(
- "fileDir", StringMeta("File dir to write HDF files into"), REQUIRED)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
+class FemChildPart(ADCore.parts.DatasetRunnableChildPart):
+ def configure(self,
+ context, # type: scanning.hooks.AContext
+ **kwargs # type: **scanning.hooks.Any
+ ):
+ # type: (...) -> None
# Throw away the dataset info the superclass returns
- super(FemChildPart, self).configure(
- context, completed_steps, steps_to_do, part_info, params)
- # Sleep after configuration - recommended to allow at least 1s after starting Excalibur before taking first frame
- # following testing on J13. Otherwise FEM1 may not be ready and will drop a frame.
- print("Sleeping...")
+ super(FemChildPart, self).configure(context, **kwargs)
+ # Sleep after configuration - recommended to allow at least 1s after
+ # starting Excalibur before taking first frame following testing on J13.
+ # Otherwise FEM1 may not be ready and will drop a frame.
context.sleep(1.0)
- print("Slept")
diff --git a/malcolm/modules/excalibur/parts/femdriverpart.py b/malcolm/modules/excalibur/parts/femdriverpart.py
index f79622d5f..71e20563a 100644
--- a/malcolm/modules/excalibur/parts/femdriverpart.py
+++ b/malcolm/modules/excalibur/parts/femdriverpart.py
@@ -1,13 +1,15 @@
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.modules.ADCore.infos import NDArrayDatasetInfo, UniqueIdInfo
+from malcolm.modules import scanning, builtin, ADCore
-class FemDriverPart(StatefulChildPart):
+class FemDriverPart(builtin.parts.ChildPart):
+ def setup(self, registrar):
+ super(FemDriverPart, self).setup(registrar)
+ # Hooks
+ self.register_hooked(scanning.hooks.ReportStatusHook,
+ self.report_status)
+
# Only need to report that we will make a dataset, top level will do all
# control
- @RunnableController.ReportStatus
- def report_configuration(self, context):
- child = context.block_view(self.params.mri)
- return [
- NDArrayDatasetInfo(rank=2), UniqueIdInfo(child.arrayCounter.value)]
+ def report_status(self):
+ # type: () -> scanning.hooks.UInfos
+ return ADCore.infos.NDArrayDatasetInfo(rank=2)
diff --git a/malcolm/modules/excalibur/parts/gappluginpart.py b/malcolm/modules/excalibur/parts/gappluginpart.py
index 4076d65f0..73c07713e 100644
--- a/malcolm/modules/excalibur/parts/gappluginpart.py
+++ b/malcolm/modules/excalibur/parts/gappluginpart.py
@@ -1,15 +1,19 @@
-from malcolm.core import method_takes
-from malcolm.modules.builtin.parts import StatefulChildPart
-from malcolm.modules.builtin.vmetas import NumberMeta
-from malcolm.modules.scanning.controllers import RunnableController
+from malcolm.core import PartRegistrar
+from malcolm.modules import builtin, scanning
+from ..util import AFillValue
-class GapPluginPart(StatefulChildPart):
+class GapPluginPart(builtin.parts.ChildPart):
"""Gap plugin for setting the fill value"""
- @RunnableController.Configure
- @method_takes(
- "fillValue", NumberMeta("int32", "Fill value for stripe spacing"), 0)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- child = context.block_view(self.params.mri)
- child.fillValue.put_value(params.fillValue)
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(GapPluginPart, self).setup(registrar)
+ self.register_hooked(scanning.hooks.ConfigureHook, self.configure)
+ # Tell the controller to expose some extra configure parameters
+ registrar.report(scanning.hooks.ConfigureHook.create_info(
+ self.configure))
+
+ def configure(self, context, fill_value=0):
+ # type: (scanning.hooks.AContext, AFillValue) -> None
+ child = context.block_view(self.mri)
+ child.fillValue.put_value(fill_value)
diff --git a/malcolm/modules/excalibur/parts/vdswrapperpart.py b/malcolm/modules/excalibur/parts/vdswrapperpart.py
index 044142531..d758215a1 100644
--- a/malcolm/modules/excalibur/parts/vdswrapperpart.py
+++ b/malcolm/modules/excalibur/parts/vdswrapperpart.py
@@ -1,292 +1,134 @@
import os
+from annotypes import Anno, add_call_types
import numpy as np
import h5py as h5
from vdsgen.subframevdsgenerator import SubFrameVDSGenerator
-from malcolm.modules.scanning.controllers import RunnableController
-from malcolm.core import method_takes, REQUIRED, Part
-from malcolm.modules.ADCore.infos import DatasetProducedInfo
-from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta
-from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta
+from malcolm.core import Part, APartName, PartRegistrar
+from malcolm.modules import ADCore, scanning
+from ..util import AFillValue
-#import cProfile
+
+with Anno("Data type of dataset"):
+ ADataType = str
+with Anno("Height of stripes"):
+ AStripeHeight = int
+with Anno("Width of stripes"):
+ AStripeWidth = int
+
+
+def _create_dataset_infos(generator, filename):
+ uniqueid_path = "/entry/NDAttributes/NDArrayUniqueId"
+ data_path = "/entry/detector/detector"
+ generator_rank = len(generator.axes)
+ # Create the main detector data
+ yield ADCore.infos.DatasetProducedInfo(
+ name="EXCALIBUR.data",
+ filename=filename,
+ type=ADCore.util.DatasetType.PRIMARY,
+ rank=2 + generator_rank,
+ path=data_path,
+ uniqueid=uniqueid_path)
+
+ # Add any setpoint dimensions
+ for axis in generator.axes:
+ yield ADCore.infos.DatasetProducedInfo(
+ name="%s.value_set" % axis, filename=filename,
+ type=ADCore.util.DatasetType.POSITION_SET, rank=1,
+ path="/entry/detector/%s_set" % axis, uniqueid="")
-@method_takes(
- "name", StringMeta("Name of part"), REQUIRED,
- "dataType", StringMeta("Data type of dataset"), REQUIRED,
- "stripeHeight", NumberMeta("int16", "Height of stripes"), REQUIRED,
- "stripeWidth", NumberMeta("int16", "Width of stripes"), REQUIRED)
class VDSWrapperPart(Part):
# Constants for class
RAW_FILE_TEMPLATE = "FEM{}"
- OUTPUT_FILE = "EXCALIBUR"
CREATE = "w"
APPEND = "a"
READ = "r"
- ID = "/entry/NDAttributes/NDArrayUniqueId"
- SUM = "/entry/sum/sum"
+ NUM_VDS_AXES = 2
required_nodes = ["/entry/detector", "/entry/sum", "/entry/NDAttributes"]
set_bases = ["/entry/detector", "/entry/sum"]
- default_node_tree = ["/entry/detector/axes", "/entry/detector/signal",
- "/entry/sum/axes", "/entry/sum/signal"]
+ default_node_tree = ["/entry/sum/axes", "/entry/sum/signal"]
- def __init__(self, params):
- self.params = params
- super(VDSWrapperPart, self).__init__(params.name)
-
- self.current_id = None
- self.done_when_reaches = None
- self.generator = None
+ def __init__(self, name, data_type, stripe_height, stripe_width):
+ # type: (APartName, ADataType, AStripeHeight, AStripeWidth) -> None
+ super(VDSWrapperPart, self).__init__(name)
self.fems = [1, 2, 3, 4, 5, 6]
- self.vds_path = ""
- self.vds = None
- self.command = []
- self.raw_paths = []
- self.raw_datasets = []
- self.data_type = params.dataType
- self.stripe_height = params.stripeHeight
- self.stripe_width = params.stripeWidth
-
- @RunnableController.Abort
- @RunnableController.Reset
- @RunnableController.PostRunReady
- def abort(self, context):
- self.close_files()
-
- def close_files(self):
- for file_ in self.raw_datasets + [self.vds]:
- if file_ is not None and file_.id.valid:
- self.log.info("Closing file %s", file_)
- file_.close()
- self.raw_datasets = []
- self.vds = None
-
- def _create_dataset_infos(self, generator, filename):
- uniqueid_path = "/entry/NDAttributes/NDArrayUniqueId"
- data_path = "/entry/detector/detector"
- sum_path = "/entry/sum/sum"
- generator_rank = len(generator.axes)
- # Create the main detector data
- yield DatasetProducedInfo(
- name="EXCALIBUR.data",
- filename=filename,
- type="primary",
- rank=2 + generator_rank,
- path=data_path,
- uniqueid=uniqueid_path)
-
- # # And the sum
- # yield DatasetProducedInfo(
- # name="EXCALIBUR.sum",
- # filename=filename,
- # type="secondary",
- # rank=2 + generator_rank,
- # path=sum_path,
- # uniqueid=uniqueid_path)
-
- # Add any setpoint dimensions
- for axis in generator.axes:
- yield DatasetProducedInfo(
- name="%s.value_set" % axis, filename=filename,
- type="position_set", rank=1,
- path="/entry/detector/%s_set" % axis, uniqueid="")
-
- @RunnableController.Configure
- @method_takes(
- "generator", PointGeneratorMeta("Generator instance"), REQUIRED,
- "fileDir", StringMeta("File dir to write HDF files into"), REQUIRED,
- "fileTemplate", StringMeta(
- """Printf style template to generate filename relative to fileDir.
- Arguments are:
- 1) %s: EXCALIBUR"""), "%s.h5",
- "fillValue", NumberMeta("int32", "Fill value for stripe spacing"), 0)
- def configure(self, context, completed_steps, steps_to_do, part_info,
- params):
- self.generator = params.generator
- self.current_id = completed_steps
- self.done_when_reaches = completed_steps + steps_to_do
- self.vds_path = os.path.join(params.fileDir,
- params.fileTemplate % self.OUTPUT_FILE)
- raw_file_path = params.fileTemplate % self.RAW_FILE_TEMPLATE.format(1)
+ self.data_type = data_type
+ self.stripe_height = stripe_height
+ self.stripe_width = stripe_width
+ # Hooks
+ self.register_hooked(scanning.hooks.ConfigureHook, self.configure)
+
+ def setup(self, registrar):
+ # type: (PartRegistrar) -> None
+ super(VDSWrapperPart, self).setup(registrar)
+ # Tell the controller to expose some extra configure parameters
+ registrar.report(scanning.hooks.ConfigureHook.create_info(
+ self.configure))
+
+ # Allow CamelCase as these parameters will be serialized
+ # noinspection PyPep8Naming
+ @add_call_types
+ def configure(self,
+ generator, # type: scanning.hooks.AGenerator
+ fileDir, # type: ADCore.parts.AFileDir
+ formatName="EXCALIBUR", # type: ADCore.parts.AFormatName
+ fileTemplate="%s.h5", # type: ADCore.parts.AFileTemplate
+ fillValue=0, # type: AFillValue
+ ):
+ # type: (...) -> scanning.hooks.UInfos
+ vds_path = os.path.join(fileDir, fileTemplate % formatName)
+ raw_file_path = fileTemplate % self.RAW_FILE_TEMPLATE.format(1)
node_tree = list(self.default_node_tree)
- for axis in params.generator.axes:
+ for axis in generator.axes:
for base in self.set_bases:
node_tree.append(base + "/{}_set".format(axis))
- node_tree.append(base + "/{}_set_indices".format(axis))
- with h5.File(self.vds_path, self.CREATE, libver="latest") as self.vds:
+ pad_dims = []
+ for d in generator.dimensions:
+ if len(d.axes) == 1:
+ pad_dims.append("%s_set" % d.axes[0])
+ else:
+ pad_dims.append(".")
+
+ pad_dims += ["."] * self.NUM_VDS_AXES
+
+ with h5.File(vds_path, self.CREATE, libver="latest") as vds:
for node in self.required_nodes:
- self.vds.require_group(node)
+ vds.require_group(node)
for node in node_tree:
- self.vds[node] = h5.ExternalLink(raw_file_path, node)
+ vds[node] = h5.ExternalLink(raw_file_path, node)
- # Create placeholder id and sum datasets
- initial_dims = tuple([1 for _ in params.generator.shape])
- initial_shape = initial_dims + (1, 1)
- max_shape = params.generator.shape + (1, 1)
- self.vds.create_dataset(self.ID, initial_shape,
- maxshape=max_shape, dtype="int32")
- self.vds.create_dataset(self.SUM, initial_shape,
- maxshape=max_shape, dtype="float64",
- fillvalue=np.nan)
- files = [params.fileTemplate % self.RAW_FILE_TEMPLATE.format(fem)
- for fem in self.fems]
- shape = params.generator.shape + (self.stripe_height, self.stripe_width)
+ vds["/entry/detector"].attrs["axes"] = ",".join(pad_dims)
+ vds["/entry/detector"].attrs["signal"] = "detector"
+ for i, d in enumerate(generator.dimensions):
+ for axis in d.axes:
+ name = "%s_set_indices" % axis
+ vds["/entry/detector"].attrs[name] = str(i)
# Create the VDS using vdsgen
+ files = [fileTemplate % self.RAW_FILE_TEMPLATE.format(fem)
+ for fem in self.fems]
+ shape = generator.shape + (self.stripe_height, self.stripe_width)
fgen = SubFrameVDSGenerator(
- params.fileDir,
- prefix=None,
+ fileDir,
files=files,
- output=params.fileTemplate % self.OUTPUT_FILE,
+ output=fileTemplate % formatName,
source=dict(shape=shape, dtype=self.data_type),
source_node="/entry/detector/detector",
target_node="/entry/detector/detector",
stripe_spacing=0,
module_spacing=121,
- fill_value=params.fillValue,
- log_level=1 # DEBUG
+ fill_value=fillValue,
+ log_level=1 # DEBUG
)
fgen.generate_vds()
- # Store required attributes
- self.raw_paths = [os.path.abspath(os.path.join(params.fileDir, file_))
- for file_ in files]
-
- # Open the VDS
- self.vds = h5.File(
- self.vds_path, self.APPEND, libver="latest", swmr=True)
# Return the dataset information
- dataset_infos = list(self._create_dataset_infos(
- params.generator, params.fileTemplate % self.OUTPUT_FILE))
+ dataset_infos = list(
+ _create_dataset_infos(generator, fileTemplate % formatName))
return dataset_infos
-
- # @RunnableController.PostRunArmed
- # @RunnableController.Seek
- # def seek(self, context, completed_steps, steps_to_do, part_info):
- # self.current_id = completed_steps
- # self.done_when_reaches = completed_steps + steps_to_do
- #
- # @RunnableController.Run
- # @RunnableController.Resume
- # def run(self, context, update_completed_steps):
- # self.log.info("VDS part running")
- # if not self.raw_datasets:
- # for path_ in self.raw_paths:
- # self.log.info("Waiting for file %s to be created", path_)
- # while not os.path.exists(path_):
- # context.sleep(1)
- # self.raw_datasets.append(
- # h5.File(path_, self.READ, libver="latest", swmr=True))
- # for dataset in self.raw_datasets:
- # self.log.info("Waiting for id in file %s", dataset)
- # while self.ID not in dataset:
- # context.sleep(0.1)
- # # here I should grab the handles to the vds dataset, id and all the swmr datasets and ids.
- # if self.vds.id.valid and self.ID in self.vds:
- # self.vds.swmr_mode = True
- # self.vds_sum = self.vds[self.SUM]
- # self.vds_id = self.vds[self.ID]
- # self.fems_sum = [ix[self.SUM] for ix in self.raw_datasets]
- # self.fems_id = [ix[self.ID] for ix in self.raw_datasets]
- # else:
- # self.log.warning("File %s does not exist or does not have a "
- # "UniqueIDArray, returning 0", file_)
- # return 0
- #
- # self.previous_idx = 0
- # # does this on every run
- # try:
- # self.log.info("Monitoring raw files until ID reaches %s",
- # self.done_when_reaches)
- # while self.current_id < self.done_when_reaches: # monitor the output of the vds id. When it counts up then we have finished.
- # context.sleep(0.1) # Allow while loop to be aborted
- # #cProfile.runctx('self.maybe_update_datasets()', globals(), locals(), filename="/dls/tmp/qvr31998/VDSstats")
- # self.maybe_update_datasets()
- #
- # except Exception as error:
- # self.log.exception("Error in run. Message:\n%s", error.message)
- # self.close_files()
- # raise
-
- def maybe_update_datasets(self):
- #self.log.info("VDS: updating")
- id_shapes = []
- sum_shapes = []
-
- #self.log.info("VDS: fems ids: %s", self.fems_id)
- # First update the id datasets and store their shapes
- for id in self.fems_id:
- id.refresh()
- id_shapes.append(np.array(id.shape))
-
- # Only refresh once, this should move to resize_vds when we are guaranteed id updates AFTER sum
- for s in self.fems_sum:
- s.refresh()
- sum_shapes.append(np.array(s.shape))
-
- #self.log.info("Shapes: %s", shapes)
- # Now iterate through the indexes, updating ids and sums if needed
- ###TODO: This doesn't seem to actually iterate - just does the last one.
- indexes = self.get_indexes_to_check()
- self.log.info("VDS: Indexes to checK: %s", indexes)
- need_updates = True
-
- for index in indexes:
- # For some reason, at certain point all the ids come back as zeroes.
- for i, id in enumerate(self.fems_id):
- if not self.index_in_range(index, id_shapes[i]):
- self.log.info("VDS: ID Index out of range: %s", index)
- return
- elif not self.index_in_range(index, sum_shapes[i]):
- self.log.info("VDS: SUM Index out of range: %s", index)
- return
- else:
- fem_id = id[index]
- if fem_id == 0:
- self.log.info("VDS: FEM%d not written data %s yet", i + 1, index)
- return
- else:
- assert fem_id == self.current_id + 1, \
- "VDS: FEM%d wrote %d in index %s when expecting %s" % (
- i + 1, fem_id, index, self.current_id + 1)
-
- #self.log.info("VDS: processing id: %s", self.current_id + 1)
- #self.log.info("VDS: index %s", index)
- if need_updates:
- self.resize_vds(id_shapes[0])
- need_updates = False
- self.update_id_sum(index)
- self.current_id += 1
- #self.log.info("ID reached: %s", self.current_id)
- self.flush_id_sum()
-
- def resize_vds(self, shape):
- self.vds_sum.resize(shape)
- self.vds_id.resize(shape) # source and target are now the same shape
-
- def update_id_sum(self, index):
- self.vds_sum[index] = sum(s[index] for s in self.fems_sum)
- self.vds_id[index] = self.current_id + 1
-
- def flush_id_sum(self):
- self.vds_sum.flush()
- self.vds_id.flush() # flush to disc
-
- def index_in_range(self, index, shape):
- # check the given index is valid for the shape of the array
- in_range = index < np.array(shape)[:len(index)]
- return np.all(in_range)
-
- def get_indexes_to_check(self):
- # returns the indexes that we should check for updates
- #self.log.info("VDS:Checking in range %s, %s", self.current_id, self.done_when_reaches)
- for idx in range(self.current_id, self.done_when_reaches):
- index = tuple(self.generator.get_point(idx).indexes)
- #self.log.info("VDS: Yielding %s", index)
- yield index
diff --git a/malcolm/modules/excalibur/util.py b/malcolm/modules/excalibur/util.py
new file mode 100644
index 000000000..6b02afaa0
--- /dev/null
+++ b/malcolm/modules/excalibur/util.py
@@ -0,0 +1,5 @@
+from annotypes import Anno
+
+
+with Anno("Fill value for stripe spacing"):
+ AFillValue = int
diff --git a/malcolm/modules/pandablocks/DEV-PANDA.yaml b/malcolm/modules/pandablocks/DEV-PANDA.yaml
new file mode 100644
index 000000000..5b4344e56
--- /dev/null
+++ b/malcolm/modules/pandablocks/DEV-PANDA.yaml
@@ -0,0 +1,7 @@
+- pandablocks.blocks.pandablocks_manager_block:
+ config_dir: /tmp
+ mri: PANDA
+ hostname: 172.23.252.201
+
+- web.blocks.web_server_block:
+ mri: WEB
diff --git a/malcolm/modules/pandablocks/__init__.py b/malcolm/modules/pandablocks/__init__.py
index e69de29bb..30b39bd88 100644
--- a/malcolm/modules/pandablocks/__init__.py
+++ b/malcolm/modules/pandablocks/__init__.py
@@ -0,0 +1 @@
+from . import controllers, parts
diff --git a/malcolm/modules/pandablocks/blocks/__init__.py b/malcolm/modules/pandablocks/blocks/__init__.py
new file mode 100644
index 000000000..46e71e44a
--- /dev/null
+++ b/malcolm/modules/pandablocks/blocks/__init__.py
@@ -0,0 +1,6 @@
+from malcolm.yamlutil import make_block_creator, check_yaml_names
+
+pandablocks_manager_block = make_block_creator(
+ __file__, "pandablocks_manager_block.yaml")
+
+__all__ = check_yaml_names(globals())
diff --git a/malcolm/modules/pandablocks/blocks/pandablocks_manager_block.yaml b/malcolm/modules/pandablocks/blocks/pandablocks_manager_block.yaml
new file mode 100644
index 000000000..0227a844d
--- /dev/null
+++ b/malcolm/modules/pandablocks/blocks/pandablocks_manager_block.yaml
@@ -0,0 +1,23 @@
+- builtin.parameters.string:
+ name: mri
+ description: Malcolm resource id of the Block and prefix for children
+
+- builtin.parameters.string:
+ name: hostname
+ description: Hostname of the box
+ default: localhost
+
+- builtin.parameters.int32:
+ name: port
+ description: Port number of the server control
+ default: 8888
+
+- builtin.parameters.string:
+ name: config_dir
+ description: Where to store saved configs
+
+- pandablocks.controllers.PandABlocksManagerController:
+ mri: $(mri)
+ config_dir: $(config_dir)
+ hostname: $(hostname)
+ port: $(port)
diff --git a/malcolm/modules/pandablocks/controllers/__init__.py b/malcolm/modules/pandablocks/controllers/__init__.py
index bffe9cc51..329906c5c 100644
--- a/malcolm/modules/pandablocks/controllers/__init__.py
+++ b/malcolm/modules/pandablocks/controllers/__init__.py
@@ -1,4 +1,8 @@
-from .pandablocksmanagercontroller import PandABlocksManagerController
+from .pandablocksmanagercontroller import PandABlocksManagerController, \
+ AMri, AConfigDir, AHostname, APort, AInitialDesign, ADescription, \
+ AUseGit, AUseCothread
-# Expose all the classes
-__all__ = sorted(k for k, v in globals().items() if type(v) == type)
+# Expose a nice namespace
+from malcolm.core import submodule_all
+
+__all__ = submodule_all(globals())
diff --git a/malcolm/modules/pandablocks/controllers/pandablocksmanagercontroller.py b/malcolm/modules/pandablocks/controllers/pandablocksmanagercontroller.py
index 828a85413..af2d2db68 100644
--- a/malcolm/modules/pandablocks/controllers/pandablocksmanagercontroller.py
+++ b/malcolm/modules/pandablocks/controllers/pandablocksmanagercontroller.py
@@ -3,50 +3,58 @@
import operator
from xml.etree import cElementTree as ET
+from annotypes import Anno
+
from malcolm.compat import OrderedDict, maybe_import_cothread, et_to_string
-from malcolm.core import method_also_takes, Queue, TimeoutError, \
- call_with_params
+from malcolm.core import Queue, TimeoutError, BooleanMeta, TableMeta
from malcolm.modules.builtin.controllers import BasicController, \
- ManagerController
+ ManagerController, AMri, AConfigDir, AInitialDesign, ADescription, \
+ AUseCothread, AUseGit
from malcolm.modules.builtin.parts import ChildPart
-from malcolm.modules.builtin.vmetas import BooleanMeta, TableMeta, StringMeta, \
- NumberMeta
-from malcolm.modules.pandablocks.parts.pandablocksmaker import \
- PandABlocksMaker, SVG_DIR
-from .pandablocksclient import PandABlocksClient
+from ..parts.pandablocksmaker import PandABlocksMaker, SVG_DIR
+from ..parts.pandablocksactionpart import PandABlocksActionPart
+from ..pandablocksclient import PandABlocksClient
LUT_CONSTANTS = dict(
A=0xffff0000, B=0xff00ff00, C=0xf0f0f0f0, D=0xcccccccc, E=0xaaaaaaaa)
+# Time between polls for *CHANGES
+POLL_PERIOD = 0.1
+
+with Anno("Hostname of the box"):
+ AHostname = str
+with Anno("Port number of the TCP server control port"):
+ APort = int
+
-@method_also_takes(
- "hostname", StringMeta("Hostname of the box"), "localhost",
- "port", NumberMeta("uint32", "Port number of the server client"), 8888)
class PandABlocksManagerController(ManagerController):
- def __init__(self, process, parts, params):
+ def __init__(self,
+ mri, # type: AMri
+ config_dir, # type: AConfigDir
+ hostname="localhost", # type: AHostname
+ port=8888, # type: APort
+ initial_design="", # type: AInitialDesign
+ description="", # type: ADescription
+ use_cothread=True, # type: AUseCothread
+ use_git=True, # type: AUseGit
+ ):
+ # type: (...) -> None
super(PandABlocksManagerController, self).__init__(
- process, parts, params)
+ mri, config_dir, initial_design, description, use_cothread, use_git)
# {block_name: BlockData}
self._blocks_data = {}
# {block_name: {field_name: Part}}
self._blocks_parts = OrderedDict()
# src_attr -> [dest_attr]
self._listening_attrs = {}
- # (block_name, src_field_name) -> [dest_field_name]
- self._scale_offset_fields = {}
- # full_src_field -> [full_dest_field]
- self._mirrored_fields = {}
- # fields that need to inherit UNITS, SCALE and OFFSET from upstream
- self._inherit_scale = {}
- self._inherit_offset = {}
# lut elements to be displayed or not
# {fnum: {id: visible}}
self._lut_elements = {}
# changes left over from last time
self.changes = OrderedDict()
# The PandABlock client that does the comms
- self.client = PandABlocksClient(params.hostname, params.port, Queue)
+ self.client = PandABlocksClient(hostname, port, Queue)
# Filled in on reset
self._stop_queue = None
self._poll_spawned = None
@@ -83,10 +91,10 @@ def do_reset(self):
super(PandABlocksManagerController, self).do_reset()
def _poll_loop(self):
- """At 10Hz poll for changes"""
+ """At POLL_PERIOD poll for changes"""
next_poll = time.time()
while True:
- next_poll += 0.1
+ next_poll += POLL_PERIOD
timeout = next_poll - time.time()
if timeout < 0:
timeout = 0
@@ -130,70 +138,45 @@ def _make_blocks_parts(self):
assert not self.changes, "There are still changes %s" % self.changes
def _make_child_controller(self, parts, mri):
- controller = call_with_params(
- BasicController, self.process, parts, mri=mri)
+ controller = BasicController(mri=mri)
+ if mri.endswith("PCAP"):
+ parts.append(PandABlocksActionPart(
+ self.client, "*PCAP", "ARM", "Arm position capture", []))
+ parts.append(PandABlocksActionPart(
+ self.client, "*PCAP", "DISARM", "Disarm position capture", []))
+ for part in parts:
+ controller.add_part(part)
return controller
def _make_corresponding_part(self, block_name, mri):
- part = call_with_params(ChildPart, name=block_name, mri=mri)
+ part = ChildPart(name=block_name, mri=mri, stateful=False)
return part
def _make_parts(self, block_name, block_data):
- mri = "%s:%s" % (self.params.mri, block_name)
+ mri = "%s:%s" % (self.mri, block_name)
# Defer creation of parts to a block maker
maker = PandABlocksMaker(self.client, block_name, block_data)
# Make the child controller and add it to the process
controller = self._make_child_controller(maker.parts.values(), mri)
- self.process.add_controller(mri, controller)
+ self.process.add_controller(controller, timeout=5)
# Store the parts so we can update them with the poller
self._blocks_parts[block_name] = maker.parts
- # setup param pos on a block with pos_out to inherit SCALE OFFSET UNITS
- pos_fields = []
- pos_out_fields = []
- pos_mux_inp_fields = []
- for field_name, field_data in block_data.fields.items():
- if field_name == "INP" and field_data.field_type == "pos_mux":
- pos_mux_inp_fields.append(field_name)
- elif field_data.field_type == "pos_out":
- pos_out_fields.append(field_name)
- elif field_data.field_subtype in ("pos", "relative_pos"):
- pos_fields.append(field_name)
-
- # Make sure pos_fields can get SCALE from somewhere
- if pos_fields:
- sources = pos_mux_inp_fields + pos_out_fields
- assert len(sources) == 1, \
- "Expected one source of SCALE and OFFSET for %s, got %s" % (
- pos_fields, sources)
- for field_name in pos_fields:
- self._map_scale_offset(block_name, sources[0], field_name)
-
# Make the corresponding part for us
child_part = self._make_corresponding_part(block_name, mri)
self.add_part(child_part)
- def _map_scale_offset(self, block_name, src_field, dest_field):
- self._scale_offset_fields.setdefault(
- (block_name, src_field), []).append(dest_field)
- if src_field == "INP":
- # mapping based on what it is connected to, defer
- return
- for suff in ("SCALE", "OFFSET", "UNITS"):
- full_src_field = "%s.%s.%s" % (block_name, src_field, suff)
- full_dest_field = "%s.%s.%s" % (block_name, dest_field, suff)
- self._mirrored_fields.setdefault(full_src_field, []).append(
- full_dest_field)
-
def _set_lut_icon(self, block_name):
icon_attr = self._blocks_parts[block_name]["icon"].attr
with open(os.path.join(SVG_DIR, "LUT.svg")) as f:
svg_text = f.read()
- fnum = int(self.client.get_field(block_name, "FUNC.RAW"))
+ fnum = int(self.client.get_field(block_name, "FUNC.RAW"), 0)
invis = self._get_lut_icon_elements(fnum)
+ # https://stackoverflow.com/a/8998773
+ ET.register_namespace('', "http://www.w3.org/2000/svg")
root = ET.fromstring(svg_text)
for i in invis:
# Find the first parent which has a child with id i
@@ -259,11 +242,26 @@ def _calc_visibility(self, func, op, nargs, permutations):
def handle_changes(self, changes):
for k, v in changes.items():
self.changes[k] = v
+ block_changes = OrderedDict()
for full_field, val in list(self.changes.items()):
- # If we have a mirrored field then fire off a request
- for dest_field in self._mirrored_fields.get(full_field, []):
- self.client.send("%s=%s\n" % (dest_field, val))
block_name, field_name = full_field.split(".", 1)
+ block_changes.setdefault(block_name, []).append((
+ field_name, full_field, val))
+ for block_name, field_changes in block_changes.items():
+ # Squash changes
+ block_mri = "%s:%s" % (self.mri, block_name)
+ try:
+ block_controller = self.process.get_controller(block_mri)
+ except ValueError:
+ self.log.debug("Block %s not known", block_name)
+ for _, full_field, _ in field_changes:
+ self.changes.pop(full_field)
+ else:
+ with block_controller.changes_squashed:
+ self.do_field_changes(block_name, field_changes)
+
+ def do_field_changes(self, block_name, field_changes):
+ for field_name, full_field, val in field_changes:
ret = self.update_attribute(block_name, field_name, val)
if ret is not None:
self.changes[full_field] = ret
@@ -275,9 +273,6 @@ def handle_changes(self, changes):
def update_attribute(self, block_name, field_name, val):
ret = None
- if block_name not in self._blocks_parts:
- self.log.debug("Block %s not known", block_name)
- return
parts = self._blocks_parts[block_name]
if field_name not in parts:
self.log.debug("Block %s has no field %s", block_name, field_name)
@@ -308,51 +303,24 @@ def update_attribute(self, block_name, field_name, val):
current_part = parts[field_name + ".CURRENT"]
current_attr = current_part.attr
self._update_current_attr(current_attr, val)
- if field_data.field_type == "pos_mux" and field_name == "INP":
- # all param pos fields should inherit scale and offset
- for dest_field_name in self._scale_offset_fields.get(
- (block_name, field_name), []):
- self._update_scale_offset_mapping(
- block_name, dest_field_name, val)
- return ret
-
- def _update_scale_offset_mapping(self, block_name, field_name, mux_val):
- # Find the fields that depend on this input
- field_data = self._blocks_data[block_name].fields.get(field_name, None)
- if field_data.field_subtype == "relative_pos":
- suffs = ("SCALE", "UNITS")
- else:
- suffs = ("SCALE", "OFFSET", "UNITS")
-
- for suff in suffs:
- full_src_field = "%s.%s" % (mux_val, suff)
- full_dest_field = "%s.%s.%s" % (block_name, field_name, suff)
- # Remove mirrored fields that are already in lists
- for field_list in self._mirrored_fields.values():
- try:
- field_list.remove(full_dest_field)
- except ValueError:
- pass
-
- self._mirrored_fields.setdefault(full_src_field, []).append(
- full_dest_field)
- # update it to the right value
- if mux_val == "ZERO":
- value = dict(SCALE=1, OFFSET=0, UNITS="")[suff]
- else:
- mon_block_name, mon_field_name = mux_val.split(".", 1)
- mon_parts = self._blocks_parts[mon_block_name]
- src_attr = mon_parts["%s.%s" % (mon_field_name, suff)].attr
- value = src_attr.value
- self.client.send("%s=%s\n" % (full_dest_field, value))
+ # if we changed a pos_out, its SCALE or OFFSET, update its scaled value
+ root_field_name = field_name.split(".")[0]
+ field_data = self._blocks_data[block_name].fields[root_field_name]
+ if field_data.field_type == "pos_out":
+ scale = parts[root_field_name + ".SCALE"].attr.value
+ offset = parts[root_field_name + ".OFFSET"].attr.value
+ value = parts[root_field_name].attr.value
+ scaled = value * scale + offset
+ parts[root_field_name + ".SCALED"].attr.set_value(scaled)
+ return ret
def _update_current_attr(self, current_attr, mux_val):
# Remove the old current_attr from all lists
- for mux_list in self._listening_attrs.values():
+ for mux_set in self._listening_attrs.values():
try:
- mux_list.remove(current_attr)
- except ValueError:
+ mux_set.remove(current_attr)
+ except KeyError:
pass
# add it to the list of things that need to update
if mux_val == "ZERO":
@@ -363,6 +331,6 @@ def _update_current_attr(self, current_attr, mux_val):
mon_block_name, mon_field_name = mux_val.split(".", 1)
mon_parts = self._blocks_parts[mon_block_name]
out_attr = mon_parts[mon_field_name].attr
- self._listening_attrs.setdefault(out_attr, []).append(current_attr)
+ self._listening_attrs.setdefault(out_attr, set()).add(current_attr)
# update it to the right value
current_attr.set_value(out_attr.value)
diff --git a/malcolm/modules/pandablocks/icons/CLOCKS.svg b/malcolm/modules/pandablocks/icons/CLOCKS.svg
index 01949a45e..6afd92f7c 100644
--- a/malcolm/modules/pandablocks/icons/CLOCKS.svg
+++ b/malcolm/modules/pandablocks/icons/CLOCKS.svg
@@ -1,103 +1,7 @@
-
-
-
-