diff --git a/docs/topic_guides/fluorescence_detectors.rst b/docs/topic_guides/fluorescence_detectors.rst index fdd7efe8..d9d6fff1 100644 --- a/docs/topic_guides/fluorescence_detectors.rst +++ b/docs/topic_guides/fluorescence_detectors.rst @@ -5,193 +5,127 @@ Fluorescence Detectors .. contents:: Table of Contents :depth: 3 -Specifying Detectors in Configuration -===================================== - -To add new detectors to the beamline, new sections should be added the -*iconfig.toml* file. Each section should be labeled -``[.]``, where ```` specifies which interface is -present (``"dxp"`` for XIA DXP or ``"xspress"`` for Xspress3), and -```` becomes the device name. *prefix* is the PV prefix for the -EPICS IOC, and *num_elements* specifies the number of detector -elements. - -.. code-block:: toml - - [dxp.vortex_me4] +.. warning:: - prefix = "20xmap4b" - num_elements = 4 + Fluorescence detectors are in the process of being transitioned + from the older, threaded Ophyd library to *ophyd-async*. The + documentation below **should be accurate for Xspress3** devices, + but **not for DXP** based devices as those have not been + transitioned to ophyd-async yet. - [xspress.vortex_ex] +Haven supports two varieties of fluorescence detector: - prefix = "dp_xsp3_2" - num_elements = 1 +- Xspress3 +- DXP (XIA's XMAP, Saturn, and Mercury) +The support for these two electronics is very different, but the basic +idea is the same. To acquire a frame, each detector will first +configure its file writer, then trigger the detector. This will result +in a file on disk with the measured spectra from all elements in the +detector. The data can then be retrieved with something like Tiled, +which can open the data file and serve the enclosed data. All these +steps happen out-of-sight of the user, provided the detector is used +with the updated Ophyd-async devices. -The device can then be retrieved from the instrument registry for use -in bluesky plans: -.. code-block:: python - - import haven - - # Get individual fluorescence detectors - my_detector = haven.registry.find(name="vortex_me4") - another_detector = haven.registry.find(name="vortex_ex") +Specifying Detectors in Configuration +===================================== - # Get all fluorescence detectors of any kind (e.g. DXP, Xspress3, etc.) - detectors = haven.registry.findall(label="fluorescence_detectors") +To add new detectors to the beamline, new sections should be added to +the *iconfig.toml* file. Each section should be labeled ``[[ +]]``, where ```` specifies which interface is present +(``"dxp"`` for XIA DXP/XMAP or ``"xspress3"`` for Xspress3). +The following parameters can then be included: -Common Behavior -=============== +*name* + The python-friendly name to use for this device. +*prefix* + The PV prefix for the EPICS IOC, including the trailing colon. -Fluorescence detectors are implemented as -:py:class:`~haven.devices.xspress.Xspress3Detector` and -:py:class:`~haven.devices.dxp.DxpDetector` Ophyd device -classes. They are written to have a common Ophyd interface so that -clients (e.g. Firefly) can use fluorescence detectors interchangeably. +.. code-block:: toml + :caption: example_iconfig.toml + + [[ dxp ]] + prefix = "20xmap4b:" + name = "vortex_me4" -Creating Devices ----------------- + [[ xspress ]] + prefix = "dp_xsp3_2:" + name = "vortex_ex" -By default, devices created from these device classes include one MCA -element, available on the ``mcas`` attribute. The **recommended way to -create a fluorescence detector** device directly is with the -:py:func:`~haven.devices.dxp.load_xspress()` and -:py:func:`~haven.devices.dxp.load_dxp()` factory functions: +The device can then be retrieved by its name for use in Bluesky plans. .. code-block:: python - from haven import load_xspress - - det = load_xspress(name="vortex_me4", - prefix="20xmap4b", - num_elements=4) - det.wait_for_connection() - -Alternately, to make a dedicated subclass with a specific number of -elements, override the ``mcas`` attributes: - -.. code-block:: python - - from haven.devices import xspress + import haven - class Xspress4Element(xspress.Xspress3Detector): - mcas = xspress.DDC( - xspress.add_mcas(range_=range(4)), - kind=(Kind.normal | Kind.config), - default_read_attrs=["mca0", "mca1", "mca2", "mca3"], - default_configuration_attrs=["mca0", "mca1", "mca2", "mca3"], - ) + # Get individual fluorescence detectors + vortex_4elem = haven.beamline.devices["vortex_me4"] + vortex_1elem = haven.beamline.devices["vortex_ex"] -Managing Elements and ROIs + # Get all fluorescence detectors of any kind (e.g. DXP, Xspress3, etc.) + detectors = haven.beamline.devices.findall(label="fluorescence_detectors") + + +Why can't I… +############ + +Previously, some steps were performed during data acquisition by the +IOC that have now been moved to other parts of the system. These +decisions were made largely to simplify data acquisition and ensure +this process happens smoothly. + +…set regions of interest (ROIs)? +-------------------------------- + +ROIs should now be done during analysis prior to visualization using +tools like xray-larch. + +ROIs are typically set so that each one roughly corresponds to the +intensity of a given emission line (e.g. Fe–K). Doing this during data +acquisition is convenient for later visualization, since no specialized +plotting tools are needed. However, there are a few drawbacks. + +Setting ROIs during acquisition mixes measured data with processed +data, giving the impression that the Fe–K emission was actually +measured, when in reality a rough approximation was performed. This +further gives the impression that no further analysis is needed. In +reality, a full spectrum analysis such as that available in xray-larch +is required to properly derive estimates of the elemental emission +signals. This analysis will account for background subtraction and +multiple overlapping peaks, among other things. + +Additionally, calculating ROIs adds additional time to each detector +frame acquisition. This may introduce a race condition. If plugins are +not set to block, then the PVs for the various plugins may not be +updated by the time the data acquisition system thinks the frame is +done. The only reliable means to ensure plugins have completed +processing is to set them to block, which adds additional time to each +acquisition. Given that ROI calculations are trivial for a full +dataset, this is best left to the analaysis and visualization phases +of the measurement. + +…disable individual elements? +----------------------------- + +Ophyd-async does not consider the elements of the detector +individually. The detector is responsible for collecting its own data +and saving it to disk. As a consequence, it is not possible to enable +or disable individual elements during acquisition. Since no data +reduction or analysis takes place during acquisition, this should not +have any impact on the results. Instead, the entire spectrum for each +element is saved to disk using the IOCs file writer plugins. **Whether +to include a given element** is then a decision that must be made +during analysis and visualization. + +…view the summed spectrum? -------------------------- -.. note:: - - Not all fluorescence detector IOCs agree on how to number MCAs and - ROIs. To maintain a unified interface, Haven uses the convention to - start counting from 0 regardless of the IOC. As such, the haven - device signals may be misaligned with the PVs they map to. - - For example on a DXP-based IOC, an ophyd signal - ``det.mcas.mca1.rois.roi1`` will have a PV like - ``xmap_4b:MCA1.R0``. - -By default all elements (MCAs) will collect spectra, and **all ROIs -will save aggregated values**. While this setup ensures that no data -are lost, it also creates a large number of signals in the database -and may make analysis tedious. Most likely, only some ROIs are -meaningful, so those signals can be identified by giving them the -``hinted`` kind. - -https://blueskyproject.io/ophyd/user/reference/signals.html#kind - -During the staging phase (in its -:py:meth:`~have.instrument.fluorescence_detector.ROIMixin.stage()` -method), each ROI will check this signal and if it is true, then it -**will change its kind** to ``hinted``. When unstaging, the signal is -reset to its original value. - -Individual **ROIs can be marked for hinting** by setting the -:py:attr:`~haven.devices.xspress.ROI.use` signal: - -.. code-block:: python - - from haven import load_xspress - - # Create a Xspress3-based fluorescence detector - det = load_xspress(name="vortex_me4", - prefix="20xmap4b", - num_elements=4) - - # Mark the 3rd element, 2nd ROI (0-indexed) - det.mcas.mca2.rois.roi1.use.set(1) - -Behind the scenes, to track the state of -:py:attr:`~haven.devices.xspress.ROI.use` we add a "~" to the start -of the value in the -:py:meth:`~have.instrument.fluorescence_detector.label` signal if -:py:meth:`~have.instrument.fluorescence_detector.use` is false. - - -Marking multiple ROIs on multiple elements is possible using the -following methods on the -:py:class:`~haven.devices.fluorescence_detector.XRFMixin` object: - -- :py:meth:`~haven.devices.fluorescence_detector.XRFMixin.enable_rois` -- :py:meth:`~haven.devices.fluorescence_detector.XRFMixin.disable_rois` - -These methods accepts an optional sequence of integers for the indices -of the elements or ROIs to enable/disable. If not ROIs or elements are -specified, the methods will operate on all ROIs or elements -(e.g. ``det.disables_rois()`` will disable all ROIs on all elements. - -.. code-block:: python - - from haven import load_xspress - - # Create a Xspress3-based fluorescence detector - det = load_xspress(name="vortex_me4", - prefix="20xmap4b", - num_elements=4) - - # Mark all ROIs on the third and fifth elements - det.enable_rois(elements=[2, 4]) - - # Unmark the first, eight, and fifteeth elements - det.enable_rois(rois=[0, 7, 14]) - - # Unmark the third ROI on the second element - det.enable_rois(rois=[2], elements=[1]) - -Xspress 3 -========= - -Support for Quantum Detectors' Xspress3 Family of detectors is -provided by the :py:class:`~haven.devices.xspress.Xspress3Detector` -base class. The EPICS support for Xspress3 detectors is based on the -EPICS area detector module, and so the -:py:class:`~haven.devices.xspress.Xspress3Detector` is a customized -:py:class:`ophyd.DetectorBase`. - -XIA DXP (XMAP) -============== - -DXP (XMAP, Mercury, Saturn) electronics use the bluesky multi-channel -analyzer (MCA) device, packaged in Haven as the -:py:class:`~haven.devices.dxp.DxpDetector` class. - -The DXP electronics are **not yet compatible** with :doc:`fly-scanning -`. The :py:class:`~haven.devices.dxp.DxpDetector` -does implement the -:py:meth:`~haven.devices.dxp.DxpDetector.kickoff()` and -:py:meth:`~haven.devices.dxp.DxpDetector.complete()` methods, but -does not yet handle data collection. This is because the data are -reported as a byte stream that must first be decoded. The DXP manual -describes the structure of this byte-stream, so in principle it is -possible to parse this in the -:py:meth:`~haven.devices.dxp.DxpDetector.collect()` method. - +Since the data coming from the fluorescence detector are effectively +an area detector image, it is simple to calculate the summed spectrum +from all the spectra of the individual elements. While the EPICS IOCs +typically include a PV for this summed spectrum, it is not trivial to +include this summed spectrum in the resulting HDF5 file. Instead, +plotting tools, like Haven's run browser, should include a feature for +dimensionality reduction. diff --git a/src/haven/devices/detectors/aravis.py b/src/haven/devices/detectors/aravis.py index e865e444..5ca41563 100644 --- a/src/haven/devices/detectors/aravis.py +++ b/src/haven/devices/detectors/aravis.py @@ -1,8 +1,8 @@ -from ophyd_async.core import SubsetEnum +from ophyd_async.core import PathProvider, SubsetEnum from ophyd_async.epics.adaravis import AravisDetector as DetectorBase from ophyd_async.epics.core import epics_signal_rw_rbv -from .area_detectors import HavenDetector +from .area_detectors import HavenDetector, default_path_provider class AravisTriggerSource(SubsetEnum): @@ -13,8 +13,12 @@ class AravisTriggerSource(SubsetEnum): class AravisDetector(HavenDetector, DetectorBase): _ophyd_labels_ = {"cameras", "detectors"} - def __init__(self, prefix, *args, **kwargs): - super().__init__(*args, prefix=prefix, **kwargs) + def __init__( + self, prefix, *args, path_provider: PathProvider | None = None, **kwargs + ): + if path_provider is None: + path_provider = default_path_provider() + super().__init__(*args, prefix=prefix, path_provider=path_provider, **kwargs) # Replace a signal that has different enum options self.drv.trigger_source = epics_signal_rw_rbv( AravisTriggerSource, # type: ignore diff --git a/src/haven/devices/detectors/area_detectors.py b/src/haven/devices/detectors/area_detectors.py index acbabcb4..0f5482c0 100644 --- a/src/haven/devices/detectors/area_detectors.py +++ b/src/haven/devices/detectors/area_detectors.py @@ -5,22 +5,17 @@ from ..._iconfig import load_config -class HavenDetector: - def __init__(self, *args, writer_path=None, **kwargs): - # Create a path provider based on the path given - if writer_path is None: - writer_path = default_path() - path_provider = YMDPathProvider( - filename_provider=UUIDFilenameProvider(), - base_directory_path=writer_path, - create_dir_depth=-4, - ) - super().__init__(*args, path_provider=path_provider, **kwargs) +class HavenDetector: ... -def default_path(config=None): +def default_path_provider(path: Path = None, config=None): if config is None: config = load_config() - # Generate a default path provider - root_dir = Path(config.get("area_detector_root_path", "/tmp")) - return root_dir + if path is None: + path = Path(config.get("area_detector_root_path", "/tmp")) + path_provider = YMDPathProvider( + filename_provider=UUIDFilenameProvider(), + base_directory_path=path, + create_dir_depth=-4, + ) + return path_provider diff --git a/src/haven/devices/detectors/dxp.py b/src/haven/devices/detectors/dxp.py new file mode 100644 index 00000000..ddd4e718 --- /dev/null +++ b/src/haven/devices/detectors/dxp.py @@ -0,0 +1,87 @@ +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator, Sequence + +from bluesky.protocols import StreamAsset +from event_model import DataKey +from ophyd_async.core import ( + DEFAULT_TIMEOUT, + AsyncStatus, + DatasetDescriber, + DetectorController, + DetectorWriter, + Device, + HDFDataset, + HDFFile, + NameProvider, + PathProvider, + SignalR, + StandardDetector, + StrictEnum, + TriggerInfo, + observe_value, + set_and_wait_for_value, + wait_for_value, +) +from ophyd_async.epics import adcore +from ophyd_async.epics.adcore._core_io import NDPluginBaseIO +from ophyd_async.epics.core import ( + epics_signal_r, + epics_signal_rw, + epics_signal_rw_rbv, + epics_signal_x, +) + +from ..synApps import ScanInterval +from .area_detectors import HavenDetector, default_path_provider +from .dxp_controller import DXPController +from .dxp_io import DXPDriverIO +from .netcdf import NetCDFWriter, NDFileNetCDFIO +# from ._utils import ( +# FileWriteMode, +# convert_param_dtype_to_np, +# convert_pv_dtype_to_np, +# ) + + +class DXPDetector(HavenDetector, StandardDetector): + """An ophyd-async detector for XIA's DXP-based detectors. + + E.g. XMAP, Saturn, and Mercury. + + """ + + _controller: DetectorController + _writer: adcore.ADHDFWriter + + def __init__( + self, + prefix: str, + path_provider: PathProvider | None = None, + netcdf_suffix="netCDF1:", + name: str = "", + config_sigs: Sequence[SignalR] = (), + ): + self.drv = DXPDriverIO(prefix) + self.netcdf = NDFileNetCDFIO(prefix + netcdf_suffix) + + if path_provider is None: + path_provider = default_path_provider() + super().__init__( + DXPController(self.drv), + NetCDFWriter( + self.netcdf, + path_provider, + lambda: self.name, + adcore.ADBaseDatasetDescriber(self.drv), + ), + config_sigs=( + self.drv.preset_mode, + self.drv.preset_live_time, + self.drv.preset_real_time, + self.drv.preset_events, + self.drv.preset_triggers, + self.drv.collect_mode, + *config_sigs, + ), + name=name, + ) diff --git a/src/haven/devices/detectors/dxp_controller.py b/src/haven/devices/detectors/dxp_controller.py new file mode 100644 index 00000000..98ed8ea7 --- /dev/null +++ b/src/haven/devices/detectors/dxp_controller.py @@ -0,0 +1,115 @@ +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator, Sequence + +from bluesky.protocols import StreamAsset +from event_model import DataKey +from ophyd_async.core import ( + DEFAULT_TIMEOUT, + AsyncStatus, + DatasetDescriber, + DetectorController, + DetectorWriter, + Device, + HDFDataset, + HDFFile, + NameProvider, + PathProvider, + SignalR, + StandardDetector, + StrictEnum, + TriggerInfo, + observe_value, + set_and_wait_for_value, + wait_for_value, +) +from ophyd_async.epics import adcore +from ophyd_async.epics.adcore._core_io import NDPluginBaseIO +from ophyd_async.epics.core import ( + epics_signal_r, + epics_signal_rw, + epics_signal_rw_rbv, + epics_signal_x, +) + +from ..synApps import ScanInterval +from .area_detectors import HavenDetector, default_path_provider + +# from ._utils import ( +# FileWriteMode, +# convert_param_dtype_to_np, +# convert_pv_dtype_to_np, +# ) + +import asyncio +from typing import Literal + +from ophyd_async.core import ( + AsyncStatus, + DetectorController, + DetectorTrigger, + TriggerInfo, + set_and_wait_for_value, +) +from ophyd_async.epics import adcore + +from .dxp_io import DXPDriverIO, DXPTriggerMode, DXPTriggerSource + + +# Need to figure out what the highest deadtime for a DXP detector is +_HIGHEST_POSSIBLE_DEADTIME = 1961e-6 + + +class DXPController(DetectorController): + def __init__(self, driver: DXPDriverIO) -> None: + self._drv = driver + self._arm_status: AsyncStatus | None = None + + def get_deadtime(self, exposure: float | None) -> float: + return _HIGHEST_POSSIBLE_DEADTIME + + async def prepare(self, trigger_info: TriggerInfo): + if trigger_info.total_number_of_triggers == 0: + image_mode = adcore.ImageMode.CONTINUOUS + else: + image_mode = adcore.ImageMode.MULTIPLE + if (exposure := trigger_info.livetime) is not None: + await self._drv.acquire_time.set(exposure) + + trigger_mode, trigger_source = self._get_trigger_info(trigger_info.trigger) + # trigger mode must be set first and on it's own! + await self._drv.trigger_mode.set(trigger_mode) + + await asyncio.gather( + self._drv.trigger_source.set(trigger_source), + self._drv.num_images.set(trigger_info.total_number_of_triggers), + self._drv.image_mode.set(image_mode), + ) + + async def arm(self): + self._arm_status = await set_and_wait_for_value(self._drv.acquire, True) + + async def wait_for_idle(self): + if self._arm_status: + await self._arm_status + + def _get_trigger_info( + self, trigger: DetectorTrigger + ) -> tuple[DXPTriggerMode, DXPTriggerSource]: + supported_trigger_types = ( + DetectorTrigger.CONSTANT_GATE, + DetectorTrigger.EDGE_TRIGGER, + DetectorTrigger.INTERNAL, + ) + if trigger not in supported_trigger_types: + raise ValueError( + f"{self.__class__.__name__} only supports the following trigger " + f"types: {supported_trigger_types} but was asked to " + f"use {trigger}" + ) + if trigger == DetectorTrigger.INTERNAL: + return DXPTriggerMode.OFF, DXPTriggerSource.FREERUN + else: + return (DXPTriggerMode.ON, f"Line{self.gpio_number}") # type: ignore + + async def disarm(self): + await adcore.stop_busy_record(self._drv.acquire, False, timeout=1) diff --git a/src/haven/devices/detectors/dxp_io.py b/src/haven/devices/detectors/dxp_io.py new file mode 100644 index 00000000..ee9d00af --- /dev/null +++ b/src/haven/devices/detectors/dxp_io.py @@ -0,0 +1,125 @@ +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator, Sequence + +from bluesky.protocols import StreamAsset +from event_model import DataKey +from ophyd_async.core import ( + DEFAULT_TIMEOUT, + AsyncStatus, + DatasetDescriber, + DetectorController, + DetectorWriter, + Device, + HDFDataset, + HDFFile, + NameProvider, + PathProvider, + SignalR, + StandardDetector, + StrictEnum, + SubsetEnum, + TriggerInfo, + observe_value, + set_and_wait_for_value, + wait_for_value, +) +from ophyd_async.epics import adcore +from ophyd_async.epics.adcore._core_io import NDPluginBaseIO +from ophyd_async.epics.core import ( + epics_signal_r, + epics_signal_rw, + epics_signal_rw_rbv, + epics_signal_x, +) + +from ..synApps import ScanInterval +from .area_detectors import HavenDetector, default_path_provider + +# from ._utils import ( +# FileWriteMode, +# convert_param_dtype_to_np, +# convert_pv_dtype_to_np, +# ) + + + +class ScanRate: + pass + + +class DXPTriggerMode(StrictEnum): + """DXP triggering mode.""" + + ON = "On" + OFF = "Off" + + +class DXPTriggerSource(SubsetEnum): + """A minimal set of TriggerSources that must be supported by the + underlying record. + + """ + FREERUN = "Freerun" + LINE1 = "Line1" + + + +class DXPDriverIO(Device): + + class PresetMode(StrictEnum): + NO_PRESET = "No preset" + REAL_TIME = "Real time" + LIVE_TIME = "Live time" + EVENTS = "Events" + TRIGGERS = "Triggers" + + class CollectMode(StrictEnum): + MCA_SPECTRA = "MCA spectra" + MCA_MAPPING = "MCA mapping" + SCA_MAPPING = "SCA mapping" + LIST_MAPPING = "List mapping" + + def __init__(self, prefix: str, name: str = "") -> None: + # SNL status records + self.snl_connected = epics_signal_r(bool, f"{prefix}SNL_Connected") + # Acquisition control records + self.erase = epics_signal_x(f"{prefix}EraseAll") + self.start = epics_signal_x(f"{prefix}StartAll") + self.stop = epics_signal_x(f"{prefix}StopAll") + # Preset control records + self.preset_mode = epics_signal_rw(self.PresetMode, f"{prefix}PresetMode") + self.preset_live_time = epics_signal_rw(float, f"{prefix}PresetLive") + self.preset_real_time = epics_signal_rw(float, f"{prefix}PresetReal") + self.preset_events = epics_signal_rw(int, f"{prefix}PresetEvents") + self.preset_triggers = epics_signal_rw(int, f"{prefix}PresetTriggers") + # Status/statistics records + self.status_scan_rate = epics_signal_rw(ScanInterval, f"{prefix}StatusAll.SCAN") + self.reading_scan_rate = epics_signal_rw(ScanInterval, f"{prefix}ReadAll.SCAN") + self.acquiring = epics_signal_r(bool, f"{prefix}Acquiring") + self.elapsed_real_time = epics_signal_r(float, f"{prefix}ElapsedReal") + self.elapsed_live_time = epics_signal_r(float, f"{prefix}ElapsedLive") + self.accumulated_dead_time = epics_signal_r(float, f"{prefix}DeadTime") + self.instantaneous_dead_time = epics_signal_r(float, f"{prefix}IDeadTime") + # Low-level parameters + self.low_level_params_scan_rate = epics_signal_rw( + ScanInterval, f"{prefix}ReadLLParams.SCAN" + ) + # Trace and diagnostic records + self.baseline_histograms_read_scan_rate = epics_signal_rw( + ScanInterval, f"{prefix}ReadBaselineHistograms.SCAN" + ) + self.traces_scan_rate = epics_signal_rw( + ScanInterval, f"{prefix}ReadTraces.SCAN" + ) + self.baseline_histogram_scan_rate = epics_signal_rw( + ScanInterval, f"{prefix}dxp1:BaselineHistogram.SCAN" + ) + self.trace_data_scan_rate = epics_signal_rw( + ScanInterval, f"{prefix}dxp1:TraceData.SCAN" + ) + # Mapping mode control records + self.collect_mode = epics_signal_rw_rbv( + self.CollectMode, f"{prefix}CollectMode" + ) + + super().__init__(name=name) diff --git a/src/haven/devices/detectors/netcdf.py b/src/haven/devices/detectors/netcdf.py new file mode 100644 index 00000000..6747cc12 --- /dev/null +++ b/src/haven/devices/detectors/netcdf.py @@ -0,0 +1,106 @@ +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator, Sequence + +from bluesky.protocols import StreamAsset +from event_model import DataKey +from ophyd_async.core import ( + DEFAULT_TIMEOUT, + AsyncStatus, + DatasetDescriber, + DetectorController, + DetectorWriter, + Device, + HDFDataset, + HDFFile, + NameProvider, + PathProvider, + SignalR, + StandardDetector, + StrictEnum, + TriggerInfo, + observe_value, + set_and_wait_for_value, + wait_for_value, +) +from ophyd_async.epics import adcore +from ophyd_async.epics.adcore._core_io import NDPluginBaseIO +from ophyd_async.epics.core import ( + epics_signal_r, + epics_signal_rw, + epics_signal_rw_rbv, + epics_signal_x, +) + +from ..synApps import ScanInterval +from .area_detectors import HavenDetector, default_path_provider +from .dxp_controller import DXPController + +# from ._utils import ( +# FileWriteMode, +# convert_param_dtype_to_np, +# convert_pv_dtype_to_np, +# ) + + +class NDFileNetCDFIO(NDPluginBaseIO): + def __init__(self, prefix: str, name="") -> None: + # Define some signals + self.file_path = epics_signal_rw_rbv(str, prefix + "FilePath") + self.file_name = epics_signal_rw_rbv(str, prefix + "FileName") + self.file_path_exists = epics_signal_r(bool, prefix + "FilePathExists_RBV") + self.file_template = epics_signal_rw_rbv(str, prefix + "FileTemplate") + self.full_file_name = epics_signal_r(str, prefix + "FullFileName_RBV") + self.file_write_mode = epics_signal_rw_rbv( + adcore.FileWriteMode, prefix + "FileWriteMode" + ) + self.num_capture = epics_signal_rw_rbv(int, prefix + "NumCapture") + self.num_captured = epics_signal_r(int, prefix + "NumCaptured_RBV") + self.lazy_open = epics_signal_rw_rbv(bool, prefix + "LazyOpen") + self.capture = epics_signal_rw_rbv(bool, prefix + "Capture") + self.array_size0 = epics_signal_r(int, prefix + "ArraySize0") + self.array_size1 = epics_signal_r(int, prefix + "ArraySize1") + self.create_directory = epics_signal_rw_rbv(int, prefix + "CreateDirectory") + super().__init__(prefix, name) + + +class NetCDFWriter(DetectorWriter): + _multiplier = 1 + + def __init__( + self, + netcdf: NDFileNetCDFIO, + path_provider: PathProvider, + name_provider: NameProvider, + dataset_describer: DatasetDescriber, + *plugins: adcore.NDArrayBaseIO, + ) -> None: + self.netcdf = netcdf + self._path_provider = path_provider + self._name_provider = name_provider + self._dataset_describer = dataset_describer + + self._plugins = plugins + self._capture_status: AsyncStatus | None = None + + async def open(self, multiplier: int = 1) -> dict[str, DataKey]: + assert multiplier == 1 + raise NotImplementedError() + + async def close(self): + raise NotImplementedError() + + async def observe_indices_written( + self, timeout=DEFAULT_TIMEOUT + ) -> AsyncGenerator[int, None]: + """Wait until a specific index is ready to be collected""" + async for num_captured in observe_value(self.netcdf.num_captured, timeout): + yield num_captured // self._multiplier + + async def get_indices_written(self) -> int: + num_captured = await self.netcdf.num_captured.get_value() + return num_captured // self._multiplier + + async def collect_stream_docs( + self, indices_written: int + ) -> AsyncIterator[StreamAsset]: + raise NotImplementedError() diff --git a/src/haven/devices/detectors/xspress.py b/src/haven/devices/detectors/xspress.py new file mode 100644 index 00000000..083ea8c9 --- /dev/null +++ b/src/haven/devices/detectors/xspress.py @@ -0,0 +1,104 @@ +import asyncio +from collections.abc import Sequence + +from ophyd_async.core import ( + AsyncStatus, + DetectorController, + PathProvider, + SignalR, + StandardDetector, + StrictEnum, + TriggerInfo, +) +from ophyd_async.epics import adcore +from ophyd_async.epics.core import epics_signal_rw, epics_signal_x + +from .area_detectors import HavenDetector, default_path_provider + + +class XspressTriggerMode(StrictEnum): + SOFTWARE = "Software" + INTERNAL = "Internal" + IDC = "IDC" + TTL_VETO_ONLY = "TTL Veto Only" + TTL_BOTH = "TTL Both" + LVDS_VETO_ONLY = "LVDS Veto Only" + LVDS_BOTH = "LVDS Both" + SOFTWARE_INTERNAL = "Software + Internal" + + +class XspressDriverIO(adcore.ADBaseIO): + def __init__(self, prefix, name=""): + self.trigger_mode = epics_signal_rw(XspressTriggerMode, f"{prefix}TriggerMode") + self.erase_on_start = epics_signal_rw(bool, f"{prefix}EraseOnStart") + self.erase = epics_signal_x(f"{prefix}ERASE") + super().__init__(prefix=prefix, name=name) + + +class XspressController(DetectorController): + def __init__(self, driver: adcore.ADBaseIO) -> None: + self._drv = driver + + def get_deadtime(self, exposure: float) -> float: + # Arbitrary value. To-do: fill this in when we know what to + # include + return 0.001 + + @AsyncStatus.wrap + async def prepare(self, trigger_info: TriggerInfo): + await asyncio.gather( + self._drv.num_images.set(trigger_info.total_number_of_triggers), + self._drv.image_mode.set(adcore.ImageMode.multiple), + self._drv.trigger_mode.set(XspressTriggerMode.INTERNAL), + ) + + async def wait_for_idle(self): + if self._arm_status: + await self._arm_status + + async def arm(self): + self._arm_status = await adcore.start_acquiring_driver_and_ensure_status( + self._drv + ) + + async def disarm(self): + await adcore.stop_busy_record(self._drv.acquire, False, timeout=1) + + +class Xspress3Detector(HavenDetector, StandardDetector): + _controller: DetectorController + _writer: adcore.ADHDFWriter + + def __init__( + self, + prefix: str, + path_provider: PathProvider | None = None, + drv_suffix="det1:", + hdf_suffix="HDF1:", + name: str = "", + config_sigs: Sequence[SignalR] = (), + ): + self.drv = XspressDriverIO(prefix + drv_suffix) + self.hdf = adcore.NDFileHDFIO(prefix + hdf_suffix) + + if path_provider is None: + path_provider = default_path_provider() + super().__init__( + XspressController(self.drv), + adcore.ADHDFWriter( + self.hdf, + path_provider, + lambda: self.name, + adcore.ADBaseDatasetDescriber(self.drv), + ), + config_sigs=(self.drv.acquire_period, self.drv.acquire_time, *config_sigs), + name=name, + ) + + @AsyncStatus.wrap + async def stage(self) -> None: + await asyncio.gather( + super().stage(), + self.drv.erase_on_start.set(False), + self.drv.erase.trigger(), + ) diff --git a/src/haven/devices/synApps.py b/src/haven/devices/synApps.py index 62071cdc..7d175375 100644 --- a/src/haven/devices/synApps.py +++ b/src/haven/devices/synApps.py @@ -42,6 +42,21 @@ class AlarmSeverity(StrictEnum): INVALID = "INVALID" +# More valid options are specific to the record type +# Subclasses may override this attribute +class ScanInterval(SubsetEnum): + PASSIVE = "Passive" + EVENT = "Event" + IO_INTR = "I/O Intr" + SCAN_10 = "10 second" + SCAN_5 = "5 second" + SCAN_2 = "2 second" + SCAN_1 = "1 second" + SCAN_0_5 = ".5 second" + SCAN_0_2 = ".2 second" + SCAN_0_1 = ".1 second" + + class EpicsRecordDeviceCommonAll(StandardReadable): """ Many of the fields common to all EPICS records. @@ -50,19 +65,7 @@ class EpicsRecordDeviceCommonAll(StandardReadable): an EPICS client or are already provided in other support. """ - # More valid options are specific to the record type - # Subclasses may override this attribute - class ScanInterval(SubsetEnum): - PASSIVE = "Passive" - EVENT = "Event" - IO_INTR = "I/O Intr" - SCAN_10 = "10 second" - SCAN_5 = "5 second" - SCAN_2 = "2 second" - SCAN_1 = "1 second" - SCAN_0_5 = ".5 second" - SCAN_0_2 = ".2 second" - SCAN_0_1 = ".1 second" + ScanInterval = ScanInterval # Config signals def __init__(self, prefix: str, name: str = ""): diff --git a/src/haven/iconfig_testing.toml b/src/haven/iconfig_testing.toml index 4caa94f1..523ca090 100644 --- a/src/haven/iconfig_testing.toml +++ b/src/haven/iconfig_testing.toml @@ -289,10 +289,9 @@ iocs = {ioc255idb = "ioc255idb:", ioc255idc = "ioc255idc:"} # prefix = "20xmap8:" # num_elements = 4 -[[ xspress ]] -name = "vortex_me4_xsp" +[[ xspress3 ]] +name = "vortex_me4" prefix = "vortex_me4_xsp:" -num_elements = 4 # Filter boxes diff --git a/src/haven/instrument.py b/src/haven/instrument.py index fea42894..69c6ba5b 100644 --- a/src/haven/instrument.py +++ b/src/haven/instrument.py @@ -20,6 +20,7 @@ from .devices.beamline_manager import BeamlineManager from .devices.detectors.aravis import AravisDetector from .devices.detectors.sim_detector import SimDetector +from .devices.detectors.xspress import Xspress3Detector from .devices.dxp import make_dxp_device from .devices.energy_positioner import EnergyPositioner from .devices.heater import CapillaryHeater @@ -34,7 +35,6 @@ from .devices.stage import XYStage from .devices.table import Table from .devices.xia_pfcu import PFCUFilterBank -from .devices.xspress import make_xspress_device from .exceptions import InvalidConfiguration log = logging.getLogger(__name__) @@ -414,6 +414,7 @@ async def load( "sim_detector": SimDetector, "camera": AravisDetector, "pss_shutter": PssShutter, + "xspress3": Xspress3Detector, # Threaded ophyd devices "blade_slits": BladeSlits, "aperture_slits": ApertureSlits, @@ -422,7 +423,6 @@ async def load( "synchrotron": ApsMachine, "robot": Robot, "pfcu4": PFCUFilterBank, # <-- fails if mocked - "xspress": make_xspress_device, "dxp": make_dxp_device, "beamline_manager": BeamlineManager, "area_detector": make_area_detector, diff --git a/src/haven/tests/test_dxp.py b/src/haven/tests/test_dxp.py new file mode 100644 index 00000000..e097364e --- /dev/null +++ b/src/haven/tests/test_dxp.py @@ -0,0 +1,148 @@ +import asyncio +from pathlib import Path + +import pytest +from ophyd_async.core import TriggerInfo, get_mock_put, set_mock_value + +from haven.devices.detectors.dxp import DXPDetector + +this_dir = Path(__file__).parent + + +@pytest.fixture() +async def detector(): + det = DXPDetector("255id_dxp:", name="vortex_me4") + await det.connect(mock=True) + set_mock_value(det.netcdf.file_path_exists, True) + return det + + +def test_signals(detector): + # Spot-check some PVs + # SNL status records + assert detector.drv.snl_connected.source == "mock+ca://255id_dxp:SNL_Connected" + # Acquisition control records + assert detector.drv.erase.source == "mock+ca://255id_dxp:EraseAll" + assert detector.drv.start.source == "mock+ca://255id_dxp:StartAll" + assert detector.drv.stop.source == "mock+ca://255id_dxp:StopAll" + # Preset control records + assert detector.drv.preset_mode.source == "mock+ca://255id_dxp:PresetMode" + assert detector.drv.preset_live_time.source == "mock+ca://255id_dxp:PresetLive" + assert detector.drv.preset_real_time.source == "mock+ca://255id_dxp:PresetReal" + assert detector.drv.preset_events.source == "mock+ca://255id_dxp:PresetEvents" + assert detector.drv.preset_triggers.source == "mock+ca://255id_dxp:PresetTriggers" + # Status/statistics records + assert detector.drv.status_scan_rate.source == "mock+ca://255id_dxp:StatusAll.SCAN" + assert detector.drv.reading_scan_rate.source == "mock+ca://255id_dxp:ReadAll.SCAN" + assert detector.drv.acquiring.source == "mock+ca://255id_dxp:Acquiring" + assert detector.drv.elapsed_real_time.source == "mock+ca://255id_dxp:ElapsedReal" + assert detector.drv.elapsed_live_time.source == "mock+ca://255id_dxp:ElapsedLive" + assert detector.drv.accumulated_dead_time.source == "mock+ca://255id_dxp:DeadTime" + assert ( + detector.drv.instantaneous_dead_time.source == "mock+ca://255id_dxp:IDeadTime" + ) + # Low-level parameters + assert ( + detector.drv.low_level_params_scan_rate.source + == "mock+ca://255id_dxp:ReadLLParams.SCAN" + ) + # Trace and diagnostic records + assert ( + detector.drv.baseline_histograms_read_scan_rate.source + == "mock+ca://255id_dxp:ReadBaselineHistograms.SCAN" + ) + assert detector.drv.traces_scan_rate.source == "mock+ca://255id_dxp:ReadTraces.SCAN" + assert ( + detector.drv.baseline_histogram_scan_rate.source + == "mock+ca://255id_dxp:dxp1:BaselineHistogram.SCAN" + ) + assert ( + detector.drv.trace_data_scan_rate.source + == "mock+ca://255id_dxp:dxp1:TraceData.SCAN" + ) + # Mapping mode control records + assert detector.drv.collect_mode.source == "mock+ca://255id_dxp:CollectMode_RBV" + # NetCDF file writer + assert detector.netcdf.file_path.source == "mock+ca://255id_dxp:netCDF1:FilePath_RBV" + assert detector.netcdf.file_name.source == "mock+ca://255id_dxp:netCDF1:FileName_RBV" + assert detector.netcdf.file_path_exists.source == "mock+ca://255id_dxp:netCDF1:FilePathExists_RBV" + assert detector.netcdf.file_template.source == "mock+ca://255id_dxp:netCDF1:FileTemplate_RBV" + assert detector.netcdf.full_file_name.source == "mock+ca://255id_dxp:netCDF1:FullFileName_RBV" + assert detector.netcdf.file_write_mode.source == "mock+ca://255id_dxp:netCDF1:FileWriteMode_RBV" + assert detector.netcdf.num_capture.source == "mock+ca://255id_dxp:netCDF1:NumCapture_RBV" + assert detector.netcdf.num_captured.source == "mock+ca://255id_dxp:netCDF1:NumCaptured_RBV" + assert detector.netcdf.lazy_open.source == "mock+ca://255id_dxp:netCDF1:LazyOpen_RBV" + assert detector.netcdf.capture.source == "mock+ca://255id_dxp:netCDF1:Capture_RBV" + assert detector.netcdf.array_size0.source == "mock+ca://255id_dxp:netCDF1:ArraySize0_RBV" + assert detector.netcdf.array_size1.source == "mock+ca://255id_dxp:netCDF1:ArraySize1_RBV" + assert detector.netcdf.create_directory.source == "mock+ca://255id_dxp:netCDF1:CreateDirectory_RBV" + +async def test_config_signals(detector): + desc = await detector.describe_configuration() + print(desc) + # assert False, "Write test for this" + assert detector.drv.preset_mode.name in desc + assert detector.drv.preset_live_time.name in desc + assert detector.drv.preset_real_time.name in desc + assert detector.drv.preset_events.name in desc + assert detector.drv.preset_triggers.name in desc + assert detector.drv.collect_mode.name in desc + + +async def test_prepare(detector): + """These records should be set to SCAN="Passive" to avoid slowdowns: + + StatusAll + ReadAll <- unless we're in mapping mode, then 2 sec + ReadLLParams + ReadBaselineHistograms + ReadTraces + dxp1:BaselineHistogram + dxp1:TraceData + + collect_mode -> MCA spectra + """ + await detector.prepare(TriggerInfo(number_of_triggers=1)) + + +# async def test_trigger(detector): +# trigger_info = TriggerInfo(number_of_triggers=1) +# status = detector.trigger() +# await asyncio.sleep(0.1) # Let the event loop turn +# set_mock_value(detector.hdf.num_captured, 1) +# await status +# # Check that signals were set +# get_mock_put(detector.drv.num_images).assert_called_once_with(1, wait=True) + + +# async def test_stage(detector): +# assert not get_mock_put(detector.drv.erase).called +# await detector.stage() +# get_mock_put(detector.drv.erase_on_start).assert_called_once_with(False, wait=True) +# assert get_mock_put(detector.drv.erase).called + + +# ----------------------------------------------------------------------------- +# :author: Mark Wolfman +# :email: wolfman@anl.gov +# :copyright: Copyright © 2024, UChicago Argonne, LLC +# +# Distributed under the terms of the 3-Clause BSD License +# +# The full license is in the file LICENSE, distributed with this software. +# +# DISCLAIMER +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# ----------------------------------------------------------------------------- diff --git a/src/haven/tests/test_xspress.py b/src/haven/tests/test_xspress.py index cc2bfe23..879aa361 100644 --- a/src/haven/tests/test_xspress.py +++ b/src/haven/tests/test_xspress.py @@ -1,45 +1,46 @@ -import pytest +import asyncio +from pathlib import Path -from haven.devices.xspress import Xspress3Detector +import pytest +from ophyd_async.core import TriggerInfo, get_mock_put, set_mock_value +from haven.devices.detectors.xspress import Xspress3Detector -def test_num_elements(xspress): - assert xspress.num_elements == 4 +this_dir = Path(__file__).parent -def test_num_rois(xspress): - assert xspress.num_rois == 16 +@pytest.fixture() +async def detector(): + det = Xspress3Detector("255id_xsp:", name="vortex_me4") + await det.connect(mock=True) + set_mock_value(det.hdf.file_path_exists, True) + return det -@pytest.mark.skip( - reason="This test can't instantiate the device without having an IOC present" -) -def test_mca_signals(): - xsp = Xspress3Detector("255id_xsp:", name="spcxsp") - assert not xsp.connected +def test_mca_signals(detector): # Spot-check some PVs - assert xsp.cam.acquire_time._write_pv.pvname == "255id_xsp:det1:AcquireTime" - assert xsp.cam.acquire._write_pv.pvname == "255id_xsp:det1:Acquire" - assert xsp.cam.acquire._read_pv.pvname == "255id_xsp:det1:Acquire_RBV" + # print(list(detector.drv.children())) assert ( - xsp.mcas.mca0.rois.roi0.total_count._read_pv.pvname - == "255id_xsp:MCA1ROI:1:Total_RBV" + detector.drv.acquire_time.source == "mock+ca://255id_xsp:det1:AcquireTime_RBV" ) + assert detector.drv.acquire.source == "mock+ca://255id_xsp:det1:Acquire_RBV" + + +async def test_trigger(detector): + trigger_info = TriggerInfo(number_of_triggers=1) + status = detector.trigger() + await asyncio.sleep(0.1) # Let the event loop turn + set_mock_value(detector.hdf.num_captured, 1) + await status + # Check that signals were set + get_mock_put(detector.drv.num_images).assert_called_once_with(1, wait=True) -def test_roi_size(xspress): - """Do the signals for max/size auto-update.""" - roi = xspress.mcas.mca0.rois.roi0 - roi.lo_chan.set(10).wait() - # Update the size and check the maximum - roi.size.set(7).wait() - assert roi.hi_chan.get() == 17 - # Update the maximum and check the size - roi.hi_chan.set(28).wait() - assert roi.size.get() == 18 - # Update the minimum and check the size - roi.lo_chan.set(25).wait() - assert roi.size.get() == 3 +async def test_stage(detector): + assert not get_mock_put(detector.drv.erase).called + await detector.stage() + get_mock_put(detector.drv.erase_on_start).assert_called_once_with(False, wait=True) + assert get_mock_put(detector.drv.erase).called # -----------------------------------------------------------------------------