diff --git a/docs/api-reference/index.md b/docs/api-reference/index.md
index 5da7f25..40f2176 100644
--- a/docs/api-reference/index.md
+++ b/docs/api-reference/index.md
@@ -34,5 +34,6 @@
types
mtz_io
scaling
+ configurations
```
diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md
index 550cfb7..83cdd88 100644
--- a/docs/user-guide/index.md
+++ b/docs/user-guide/index.md
@@ -5,6 +5,7 @@
maxdepth: 1
---
+workflow
mcstas_workflow
mcstas_workflow_chunk
scaling_workflow
diff --git a/docs/user-guide/workflow.ipynb b/docs/user-guide/workflow.ipynb
new file mode 100644
index 0000000..af3d2fc
--- /dev/null
+++ b/docs/user-guide/workflow.ipynb
@@ -0,0 +1,171 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# NMX Reduction Workflow\n",
+ "\n",
+ "> NMX does not expect users to use python interface directly.
\n",
+ "This documentation is mostly for instrument data scientists or instrument scientists.
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## TL;DR"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import reduction\n",
+ "from ess.nmx.data import get_small_nmx_nexus\n",
+ "from ess.nmx.configurations import (\n",
+ " ReductionConfig,\n",
+ " OutputConfig,\n",
+ " InputConfig,\n",
+ " WorkflowConfig,\n",
+ " TimeBinCoordinate,\n",
+ ")\n",
+ "\n",
+ "# Build Configuration\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(\n",
+ " input_file=[get_small_nmx_nexus().as_posix()],\n",
+ " detector_ids=[0, 1, 2],\n",
+ " ),\n",
+ " output=OutputConfig(\n",
+ " output_file=\"scipp_output.hdf\", skip_file_output=False, overwrite=True\n",
+ " ),\n",
+ " workflow=WorkflowConfig(\n",
+ " time_bin_coordinate=TimeBinCoordinate.time_of_flight,\n",
+ " nbins=10,\n",
+ " tof_simulation_num_neutrons=1_000_000,\n",
+ " tof_simulation_min_wavelength=1.8,\n",
+ " tof_simulation_max_wavelength=3.6,\n",
+ " tof_simulation_seed=42,\n",
+ " ),\n",
+ ")\n",
+ "\n",
+ "# Run Reduction\n",
+ "reduction(config=config, display=display)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configuration\n",
+ "\n",
+ "`essnmx` provides a command line data reduction tool.
\n",
+ "The `essnmx-reduce` interface will reduce `nexus` file
\n",
+ "and save the results into `NXlauetof`(not exactly but very close) format for `dials`.
\n",
+ "\n",
+ "For conveniences and safety, all configuration options are wrapped in a nested pydantic model.
\n",
+ "Here is a python API you can use to build the configuration and turn it into command line arguments.\n",
+ "\n",
+ "**The configuration object is a pydantic model, and it thus enforces strict checks on the types of the arguments.**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.configurations import (\n",
+ " ReductionConfig,\n",
+ " OutputConfig,\n",
+ " InputConfig,\n",
+ " WorkflowConfig,\n",
+ " TimeBinCoordinate,\n",
+ " to_command_arguments,\n",
+ ")\n",
+ "\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(\n",
+ " input_file=[\"PATH_TO_THE_NEXUS_FILE.hdf\"],\n",
+ " detector_ids=[0, 1, 2], # Detector index to be reduced in alphabetical order.\n",
+ " ),\n",
+ " output=OutputConfig(output_file=\"scipp_output.hdf\", skip_file_output=True),\n",
+ " workflow=WorkflowConfig(\n",
+ " time_bin_coordinate=TimeBinCoordinate.time_of_flight,\n",
+ " nbins=10,\n",
+ " tof_simulation_num_neutrons=1_000_000,\n",
+ " tof_simulation_min_wavelength=1.8,\n",
+ " tof_simulation_max_wavelength=3.6,\n",
+ " tof_simulation_seed=42,\n",
+ " ),\n",
+ ")\n",
+ "\n",
+ "display(config)\n",
+ "print(to_command_arguments(config=config, one_line=True))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Reduce Nexus File(s)\n",
+ "\n",
+ "`OutputConfig` has an option called `skip_file_output` if you want to reduce the file and use it only on the memory.
\n",
+ "Then you can use `save_results` function to explicitly save the results."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import reduction\n",
+ "from ess.nmx.data import get_small_nmx_nexus\n",
+ "\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(input_file=[get_small_nmx_nexus().as_posix()]),\n",
+ " output=OutputConfig(skip_file_output=True),\n",
+ ")\n",
+ "results = reduction(config=config, display=display)\n",
+ "results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import save_results\n",
+ "\n",
+ "output_config = OutputConfig(output_file=\"scipp_output.hdf\", overwrite=True)\n",
+ "save_results(results=results, output_config=output_config)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "nmx-dev-313",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.13.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/pyproject.toml b/pyproject.toml
index 6d67ab0..676d956 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -43,7 +43,7 @@ dependencies = [
"defusedxml>=0.7.1",
"bitshuffle>=0.5.2",
"msgpack>=1.0.8",
- "tof>=25.12.0",
+ "tof>=25.12.1",
]
dynamic = ["version"]
diff --git a/requirements/base.in b/requirements/base.in
index 7f14a72..017d437 100644
--- a/requirements/base.in
+++ b/requirements/base.in
@@ -15,4 +15,4 @@ gemmi>=0.6.6
defusedxml>=0.7.1
bitshuffle>=0.5.2
msgpack>=1.0.8
-tof>=25.12.0
+tof>=25.12.1
diff --git a/requirements/base.txt b/requirements/base.txt
index 12864a3..6ab65ed 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,4 +1,4 @@
-# SHA1:60fc64eb056a0e8e93d0a6ebf0875a2e341193b5
+# SHA1:f587b0729a7479dd1077a91433877224e995175b
#
# This file was generated by pip-compile-multi.
# To update, run:
@@ -23,7 +23,7 @@ cyclebane==24.10.0
# via sciline
cycler==0.12.1
# via matplotlib
-cython==3.2.1
+cython==3.2.2
# via bitshuffle
dask==2025.11.0
# via -r base.in
@@ -33,11 +33,11 @@ dnspython==2.8.0
# via email-validator
email-validator==2.3.0
# via scippneutron
-essreduce==25.11.5
+essreduce==25.12.1
# via -r base.in
-fonttools==4.60.1
+fonttools==4.61.0
# via matplotlib
-fsspec==2025.10.0
+fsspec==2025.12.0
# via dask
gemmi==0.7.4
# via -r base.in
@@ -52,8 +52,6 @@ idna==3.11
# via
# email-validator
# requests
-importlib-metadata==8.7.0
- # via dask
kiwisolver==1.4.9
# via matplotlib
lazy-loader==0.4
@@ -71,7 +69,7 @@ mpltoolbox==25.10.0
# via scippneutron
msgpack==1.1.2
# via -r base.in
-networkx==3.6
+networkx==3.6.1
# via cyclebane
numpy==2.3.5
# via
@@ -95,7 +93,7 @@ partd==1.4.2
# via dask
pillow==12.0.0
# via matplotlib
-platformdirs==4.5.0
+platformdirs==4.5.1
# via pooch
plopp==25.11.0
# via
@@ -134,7 +132,7 @@ scipp==25.11.0
# scippneutron
# scippnexus
# tof
-scippneutron==25.11.0
+scippneutron==25.11.2
# via essreduce
scippnexus==25.11.0
# via
@@ -147,7 +145,7 @@ scipy==1.16.3
# scippnexus
six==1.17.0
# via python-dateutil
-tof==25.12.0
+tof==25.12.1
# via -r base.in
toolz==1.1.0
# via
@@ -162,10 +160,8 @@ typing-inspection==0.4.2
# via pydantic
tzdata==2025.2
# via pandas
-urllib3==2.5.0
+urllib3==2.6.1
# via requests
-zipp==3.23.0
- # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools
diff --git a/requirements/basetest.txt b/requirements/basetest.txt
index acbe0b3..e9fde5f 100644
--- a/requirements/basetest.txt
+++ b/requirements/basetest.txt
@@ -13,5 +13,5 @@ pluggy==1.6.0
# via pytest
pygments==2.19.2
# via pytest
-pytest==9.0.1
+pytest==9.0.2
# via -r basetest.in
diff --git a/requirements/ci.txt b/requirements/ci.txt
index 4b49511..5e597a7 100644
--- a/requirements/ci.txt
+++ b/requirements/ci.txt
@@ -32,7 +32,7 @@ packaging==25.0
# -r ci.in
# pyproject-api
# tox
-platformdirs==4.5.0
+platformdirs==4.5.1
# via
# tox
# virtualenv
@@ -46,7 +46,7 @@ smmap==5.0.2
# via gitdb
tox==4.32.0
# via -r ci.in
-urllib3==2.5.0
+urllib3==2.6.1
# via requests
virtualenv==20.35.4
# via tox
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 6c15809..b3ae8b9 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -12,7 +12,7 @@
-r static.txt
-r test.txt
-r wheels.txt
-anyio==4.11.0
+anyio==4.12.0
# via
# httpx
# jupyter-server
@@ -73,8 +73,6 @@ lark==1.3.1
# via rfc3987-syntax
notebook-shim==0.2.4
# via jupyterlab
-overrides==7.7.0
- # via jupyter-server
pip-compile-multi==3.2.2
# via -r dev.in
pip-tools==7.5.2
@@ -101,8 +99,6 @@ rfc3987-syntax==1.1.0
# via jsonschema
send2trash==1.8.3
# via jupyter-server
-sniffio==1.3.1
- # via anyio
terminado==0.18.1
# via
# jupyter-server
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 4971a08..9eef163 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -24,7 +24,7 @@ babel==2.17.0
# via
# pydata-sphinx-theme
# sphinx
-beautifulsoup4==4.14.2
+beautifulsoup4==4.14.3
# via
# nbconvert
# pydata-sphinx-theme
@@ -54,7 +54,7 @@ ipydatawidgets==4.3.5
# via pythreejs
ipykernel==7.1.0
# via -r docs.in
-ipython==9.7.0
+ipython==9.8.0
# via
# -r docs.in
# ipykernel
@@ -121,7 +121,7 @@ nbformat==5.10.4
# nbclient
# nbconvert
# nbsphinx
-nbsphinx==0.9.7
+nbsphinx==0.9.8
# via -r docs.in
nest-asyncio==1.6.0
# via ipykernel
@@ -163,7 +163,9 @@ referencing==0.37.0
# via
# jsonschema
# jsonschema-specifications
-rpds-py==0.29.0
+roman-numerals-py==3.1.0
+ # via sphinx
+rpds-py==0.30.0
# via
# jsonschema
# referencing
@@ -171,7 +173,7 @@ snowballstemmer==3.0.1
# via sphinx
soupsieve==2.8
# via beautifulsoup4
-sphinx==8.1.3
+sphinx==8.2.3
# via
# -r docs.in
# autodoc-pydantic
@@ -181,7 +183,7 @@ sphinx==8.1.3
# sphinx-autodoc-typehints
# sphinx-copybutton
# sphinx-design
-sphinx-autodoc-typehints==3.0.1
+sphinx-autodoc-typehints==3.5.2
# via -r docs.in
sphinx-copybutton==0.5.2
# via -r docs.in
diff --git a/requirements/mypy.txt b/requirements/mypy.txt
index 74e2fd2..1884b2a 100644
--- a/requirements/mypy.txt
+++ b/requirements/mypy.txt
@@ -6,7 +6,9 @@
# requirements upgrade
#
-r test.txt
-mypy==1.18.2
+librt==0.7.3
+ # via mypy
+mypy==1.19.0
# via -r mypy.in
mypy-extensions==1.1.0
# via mypy
diff --git a/requirements/nightly.in b/requirements/nightly.in
index 29b77c0..b579c0a 100644
--- a/requirements/nightly.in
+++ b/requirements/nightly.in
@@ -10,7 +10,7 @@ gemmi>=0.6.6
defusedxml>=0.7.1
bitshuffle>=0.5.2
msgpack>=1.0.8
-tof>=25.12.0
+tof>=25.12.1
pytest>=7.0
scipp
--index-url=https://pypi.anaconda.org/scipp-nightly-wheels/simple/
diff --git a/requirements/nightly.txt b/requirements/nightly.txt
index 625fda3..c17a115 100644
--- a/requirements/nightly.txt
+++ b/requirements/nightly.txt
@@ -1,4 +1,4 @@
-# SHA1:82815c7a739545007523e0e1a48d57f0f7e427db
+# SHA1:ffed931b80e632af913de357166a7bc05bb84e8c
#
# This file was generated by pip-compile-multi.
# To update, run:
@@ -26,7 +26,7 @@ cyclebane==24.10.0
# via sciline
cycler==0.12.1
# via matplotlib
-cython==3.2.1
+cython==3.2.2
# via bitshuffle
dask==2025.11.0
# via -r nightly.in
@@ -36,11 +36,11 @@ dnspython==2.8.0
# via email-validator
email-validator==2.3.0
# via scippneutron
-essreduce==25.11.5
+essreduce==25.12.1
# via -r nightly.in
-fonttools==4.60.1
+fonttools==4.61.0
# via matplotlib
-fsspec==2025.10.0
+fsspec==2025.12.0
# via dask
gemmi==0.7.4
# via -r nightly.in
@@ -55,8 +55,6 @@ idna==3.11
# via
# email-validator
# requests
-importlib-metadata==8.7.0
- # via dask
iniconfig==2.3.0
# via pytest
kiwisolver==1.4.10rc0
@@ -76,9 +74,9 @@ mpltoolbox==25.10.0
# via scippneutron
msgpack==1.1.2
# via -r nightly.in
-networkx==3.6
+networkx==3.6.1
# via cyclebane
-numpy==2.3.5
+numpy==2.4.0rc1
# via
# bitshuffle
# contourpy
@@ -95,13 +93,13 @@ packaging==25.0
# matplotlib
# pooch
# pytest
-pandas==2.3.3
+pandas==3.0.0rc0
# via -r nightly.in
partd==1.4.2
# via dask
pillow==12.0.0
# via matplotlib
-platformdirs==4.5.0
+platformdirs==4.5.1
# via pooch
plopp @ git+https://github.com/scipp/plopp@main
# via
@@ -122,15 +120,13 @@ pygments==2.19.2
# via pytest
pyparsing==3.3.0b1
# via matplotlib
-pytest==9.0.1
+pytest==9.0.2
# via -r nightly.in
python-dateutil==2.9.0.post0
# via
# matplotlib
# pandas
# scippneutron
-pytz==2025.2
- # via pandas
pyyaml==6.0.3
# via dask
requests==2.32.5
@@ -146,7 +142,7 @@ scipp==100.0.0.dev0
# scippneutron
# scippnexus
# tof
-scippneutron==25.11.0
+scippneutron==25.11.2
# via essreduce
scippnexus @ git+https://github.com/scipp/scippnexus@main
# via
@@ -159,7 +155,7 @@ scipy==1.16.3
# scippnexus
six==1.17.0
# via python-dateutil
-tof==25.12.0
+tof==25.12.1
# via -r nightly.in
toolz==1.1.0
# via
@@ -174,10 +170,8 @@ typing-inspection==0.4.2
# via pydantic
tzdata==2025.2
# via pandas
-urllib3==2.5.0
+urllib3==2.6.1
# via requests
-zipp==3.23.0
- # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools
diff --git a/requirements/static.txt b/requirements/static.txt
index 4cc95f1..2a110d8 100644
--- a/requirements/static.txt
+++ b/requirements/static.txt
@@ -15,7 +15,7 @@ identify==2.6.15
# via pre-commit
nodeenv==1.9.1
# via pre-commit
-platformdirs==4.5.0
+platformdirs==4.5.1
# via virtualenv
pre-commit==4.5.0
# via -r static.in
diff --git a/src/ess/nmx/_executable_helper.py b/src/ess/nmx/_executable_helper.py
index afa1843..425c494 100644
--- a/src/ess/nmx/_executable_helper.py
+++ b/src/ess/nmx/_executable_helper.py
@@ -174,42 +174,6 @@ def reduction_config_from_args(args: argparse.Namespace) -> ReductionConfig:
)
-def to_command_arguments(
- config: ReductionConfig, one_line: bool = True
-) -> list[str] | str:
- """Convert the config to a list of command line arguments.
-
- Parameters
- ----------
- one_line:
- If True, return a single string with all arguments joined by spaces.
- If False, return a list of argument strings.
-
- """
- args = {}
- for instance in config._children:
- args.update(instance.model_dump(mode='python'))
- args = {f"--{k.replace('_', '-')}": v for k, v in args.items() if v is not None}
-
- arg_list = []
- for k, v in args.items():
- if not isinstance(v, bool):
- arg_list.append(k)
- if isinstance(v, list):
- arg_list.extend(str(item) for item in v)
- elif isinstance(v, enum.StrEnum):
- arg_list.append(v.value)
- else:
- arg_list.append(str(v))
- elif v is True:
- arg_list.append(k)
-
- if one_line:
- return ' '.join(arg_list)
- else:
- return arg_list
-
-
def build_logger(args: argparse.Namespace | OutputConfig) -> logging.Logger:
logger = logging.getLogger(__name__)
if args.verbose:
diff --git a/src/ess/nmx/configurations.py b/src/ess/nmx/configurations.py
index dbfa65a..2470f6b 100644
--- a/src/ess/nmx/configurations.py
+++ b/src/ess/nmx/configurations.py
@@ -98,6 +98,11 @@ class WorkflowConfig(BaseModel):
"If None, the lookup table will be computed on-the-fly.",
default=None,
)
+ tof_simulation_num_neutrons: int = Field(
+ title="Number of Neutrons for TOF Simulation",
+ description="Number of neutrons to simulate for TOF lookup table calculation.",
+ default=1_000_000,
+ )
tof_simulation_min_wavelength: float = Field(
title="TOF Simulation Minimum Wavelength",
description="Minimum wavelength for TOF simulation in Angstrom.",
@@ -135,11 +140,22 @@ class OutputConfig(BaseModel):
default=False,
)
# File output
+ skip_file_output: bool = Field(
+ title="Skip File Output",
+ description="If True, the output file will not be written.",
+ default=False,
+ )
output_file: str = Field(
title="Output File",
- description="Path to the output file.",
+ description="Path to the output file. "
+ "It will be overwritten if ``overwrite`` is True.",
default="scipp_output.h5",
)
+ overwrite: bool = Field(
+ title="Overwrite Output File",
+ description="If True, overwrite the output file if ``output_file`` exists.",
+ default=False,
+ )
compression: Compression = Field(
title="Compression",
description="Compress option of reduced output file.",
@@ -157,3 +173,45 @@ class ReductionConfig(BaseModel):
@property
def _children(self) -> list[BaseModel]:
return [self.inputs, self.workflow, self.output]
+
+
+def to_command_arguments(
+ *, config: ReductionConfig, one_line: bool = True, separator: str = '\\\n'
+) -> list[str] | str:
+ """Convert the config to a list of command line arguments.
+
+ Parameters
+ ----------
+ one_line:
+ If True, return a single string with all arguments joined by spaces.
+ If False, return a list of argument strings.
+
+ """
+ args = {}
+ for instance in config._children:
+ args.update(instance.model_dump(mode='python'))
+ args = {f"--{k.replace('_', '-')}": v for k, v in args.items() if v is not None}
+
+ arg_list = []
+ for k, v in args.items():
+ if not isinstance(v, bool):
+ arg_list.append(k)
+ if isinstance(v, list):
+ arg_list.extend(str(item) for item in v)
+ elif isinstance(v, enum.StrEnum):
+ arg_list.append(v.value)
+ else:
+ arg_list.append(str(v))
+ elif v is True:
+ arg_list.append(k)
+
+ if one_line:
+ # Default separator is backslash + newline for better readability
+ # Users can directly copy-paste the output in a terminal or a script.
+ return (
+ (separator + '--')
+ .join(" ".join(arg_list).split('--'))
+ .removeprefix(separator)
+ )
+ else:
+ return arg_list
diff --git a/src/ess/nmx/executables.py b/src/ess/nmx/executables.py
index 0aed085..2d68928 100644
--- a/src/ess/nmx/executables.py
+++ b/src/ess/nmx/executables.py
@@ -16,7 +16,7 @@
collect_matching_input_files,
reduction_config_from_args,
)
-from .configurations import ReductionConfig
+from .configurations import OutputConfig, ReductionConfig
from .nexus import (
export_detector_metadata_as_nxlauetof,
export_monitor_metadata_as_nxlauetof,
@@ -117,22 +117,13 @@ def reduction(
base_wf[TimeOfFlightLookupTable] = base_wf.compute(TimeOfFlightLookupTable)
metadatas = base_wf.compute((NMXSampleMetadata, NMXSourceMetadata))
- export_static_metadata_as_nxlauetof(
- sample_metadata=metadatas[NMXSampleMetadata],
- source_metadata=metadatas[NMXSourceMetadata],
- output_file=config.output.output_file,
- )
tof_das = sc.DataGroup()
detector_metas = sc.DataGroup()
for detector_name in detector_names:
cur_wf = base_wf.copy()
cur_wf[NeXusName[snx.NXdetector]] = detector_name
results = cur_wf.compute((TofDetector[SampleRun], NMXDetectorMetadata))
- detector_meta: NMXDetectorMetadata = results[NMXDetectorMetadata]
- export_detector_metadata_as_nxlauetof(
- detector_metadata=detector_meta, output_file=config.output.output_file
- )
- detector_metas[detector_name] = detector_meta
+ detector_metas[detector_name] = results[NMXDetectorMetadata]
# Binning into 1 bin and getting final tof bin edges later.
tof_das[detector_name] = results[TofDetector[SampleRun]]
@@ -154,28 +145,54 @@ def reduction(
data=sc.ones_like(tof_bin_edges[:-1]),
),
)
- export_monitor_metadata_as_nxlauetof(
- monitor_metadata=monitor_metadata, output_file=config.output.output_file
- )
# Histogram detector counts
tof_histograms = sc.DataGroup()
for detector_name, tof_da in tof_das.items():
- det_meta: NMXDetectorMetadata = detector_metas[detector_name]
histogram = tof_da.hist(tof=tof_bin_edges)
tof_histograms[detector_name] = histogram
- export_reduced_data_as_nxlauetof(
- detector_name=det_meta.detector_name,
- da=histogram,
- output_file=config.output.output_file,
- compress_mode=config.output.compression,
- )
- return sc.DataGroup(
- metadata=detector_metas,
+ results = sc.DataGroup(
histogram=tof_histograms,
+ detector=detector_metas,
+ sample=metadatas[NMXSampleMetadata],
+ source=metadatas[NMXSourceMetadata],
+ monitor=monitor_metadata,
lookup_table=base_wf.compute(TimeOfFlightLookupTable),
)
+ if not config.output.skip_file_output:
+ save_results(results=results, output_config=config.output)
+
+ return results
+
+
+def save_results(*, results: sc.DataGroup, output_config: OutputConfig) -> None:
+ # Validate if results have expected fields
+ for mandatory_key in ['histogram', 'detector', 'sample', 'source', 'monitor']:
+ if mandatory_key not in results:
+ raise ValueError(f"Missing '{mandatory_key}' in results to save.")
+
+ export_static_metadata_as_nxlauetof(
+ sample_metadata=results['sample'],
+ source_metadata=results['source'],
+ output_file=output_config.output_file,
+ overwrite=output_config.overwrite,
+ )
+ export_monitor_metadata_as_nxlauetof(
+ monitor_metadata=results['monitor'],
+ output_file=output_config.output_file,
+ )
+ for detector_name, detector_meta in results['detector'].items():
+ export_detector_metadata_as_nxlauetof(
+ detector_metadata=detector_meta,
+ output_file=output_config.output_file,
+ )
+ export_reduced_data_as_nxlauetof(
+ detector_name=detector_name,
+ da=results['histogram'][detector_name],
+ output_file=output_config.output_file,
+ compress_mode=output_config.compression,
+ )
def main() -> None:
diff --git a/src/ess/nmx/nexus.py b/src/ess/nmx/nexus.py
index eb233b2..8de79d4 100644
--- a/src/ess/nmx/nexus.py
+++ b/src/ess/nmx/nexus.py
@@ -210,9 +210,11 @@ def _add_arbitrary_metadata(
def export_static_metadata_as_nxlauetof(
+ *,
sample_metadata: NMXSampleMetadata,
source_metadata: NMXSourceMetadata,
output_file: str | pathlib.Path | io.BytesIO,
+ overwrite: bool = False,
**arbitrary_metadata: sc.Variable,
) -> None:
"""Export the metadata to a NeXus file with the LAUE_TOF application definition.
@@ -237,7 +239,7 @@ def export_static_metadata_as_nxlauetof(
Arbitrary metadata that does not fit into the existing metadata objects.
"""
- _check_file(output_file, overwrite=True)
+ _check_file(output_file, overwrite=overwrite)
with h5py.File(output_file, "w") as f:
f.attrs["NX_class"] = "NXlauetof"
nx_entry = _create_lauetof_data_entry(f)
diff --git a/src/ess/nmx/workflows.py b/src/ess/nmx/workflows.py
index d8aaac4..f4e9bd3 100644
--- a/src/ess/nmx/workflows.py
+++ b/src/ess/nmx/workflows.py
@@ -56,7 +56,12 @@ def _simulate_fixed_wavelength_tof(
----------
"""
source = tof.Source(
- facility="ess", neutrons=neutrons, pulses=2, seed=seed, wmax=wmax, wmin=wmin
+ facility="ess",
+ neutrons=neutrons,
+ pulses=1,
+ seed=seed,
+ wmax=wmax,
+ wmin=wmin,
)
nmx_det = tof.Detector(distance=max(ltotal_range), name="detector")
model = tof.Model(source=source, choppers=[], detectors=[nmx_det])
diff --git a/tests/executable_test.py b/tests/executable_test.py
index d385d13..4fba9c6 100644
--- a/tests/executable_test.py
+++ b/tests/executable_test.py
@@ -10,6 +10,7 @@
import pytest
import scipp as sc
import scippnexus as snx
+from scipp.testing import assert_identical
from ess.nmx._executable_helper import (
InputConfig,
@@ -18,9 +19,8 @@
WorkflowConfig,
build_reduction_argument_parser,
reduction_config_from_args,
- to_command_arguments,
)
-from ess.nmx.configurations import TimeBinCoordinate, TimeBinUnit
+from ess.nmx.configurations import TimeBinCoordinate, TimeBinUnit, to_command_arguments
from ess.nmx.executables import reduction
from ess.nmx.types import Compression
@@ -93,6 +93,7 @@ def test_reduction_config() -> None:
max_time_bin=100_000,
time_bin_coordinate=TimeBinCoordinate.time_of_flight,
time_bin_unit=TimeBinUnit.us,
+ tof_simulation_num_neutrons=700_000,
tof_simulation_max_wavelength=5.0,
tof_simulation_min_wavelength=1.0,
tof_simulation_min_ltotal=140.0,
@@ -100,7 +101,11 @@ def test_reduction_config() -> None:
tof_simulation_seed=12345,
)
output_options = OutputConfig(
- output_file='test-output.h5', compression=Compression.NONE, verbose=True
+ output_file='test-output.h5',
+ compression=Compression.NONE,
+ verbose=True,
+ skip_file_output=True,
+ overwrite=True,
)
expected_config = ReductionConfig(
inputs=input_options, workflow=workflow_options, output=output_options
@@ -112,7 +117,7 @@ def test_reduction_config() -> None:
arg_list = _build_arg_list_from_pydantic_instance(
input_options, workflow_options, output_options
)
- assert arg_list == to_command_arguments(expected_config, one_line=False)
+ assert arg_list == to_command_arguments(config=expected_config, one_line=False)
# Parse arguments and build config from them.
parser = build_reduction_argument_parser()
@@ -189,7 +194,9 @@ def reduction_config(
# only properly works in linux so we set it to NONE here
# for convenience of testing on all platforms.
output_config = OutputConfig(
- output_file=temp_output_file.as_posix(), compression=Compression.NONE
+ output_file=temp_output_file.as_posix(),
+ compression=Compression.NONE,
+ skip_file_output=True, # No need to write output file for most tests.
)
return ReductionConfig(inputs=input_config, output=output_config)
@@ -213,3 +220,47 @@ def test_reduction_only_number_of_time_bins(reduction_config: ReductionConfig) -
# Check that the number of time bins is as expected.
assert len(hist.coords['tof']) == 21 # nbins + 1 edges
+
+
+@pytest.fixture
+def tof_lut_file_path(tmp_path: pathlib.Path):
+ """Fixture to provide the path to the small NMX NeXus file."""
+ from ess.nmx.workflows import initialize_nmx_workflow
+ from ess.reduce.time_of_flight import TimeOfFlightLookupTable
+
+ # Simply use the default workflow for testing.
+ workflow = initialize_nmx_workflow(config=WorkflowConfig())
+ tof_lut: sc.DataArray = workflow.compute(TimeOfFlightLookupTable)
+
+ # Change the tof range a bit for testing.
+ tof_lut *= 2
+
+ lut_file_path = tmp_path / "nmx_tof_lookup_table.h5"
+ tof_lut.save_hdf5(lut_file_path.as_posix())
+ yield lut_file_path
+ if lut_file_path.exists():
+ lut_file_path.unlink()
+
+
+def test_reduction_with_tof_lut_file(
+ reduction_config: ReductionConfig, tof_lut_file_path: pathlib.Path
+) -> None:
+ # Make sure the config uses no TOF lookup table file initially.
+ assert reduction_config.workflow.tof_lookup_table_file_path is None
+ with known_warnings():
+ default_results = reduction(config=reduction_config)
+
+ # Update config to use the TOF lookup table file.
+ reduction_config.workflow.tof_lookup_table_file_path = tof_lut_file_path.as_posix()
+ with known_warnings():
+ results = reduction(config=reduction_config)
+
+ for default_hist, hist in zip(
+ default_results['histogram'].values(),
+ results['histogram'].values(),
+ strict=True,
+ ):
+ tof_edges_default = default_hist.coords['tof']
+ tof_edges = hist.coords['tof']
+ assert_identical(default_hist.data, hist.data)
+ assert_identical(tof_edges_default * 2, tof_edges)