diff --git a/docs/api-reference/index.md b/docs/api-reference/index.md
index 5da7f25..40f2176 100644
--- a/docs/api-reference/index.md
+++ b/docs/api-reference/index.md
@@ -34,5 +34,6 @@
types
mtz_io
scaling
+ configurations
```
diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md
index 550cfb7..83cdd88 100644
--- a/docs/user-guide/index.md
+++ b/docs/user-guide/index.md
@@ -5,6 +5,7 @@
maxdepth: 1
---
+workflow
mcstas_workflow
mcstas_workflow_chunk
scaling_workflow
diff --git a/docs/user-guide/workflow.ipynb b/docs/user-guide/workflow.ipynb
new file mode 100644
index 0000000..bd28d20
--- /dev/null
+++ b/docs/user-guide/workflow.ipynb
@@ -0,0 +1,171 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# NMX Reduction Workflow\n",
+ "\n",
+ "> NMX does not expect users to use python interface directly.
\n",
+ "This documentation is mostly for istrument data scientists or instrument scientists.
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## TL;DR"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import reduction\n",
+ "from ess.nmx.data import get_small_nmx_nexus\n",
+ "from ess.nmx.configurations import (\n",
+ " ReductionConfig,\n",
+ " OutputConfig,\n",
+ " InputConfig,\n",
+ " WorkflowConfig,\n",
+ " TimeBinCoordinate,\n",
+ ")\n",
+ "\n",
+ "# Build Configuration\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(\n",
+ " input_file=[get_small_nmx_nexus().as_posix()],\n",
+ " detector_ids=[0, 1, 2], # Detector index to be reduced in alphabetical order.\n",
+ " ),\n",
+ " output=OutputConfig(\n",
+ " output_file=\"scipp_output.hdf\", skip_file_output=False, overwrite=True\n",
+ " ),\n",
+ " workflow=WorkflowConfig(\n",
+ " time_bin_coordinate=TimeBinCoordinate.time_of_flight,\n",
+ " nbins=10,\n",
+ " tof_simulation_num_neutrons=1_000_000,\n",
+ " tof_simulation_min_wavelength=1.8,\n",
+ " tof_simulation_max_wavelength=3.6,\n",
+ " tof_simulation_seed=42,\n",
+ " ),\n",
+ ")\n",
+ "\n",
+ "# Run Reduction\n",
+ "reduction(config=config, display=display)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configuration\n",
+ "\n",
+ "`essnmx` provide command line data reduction tool for the reduction between `nexus` and `dials`.
\n",
+ "The `essnmx-reduce` interface will reduce `nexus` file
\n",
+ "and save the results into `NXlauetof`(not exactly but very close) format for `dials`.
\n",
+ "\n",
+ "Argument options could be exhaustive therefore we wrapped them into a nested pydantic model.
\n",
+ "Here is a python API you can use to build the configuration and turn it into a command line arguments.\n",
+ "\n",
+ "**Configuration object is pydantic model so it strictly check the type of the arguments.**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.configurations import (\n",
+ " ReductionConfig,\n",
+ " OutputConfig,\n",
+ " InputConfig,\n",
+ " WorkflowConfig,\n",
+ " TimeBinCoordinate,\n",
+ " to_command_arguments,\n",
+ ")\n",
+ "\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(\n",
+ " input_file=[\"PATH_TO_THE_NEXUS_FILE.hdf\"],\n",
+ " detector_ids=[0, 1, 2], # Detector index to be reduced in alphabetical order.\n",
+ " ),\n",
+ " output=OutputConfig(output_file=\"scipp_output.hdf\", skip_file_output=True),\n",
+ " workflow=WorkflowConfig(\n",
+ " time_bin_coordinate=TimeBinCoordinate.time_of_flight,\n",
+ " nbins=10,\n",
+ " tof_simulation_num_neutrons=1_000_000,\n",
+ " tof_simulation_min_wavelength=1.8,\n",
+ " tof_simulation_max_wavelength=3.6,\n",
+ " tof_simulation_seed=42,\n",
+ " ),\n",
+ ")\n",
+ "\n",
+ "display(config)\n",
+ "print(to_command_arguments(config=config, one_line=True))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Reduce Nexus File(s)\n",
+ "\n",
+ "`OutputConfig` has an option called `skip_file_output` if you want to reduce the file and use it only on the memory.
\n",
+ "Then you can use `save_results` function to explicitly save the results."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import reduction\n",
+ "from ess.nmx.data import get_small_nmx_nexus\n",
+ "\n",
+ "config = ReductionConfig(\n",
+ " inputs=InputConfig(input_file=[get_small_nmx_nexus().as_posix()]),\n",
+ " output=OutputConfig(skip_file_output=True),\n",
+ ")\n",
+ "results = reduction(config=config, display=display)\n",
+ "results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ess.nmx.executables import save_results\n",
+ "\n",
+ "output_config = OutputConfig(output_file=\"scipp_output.hdf\", overwrite=True)\n",
+ "save_results(results=results, output_config=output_config)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "nmx-dev-313",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.13.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/src/ess/nmx/_executable_helper.py b/src/ess/nmx/_executable_helper.py
index afa1843..425c494 100644
--- a/src/ess/nmx/_executable_helper.py
+++ b/src/ess/nmx/_executable_helper.py
@@ -174,42 +174,6 @@ def reduction_config_from_args(args: argparse.Namespace) -> ReductionConfig:
)
-def to_command_arguments(
- config: ReductionConfig, one_line: bool = True
-) -> list[str] | str:
- """Convert the config to a list of command line arguments.
-
- Parameters
- ----------
- one_line:
- If True, return a single string with all arguments joined by spaces.
- If False, return a list of argument strings.
-
- """
- args = {}
- for instance in config._children:
- args.update(instance.model_dump(mode='python'))
- args = {f"--{k.replace('_', '-')}": v for k, v in args.items() if v is not None}
-
- arg_list = []
- for k, v in args.items():
- if not isinstance(v, bool):
- arg_list.append(k)
- if isinstance(v, list):
- arg_list.extend(str(item) for item in v)
- elif isinstance(v, enum.StrEnum):
- arg_list.append(v.value)
- else:
- arg_list.append(str(v))
- elif v is True:
- arg_list.append(k)
-
- if one_line:
- return ' '.join(arg_list)
- else:
- return arg_list
-
-
def build_logger(args: argparse.Namespace | OutputConfig) -> logging.Logger:
logger = logging.getLogger(__name__)
if args.verbose:
diff --git a/src/ess/nmx/configurations.py b/src/ess/nmx/configurations.py
index 3db753f..4956310 100644
--- a/src/ess/nmx/configurations.py
+++ b/src/ess/nmx/configurations.py
@@ -130,11 +130,22 @@ class OutputConfig(BaseModel):
default=False,
)
# File output
+ skip_file_output: bool = Field(
+ title="Skip File Output",
+ description="If True, the output file will not be written.",
+ default=False,
+ )
output_file: str = Field(
title="Output File",
- description="Path to the output file.",
+ description="Path to the output file. "
+ "It will be overwritten if ``overwrite`` is True.",
default="scipp_output.h5",
)
+ overwrite: bool = Field(
+ title="Overwrite Output File",
+ description="If True, overwrite the output file if ``output_file`` exists.",
+ default=False,
+ )
compression: Compression = Field(
title="Compression",
description="Compress option of reduced output file.",
@@ -152,3 +163,45 @@ class ReductionConfig(BaseModel):
@property
def _children(self) -> list[BaseModel]:
return [self.inputs, self.workflow, self.output]
+
+
+def to_command_arguments(
+ *, config: ReductionConfig, one_line: bool = True, separator: str = '\\\n'
+) -> list[str] | str:
+ """Convert the config to a list of command line arguments.
+
+ Parameters
+ ----------
+ one_line:
+ If True, return a single string with all arguments joined by spaces.
+ If False, return a list of argument strings.
+
+ """
+ args = {}
+ for instance in config._children:
+ args.update(instance.model_dump(mode='python'))
+ args = {f"--{k.replace('_', '-')}": v for k, v in args.items() if v is not None}
+
+ arg_list = []
+ for k, v in args.items():
+ if not isinstance(v, bool):
+ arg_list.append(k)
+ if isinstance(v, list):
+ arg_list.extend(str(item) for item in v)
+ elif isinstance(v, enum.StrEnum):
+ arg_list.append(v.value)
+ else:
+ arg_list.append(str(v))
+ elif v is True:
+ arg_list.append(k)
+
+ if one_line:
+ # Default separator is backslash + newline for better readability
+ # Users can directly copy-paste the output in a terminal or a script.
+ return (
+ (separator + '--')
+ .join(" ".join(arg_list).split('--'))
+ .removeprefix(separator)
+ )
+ else:
+ return arg_list
diff --git a/src/ess/nmx/executables.py b/src/ess/nmx/executables.py
index 4cd16d7..fdc7b00 100644
--- a/src/ess/nmx/executables.py
+++ b/src/ess/nmx/executables.py
@@ -17,7 +17,7 @@
collect_matching_input_files,
reduction_config_from_args,
)
-from .configurations import ReductionConfig, WorkflowConfig
+from .configurations import OutputConfig, ReductionConfig, WorkflowConfig
from .nexus import (
export_detector_metadata_as_nxlauetof,
export_monitor_metadata_as_nxlauetof,
@@ -160,27 +160,17 @@ def reduction(
display=display,
)
metadatas = base_wf.compute((NMXSampleMetadata, NMXSourceMetadata))
- export_static_metadata_as_nxlauetof(
- sample_metadata=metadatas[NMXSampleMetadata],
- source_metadata=metadatas[NMXSourceMetadata],
- output_file=config.output.output_file,
- )
tof_das = sc.DataGroup()
detector_metas = sc.DataGroup()
for detector_name in detector_names:
cur_wf = base_wf.copy()
cur_wf[NeXusName[snx.NXdetector]] = detector_name
results = cur_wf.compute((TofDetector[SampleRun], NMXDetectorMetadata))
- detector_meta: NMXDetectorMetadata = results[NMXDetectorMetadata]
- export_detector_metadata_as_nxlauetof(
- detector_metadata=detector_meta, output_file=config.output.output_file
- )
- detector_metas[detector_name] = detector_meta
+ detector_metas[detector_name] = results[NMXDetectorMetadata]
# Binning into 1 bin and getting final tof bin edges later.
tof_das[detector_name] = results[TofDetector[SampleRun]].bin(tof=1)
tof_bin_edges = _finalize_tof_bin_edges(tof_das=tof_das, config=config.workflow)
-
monitor_metadata = NMXMonitorMetadata(
tof_bin_coord='tof',
# TODO: Use real monitor data
@@ -190,28 +180,54 @@ def reduction(
data=sc.ones_like(tof_bin_edges[:-1]),
),
)
- export_monitor_metadata_as_nxlauetof(
- monitor_metadata=monitor_metadata, output_file=config.output.output_file
- )
# Histogram detector counts
tof_histograms = sc.DataGroup()
for detector_name, tof_da in tof_das.items():
- det_meta: NMXDetectorMetadata = detector_metas[detector_name]
histogram = tof_da.hist(tof=tof_bin_edges)
tof_histograms[detector_name] = histogram
- export_reduced_data_as_nxlauetof(
- detector_name=det_meta.detector_name,
- da=histogram,
- output_file=config.output.output_file,
- compress_mode=config.output.compression,
- )
- return sc.DataGroup(
- metadata=detector_metas,
+ results = sc.DataGroup(
histogram=tof_histograms,
+ detector=detector_metas,
+ sample=metadatas[NMXSampleMetadata],
+ source=metadatas[NMXSourceMetadata],
+ monitor=monitor_metadata,
lookup_table=base_wf.compute(TimeOfFlightLookupTable),
)
+ if not config.output.skip_file_output:
+ save_results(results=results, output_config=config.output)
+
+ return results
+
+
+def save_results(*, results: sc.DataGroup, output_config: OutputConfig) -> None:
+ # Validate if results have expected fields
+ for mandatory_key in ['histogram', 'detector', 'sample', 'source', 'monitor']:
+ if mandatory_key not in results:
+ raise ValueError(f"Missing '{mandatory_key}' in results to save.")
+
+ export_static_metadata_as_nxlauetof(
+ sample_metadata=results['sample'],
+ source_metadata=results['source'],
+ output_file=output_config.output_file,
+ overwrite=output_config.overwrite,
+ )
+ export_monitor_metadata_as_nxlauetof(
+ monitor_metadata=results['monitor'],
+ output_file=output_config.output_file,
+ )
+ for detector_name, detector_meta in results['detector'].items():
+ export_detector_metadata_as_nxlauetof(
+ detector_metadata=detector_meta,
+ output_file=output_config.output_file,
+ )
+ export_reduced_data_as_nxlauetof(
+ detector_name=detector_name,
+ da=results['histogram'][detector_name],
+ output_file=output_config.output_file,
+ compress_mode=output_config.compression,
+ )
def main() -> None:
diff --git a/src/ess/nmx/nexus.py b/src/ess/nmx/nexus.py
index eb233b2..8de79d4 100644
--- a/src/ess/nmx/nexus.py
+++ b/src/ess/nmx/nexus.py
@@ -210,9 +210,11 @@ def _add_arbitrary_metadata(
def export_static_metadata_as_nxlauetof(
+ *,
sample_metadata: NMXSampleMetadata,
source_metadata: NMXSourceMetadata,
output_file: str | pathlib.Path | io.BytesIO,
+ overwrite: bool = False,
**arbitrary_metadata: sc.Variable,
) -> None:
"""Export the metadata to a NeXus file with the LAUE_TOF application definition.
@@ -237,7 +239,7 @@ def export_static_metadata_as_nxlauetof(
Arbitrary metadata that does not fit into the existing metadata objects.
"""
- _check_file(output_file, overwrite=True)
+ _check_file(output_file, overwrite=overwrite)
with h5py.File(output_file, "w") as f:
f.attrs["NX_class"] = "NXlauetof"
nx_entry = _create_lauetof_data_entry(f)
diff --git a/tests/executable_test.py b/tests/executable_test.py
index 03e3880..e2b5e65 100644
--- a/tests/executable_test.py
+++ b/tests/executable_test.py
@@ -18,9 +18,8 @@
WorkflowConfig,
build_reduction_argument_parser,
reduction_config_from_args,
- to_command_arguments,
)
-from ess.nmx.configurations import TimeBinCoordinate, TimeBinUnit
+from ess.nmx.configurations import TimeBinCoordinate, TimeBinUnit, to_command_arguments
from ess.nmx.executables import reduction
from ess.nmx.types import Compression
@@ -99,7 +98,11 @@ def test_reduction_config() -> None:
tof_simulation_seed=12345,
)
output_options = OutputConfig(
- output_file='test-output.h5', compression=Compression.NONE, verbose=True
+ output_file='test-output.h5',
+ compression=Compression.NONE,
+ verbose=True,
+ skip_file_output=True,
+ overwrite=True,
)
expected_config = ReductionConfig(
inputs=input_options, workflow=workflow_options, output=output_options
@@ -111,7 +114,7 @@ def test_reduction_config() -> None:
arg_list = _build_arg_list_from_pydantic_instance(
input_options, workflow_options, output_options
)
- assert arg_list == to_command_arguments(expected_config, one_line=False)
+ assert arg_list == to_command_arguments(config=expected_config, one_line=False)
# Parse arguments and build config from them.
parser = build_reduction_argument_parser()
@@ -188,7 +191,9 @@ def reduction_config(
# only properly works in linux so we set it to NONE here
# for convenience of testing on all platforms.
output_config = OutputConfig(
- output_file=temp_output_file.as_posix(), compression=Compression.NONE
+ output_file=temp_output_file.as_posix(),
+ compression=Compression.NONE,
+ skip_file_output=True, # No need to write output file for most tests.
)
return ReductionConfig(inputs=input_config, output=output_config)