diff --git a/docs/user-guide/estia/estia-advanced-mcstas-reduction.ipynb b/docs/user-guide/estia/estia-advanced-mcstas-reduction.ipynb index abe221af..325eb15f 100644 --- a/docs/user-guide/estia/estia-advanced-mcstas-reduction.ipynb +++ b/docs/user-guide/estia/estia-advanced-mcstas-reduction.ipynb @@ -76,7 +76,7 @@ "\n", "# If you want to use the ground truth wavelengths from the McStas simulation\n", "# instead of the wavelengths from the frame unwrapping, uncomment the lines below:\n", - "#from ess.estia.conversions import mcstas_wavelength_coordinate_transformation_graph\n", + "#from ess.estia.mcstas import mcstas_wavelength_coordinate_transformation_graph\n", "#wf.insert( mcstas_wavelength_coordinate_transformation_graph )\n", "\n", "wf[TimeOfFlightLookupTableFilename] = estia_tof_lookup_table()\n", @@ -448,7 +448,7 @@ "\n", "# If you want to use the ground truth wavelengths from the McStas simulation\n", "# instead of the wavelengths from the frame unwrapping, uncomment the lines below:\n", - "#from ess.estia.conversions import mcstas_wavelength_coordinate_transformation_graph\n", + "#from ess.estia.mcstas import mcstas_wavelength_coordinate_transformation_graph\n", "#wf.insert( mcstas_wavelength_coordinate_transformation_graph )\n", "\n", "wf[TimeOfFlightLookupTableFilename] = estia_tof_lookup_table()\n", diff --git a/docs/user-guide/estia/simulated-spin-flip-sample.ipynb b/docs/user-guide/estia/simulated-spin-flip-sample.ipynb index ba81692b..b9ab6d82 100644 --- a/docs/user-guide/estia/simulated-spin-flip-sample.ipynb +++ b/docs/user-guide/estia/simulated-spin-flip-sample.ipynb @@ -53,7 +53,7 @@ " estia_tof_lookup_table,\n", ")\n", "from ess.reflectometry.figures import wavelength_z_figure\n", - "from ess.estia.conversions import mcstas_wavelength_coordinate_transformation_graph\n", + "from ess.estia.mcstas import mcstas_wavelength_coordinate_transformation_graph\n", "\n", "from ess.polarization import ( # noqa: F401\n", " Up, Down, ReducedSampleDataBySpinChannel, SupermirrorEfficiencyFunction, Polarizer, Analyzer, PolarizationAnalysisWorkflow,\n", @@ -353,6 +353,8 @@ "mask = sc.isnan(I0).sum(('blade', 'wire')) > sc.scalar(0, unit=None)\n", "wf[ReducedReference] = I0.assign_masks(nopolcal=mask.data).assign_coords(wavelength=wf.compute(WavelengthBins))\n", "\n", + "# Required to read sample rotation / similar parameters associated with the reference measurement\n", + "wf[RawDetector] = sc.io.load_hdf5(estia_mcstas_spin_flip_example('supermirror', 'offoff'))\n", "\n", "sample_name = 'spin_flip_sample'\n", "\n", diff --git a/src/ess/amor/conversions.py b/src/ess/amor/conversions.py index 29935802..d531a763 100644 --- a/src/ess/amor/conversions.py +++ b/src/ess/amor/conversions.py @@ -6,13 +6,24 @@ from ..reflectometry.conversions import reflectometry_q from ..reflectometry.types import ( BeamDivergenceLimits, + BeamSize, CoordTransformationGraph, + DetectorRotation, + RunType, + SampleRotation, + SampleSize, WavelengthBins, YIndexLimits, ZIndexLimits, ) from .geometry import Detector -from .types import GravityToggle +from .types import ( + ChopperDistance, + ChopperFrequency, + ChopperPhase, + ChopperSeparation, + GravityToggle, +) def theta(wavelength, pixel_divergence_angle, L2, sample_rotation, detector_rotation): @@ -135,7 +146,17 @@ def wavelength( return out.to(unit='angstrom', copy=False) -def coordinate_transformation_graph(gravity: GravityToggle) -> CoordTransformationGraph: +def coordinate_transformation_graph( + gravity: GravityToggle, + detector_rotation: DetectorRotation[RunType], + sample_rotation: SampleRotation[RunType], + chopper_phase: ChopperPhase[RunType], + chopper_frequency: ChopperFrequency[RunType], + chopper_distance: ChopperDistance[RunType], + chopper_separation: ChopperSeparation[RunType], + sample_size: SampleSize[RunType], + beam_size: BeamSize[RunType], +) -> CoordTransformationGraph[RunType]: return { "wavelength": wavelength, "theta": theta if gravity else theta_no_gravity, @@ -143,9 +164,50 @@ def coordinate_transformation_graph(gravity: GravityToggle) -> CoordTransformati "Q": reflectometry_q, "L1": lambda chopper_distance: sc.abs(chopper_distance), "L2": lambda distance_in_detector: distance_in_detector + Detector.distance, + "sample_rotation": lambda: sample_rotation.to(unit='rad'), + "detector_rotation": lambda: detector_rotation.to(unit='rad'), + "chopper_phase": lambda: chopper_phase, + "chopper_frequency": lambda: chopper_frequency, + "chopper_separation": lambda: chopper_separation, + "chopper_distance": lambda: chopper_distance, + "sample_size": lambda: sample_size, + "beam_size": lambda: beam_size, } +def add_coords( + da: sc.DataArray, + graph: dict, +) -> sc.DataArray: + "Adds scattering coordinates to the raw detector data." + return da.transform_coords( + ( + "wavelength", + "theta", + "divergence_angle", + "Q", + "L1", + "L2", + "blade", + "wire", + "strip", + "z_index", + "sample_rotation", + "detector_rotation", + "sample_size", + "beam_size", + "chopper_phase", + "chopper_frequency", + "chopper_separation", + "chopper_distance", + ), + graph, + rename_dims=False, + keep_intermediate=False, + keep_aliases=False, + ) + + def _not_between(v, a, b): return (v < a) | (v > b) diff --git a/src/ess/amor/load.py b/src/ess/amor/load.py index 5d6efe59..3fcd8ffd 100644 --- a/src/ess/amor/load.py +++ b/src/ess/amor/load.py @@ -9,7 +9,6 @@ from ..reflectometry.load import load_nx from ..reflectometry.types import ( Beamline, - BeamSize, DetectorRotation, Filename, Measurement, @@ -20,9 +19,7 @@ RawDetector, RawSampleRotation, RunType, - SampleRotation, SampleRun, - SampleSize, ) from .geometry import pixel_coordinates_in_detector_system from .types import ( @@ -41,14 +38,6 @@ def load_detector( def load_events( detector: NeXusComponent[snx.NXdetector, RunType], - detector_rotation: DetectorRotation[RunType], - sample_rotation: SampleRotation[RunType], - chopper_phase: ChopperPhase[RunType], - chopper_frequency: ChopperFrequency[RunType], - chopper_distance: ChopperDistance[RunType], - chopper_separation: ChopperSeparation[RunType], - sample_size: SampleSize[RunType], - beam_size: BeamSize[RunType], ) -> RawDetector[RunType]: event_data = detector["data"] if 'event_time_zero' in event_data.coords: @@ -69,14 +58,6 @@ def load_events( "data" ].data.values - data.coords["sample_rotation"] = sample_rotation.to(unit='rad') - data.coords["detector_rotation"] = detector_rotation.to(unit='rad') - data.coords["chopper_phase"] = chopper_phase - data.coords["chopper_frequency"] = chopper_frequency - data.coords["chopper_separation"] = chopper_separation - data.coords["chopper_distance"] = chopper_distance - data.coords["sample_size"] = sample_size - data.coords["beam_size"] = beam_size return RawDetector[RunType](data) diff --git a/src/ess/amor/normalization.py b/src/ess/amor/normalization.py index 9c0b1d8a..405bae0d 100644 --- a/src/ess/amor/normalization.py +++ b/src/ess/amor/normalization.py @@ -14,6 +14,7 @@ ReducedReference, ReducibleData, Reference, + ReferenceRun, Sample, SampleRun, ) @@ -71,7 +72,7 @@ def evaluate_reference_at_sample_coords( reference: ReducedReference, sample: ReducibleData[SampleRun], detector_spatial_resolution: DetectorSpatialResolution[SampleRun], - graph: CoordTransformationGraph, + graph: CoordTransformationGraph[ReferenceRun], ) -> Reference: """ Adds a :math:`Q` and :math:`Q`-resolution coordinate to each bin of the ideal diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index 0c1dcdf3..576d7359 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -1,6 +1,5 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from ..reflectometry.conversions import ( - add_coords, add_proton_current_coord, add_proton_current_mask, ) @@ -16,7 +15,7 @@ YIndexLimits, ZIndexLimits, ) -from .conversions import add_masks +from .conversions import add_coords, add_masks def add_coords_masks_and_apply_corrections( @@ -26,7 +25,7 @@ def add_coords_masks_and_apply_corrections( bdlim: BeamDivergenceLimits, wbins: WavelengthBins, proton_current: ProtonCurrent[RunType], - graph: CoordTransformationGraph, + graph: CoordTransformationGraph[RunType], ) -> ReducibleData[RunType]: """ Computes coordinates, masks and corrections that are diff --git a/src/ess/estia/beamline.py b/src/ess/estia/beamline.py index f4ec4656..e09b536f 100644 --- a/src/ess/estia/beamline.py +++ b/src/ess/estia/beamline.py @@ -6,9 +6,9 @@ DETECTOR_BANK_SIZES = { "multiblade_detector": { + "strip": 64, "blade": 48, "wire": 32, - "strip": 64, }, } diff --git a/src/ess/estia/conversions.py b/src/ess/estia/conversions.py index f68403b6..d033cf7b 100644 --- a/src/ess/estia/conversions.py +++ b/src/ess/estia/conversions.py @@ -2,13 +2,16 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc from scippneutron.conversion.tof import wavelength_from_tof +from scippnexus import NXsample, NXsource + +from ess.reduce.nexus.types import DetectorBankSizes, Position from ..reflectometry.conversions import reflectometry_q from ..reflectometry.types import ( CoordTransformationGraph, - DetectorLtotal, - RawDetector, + DetectorRotation, RunType, + SampleRotation, ) @@ -62,34 +65,64 @@ def divergence_angle( return sc.atan2(y=p.fields.x, x=p.fields.z) - detector_rotation.to(unit='rad') -def detector_ltotal_from_raw( - da: RawDetector[RunType], graph: CoordTransformationGraph -) -> DetectorLtotal[RunType]: - return da.transform_coords(['Ltotal'], graph=graph).coords['Ltotal'] - - -def coordinate_transformation_graph() -> CoordTransformationGraph: +def coordinate_transformation_graph( + source_position: Position[NXsource, RunType], + sample_position: Position[NXsample, RunType], + sample_rotation: SampleRotation[RunType], + detector_rotation: DetectorRotation[RunType], + detector_bank_sizes: DetectorBankSizes, +) -> CoordTransformationGraph[RunType]: + bank = detector_bank_sizes['multiblade_detector'] return { - "wavelength": wavelength_from_tof, "theta": theta, "divergence_angle": divergence_angle, "Q": reflectometry_q, "L1": lambda source_position, sample_position: sc.norm( - sample_position - source_position + sample_position - source_position.to(unit=sample_position.unit) ), # + extra correction for guides? - "L2": lambda position, sample_position: sc.norm(position - sample_position), - "Ltotal": lambda L1, L2: L1 + L2, - } - - -def mcstas_wavelength_coordinate_transformation_graph() -> CoordTransformationGraph: - return { - **coordinate_transformation_graph(), - "wavelength": lambda wavelength_from_mcstas: wavelength_from_mcstas, + "L2": lambda position, sample_position: sc.norm( + position - sample_position.to(unit=position.unit) + ), + "Ltotal": lambda L1, L2: L1.to(unit=L2.unit) + L2, + 'sample_size': lambda: sc.scalar(20.0, unit='mm'), + 'blade': lambda: sc.arange('blade', bank['blade'] - 1, -1, -1), + 'wire': lambda: sc.arange('wire', bank['wire'] - 1, -1, -1), + 'strip': lambda: sc.arange('strip', bank['strip'] - 1, -1, -1), + 'z_index': lambda blade, wire: blade * wire, + "wavelength": wavelength_from_tof, + 'sample_rotation': lambda: sample_rotation, + 'detector_rotation': lambda: detector_rotation, + 'source_position': lambda: source_position, + 'sample_position': lambda: sample_position, } -providers = ( - coordinate_transformation_graph, - detector_ltotal_from_raw, -) +def add_coords( + da: sc.DataArray, + graph: dict, +) -> sc.DataArray: + "Adds scattering coordinates to the raw detector data." + return da.transform_coords( + ( + "wavelength", + "theta", + "divergence_angle", + "Q", + "L1", + "L2", + "blade", + "wire", + "strip", + "z_index", + "sample_rotation", + "detector_rotation", + "sample_size", + ), + graph, + rename_dims=False, + keep_intermediate=False, + keep_aliases=False, + ) + + +providers = (coordinate_transformation_graph,) diff --git a/src/ess/estia/corrections.py b/src/ess/estia/corrections.py index b5a93965..d440ab30 100644 --- a/src/ess/estia/corrections.py +++ b/src/ess/estia/corrections.py @@ -3,7 +3,6 @@ import scipp as sc from ..reflectometry.conversions import ( - add_coords, add_proton_current_coord, add_proton_current_mask, ) @@ -19,6 +18,7 @@ YIndexLimits, ZIndexLimits, ) +from .conversions import add_coords from .maskings import add_masks @@ -29,7 +29,7 @@ def add_coords_masks_and_apply_corrections( bdlim: BeamDivergenceLimits, wbins: WavelengthBins, proton_current: ProtonCurrent[RunType], - graph: CoordTransformationGraph, + graph: CoordTransformationGraph[RunType], ) -> ReducibleData[RunType]: """ Computes coordinates, masks and corrections that are diff --git a/src/ess/estia/load.py b/src/ess/estia/load.py index 40cd5227..e60f6d78 100644 --- a/src/ess/estia/load.py +++ b/src/ess/estia/load.py @@ -1,103 +1,26 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) -import h5py -import scipp as sc +from scippnexus import NXdetector, NXsample + +from ess.reduce.nexus.types import NeXusComponent from ..reflectometry.types import ( - Filename, - RawDetector, + DetectorRotation, + RawSampleRotation, RunType, - SampleRotationOffset, ) -from .beamline import DETECTOR_BANK_SIZES -from .mcstas import parse_events_ascii, parse_events_h5 - - -def load_mcstas_events( - filename: Filename[RunType], - sample_rotation_offset: SampleRotationOffset[RunType], -) -> RawDetector[RunType]: - """ - Load event data from a McStas run and reshape it - to look like what we would expect if - we loaded data from a real experiment file. - """ - if h5py.is_hdf5(filename): - with h5py.File(filename) as f: - da = parse_events_h5(f) - else: - with open(filename) as f: - da = parse_events_ascii(f) - - da.coords['sample_rotation'] = sc.scalar( - float(da.coords['omegaa'].value), unit='deg' - ) - da.coords['detector_rotation'] = 2 * da.coords['sample_rotation'] + sc.scalar( - 1.65, unit='deg' - ) - da.coords['sample_rotation'] += sample_rotation_offset.to( - unit=da.coords['sample_rotation'].unit - ) - - nblades = DETECTOR_BANK_SIZES['multiblade_detector']['blade'] - nwires = DETECTOR_BANK_SIZES['multiblade_detector']['wire'] - nstrips = DETECTOR_BANK_SIZES['multiblade_detector']['strip'] - xbins = sc.linspace('x', -0.25, 0.25, nblades * nwires + 1) - ybins = sc.linspace('y', -0.13, 0.13, nstrips + 1) - da = da.bin(y=ybins, x=xbins).rename_dims({'y': 'strip'}) - da.coords['strip'] = sc.arange('strip', 0, nstrips) - da.coords['z_index'] = sc.arange('x', nblades * nwires - 1, -1, -1) - - # Information is not available in the mcstas output files, therefore it's hardcoded - da.coords['sample_position'] = sc.vector([0.264298, -0.427595, 35.0512], unit='m') - da.coords['source_position'] = sc.vector([0, 0, 0.0], unit='m') - da.coords['detector_position'] = sc.vector( - tuple(map(float, da.coords['position'].value.split(' '))), unit='m' - ) - - rotation_by_detector_rotation = sc.spatial.rotation( - value=[ - sc.scalar(0.0), - sc.sin(da.coords['detector_rotation'].to(unit='rad')), - sc.scalar(0.0), - sc.cos(da.coords['detector_rotation'].to(unit='rad')), - ] - ) - position = sc.spatial.as_vectors( - x=sc.midpoints(da.coords['x']) * sc.scalar(1.0, unit='m'), - y=sc.midpoints(da.coords['y']) * sc.scalar(1.0, unit='m'), - z=sc.scalar(0.0, unit='m'), - ).transpose(da.dims) - da.coords['position'] = ( - da.coords['detector_position'] + rotation_by_detector_rotation * position - ) - da.bins.coords['event_time_zero'] = ( - sc.scalar(0, unit='s') * da.bins.coords['t'] - ).to(unit='ns') - da.bins.coords['event_time_offset'] = ( - sc.scalar(1, unit='s') * da.bins.coords['t'] - ).to(unit='ns') % sc.scalar(1 / 14, unit='s').to(unit='ns') - da.bins.coords['wavelength'] = sc.scalar(1, unit='angstrom') * da.bins.coords['L'] +def load_sample_rotation( + sample: NeXusComponent[NXsample, RunType], +) -> RawSampleRotation[RunType]: + return sample['sample_rotation'][0].data - da.coords["sample_size"] = sc.scalar(1.0, unit='m') * float( - da.coords['sample_length'].value - ) - da.coords["beam_size"] = sc.scalar(2.0, unit='mm') - da = da.fold( - 'x', - sizes={ - k: v - for k, v in DETECTOR_BANK_SIZES['multiblade_detector'].items() - if k in ('blade', 'wire') - }, - ) - da.bins.coords.pop('L') - da.bins.coords.pop('t') - da.bins.coords['wavelength_from_mcstas'] = da.bins.coords.pop('wavelength') - return RawDetector[RunType](da) +def load_detector_rotation( + detector: NeXusComponent[NXdetector, RunType], +) -> DetectorRotation[RunType]: + return detector['transformations']['detector_rotation'].value[0].data -providers = () +providers = (load_sample_rotation, load_detector_rotation) diff --git a/src/ess/estia/mcstas.py b/src/ess/estia/mcstas.py index f1a07f99..ebb773c5 100644 --- a/src/ess/estia/mcstas.py +++ b/src/ess/estia/mcstas.py @@ -1,9 +1,28 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import pathlib + import h5py +import numpy as np import pandas as pd import scipp as sc +from scippnexus import NXsample, NXsource + +from ess.reduce.nexus.types import DetectorBankSizes, Position -from ess.reflectometry.load import load_h5 +from ..reflectometry.load import load_h5 +from ..reflectometry.types import ( + CoordTransformationGraph, + DetectorLtotal, + DetectorRotation, + Filename, + RawDetector, + RawSampleRotation, + RunType, + SampleRotation, + SampleRotationOffset, +) +from .beamline import DETECTOR_BANK_SIZES +from .conversions import coordinate_transformation_graph def parse_metadata_ascii(lines): @@ -63,7 +82,7 @@ def parse_events_ascii(lines): raise ValueError('Could not parse the file as a list of events.') -def parse_events_h5(f): +def parse_events_h5(f, events_to_sample_per_unit_weight=None): if isinstance(f, str): with h5py.File(f) as ff: return parse_events_h5(ff) @@ -75,23 +94,200 @@ def parse_events_h5(f): 'NXentry/simulation/Param', ) events = events[()] - da = sc.DataArray( - # The squares on the variances is the correct way to load weighted event data. - # Consult the McStas documentation - # (section 2.2.1) https://www.mcstas.org/documentation/manual/ - # for more information. - sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), - ) + if events_to_sample_per_unit_weight is None: + da = sc.DataArray( + # The squares on the variances is the correct way to load + # weighted event data. + # Consult the McStas documentation + # (section 2.2.1) https://www.mcstas.org/documentation/manual/ + # for more information. + sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), + ) + for i, label in enumerate(data.attrs["ylabel"].decode().strip().split(' ')): + if label == 'p': + continue + da.coords[label] = sc.array(dims=['events'], values=events[:, i]) + else: + weights = events[:, 0] + total_weight = weights.sum() + nevents_to_sample = round(events_to_sample_per_unit_weight * total_weight) + inds = np.random.choice( + np.arange(len(weights)), + nevents_to_sample, + p=weights / total_weight, + ) + da = sc.DataArray( + sc.ones(dims=['events'], shape=(nevents_to_sample,), with_variances=True), + ) + for i, label in enumerate(data.attrs["ylabel"].decode().strip().split(' ')): + if label == 'p': + continue + da.coords[label] = sc.array(dims=['events'], values=events[inds, i]) + for name, value in data.attrs.items(): da.coords[name] = sc.scalar(value.decode()) - for i, label in enumerate(data.attrs["ylabel"].decode().strip().split(' ')): - if label == 'p': - continue - da.coords[label] = sc.array(dims=['events'], values=events[:, i]) for k, v in params.items(): v = v[0] if isinstance(v, bytes): v = v.decode() da.coords[k] = sc.scalar(v) return da + + +def load_mcstas_provider( + filename: Filename[RunType], + sample_rotation_offset: SampleRotationOffset[RunType], +) -> RawDetector[RunType]: + """See :py:`load_mcstas`.""" + da = load_mcstas(filename) + da.coords['sample_rotation'] += sample_rotation_offset.to( + unit=da.coords['sample_rotation'].unit + ) + return RawDetector[RunType](da) + + +def load_mcstas( + filename: str | pathlib.Path | sc.DataArray, +) -> sc.DataArray: + """ + Load event data from a McStas run and reshape it + to look like what we would expect if + we loaded data from a real experiment file. + """ + if isinstance(filename, sc.DataArray): + da = filename + elif h5py.is_hdf5(filename): + with h5py.File(filename) as f: + da = parse_events_h5(f) + else: + with open(filename) as f: + da = parse_events_ascii(f) + + da.coords['sample_rotation'] = sc.scalar( + float(da.coords['omegaa'].value), unit='deg' + ) + da.coords['detector_rotation'] = 2 * da.coords['sample_rotation'] + sc.scalar( + 1.65, unit='deg' + ) + + nblades = DETECTOR_BANK_SIZES['multiblade_detector']['blade'] + nwires = DETECTOR_BANK_SIZES['multiblade_detector']['wire'] + nstrips = DETECTOR_BANK_SIZES['multiblade_detector']['strip'] + xbins = sc.linspace('x', -0.25, 0.25, nblades * nwires + 1) + ybins = sc.linspace('y', -0.13, 0.13, nstrips + 1) + da = da.bin(y=ybins, x=xbins).rename_dims({'y': 'strip'}) + da.coords['strip'] = sc.arange('strip', 0, nstrips) + da.coords['z_index'] = sc.arange('x', nblades * nwires - 1, -1, -1) + + # Information is not available in the mcstas output files, therefore it's hardcoded + da.coords['sample_position'] = sc.vector([0.264298, -0.427595, 35.0512], unit='m') + da.coords['source_position'] = sc.vector([0, 0, 0.0], unit='m') + da.coords['detector_position'] = sc.vector( + tuple(map(float, da.coords['position'].value.split(' '))), unit='m' + ) + + rotation_by_detector_rotation = sc.spatial.rotation( + value=[ + sc.scalar(0.0), + sc.sin(da.coords['detector_rotation'].to(unit='rad')), + sc.scalar(0.0), + sc.cos(da.coords['detector_rotation'].to(unit='rad')), + ] + ) + + position = sc.spatial.as_vectors( + x=sc.midpoints(da.coords['x']) * sc.scalar(1.0, unit='m'), + y=sc.midpoints(da.coords['y']) * sc.scalar(1.0, unit='m'), + z=sc.scalar(0.0, unit='m'), + ).transpose(da.dims) + da.coords['position'] = ( + da.coords['detector_position'] + rotation_by_detector_rotation * position + ) + + da.bins.coords['event_time_zero'] = ( + sc.scalar(0, unit='s') * da.bins.coords['t'] + ).to(unit='ns') + da.bins.coords['event_time_offset'] = ( + sc.scalar(1, unit='s') * da.bins.coords['t'] + ).to(unit='ns') % sc.scalar(1 / 14, unit='s').to(unit='ns') + da.bins.coords['wavelength_from_mcstas'] = ( + sc.scalar(1.0, unit='angstrom') * da.bins.coords['L'] + ) + + da.coords["sample_size"] = sc.scalar(1.0, unit='m') * float( + da.coords['sample_length'].value + ) + da.coords["beam_size"] = sc.scalar(2.0, unit='mm') + + da = da.fold( + 'x', + sizes={ + k: v + for k, v in DETECTOR_BANK_SIZES['multiblade_detector'].items() + if k in ('blade', 'wire') + }, + ) + da.bins.coords.pop('L') + da.bins.coords.pop('t') + return da + + +def load_sample_rotation(da: RawDetector[RunType]) -> RawSampleRotation[RunType]: + return da.coords['sample_rotation'] + + +def load_detector_rotation( + da: RawDetector[RunType], +) -> DetectorRotation[RunType]: + return da.coords['detector_rotation'] + + +def load_source_position( + da: RawDetector[RunType], +) -> Position[NXsource, RunType]: + return da.coords['source_position'] + + +def load_sample_position( + da: RawDetector[RunType], +) -> Position[NXsample, RunType]: + return da.coords['sample_position'] + + +def detector_ltotal_from_raw( + da: RawDetector[RunType], graph: CoordTransformationGraph[RunType] +) -> DetectorLtotal[RunType]: + return da.transform_coords( + ['Ltotal'], + graph=graph, + ).coords['Ltotal'] + + +def mcstas_wavelength_coordinate_transformation_graph( + source_position: Position[NXsource, RunType], + sample_position: Position[NXsample, RunType], + sample_rotation: SampleRotation[RunType], + detector_rotation: DetectorRotation[RunType], + detector_bank_sizes: DetectorBankSizes, +) -> CoordTransformationGraph[RunType]: + return { + **coordinate_transformation_graph( + source_position, + sample_position, + sample_rotation, + detector_rotation, + detector_bank_sizes, + ), + "wavelength": lambda wavelength_from_mcstas: wavelength_from_mcstas, + } + + +providers = ( + load_mcstas_provider, + load_sample_position, + load_source_position, + load_detector_rotation, + load_sample_rotation, + detector_ltotal_from_raw, +) diff --git a/src/ess/estia/normalization.py b/src/ess/estia/normalization.py index 939704e5..82c99def 100644 --- a/src/ess/estia/normalization.py +++ b/src/ess/estia/normalization.py @@ -14,6 +14,7 @@ ReducedReference, ReducibleData, Reference, + ReferenceRun, Sample, SampleRun, ) @@ -66,7 +67,7 @@ def mask_events_where_supermirror_does_not_cover( def evaluate_reference( reference: ReducedReference, sample: ReducibleData[SampleRun], - graph: CoordTransformationGraph, + graph: CoordTransformationGraph[ReferenceRun], detector_spatial_resolution: DetectorSpatialResolution[SampleRun], ) -> Reference: """ diff --git a/src/ess/estia/workflow.py b/src/ess/estia/workflow.py index c1bcf5a2..afc79eb9 100644 --- a/src/ess/estia/workflow.py +++ b/src/ess/estia/workflow.py @@ -13,7 +13,16 @@ RunType, SampleRotationOffset, ) -from . import beamline, conversions, corrections, load, maskings, normalization, orso +from . import ( + beamline, + conversions, + corrections, + load, + maskings, + mcstas, + normalization, + orso, +) _general_providers = ( *reflectometry_providers, @@ -22,12 +31,12 @@ *maskings.providers, *normalization.providers, *orso.providers, + *load.providers, ) mcstas_providers = ( *_general_providers, - *load.providers, - load.load_mcstas_events, + *mcstas.providers, ) """List of providers for setting up a Sciline pipeline for McStas data. @@ -60,6 +69,7 @@ def mcstas_default_parameters() -> dict: def default_parameters() -> dict: return { NeXusDetectorName: "multiblade_detector", + SampleRotationOffset[RunType]: sc.scalar(0.0, unit='deg'), } diff --git a/src/ess/offspec/conversions.py b/src/ess/offspec/conversions.py index 5534e8cf..cc373068 100644 --- a/src/ess/offspec/conversions.py +++ b/src/ess/offspec/conversions.py @@ -3,10 +3,10 @@ from scippneutron.conversion.graph import beamline, tof from ..reflectometry.types import ( + CoordTransformationGraph, ReferenceRun, SampleRun, ) -from .types import CoordTransformationGraph def coordinate_transformation_graph_sample() -> CoordTransformationGraph[SampleRun]: diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py index b98a0f75..c617a580 100644 --- a/src/ess/offspec/load.py +++ b/src/ess/offspec/load.py @@ -2,8 +2,14 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc -from ..reflectometry.types import Filename, RawDetector, ReferenceRun, RunType -from .types import CoordTransformationGraph, MonitorData, NeXusMonitorName +from ..reflectometry.types import ( + CoordTransformationGraph, + Filename, + RawDetector, + ReferenceRun, + RunType, +) +from .types import MonitorData, NeXusMonitorName def load_offspec_events( diff --git a/src/ess/offspec/types.py b/src/ess/offspec/types.py index 4bb4cf89..8f6e0966 100644 --- a/src/ess/offspec/types.py +++ b/src/ess/offspec/types.py @@ -14,10 +14,6 @@ BackgroundMinWavelength = NewType("BackgroundMinWavelength", sc.Variable) -class CoordTransformationGraph(sciline.Scope[RunType, dict], dict): - """Coordinate transformation for the runtype""" - - class MonitorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """ "Monitor data from the run file, with background subtracted""" diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py index 741ec6c9..499b083a 100644 --- a/src/ess/offspec/workflow.py +++ b/src/ess/offspec/workflow.py @@ -4,6 +4,7 @@ from ..reflectometry import providers as reflectometry_providers from ..reflectometry.types import ( + CoordTransformationGraph, RawDetector, ReducibleData, ReferenceRun, @@ -16,7 +17,6 @@ from .maskings import add_masks from .types import ( BackgroundMinWavelength, - CoordTransformationGraph, MonitorData, NeXusMonitorName, SpectrumLimits, diff --git a/src/ess/reflectometry/conversions.py b/src/ess/reflectometry/conversions.py index 7c05b278..d2866394 100644 --- a/src/ess/reflectometry/conversions.py +++ b/src/ess/reflectometry/conversions.py @@ -64,18 +64,4 @@ def add_proton_current_mask(da: sc.DataArray) -> sc.DataArray: return da -def add_coords( - da: sc.DataArray, - graph: dict, -) -> sc.DataArray: - "Adds scattering coordinates to the raw detector data." - return da.transform_coords( - ("wavelength", "theta", "divergence_angle", "Q", "L1", "L2"), - graph, - rename_dims=False, - keep_intermediate=False, - keep_aliases=False, - ) - - providers = () diff --git a/src/ess/reflectometry/types.py b/src/ess/reflectometry/types.py index 51c6cad3..905f48e0 100644 --- a/src/ess/reflectometry/types.py +++ b/src/ess/reflectometry/types.py @@ -29,7 +29,8 @@ DetectorLtotal = tof_t.DetectorLtotal -CoordTransformationGraph = NewType("CoordTransformationGraph", dict) +class CoordTransformationGraph(sciline.Scope[RunType, dict], dict): + pass class RawChopper(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): diff --git a/tests/estia/mcstas_data_test.py b/tests/estia/mcstas_data_test.py index acd586af..415c1db0 100644 --- a/tests/estia/mcstas_data_test.py +++ b/tests/estia/mcstas_data_test.py @@ -12,13 +12,12 @@ from orsopy import fileio from ess.estia import EstiaMcStasWorkflow -from ess.estia.conversions import mcstas_wavelength_coordinate_transformation_graph from ess.estia.data import ( estia_mcstas_reference_run, estia_mcstas_sample_run, estia_tof_lookup_table, ) -from ess.estia.load import load_mcstas_events +from ess.estia.mcstas import mcstas_wavelength_coordinate_transformation_graph from ess.reflectometry import orso from ess.reflectometry.types import ( BeamDivergenceLimits, @@ -39,7 +38,6 @@ @pytest.fixture def estia_mcstas_pipeline() -> sciline.Pipeline: wf = EstiaMcStasWorkflow() - wf.insert(load_mcstas_events) wf[Filename[ReferenceRun]] = estia_mcstas_reference_run() wf[YIndexLimits] = sc.scalar(35), sc.scalar(64) diff --git a/tools/mcstas_to_nexus.ipynb b/tools/mcstas_to_nexus.ipynb new file mode 100644 index 00000000..91c0ffc4 --- /dev/null +++ b/tools/mcstas_to_nexus.ipynb @@ -0,0 +1,239 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# McStas to NeXus for ESTIA\n", + "\n", + "This notebook converts data from a McStas simulation output (`.h5` filetype) to a NeXus file that uses a file for the Estia instrument (written by CODA) as a template for the geometry information.\n", + "\n", + "It adds\n", + "\n", + "* events to the `multiblade_detector` detector,\n", + "* a single value to the `proton_current` log,\n", + "* a single detector rotation value to the `detector_rotation` log,\n", + "* a single sample rotation value to the `sample_rotation` log." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import scipp as sc\n", + "import h5py\n", + "import shutil\n", + "\n", + "from ess.estia.mcstas import parse_events_h5\n", + "from ess.estia.load import load_mcstas" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "def replace_dataset(entry, name, values):\n", + " attrs = dict(entry[name].attrs)\n", + " del entry[name]\n", + " dset = entry.create_dataset(name, data=values)\n", + " dset.attrs.update(attrs)\n", + "\n", + "\n", + "def mcstas_to_nexus(\n", + " mcstas_data_file: str,\n", + " template_nexus_file: str,\n", + " outfile: str,\n", + " nevents_per_weight: float = 1,\n", + "):\n", + " \"\"\"\n", + " Store the events from a McStas Estia simulation in a NeXus CODA file.\n", + "\n", + " Parameters\n", + " ----------\n", + " mcstas_data_file:\n", + " Data file containing simulated McStas events.\n", + " template_nexus_file:\n", + " NeXus file containing geometry and instrument info, used as a template.\n", + " outfile:\n", + " Output file to be written.\n", + " nevents_per_weight:\n", + " Number of events to have in the output file\n", + " per unit weight in the McStas data.\n", + " \"\"\"\n", + " detector_entry_path = '/entry/instrument/multiblade_detector'\n", + "\n", + " with h5py.File(template_nexus_file, \"r\") as f:\n", + " det_numbers = f[f\"{detector_entry_path}/detector_number\"][()]\n", + "\n", + " with h5py.File(mcstas_data_file) as f:\n", + " events = parse_events_h5(f, events_to_sample_per_unit_weight=nevents_per_weight)\n", + " binned = load_mcstas(events).transpose(['strip', 'blade', 'wire', ])\n", + "\n", + " toa = (binned.bins.coords[\"event_time_zero\"] + binned.bins.coords[\"event_time_offset\"]).bins.concat().value\n", + "\n", + " det_numbers_matching_order_of_binned = det_numbers.reshape(binned.shape)\n", + " det_numbers_matching_order_of_binned = det_numbers_matching_order_of_binned[::-1, ::-1, ::-1]\n", + " # IMPORTANT! we need to sort the arrays below according to toa,\n", + " # so that the event_index does not get messed up!\n", + " event_id = sc.sort(\n", + " (\n", + " sc.bins_like(binned, sc.array(dims=binned.dims, values=det_numbers_matching_order_of_binned))\n", + " .bins.concat()\n", + " .value\n", + " ),\n", + " key=toa,\n", + " )\n", + " event_time_zero = sc.sort(binned.bins.coords[\"event_time_zero\"].bins.concat().value, key=toa)\n", + " event_time_offset = sc.sort(binned.bins.coords[\"event_time_offset\"].bins.concat().value, key=toa)\n", + "\n", + " event_index = sc.DataArray(\n", + " data=sc.ones_like(event_time_offset),\n", + " coords={\"event_time_zero\": event_time_zero},\n", + " ).group(\"event_time_zero\")\n", + "\n", + " event_index = sc.cumsum(event_index.bins.size())\n", + " event_index.values = np.concatenate([[0], event_index.values[:-1]])\n", + "\n", + " shutil.copyfile(template_nexus_file, outfile)\n", + "\n", + " with h5py.File(outfile, \"r+\") as f:\n", + "\n", + " # TODO: remove this when the files is fixed\n", + " # Fix error in Nexus file\n", + " f['/entry/instrument/source/transformations/location'][()] = -35.0512\n", + "\n", + " detector_rotation = f[f\"{detector_entry_path}/transformations/detector_rotation\"]\n", + " replace_dataset(\n", + " detector_rotation,\n", + " name=\"value\",\n", + " values=[\n", + " binned\n", + " .coords['detector_rotation']\n", + " .to(unit=detector_rotation['value'].attrs['units'])\n", + " .value\n", + " ]\n", + " )\n", + " replace_dataset(\n", + " detector_rotation,\n", + " name=\"time\",\n", + " values=[\n", + " event_time_zero\n", + " .min()\n", + " .to(unit=detector_rotation['time'].attrs['units'])\n", + " .value\n", + " .astype('uint64')\n", + " ]\n", + " )\n", + "\n", + " sample = f[\"entry/sample\"]\n", + " sample_rotation = sample.create_group('sample_rotation')\n", + " sample_rotation.attrs['NX_class'] = 'NXlog'\n", + " sample_rotation.create_dataset(\n", + " 'value',\n", + " data=[\n", + " binned\n", + " .coords['sample_rotation']\n", + " .to(unit='deg')\n", + " .value\n", + " .astype('float64')\n", + " ],\n", + " )\n", + " sample_rotation['value'].attrs['units'] = 'degrees'\n", + " sample_rotation.create_dataset(\n", + " 'time',\n", + " data=[\n", + " event_time_zero\n", + " .min()\n", + " .to(unit='ns')\n", + " .value\n", + " .astype('uint64')\n", + " ],\n", + " )\n", + " sample_rotation['time'].attrs['units'] = 'ns'\n", + "\n", + " f[\"entry/neutron_prod_info/pulse_charge/value\"][:] = 1.\n", + "\n", + " event_data = f[f\"{detector_entry_path}/event_data\"]\n", + " replace_dataset(event_data, name=\"event_id\", values=event_id.values.astype('uint32'))\n", + " replace_dataset(\n", + " event_data,\n", + " name=\"event_time_offset\",\n", + " values=event_time_offset.to(\n", + " unit=event_data[\"event_time_offset\"].attrs[\"units\"], copy=False\n", + " ).values.astype('uint32'),\n", + " )\n", + " replace_dataset(event_data, name=\"event_index\", values=event_index.values.astype('uint64'))\n", + " replace_dataset(\n", + " event_data,\n", + " name=\"event_time_zero\",\n", + " values=event_index.coords[\"event_time_zero\"]\n", + " .to(unit=event_data[\"event_time_zero\"].attrs[\"units\"], copy=False)\n", + " .values.astype('uint64'),\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "### Generate nexus files from a Nexus template and McStas events" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "from ess.estia.data import estia_mcstas_example\n", + "\n", + "\n", + "mcstas_to_nexus(\n", + " mcstas_data_file=estia_mcstas_example('reference'),\n", + " template_nexus_file=\"/home/johannes/Downloads/estia_999999_00008786.hdf\",\n", + " outfile='reference.nx',\n", + " nevents_per_weight=1,\n", + ")\n", + "\n", + "for i, example in enumerate(estia_mcstas_example('Ni/Ti-multilayer')):\n", + " mcstas_to_nexus(\n", + " mcstas_data_file=example,\n", + " template_nexus_file=\"/home/johannes/Downloads/estia_999999_00008786.hdf\",\n", + " outfile=f'niti_ml_{i}.nx',\n", + " nevents_per_weight=1,\n", + " )" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}