Skip to content
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.14"
"version": "3.12.12"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
"id": "0",
"metadata": {},
"source": [
"# Create a time-of-flight lookup table for DREAM\n",
"# Create a wavelength lookup table for DREAM\n",
"\n",
"This notebook shows how to create a time-of-flight lookup table for the DREAM instrument."
"This notebook shows how to create a wavelength lookup table for the DREAM instrument."
]
},
{
Expand All @@ -18,7 +18,7 @@
"outputs": [],
"source": [
"import scipp as sc\n",
"from ess.reduce import time_of_flight\n",
"from ess.reduce import unwrap\n",
"from ess.reduce.nexus.types import AnyRun\n",
"from ess.dream.beamline import InstrumentConfiguration, choppers"
]
Expand All @@ -40,7 +40,7 @@
"metadata": {},
"outputs": [],
"source": [
"disk_choppers = choppers(InstrumentConfiguration.high_flux_BC215)"
"disk_choppers = choppers(InstrumentConfiguration.high_flux_BC240)"
]
},
{
Expand All @@ -60,17 +60,17 @@
"metadata": {},
"outputs": [],
"source": [
"wf = time_of_flight.TofLookupTableWorkflow()\n",
"wf = unwrap.LookupTableWorkflow()\n",
"\n",
"wf[time_of_flight.LtotalRange] = sc.scalar(5.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")\n",
"wf[time_of_flight.NumberOfSimulatedNeutrons] = 200_000 # Increase this number for more reliable results\n",
"wf[time_of_flight.SourcePosition] = sc.vector([0, 0, 0], unit='m')\n",
"wf[time_of_flight.DiskChoppers[AnyRun]] = disk_choppers\n",
"wf[time_of_flight.DistanceResolution] = sc.scalar(0.1, unit=\"m\")\n",
"wf[time_of_flight.TimeResolution] = sc.scalar(250.0, unit='us')\n",
"wf[time_of_flight.PulsePeriod] = 1.0 / sc.scalar(14.0, unit=\"Hz\")\n",
"wf[time_of_flight.PulseStride] = 1\n",
"wf[time_of_flight.PulseStrideOffset] = None"
"wf[unwrap.LtotalRange] = sc.scalar(5.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")\n",
"wf[unwrap.NumberOfSimulatedNeutrons] = 200_000 # Increase this number for more reliable results\n",
"wf[unwrap.SourcePosition] = sc.vector([0, 0, 0], unit='m')\n",
"wf[unwrap.DiskChoppers[AnyRun]] = disk_choppers\n",
"wf[unwrap.DistanceResolution] = sc.scalar(0.1, unit=\"m\")\n",
"wf[unwrap.TimeResolution] = sc.scalar(250.0, unit='us')\n",
"wf[unwrap.PulsePeriod] = 1.0 / sc.scalar(14.0, unit=\"Hz\")\n",
"wf[unwrap.PulseStride] = 1\n",
"wf[unwrap.PulseStrideOffset] = None"
]
},
{
Expand All @@ -88,7 +88,7 @@
"metadata": {},
"outputs": [],
"source": [
"table = wf.compute(time_of_flight.TimeOfFlightLookupTable)\n",
"table = wf.compute(unwrap.LookupTable)\n",
"table.array"
]
},
Expand Down Expand Up @@ -117,7 +117,7 @@
"metadata": {},
"outputs": [],
"source": [
"table.save_hdf5('DREAM-high-flux-tof-lut-5m-80m.h5')"
"table.save_hdf5('DREAM-high-flux-wavelength-lut-5m-80m-bc240.h5')"
]
}
],
Expand Down
36 changes: 17 additions & 19 deletions docs/user-guide/dream/dream-powder-reduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
"metadata": {},
"outputs": [],
"source": [
"workflow = dream.DreamGeant4Workflow(\n",
"wf = dream.DreamGeant4Workflow(\n",
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why? I find this less readable and normally avoid wf in documentation. (Except apparently in the lookup table notebook)

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought we had wf in most other places. I find it much easier to type, but maybe it is less readable?

" run_norm=powder.RunNormalization.monitor_histogram,\n",
")"
]
Expand All @@ -77,26 +77,26 @@
"metadata": {},
"outputs": [],
"source": [
"workflow[Filename[SampleRun]] = dream.data.simulated_diamond_sample()\n",
"workflow[Filename[VanadiumRun]] = dream.data.simulated_vanadium_sample()\n",
"workflow[Filename[EmptyCanRun]] = dream.data.simulated_empty_can()\n",
"workflow[CalibrationFilename] = None\n",
"wf[Filename[SampleRun]] = dream.data.simulated_diamond_sample()\n",
"wf[Filename[VanadiumRun]] = dream.data.simulated_vanadium_sample()\n",
"wf[Filename[EmptyCanRun]] = dream.data.simulated_empty_can()\n",
"wf[CalibrationFilename] = None\n",
"\n",
"workflow[MonitorFilename[SampleRun]] = dream.data.simulated_monitor_diamond_sample()\n",
"workflow[MonitorFilename[VanadiumRun]] = dream.data.simulated_monitor_vanadium_sample()\n",
"workflow[MonitorFilename[EmptyCanRun]] = dream.data.simulated_monitor_empty_can()\n",
"workflow[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit=\"mm\")\n",
"wf[MonitorFilename[SampleRun]] = dream.data.simulated_monitor_diamond_sample()\n",
"wf[MonitorFilename[VanadiumRun]] = dream.data.simulated_monitor_vanadium_sample()\n",
"wf[MonitorFilename[EmptyCanRun]] = dream.data.simulated_monitor_empty_can()\n",
"wf[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit=\"mm\")\n",
"\n",
"workflow[dream.InstrumentConfiguration] = dream.InstrumentConfiguration.high_flux_BC215\n",
"wf[dream.InstrumentConfiguration] = dream.InstrumentConfiguration.high_flux_BC215\n",
"# Select a detector bank:\n",
"workflow[NeXusDetectorName] = \"mantle\"\n",
"wf[NeXusDetectorName] = \"mantle\"\n",
"# We drop uncertainties where they would otherwise lead to correlations:\n",
"workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n",
"wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n",
"# Edges for binning in d-spacing:\n",
"workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.3, 2.3434, 201, unit=\"angstrom\")\n",
"wf[DspacingBins] = sc.linspace(\"dspacing\", 0.3, 2.3434, 201, unit=\"angstrom\")\n",
"\n",
"# Do not mask any pixels / voxels:\n",
"workflow = powder.with_pixel_mask_filenames(workflow, [])"
"wf = powder.with_pixel_mask_filenames(wf, [])"
]
},
{
Expand All @@ -121,7 +121,7 @@
"metadata": {},
"outputs": [],
"source": [
"results = workflow.compute([\n",
"results = wf.compute([\n",
" EmptyCanSubtractedIofDspacing,\n",
" ReducedEmptyCanSubtractedTofCIF\n",
"])\n",
Expand All @@ -145,9 +145,7 @@
"outputs": [],
"source": [
"histogram = intensity.hist()\n",
"fig = histogram.plot(title=intensity.coords['detector'].value.capitalize())\n",
"fig.ax.set_ylabel(f\"I(d) [{histogram.unit}]\")\n",
"fig"
"histogram.plot(title=intensity.coords['detector'].value.capitalize(), ylabel=f\"I(d) [{histogram.unit}]\")"
]
},
{
Expand Down Expand Up @@ -229,7 +227,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.10"
"version": "3.12.12"
}
},
"nbformat": 4,
Expand Down
8 changes: 6 additions & 2 deletions src/ess/beer/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import scipp as sc

from ess.reduce.nexus.types import Filename, RawDetector, RunType, SampleRun
from ess.reduce.time_of_flight.types import TofDetector


class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
Expand All @@ -24,7 +23,6 @@ class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
RawDetector = RawDetector
Filename = Filename
SampleRun = SampleRun
TofDetector = TofDetector


class DetectorBank(Enum):
Expand Down Expand Up @@ -58,3 +56,9 @@ class DetectorBank(Enum):

CIFPeaksMinIntensity = NewType("CIFPeaksMinIntensity", sc.Variable)
"""Minimum peak intensity for peaks from CIF file to be included in :py:`DHKLList`."""


class TofDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This type no longer exists in essreduce.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are you leaving the beer workflow for later or does it need to continue using tof?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We are leaving it for later. Sorry, I forgot to mention that.

"""
Detector with a time-of-flight coordinate
"""
36 changes: 36 additions & 0 deletions src/ess/dream/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@
# `shrink_nexus.py` script in the `tools` folder at the top level of the
# `essdiffraction` repository.
"TEST_DREAM_nexus_sorted-2023-12-07.nxs": "md5:599b426a93c46a7b4b09a874bf288c53", # noqa: E501
# Wavelength lookup tables
"DREAM-high-flux-wavelength-lut-5m-80m-bc215.h5": "md5:10c80c9de311cfa246f7b2c165eb0b49", # noqa: E501
"DREAM-high-flux-wavelength-lut-5m-80m-bc240.h5": "md5:9741176f8da9b34c2a15967a43e21462", # noqa: E501
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you remove the tof lookup tables from the registry and their associated functions?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought I would leave them in for backward compatibility, but then this is a very breaking change, so maybe we cannot be backward compatible?

I kept them in essimaging when I made my changes...

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we can still use the old tables. The workflow has changed too much and we no longer have the right providers. So I'd just remove the tables.

},
)

Expand Down Expand Up @@ -293,3 +296,36 @@ def tof_lookup_table_high_flux(bc: Literal[215, 240] = 215) -> Path:
return get_path("DREAM-high-flux-tof-lut-5m-80m-bc240.h5")
case _:
raise ValueError(f"Unsupported band-control chopper (BC) value: {bc}")


def lookup_table_high_flux(bc: Literal[215, 240] = 215) -> Path:
"""Path to a HDF5 file containing a wavelength lookup table for high-flux mode.

The table was created using the ``tof`` package and the chopper settings for the
DREAM instrument in high-resolution mode.
Can return tables for two different band-control chopper (BC) settings:
- ``bc=215``: corresponds to the settings of the choppers in the tutorial data.
- ``bc=240``: a setting with less time overlap between frames.

Note that the phase of the band-control chopper (BCC) was set to 215 degrees in the
Geant4 simulation which generated the data used in the documentation notebooks.
This has since been found to be non-optimal as it leads to time overlap between the
two frames, and a value of 240 degrees is now recommended.

This table was computed using `Create a wavelength lookup table for DREAM
<../../user-guide/dream/dream-make-wavelength-lookup-table.rst>`_
with ``NumberOfSimulatedNeutrons = 5_000_000``.

Parameters
----------
bc:
Band-control chopper (BC) setting. The default is 215, which corresponds to the
settings of the choppers in the tutorial data.
"""
match bc:
case 215:
return get_path("DREAM-high-flux-wavelength-lut-5m-80m-bc215.h5")
case 240:
return get_path("DREAM-high-flux-wavelength-lut-5m-80m-bc240.h5")
case _:
raise ValueError(f"Unsupported band-control chopper (BC) value: {bc}")
22 changes: 11 additions & 11 deletions src/ess/dream/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,21 @@
CaveMonitorPosition, # Should this be a DREAM-only parameter?
EmptyCanRun,
KeepEvents,
LookupTableFilename,
LookupTableRelativeErrorThreshold,
Measurement,
PixelMaskFilename,
Position,
ReducerSoftware,
SampleRun,
TimeOfFlightLookupTableFilename,
TofMask,
TwoThetaMask,
VanadiumRun,
WavelengthMask,
)
from ess.reduce.nexus.types import DetectorBankSizes, NeXusName
from ess.reduce.parameter import parameter_mappers
from ess.reduce.time_of_flight import GenericTofWorkflow
from ess.reduce.unwrap import GenericUnwrapWorkflow
from ess.reduce.workflow import register_workflow

from .beamline import InstrumentConfiguration
Expand Down Expand Up @@ -73,24 +73,24 @@

def _get_lookup_table_filename_from_configuration(
configuration: InstrumentConfiguration,
) -> TimeOfFlightLookupTableFilename:
from .data import tof_lookup_table_high_flux
) -> LookupTableFilename:
from .data import lookup_table_high_flux

match configuration:
case InstrumentConfiguration.high_flux_BC215:
out = tof_lookup_table_high_flux(bc=215)
out = lookup_table_high_flux(bc=215)
case InstrumentConfiguration.high_flux_BC240:
out = tof_lookup_table_high_flux(bc=240)
out = lookup_table_high_flux(bc=240)
case InstrumentConfiguration.high_resolution:
raise NotImplementedError("High resolution configuration not yet supported")

return TimeOfFlightLookupTableFilename(out)
return LookupTableFilename(out)


def _collect_reducer_software() -> ReducerSoftware:
return ReducerSoftware(
[
Software.from_package_metadata('essdiffraction'),
# Software.from_package_metadata('essdiffraction'),
Software.from_package_metadata('scippneutron'),
Software.from_package_metadata('scipp'),
]
Expand All @@ -100,7 +100,7 @@ def _collect_reducer_software() -> ReducerSoftware:
def DreamWorkflow(**kwargs) -> sciline.Pipeline:
"""
Dream generic workflow with default parameters.
The workflow is based on the GenericTofWorkflow.
The workflow is based on the GenericUnwrapWorkflow.
It can load data from a NeXus file recorded on the DREAM instrument, and can
compute time-of-flight for the neutron events.

Expand All @@ -111,9 +111,9 @@ def DreamWorkflow(**kwargs) -> sciline.Pipeline:
----------
kwargs:
Additional keyword arguments are forwarded to the base
:func:`GenericTofWorkflow`.
:func:`GenericUnwrapWorkflow`.
"""
wf = GenericTofWorkflow(
wf = GenericUnwrapWorkflow(
run_types=[SampleRun, VanadiumRun, EmptyCanRun],
monitor_types=[BunkerMonitor, CaveMonitor],
**kwargs,
Expand Down
Loading
Loading