Skip to content

Commit

Permalink
Merge pull request #31 from scipp/orso-corrections
Browse files Browse the repository at this point in the history
Track ORSO corrections
  • Loading branch information
jl-wynen authored Mar 4, 2024
2 parents b99fbd6 + c826b4d commit ac77b12
Show file tree
Hide file tree
Showing 10 changed files with 159 additions and 85 deletions.
18 changes: 17 additions & 1 deletion docs/examples/amor.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Unfortunately, some metadata could not be determined autoamtically.\n",
"Unfortunately, some metadata could not be determined automatically.\n",
"In particular, we need to specify the sample manually:"
]
},
Expand Down Expand Up @@ -230,6 +230,22 @@
"iofq_dataset.info.reduction.script = 'https://scipp.github.io/essreflectometry/examples/amor.html'"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"To support tracking provenance, we also list the corrections that were done by the workflow and store them in the dataset:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"iofq_dataset.info.reduction.corrections = orso.find_corrections(metadata_pipeline.get(orso.OrsoIofQDataset))"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down
34 changes: 13 additions & 21 deletions src/essreflectometry/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
import scippnexus as snx

from ..logging import get_logger
from ..types import Filename, RawData, RawEvents, Run
from ..types import ChopperCorrectedTofEvents, Filename, RawData, RawEvents, Run
from .data import get_path
from .types import BeamlineParams


def _tof_correction(data: sc.DataArray, dim: str = 'tof') -> sc.DataArray:
def chopper_tof_correction(data: RawEvents[Run]) -> ChopperCorrectedTofEvents[Run]:
"""
A correction for the presence of the chopper with respect to the "true" ToF.
Also fold the two pulses.
Expand All @@ -22,17 +22,13 @@ def _tof_correction(data: sc.DataArray, dim: str = 'tof') -> sc.DataArray:
----------
data:
Input data array to correct.
dim:
Name of the time of flight dimension.
Returns
-------
:
ToF corrected data array.
"""
# TODO
# if 'orso' in data.attrs:
# data.attrs['orso'].value.reduction.corrections += ['chopper ToF correction']
dim = 'tof'
tof_unit = data.bins.coords[dim].bins.unit
tau = sc.to_unit(
1 / (2 * data.coords['source_chopper_2'].value['frequency'].data),
Expand All @@ -48,7 +44,15 @@ def _tof_correction(data: sc.DataArray, dim: str = 'tof') -> sc.DataArray:
# Apply the offset on both bins
data.bins.coords[dim] += offset
# Rebin to exclude second (empty) pulse range
return data.bin({dim: sc.concat([0.0 * sc.units.us, tau], dim)})
data = data.bin({dim: sc.concat([0.0 * sc.units.us, tau], dim)})

# Ad-hoc correction described in
# https://scipp.github.io/ess/instruments/amor/amor_reduction.html
data.coords['position'].fields.y += data.coords['position'].fields.z * sc.tan(
2.0 * data.coords['sample_rotation'] - (0.955 * sc.units.deg)
)

return ChopperCorrectedTofEvents[Run](data)


def _assemble_event_data(dg: sc.DataGroup) -> sc.DataArray:
Expand Down Expand Up @@ -141,19 +145,7 @@ def extract_events(
for key, value in beamline.items():
data.coords[key] = value

# if orso is not None:
# populate_orso(orso=orso, data=full_data, filename=filename)
# data.attrs['orso'] = sc.scalar(orso)

# Perform tof correction and fold two pulses
data = _tof_correction(data)

# Ad-hoc correction described in
# https://scipp.github.io/ess/instruments/amor/amor_reduction.html
data.coords['position'].fields.y += data.coords['position'].fields.z * sc.tan(
2.0 * data.coords['sample_rotation'] - (0.955 * sc.units.deg)
)
return RawEvents[Run](data)


providers = (extract_events, load_raw_nexus)
providers = (extract_events, load_raw_nexus, chopper_tof_correction)
2 changes: 1 addition & 1 deletion src/essreflectometry/amor/orso.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
"""ORSO utilities for Amor."""
from typing import Optional

Expand Down
2 changes: 0 additions & 2 deletions src/essreflectometry/calibrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
import scipp as sc

from . import supermirror

# from ..reflectometry import orso
from .types import QBins


Expand Down
34 changes: 2 additions & 32 deletions src/essreflectometry/conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,12 @@
from scippneutron._utils import elem_dtype, elem_unit
from scippneutron.conversion.graph import beamline, tof

# from . import orso
from .types import (
ChopperCorrectedTofEvents,
FootprintCorrectedData,
HistogrammedQData,
QBins,
QData,
RawEvents,
Run,
SpecularReflectionCoordTransformGraph,
ThetaData,
Expand Down Expand Up @@ -119,7 +118,7 @@ def specular_reflection() -> SpecularReflectionCoordTransformGraph:


def tof_to_wavelength(
data_array: RawEvents[Run],
data_array: ChopperCorrectedTofEvents[Run],
graph: SpecularReflectionCoordTransformGraph,
wavelength_edges: Optional[WavelengthEdges],
) -> WavelengthData[Run]:
Expand All @@ -144,23 +143,6 @@ def tof_to_wavelength(
data_array_wav = data_array.transform_coords(["wavelength"], graph=graph)
if wavelength_edges is not None:
data_array_wav = data_array_wav.bin({wavelength_edges.dim: wavelength_edges})
# TODO
# try:
# from orsopy import fileio

# unit = data_array_wav.coords['wavelength'].unit
# # This insures that when the unit is Å it is written as
# # angstrom in the ORSO object.
# if unit == 'angstrom':
# unit = 'angstrom'
# orso_measurement = data_array_wav.attrs['orso'].value.data_source.measurement
# orso_measurement.instrument_settings.wavelength = fileio.base.ValueRange(
# float(data_array_wav.coords['wavelength'].min().value),
# float(data_array_wav.coords['wavelength'].max().value),
# unit,
# )
# except ImportError:
# orso.not_found_warning()
return WavelengthData[Run](data_array_wav)


Expand All @@ -185,18 +167,6 @@ def wavelength_to_theta(
New data array with theta coordinate.
"""
data_array_theta = data_array.transform_coords(['theta'], graph=graph)
# TODO
# try:
# from orsopy import fileio

# orso_measurement = data_array_theta.attrs['orso'].value.data_source.measurement
# orso_measurement.instrument_settings.incident_angle = fileio.base.ValueRange(
# float(data_array_theta.coords['theta'].min().value),
# float(data_array_theta.coords['theta'].max().value),
# data_array_theta.bins.coords['theta'].min().unit,
# )
# import inspect

# # Determine if 'gravity' is in the graph and if to add the gravity correction
# if any(
# [
Expand Down
13 changes: 0 additions & 13 deletions src/essreflectometry/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@

from .supermirror import SupermirrorCalibrationFactor
from .tools import fwhm_to_std

# from . import orso
from .types import (
FootprintCorrectedData,
HistogrammedQData,
Expand Down Expand Up @@ -40,12 +38,6 @@ def footprint_correction(data_array: ThetaData[Run]) -> FootprintCorrectedData[R
fwhm_to_std(data_array.coords['sample_size'] / size_of_beam_on_sample)
)
data_array_fp_correction = data_array / footprint_scale.squeeze()
# try:
# data_array_fp_correction.attrs['orso'].value.reduction.corrections += [
# 'footprint correction'
# ]
# except KeyError:
# orso.not_found_warning()
return FootprintCorrectedData[Run](data_array_fp_correction)


Expand Down Expand Up @@ -98,11 +90,6 @@ def normalize_by_counts(
f'regime. The maximum counts found is {data_array.values[ind]} at '
f'index {ind}. The total number of counts is {ncounts.value}.'
)
# TODO
# try:
# norm.attrs['orso'].value.reduction.corrections += ['total counts']
# except KeyError:
# orso.not_found_warning()
return norm


Expand Down
12 changes: 0 additions & 12 deletions src/essreflectometry/normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
import scipp as sc

# from ..reflectometry import orso
from .types import IofQ, NormalizedIofQ, Reference, Sample


Expand All @@ -28,17 +27,6 @@ def normalize_by_supermirror(
"""
normalized = sample / supermirror
normalized.masks['no_reference_neutrons'] = (supermirror == sc.scalar(0)).data
# TODO
# try:
# normalized.attrs['orso'] = sample.attrs['orso']
# normalized.attrs['orso'].value.reduction.corrections = list(
# set(
# sample.attrs['orso'].value.reduction.corrections
# + supermirror.attrs['orso'].value.reduction.corrections
# )
# )
# except KeyError:
# orso.not_found_warning()
return NormalizedIofQ(normalized)


Expand Down
63 changes: 60 additions & 3 deletions src/essreflectometry/orso.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,37 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
"""ORSO utilities for reflectometry.
The Sciline providers and types in this module largely ignore the metadata
of reference runs and only use the metadata of the sample run.
"""

import graphlib
import os
import platform
from datetime import datetime, timezone
from typing import NewType, Optional
from typing import Any, NewType, Optional

from dateutil.parser import parse as parse_datetime
from orsopy.fileio import base as orso_base
from orsopy.fileio import data_source, orso, reduction

from .types import Filename, RawData, Reference, Sample
from .supermirror import SupermirrorCalibrationFactor
from .types import (
ChopperCorrectedTofEvents,
Filename,
FootprintCorrectedData,
IofQ,
RawData,
Reference,
Sample,
)

try:
from sciline.task_graph import TaskGraph
except ModuleNotFoundError:
TaskGraph = Any


OrsoCreator = NewType('OrsoCreator', orso_base.Person)
"""ORSO creator, that is, the person who processed the data."""
Expand Down Expand Up @@ -142,6 +158,47 @@ def build_orso_data_source(
)


_CORRECTIONS_BY_GRAPH_KEY = {
ChopperCorrectedTofEvents[Sample]: 'chopper ToF correction',
FootprintCorrectedData[Sample]: 'footprint correction',
IofQ[Sample]: 'total counts',
SupermirrorCalibrationFactor: 'supermirror calibration',
}


def find_corrections(task_graph: TaskGraph) -> list[str]:
"""Determine the list of corrections for ORSO from a task graph.
Checks for known keys in the graph that correspond to corrections
that should be tracked in an ORSO output dataset.
Bear in mind that this exclusively checks the types used as keys in a task graph,
it cannot detect other corrections that are performed within providers
or outside the graph.
Parameters
----------
:
task_graph:
The task graph used to produce output data.
Returns
-------
:
List of corrections in the order they are applied in.
"""
toposort = graphlib.TopologicalSorter(
{
key: tuple(provider.arg_spec.keys())
for key, provider in task_graph._graph.items()
}
)
return [
c
for key in toposort.static_order()
if (c := _CORRECTIONS_BY_GRAPH_KEY.get(key, None)) is not None
]


providers = (
build_orso_data_source,
build_orso_measurement,
Expand Down
4 changes: 4 additions & 0 deletions src/essreflectometry/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ class RawEvents(sciline.Scope[Run, sc.DataArray], sc.DataArray):
binned by `detector_number` (pixel of the detector frame)."""


class ChopperCorrectedTofEvents(sciline.Scope[Run, sc.DataArray], sc.DataArray):
"""Event time data after correcting tof for choppers."""


class WavelengthData(sciline.Scope[Run, sc.DataArray], sc.DataArray):
"""Event data with wavelengths computed for every event,
binned by `detector_number` (pixel of the detector frame)"""
Expand Down
Loading

0 comments on commit ac77b12

Please sign in to comment.