Skip to content
12 changes: 8 additions & 4 deletions imap_processing/ancillary/ancillary_dataset_combiner.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,13 +431,17 @@ def convert_file_to_dataset(self, filepath: str | Path) -> xr.Dataset: # noqa:
lines = [line.strip() for line in f if not line.startswith("#")]
identifiers = [line.split(" ", 1)[0] for line in lines]
values = [float(line.split(" ", 1)[1]) for line in lines]
return xr.Dataset(
ds = xr.Dataset(
{
"start_time_utc": (["time_block"], identifiers),
"cps_per_r": (["time_block"], values),
}
"cps_per_r": (["start_time_utc"], values), # floats
},
coords={
"start_time_utc": np.array(identifiers, dtype="datetime64[s]")
}, # (e.g. '2025-07-01T00:00:00')
)

return ds.sortby("start_time_utc")

elif filename.endswith(".json"):
# Handle pipeline settings JSON file using the generic read_json method
return self.convert_json_to_dataset(filepath)
Expand Down
2 changes: 1 addition & 1 deletion imap_processing/glows/l2/glows_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def glows_l2(
pipeline_settings_dataset.sel(epoch=day, method="nearest")
)

l2 = HistogramL2(input_dataset, pipeline_settings)
l2 = HistogramL2(input_dataset, pipeline_settings, calibration_dataset)
if l2.number_of_good_l1b_inputs == 0:
logger.warning("No good data found in L1B dataset. Returning empty list.")
return []
Expand Down
90 changes: 80 additions & 10 deletions imap_processing/glows/l2/glows_l2_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,12 @@
frame_transform_az_el,
get_instrument_mounting_az_el,
)
from imap_processing.spice.time import met_to_sclkticks, sct_to_et
from imap_processing.spice.time import (
et_to_datetime64,
met_to_sclkticks,
sct_to_et,
ttj2000ns_to_et,
)


@dataclass
Expand Down Expand Up @@ -46,6 +51,8 @@ class DailyLightcurve:
number of bins in lightcurve
l1b_data : xarray.Dataset
L1B data filtered by good times, good angles, and good bins.
calibration_factor : float | None
Rayleigh calibration factor used for flux calculations.
"""

# All variables should have n_bin elements
Expand All @@ -55,14 +62,19 @@ class DailyLightcurve:
exposure_times: np.ndarray = field(init=False)
flux_uncertainties: np.ndarray = field(init=False)
histogram_flag_array: np.ndarray = field(init=False)
# TODO: ecliptic coordinates
ecliptic_lon: np.ndarray = field(init=False)
ecliptic_lat: np.ndarray = field(init=False)
number_of_bins: int = field(init=False)
l1b_data: InitVar[xr.Dataset]
position_angle: InitVar[float]

def __post_init__(self, l1b_data: xr.Dataset, position_angle: float) -> None:
calibration_factor: InitVar[float | None]

def __post_init__(
self,
l1b_data: xr.Dataset,
position_angle: float,
calibration_factor: float | None,
) -> None:
"""
Compute all the daily lightcurve variables from L1B data.

Expand All @@ -74,6 +86,10 @@ def __post_init__(self, l1b_data: xr.Dataset, position_angle: float) -> None:
position_angle : float
The offset angle of the GLOWS instrument from the north spin point - this
is used in spin angle calculations.
calibration_factor : float
Calibration factor used for flux calculations, in units of counts per second
per Rayleigh. This is used to convert from raw histograms and exposure times
to physical photon flux units.
"""
# number_of_bins_per_histogram is the count of valid (non-FILLVAL) bins.
# Histogram arrays from L1B are always GlowsConstants.STANDARD_BIN_COUNT
Expand Down Expand Up @@ -107,12 +123,16 @@ def __post_init__(self, l1b_data: xr.Dataset, position_angle: float) -> None:
self.flux_uncertainties = np.zeros(self.number_of_bins)

if (
len(self.exposure_times) != 0
self.number_of_bins > 0
and self.exposure_times[0] > 0
and len(np.unique(self.exposure_times)) == 1
and calibration_factor
):
self.photon_flux = self.raw_histograms / self.exposure_times
self.flux_uncertainties = raw_uncertainties / self.exposure_times
self.photon_flux = (
self.raw_histograms / self.exposure_times
) / calibration_factor
self.flux_uncertainties = (
raw_uncertainties / self.exposure_times
) / calibration_factor

self.spin_angle = np.zeros(0)

Expand Down Expand Up @@ -244,6 +264,8 @@ class HistogramL2:
GLOWS histogram L1B dataset, as produced by glows_l1b.py.
pipeline_settings : PipelineSettings
Pipeline settings object read from ancillary file.
calibration_dataset : xr.Dataset
The cps-to-Rayleigh calibration dataset needed for flux calculations.

Attributes
----------
Expand Down Expand Up @@ -327,7 +349,12 @@ class HistogramL2:
spin_axis_orientation_average: np.ndarray[np.double]
bad_time_flag_occurrences: np.ndarray

def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings):
def __init__(
self,
l1b_dataset: xr.Dataset,
pipeline_settings: PipelineSettings,
calibration_dataset: xr.Dataset,
) -> None:
"""
Given an L1B dataset, process data into an output HistogramL2 object.

Expand All @@ -337,6 +364,9 @@ def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings)
GLOWS histogram L1B dataset, as produced by glows_l1b.py.
pipeline_settings : PipelineSettings
Pipeline settings object read from ancillary file.
calibration_dataset : xr.Dataset
cps-to-Rayleigh calibration dataset used for flux calculations.
coords: start_time_utc, data_vars: cps_per_r
"""
active_flags = np.array(pipeline_settings.active_bad_time_flags, dtype=float)

Expand Down Expand Up @@ -438,7 +468,17 @@ def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings)
.data[np.newaxis, :]
)

self.daily_lightcurve = DailyLightcurve(good_data, position_angle)
# Select calibration factor corresponding to the mid-epoch in the L1B data.
if len(good_data["epoch"].data) != 0:
calibration_factor = self.get_calibration_factor(
good_data["epoch"].data, calibration_dataset
)
else:
calibration_factor = None

self.daily_lightcurve = DailyLightcurve(
good_data, position_angle, calibration_factor
)

def filter_bad_bins(self, histograms: NDArray, bin_exclusions: NDArray) -> NDArray:
"""
Expand Down Expand Up @@ -517,3 +557,33 @@ def compute_position_angle(self) -> float:
# doesn't move from the SPICE determined mounting angle.
glows_mounting_azimuth, _ = get_instrument_mounting_az_el(SpiceFrame.IMAP_GLOWS)
return (360.0 - glows_mounting_azimuth) % 360.0

@staticmethod
def get_calibration_factor(
epoch_values: np.ndarray, calibration_dataset: xr.Dataset
) -> float:
"""
Select calibration factor for an observational day.

The calibration factor is needed to compute flux in Rayleigh units.
There is a strong assumption that the calibration is constant for
a given observational day.

Parameters
----------
epoch_values : np.ndarray
Array of epoch values from the L1B dataset, in TT J2000 nanoseconds.
calibration_dataset : xr.Dataset
Dataset containing calibration data.

Returns
-------
float
The calibration factor needed to compute flux in Rayleigh units.
"""
# Use the midpoint epoch for the day
mid_idx = len(epoch_values) // 2
mid_epoch_utc = et_to_datetime64(ttj2000ns_to_et(epoch_values[mid_idx].item()))
return calibration_dataset.sel(start_time_utc=mid_epoch_utc, method="pad")[
"cps_per_r"
].data.item()
Original file line number Diff line number Diff line change
Expand Up @@ -316,15 +316,20 @@ def test_glows_exclusions_by_instr_team_combiner(glows_ancillary_filepath):
def test_glows_l2_calibration_combiner(tmp_path):
file_path = tmp_path / "imap_glows_l2-calibration_20251112_v001.dat"
file_path.write_text(
"# header\n2025-11-13T18:12:48 1.020\n2025-11-14T09:58:04 0.849\n"
"# header\n2025-11-13T18:12:48 1.020\n"
"2025-11-14T09:58:04 0.849\n"
"2025-11-14T20:58:04 0.649\n"
)

combiner = GlowsAncillaryCombiner([], "20251115")
dataset = combiner.convert_file_to_dataset(file_path)

assert "start_time_utc" in dataset.data_vars
assert "start_time_utc" in dataset.coords
assert (
np.diff(dataset.start_time_utc.values.astype("datetime64")) >= np.timedelta64(0)
).all()
assert "cps_per_r" in dataset.data_vars
assert len(dataset["cps_per_r"]) == 2
assert len(dataset["cps_per_r"]) == 3
assert dataset["cps_per_r"].values[0] == pytest.approx(1.020)


Expand Down
13 changes: 13 additions & 0 deletions imap_processing/tests/glows/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,19 @@ def mock_conversion_table_dict():
return mock_dict


@pytest.fixture
def mock_calibration_dataset():
"""Create a mock CalibrationDataset object for testing."""
return xr.Dataset(
{"cps_per_r": xr.DataArray([0.849, 1.020], dims=["start_time_utc"])},
coords={
"start_time_utc": np.array(
["2011-09-19T09:58:04", "2011-09-20T18:12:48"], dtype="datetime64[s]"
)
},
)


@pytest.fixture
def mock_pipeline_settings():
"""Create a mock PipelineSettings dataset for testing."""
Expand Down
79 changes: 42 additions & 37 deletions imap_processing/tests/glows/test_glows_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@
HistogramL1B,
PipelineSettings,
)
from imap_processing.glows.l2.glows_l2 import (
glows_l2,
)
from imap_processing.glows.l2.glows_l2 import glows_l2
from imap_processing.glows.l2.glows_l2_data import DailyLightcurve, HistogramL2
from imap_processing.glows.utils.constants import GlowsConstants
from imap_processing.spice.time import et_to_datetime64, ttj2000ns_to_et
Expand Down Expand Up @@ -54,6 +52,7 @@ def test_glows_l2(
mock_pipeline_settings,
mock_conversion_table_dict,
mock_ecliptic_bin_centers,
mock_calibration_dataset,
caplog,
):
mock_spice_function.side_effect = mock_update_spice_parameters
Expand All @@ -69,7 +68,7 @@ def test_glows_l2(
)

# Test case 1: L1B dataset has good times
l2 = glows_l2(l1b_hist_dataset, mock_pipeline_settings, None)[0]
l2 = glows_l2(l1b_hist_dataset, mock_pipeline_settings, mock_calibration_dataset)[0]
assert l2.attrs["Logical_source"] == "imap_glows_l2_hist"
assert np.allclose(l2["filter_temperature_average"].values, [57.6], rtol=0.1)

Expand All @@ -79,7 +78,9 @@ def test_glows_l2(
l1b_hist_dataset_no_good_times.flags.shape
)
caplog.set_level("WARNING")
result = glows_l2(l1b_hist_dataset_no_good_times, mock_pipeline_settings, None)
result = glows_l2(
l1b_hist_dataset_no_good_times, mock_pipeline_settings, mock_calibration_dataset
)
assert result == []
assert any(record.levelname == "WARNING" for record in caplog.records)

Expand All @@ -88,7 +89,9 @@ def test_glows_l2(
l1b_hist_dataset_zero_values["spin_period_average"].data[:] = 0
l1b_hist_dataset_zero_values["number_of_spins_per_block"].data[:] = 0
caplog.set_level("WARNING")
result = glows_l2(l1b_hist_dataset_zero_values, mock_pipeline_settings, None)
result = glows_l2(
l1b_hist_dataset_zero_values, mock_pipeline_settings, mock_calibration_dataset
)
assert result == []
assert any(record.levelname == "WARNING" for record in caplog.records)

Expand All @@ -109,6 +112,7 @@ def test_generate_l2(
mock_pipeline_settings,
mock_conversion_table_dict,
mock_ecliptic_bin_centers,
mock_calibration_dataset,
):
mock_spice_function.side_effect = mock_update_spice_parameters

Expand All @@ -127,37 +131,38 @@ def test_generate_l2(
)

# Test case 1: L1B dataset has good times
l2 = HistogramL2(l1b_hist_dataset, pipeline_settings)

expected_values = {
"filter_temperature_average": [57.59],
"filter_temperature_std_dev": [0.21],
"hv_voltage_average": [1715.4],
"hv_voltage_std_dev": [0.0],
}

assert np.isclose(
l2.filter_temperature_average,
expected_values["filter_temperature_average"],
0.01,
)
assert np.isclose(
l2.filter_temperature_std_dev,
expected_values["filter_temperature_std_dev"],
0.01,
)
assert np.isclose(
l2.hv_voltage_average, expected_values["hv_voltage_average"], 0.01
)
assert np.isclose(
l2.hv_voltage_std_dev, expected_values["hv_voltage_std_dev"], 0.01
)

# Test case 2: L1B dataset has no good times (all flags 0)
l1b_hist_dataset["flags"].values = np.zeros(l1b_hist_dataset.flags.shape)
ds = HistogramL2(l1b_hist_dataset, pipeline_settings)
expected_number_of_good_l1b_inputs = 0
assert ds.number_of_good_l1b_inputs == expected_number_of_good_l1b_inputs
with patch.object(HistogramL2, "get_calibration_factor", return_value=1):
l2 = HistogramL2(l1b_hist_dataset, pipeline_settings, mock_calibration_dataset)

expected_values = {
"filter_temperature_average": [57.59],
"filter_temperature_std_dev": [0.21],
"hv_voltage_average": [1715.4],
"hv_voltage_std_dev": [0.0],
}

assert np.isclose(
l2.filter_temperature_average,
expected_values["filter_temperature_average"],
0.01,
)
assert np.isclose(
l2.filter_temperature_std_dev,
expected_values["filter_temperature_std_dev"],
0.01,
)
assert np.isclose(
l2.hv_voltage_average, expected_values["hv_voltage_average"], 0.01
)
assert np.isclose(
l2.hv_voltage_std_dev, expected_values["hv_voltage_std_dev"], 0.01
)

# Test case 2: L1B dataset has no good times (all flags 0)
l1b_hist_dataset["flags"].values = np.zeros(l1b_hist_dataset.flags.shape)
ds = HistogramL2(l1b_hist_dataset, pipeline_settings, mock_calibration_dataset)
expected_number_of_good_l1b_inputs = 0
assert ds.number_of_good_l1b_inputs == expected_number_of_good_l1b_inputs


def test_bin_exclusions(l1b_hists):
Expand Down
Loading
Loading