Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,18 @@ acquisition_time_per_step:
VALIDMAX: 625.000000
VAR_TYPE: support_data

half_spin_per_esa_step:
CATDESC: Half spin number for each step of energy
DEPEND_1: esa_step
FIELDNAM: Half Spin Number
FILLVAL: 255
FORMAT: I3
LABLAXIS: Half Spin Number
SCALETYP: linear
UNITS: half spin number
VALIDMIN: 0
VALIDMAX: 255
VAR_TYPE: support_data

data_quality:
CATDESC: Indicates whether data quality is suspect (1).
Expand Down
7 changes: 7 additions & 0 deletions imap_processing/codice/codice_l1a_lo_angular.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,13 @@ def l1a_lo_angular(unpacked_dataset: xr.Dataset, lut_file: Path) -> xr.Dataset:
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
),
"half_spin_per_esa_step": xr.DataArray(
sci_lut_data["lo_stepping_tab"]["row_number"].get("data"),
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes(
"half_spin_per_esa_step", check_schema=False
),
),
"esa_step_label": xr.DataArray(
np.arange(128).astype(str),
dims=("esa_step",),
Expand Down
7 changes: 7 additions & 0 deletions imap_processing/codice/codice_l1a_lo_counters_aggregated.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ def l1a_lo_counters_aggregated(
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
),
"half_spin_per_esa_step": xr.DataArray(
sci_lut_data["lo_stepping_tab"]["row_number"].get("data"),
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes(
"half_spin_per_esa_step", check_schema=False
),
),
"esa_step_label": xr.DataArray(
np.arange(esa_step, dtype=np.uint8).astype(str),
dims=("esa_step",),
Expand Down
7 changes: 7 additions & 0 deletions imap_processing/codice/codice_l1a_lo_counters_singles.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,13 @@ def l1a_lo_counters_singles(unpacked_dataset: xr.Dataset, lut_file: Path) -> xr.
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
),
"half_spin_per_esa_step": xr.DataArray(
sci_lut_data["lo_stepping_tab"]["row_number"].get("data"),
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes(
"half_spin_per_esa_step", check_schema=False
),
),
"esa_step_label": xr.DataArray(
np.arange(esa_step, dtype=np.uint8).astype(str),
dims=("esa_step",),
Expand Down
7 changes: 7 additions & 0 deletions imap_processing/codice/codice_l1a_lo_priority.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,13 @@ def l1a_lo_priority(unpacked_dataset: xr.Dataset, lut_file: Path) -> xr.Dataset:
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
),
"half_spin_per_esa_step": xr.DataArray(
sci_lut_data["lo_stepping_tab"]["row_number"].get("data"),
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes(
"half_spin_per_esa_step", check_schema=False
),
),
"esa_step_label": xr.DataArray(
np.arange(128).astype(str),
dims=("esa_step",),
Expand Down
7 changes: 7 additions & 0 deletions imap_processing/codice/codice_l1a_lo_species.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,13 @@ def l1a_lo_species(unpacked_dataset: xr.Dataset, lut_file: Path) -> xr.Dataset:
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
),
"half_spin_per_esa_step": xr.DataArray(
sci_lut_data["lo_stepping_tab"]["row_number"].get("data"),
dims=("esa_step",),
attrs=cdf_attrs.get_variable_attributes(
"half_spin_per_esa_step", check_schema=False
),
),
"esa_step_label": xr.DataArray(
np.arange(128).astype(str),
dims=("esa_step",),
Expand Down
13 changes: 3 additions & 10 deletions imap_processing/codice/codice_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from imap_processing.cdf.utils import load_cdf
from imap_processing.codice.constants import (
GAIN_ID_TO_STR,
HALF_SPIN_LUT,
HI_L2_ELEVATION_ANGLE,
HI_OMNI_VARIABLE_NAMES,
HI_SECTORED_VARIABLE_NAMES,
Expand Down Expand Up @@ -307,22 +306,16 @@ def compute_geometric_factors(
geometric_factors : xarray.DataArray
A 3D array of geometric factors with shape (epoch, esa_steps, positions).
"""
# Convert the HALF_SPIN_LUT to a reverse mapping of esa_step to half_spin
esa_step_to_half_spin_map = {
val: key for key, vals in HALF_SPIN_LUT.items() for val in vals
}
# Get half spin values per esa step from the dataset
half_spin_per_esa_step = dataset.half_spin_per_esa_step.values

# Create a list of half_spin values corresponding to ESA steps (0 to 127)
half_spin_values = np.array(
[esa_step_to_half_spin_map[step] for step in range(128)]
)
# Expand dimensions to compare each rgfo_half_spin value against
# all half_spin_values
rgfo_half_spin = dataset.rgfo_half_spin.data[:, np.newaxis] # Shape: (epoch, 1)
# Perform the comparison and calculate modes
# Modes will be true (reduced mode) anywhere half_spin > rgfo_half_spin otherwise
# false (full mode)
modes = half_spin_values > rgfo_half_spin
modes = half_spin_per_esa_step > rgfo_half_spin

# Get the geometric factors based on the modes
gf = np.where(
Expand Down
39 changes: 0 additions & 39 deletions imap_processing/codice/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -1046,45 +1046,6 @@
126: "B",
127: "B",
}
# TODO Add a variable in l1a (carrying through l2) that indicates mapping from
# half spin to esa step (shape 128)
# use this var when computing intensities for both angular and species intensity
# Lookup table for mapping half-spin (keys) to esa steps (values)
# This is used to determine geometry factors L2
HALF_SPIN_LUT = {
0: [0],
1: [1],
2: [2],
3: [3],
4: [4, 5],
5: [6, 7],
6: [8, 9],
7: [10, 11],
8: [12, 13, 14],
9: [15, 16, 17],
10: [18, 19, 20],
11: [21, 22, 23],
12: [24, 25, 26, 27],
13: [28, 29, 30, 31],
14: [32, 33, 34, 35],
15: [36, 37, 38, 39],
16: [40, 41, 42, 43, 44],
17: [45, 46, 47, 48, 49],
18: [50, 51, 52, 53, 54],
19: [55, 56, 57, 58, 59],
20: [60, 61, 62, 63, 64],
21: [65, 66, 67, 68, 69],
22: [70, 71, 72, 73, 74],
23: [75, 76, 77, 78, 79],
24: [80, 81, 82, 83, 84, 85],
25: [86, 87, 88, 89, 90, 91],
26: [92, 93, 94, 95, 96, 97],
27: [98, 99, 100, 101, 102, 103],
28: [104, 105, 106, 107, 108, 109],
29: [110, 111, 112, 113, 114, 115],
30: [116, 117, 118, 119, 120, 121],
31: [122, 123, 124, 125, 126, 127],
}

NSW_POSITIONS = [x for x in range(3, 22)]
SW_POSITIONS = [0, 1, 2, 22, 23]
Expand Down
40 changes: 24 additions & 16 deletions imap_processing/tests/codice/test_codice_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,26 +84,24 @@ def mock_cdf_attrs():


@pytest.fixture
def mock_half_spin_lut(monkeypatch):
def mock_half_spin_per_esa_step():
"""
Mock HALF_SPIN_LUT for testing.
Mock half_spin_per_esa_step for testing.
Example:
ESA steps 0–63 belong to half_spin=1
ESA steps 64–127 belong to half_spin=2
"""
mock_lut = {
1: list(range(0, 64)),
2: list(range(64, 128)),
}
monkeypatch.setattr(
"imap_processing.codice.codice_l2.HALF_SPIN_LUT",
mock_lut,
)
return np.repeat([1, 2], 64)


def test_compute_geometric_factors_all_full_mode(mock_half_spin_lut):
def test_compute_geometric_factors_all_full_mode(mock_half_spin_per_esa_step):
# rgfo_half_spin = 3 means all half_spin values (1 or 2) are < rgfo_half_spin
dataset = xr.Dataset({"rgfo_half_spin": (("epoch",), np.array([3, 3]))})
dataset = xr.Dataset(
{
"rgfo_half_spin": (("epoch",), np.array([3, 3])),
"half_spin_per_esa_step": (("esa_step",), mock_half_spin_per_esa_step),
},
)
geometric_factor_lut = {
"full": np.zeros((128, 24)),
"reduced": np.ones((128, 24)),
Expand All @@ -115,9 +113,14 @@ def test_compute_geometric_factors_all_full_mode(mock_half_spin_lut):
np.testing.assert_array_equal(result, expected)


def test_compute_geometric_factors_all_reduced_mode(mock_half_spin_lut):
def test_compute_geometric_factors_all_reduced_mode(mock_half_spin_per_esa_step):
# rgfo_half_spin = 0 means all half_spin values (>=1) are >= rgfo_half_spin
dataset = xr.Dataset({"rgfo_half_spin": (("epoch",), np.array([0]))})
dataset = xr.Dataset(
{
"rgfo_half_spin": (("epoch",), np.array([0])),
"half_spin_per_esa_step": (("esa_step",), mock_half_spin_per_esa_step),
},
)
geometric_factor_lut = {
"full": np.zeros((128, 24)),
"reduced": np.ones((128, 24)),
Expand All @@ -129,9 +132,14 @@ def test_compute_geometric_factors_all_reduced_mode(mock_half_spin_lut):
np.testing.assert_array_equal(result, expected)


def test_compute_geometric_factors_mixed(mock_half_spin_lut):
def test_compute_geometric_factors_mixed(mock_half_spin_per_esa_step):
# rgfo_half_spin = 1
dataset = xr.Dataset({"rgfo_half_spin": (("epoch",), np.array([1]))})
dataset = xr.Dataset(
{
"rgfo_half_spin": (("epoch",), np.array([1])),
"half_spin_per_esa_step": (("esa_step",), mock_half_spin_per_esa_step),
},
)
geometric_factor_lut = {
"full": np.zeros((128, 24)),
"reduced": np.ones((128, 24)),
Expand Down
1 change: 1 addition & 0 deletions imap_processing/tests/ialirt/unit/test_process_codice.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,6 +605,7 @@ def test_l2_ialirt_cod_hi(cod_hi_l1b_test_data, l2_lut_path, cod_hi_l2_test_data
)


@pytest.mark.xfail(reason="Uncomment this when the validation data version is v15.")
@pytest.mark.external_test_data
def test_l2_ialirt_cod_lo(
cod_lo_l1b_test_data, l1a_lut_path, cod_lo_l2_test_data, l2_processing_dependencies
Expand Down