Skip to content

Commit 823e62d

Browse files
committed
apply to v1
1 parent c0ac520 commit 823e62d

File tree

2 files changed

+28
-34
lines changed

2 files changed

+28
-34
lines changed

petab/v1/calculate.py

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -140,19 +140,17 @@ def calculate_residuals_for_table(
140140
# apply scaling
141141
observable = observable_df.loc[row[OBSERVABLE_ID]]
142142
trafo = observable.get(OBSERVABLE_TRANSFORMATION, LIN)
143-
simulation = petab.scale(simulation, trafo)
144-
measurement = petab.scale(measurement, trafo)
143+
scaled_simulation = petab.scale(simulation, trafo)
144+
scaled_measurement = petab.scale(measurement, trafo)
145145

146146
# non-normalized residual is just the difference
147-
residual = simulation - measurement
147+
residual = scaled_measurement - scaled_simulation
148148

149-
noise_value = 1
150149
if normalize:
151-
# look up noise standard deviation
152-
noise_value = evaluate_noise_formula(
150+
# divide by standard deviation
151+
residual /= evaluate_noise_formula(
153152
row, noise_formulas, parameter_df, simulation
154153
)
155-
residual /= noise_value
156154

157155
# fill in value
158156
residual_df.loc[irow, RESIDUAL] = residual
@@ -170,13 +168,10 @@ def get_symbolic_noise_formulas(observable_df) -> dict[str, sp.Expr]:
170168
"""
171169
noise_formulas = {}
172170
# iterate over observables
173-
for row in observable_df.itertuples():
174-
observable_id = row.Index
175-
if NOISE_FORMULA not in observable_df.columns:
176-
noise_formula = None
177-
else:
178-
noise_formula = sympify_petab(row.noiseFormula)
179-
noise_formulas[observable_id] = noise_formula
171+
for observable_id, row in observable_df.iterrows():
172+
noise_formulas[observable_id] = (
173+
sympify_petab(row.noiseFormula) if NOISE_FORMULA in row else None
174+
)
180175
return noise_formulas
181176

182177

@@ -376,7 +371,7 @@ def calculate_llh_for_table(
376371

377372
# get noise standard deviation
378373
noise_value = evaluate_noise_formula(
379-
row, noise_formulas, parameter_df, petab.scale(simulation, scale)
374+
row, noise_formulas, parameter_df, simulation
380375
)
381376

382377
# get noise distribution

tests/v1/test_calculate.py

Lines changed: 18 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,12 @@ def model_simple():
4343
simulation_df[SIMULATION] = [2, 2, 19, 20]
4444

4545
expected_residuals = {
46-
(2 - 0) / 2,
47-
(2 - 1) / 2,
48-
(19 - 20) / 3,
49-
(20 - 22) / 3,
46+
(0 - 2) / 2,
47+
(1 - 2) / 2,
48+
(20 - 19) / 3,
49+
(22 - 20) / 3,
5050
}
51-
expected_residuals_nonorm = {2 - 0, 2 - 1, 19 - 20, 20 - 22}
51+
expected_residuals_nonorm = {0 - 2, 1 - 2, 20 - 19, 22 - 20}
5252
expected_llh = (
5353
-0.5 * (np.array(list(expected_residuals)) ** 2).sum()
5454
- 0.5 * np.log(2 * np.pi * np.array([2, 2, 3, 3]) ** 2).sum()
@@ -93,8 +93,8 @@ def model_replicates():
9393
)
9494
simulation_df[SIMULATION] = [2, 2]
9595

96-
expected_residuals = {(2 - 0) / 2, (2 - 1) / 2}
97-
expected_residuals_nonorm = {2 - 0, 2 - 1}
96+
expected_residuals = {(0 - 2) / 2, (1 - 2) / 2}
97+
expected_residuals_nonorm = {0 - 2, 1 - 2}
9898
expected_llh = (
9999
-0.5 * (np.array(list(expected_residuals)) ** 2).sum()
100100
- 0.5 * np.log(2 * np.pi * np.array([2, 2]) ** 2).sum()
@@ -141,12 +141,12 @@ def model_scalings():
141141
simulation_df[SIMULATION] = [2, 3]
142142

143143
expected_residuals = {
144-
(np.log(2) - np.log(0.5)) / 2,
145-
(np.log(3) - np.log(1)) / 2,
144+
(np.log(0.5) - np.log(2)) / 2,
145+
(np.log(1) - np.log(3)) / 2,
146146
}
147147
expected_residuals_nonorm = {
148-
np.log(2) - np.log(0.5),
149-
np.log(3) - np.log(1),
148+
np.log(0.5) - np.log(2),
149+
np.log(1) - np.log(3),
150150
}
151151
expected_llh = (
152152
-0.5 * (np.array(list(expected_residuals)) ** 2).sum()
@@ -201,21 +201,20 @@ def model_non_numeric_overrides():
201201
simulation_df[SIMULATION] = [2, 3]
202202

203203
expected_residuals = {
204-
(np.log(2) - np.log(0.5)) / (2 * 7 + 8 + 4 + np.log(2)),
205-
(np.log(3) - np.log(1)) / (2 * 2 + 3 + 4 + np.log(3)),
204+
(np.log(0.5) - np.log(2)) / (2 * 7 + 8 + 4 + 2),
205+
(np.log(1) - np.log(3)) / (2 * 2 + 3 + 4 + 3),
206206
}
207207
expected_residuals_nonorm = {
208-
np.log(2) - np.log(0.5),
209-
np.log(3) - np.log(1),
208+
np.log(0.5) - np.log(2),
209+
np.log(1) - np.log(3),
210210
}
211211
expected_llh = (
212212
-0.5 * (np.array(list(expected_residuals)) ** 2).sum()
213213
- 0.5
214214
* np.log(
215215
2
216216
* np.pi
217-
* np.array([2 * 7 + 8 + 4 + np.log(2), 2 * 2 + 3 + 4 + np.log(3)])
218-
** 2
217+
* np.array([2 * 7 + 8 + 4 + 2, 2 * 2 + 3 + 4 + 3]) ** 2
219218
* np.array([0.5, 1]) ** 2
220219
).sum()
221220
)
@@ -261,8 +260,8 @@ def model_custom_likelihood():
261260
)
262261
simulation_df[SIMULATION] = [2, 3]
263262

264-
expected_residuals = {(np.log(2) - np.log(0.5)) / 2, (3 - 2) / 1.5}
265-
expected_residuals_nonorm = {np.log(2) - np.log(0.5), 3 - 2}
263+
expected_residuals = {(np.log(0.5) - np.log(2)) / 2, (2 - 3) / 1.5}
264+
expected_residuals_nonorm = {np.log(0.5) - np.log(2), 2 - 3}
266265
expected_llh = (
267266
-np.abs(list(expected_residuals)).sum()
268267
- np.log(2 * np.array([2, 1.5]) * np.array([0.5, 1])).sum()

0 commit comments

Comments
 (0)