Skip to content

Commit

Permalink
Fix flatten_timepoint_specific_output_overrides not supporting obse…
Browse files Browse the repository at this point in the history
…rvableParameter overrides as placeholders in noise formulae

Fixes #234

Also fixes return type annotation, makes test more readable, and uses more informative assertions.
  • Loading branch information
dweindl committed Dec 14, 2023
1 parent 4e6a018 commit b55ebad
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 50 deletions.
35 changes: 17 additions & 18 deletions petab/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def write_simulation_df(df: pd.DataFrame, filename: Union[str, Path]) -> None:


def get_visualization_df(
visualization_file: Union[str, Path, pd.DataFrame, None]
visualization_file: Union[str, Path, pd.DataFrame, None]
) -> Union[pd.DataFrame, None]:
"""Read PEtab visualization table
Expand Down Expand Up @@ -254,7 +254,7 @@ def flatten_timepoint_specific_output_overrides(
Arguments:
petab_problem:
PEtab problem to work on
PEtab problem to work on. Modified in place.
"""
new_measurement_dfs = []
new_observable_dfs = []
Expand All @@ -277,22 +277,21 @@ def flatten_timepoint_specific_output_overrides(
for field, hyperparameter_type, target in [
(NOISE_PARAMETERS, "noiseParameter", NOISE_FORMULA),
(OBSERVABLE_PARAMETERS, "observableParameter", OBSERVABLE_FORMULA),
(OBSERVABLE_PARAMETERS, "observableParameter", NOISE_FORMULA),
]:
if field in measurements:
hyperparameter_replacement_id = (
get_hyperparameter_replacement_id(
hyperparameter_type=hyperparameter_type,
observable_replacement_id=observable_replacement_id,
)
)
hyperparameter_id = mappings[field][
hyperparameter_replacement_id
]
observable[target] = re.sub(
hyperparameter_id,
hyperparameter_replacement_id,
observable[target],
)
if field not in measurements:
continue

hyperparameter_replacement_id = get_hyperparameter_replacement_id(
hyperparameter_type=hyperparameter_type,
observable_replacement_id=observable_replacement_id,
)
hyperparameter_id = mappings[field][hyperparameter_replacement_id]
observable[target] = re.sub(
hyperparameter_id,
hyperparameter_replacement_id,
observable[target],
)

measurements[OBSERVABLE_ID] = observable_replacement_id
new_measurement_dfs.append(measurements)
Expand All @@ -306,7 +305,7 @@ def flatten_timepoint_specific_output_overrides(
def unflatten_simulation_df(
simulation_df: pd.DataFrame,
petab_problem: "petab.problem.Problem",
) -> None:
) -> pd.DataFrame:
"""Unflatten simulations from a flattened PEtab problem.
A flattened PEtab problem is the output of applying
Expand Down
64 changes: 32 additions & 32 deletions tests/test_petab.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,36 +353,32 @@ def test_flatten_timepoint_specific_output_overrides():
OBSERVABLE_FORMULA: [
"observableParameter1_obs1 + observableParameter2_obs1"
],
NOISE_FORMULA: ["noiseParameter1_obs1"],
NOISE_FORMULA: [
"(observableParameter1_obs1 + observableParameter2_obs1) * noiseParameter1_obs1"
],
}
)
observable_df.set_index(OBSERVABLE_ID, inplace=True)

# new observable IDs (obs${i_obs}_${i_obsParOverride}_${i_noiseParOverride}_${i_condition})
obs1_1_1_1 = "obs1__obsParOverride1_1_0__noiseParOverride1__condition1"
obs1_2_1_1 = "obs1__obsParOverride2_1_0__noiseParOverride1__condition1"
obs1_2_2_1 = "obs1__obsParOverride2_1_0__noiseParOverride2__condition1"
observable_df_expected = pd.DataFrame(
data={
OBSERVABLE_ID: [
"obs1__obsParOverride1_1_0__noiseParOverride1__condition1",
"obs1__obsParOverride2_1_0__noiseParOverride1__condition1",
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
],
OBSERVABLE_ID: [obs1_1_1_1, obs1_2_1_1, obs1_2_2_1],
OBSERVABLE_FORMULA: [
"observableParameter1_obs1__obsParOverride1_1_0__"
"noiseParOverride1__condition1 + observableParameter2_obs1"
"__obsParOverride1_1_0__noiseParOverride1__condition1",
"observableParameter1_obs1__obsParOverride2_1_0__noiseParOverride1"
"__condition1 + observableParameter2_obs1__obsParOverride2_1_0"
"__noiseParOverride1__condition1",
"observableParameter1_obs1__obsParOverride2_1_0"
"__noiseParOverride2__condition1 + observableParameter2_obs1__"
"obsParOverride2_1_0__noiseParOverride2__condition1",
f"observableParameter1_{obs1_1_1_1} + observableParameter2_{obs1_1_1_1}",
f"observableParameter1_{obs1_2_1_1} + observableParameter2_{obs1_2_1_1}",
f"observableParameter1_{obs1_2_2_1} + observableParameter2_{obs1_2_2_1}",
],
NOISE_FORMULA: [
"noiseParameter1_obs1__obsParOverride1_1_0__"
"noiseParOverride1__condition1",
"noiseParameter1_obs1__obsParOverride2_1_0__"
"noiseParOverride1__condition1",
"noiseParameter1_obs1__obsParOverride2_1_0__"
"noiseParOverride2__condition1",
f"(observableParameter1_{obs1_1_1_1} + observableParameter2_{obs1_1_1_1})"
f" * noiseParameter1_{obs1_1_1_1}",
f"(observableParameter1_{obs1_2_1_1} + observableParameter2_{obs1_2_1_1})"
f" * noiseParameter1_{obs1_2_1_1}",
f"(observableParameter1_{obs1_2_2_1} + observableParameter2_{obs1_2_2_1})"
f" * noiseParameter1_{obs1_2_2_1}",
],
}
)
Expand Down Expand Up @@ -418,12 +414,7 @@ def test_flatten_timepoint_specific_output_overrides():

measurement_df_expected = pd.DataFrame(
data={
OBSERVABLE_ID: [
"obs1__obsParOverride1_1_0__noiseParOverride1__condition1",
"obs1__obsParOverride2_1_0__noiseParOverride1__condition1",
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
],
OBSERVABLE_ID: [obs1_1_1_1, obs1_2_1_1, obs1_2_2_1, obs1_2_2_1],
SIMULATION_CONDITION_ID: [
"condition1",
"condition1",
Expand Down Expand Up @@ -472,8 +463,12 @@ def test_flatten_timepoint_specific_output_overrides():
is False
)

assert problem.observable_df.equals(observable_df_expected) is True
assert problem.measurement_df.equals(measurement_df_expected) is True
pd.testing.assert_frame_equal(
problem.observable_df, observable_df_expected
)
pd.testing.assert_frame_equal(
problem.measurement_df, measurement_df_expected
)

assert petab.lint_problem(problem) is False

Expand Down Expand Up @@ -591,8 +586,12 @@ def test_flatten_timepoint_specific_output_overrides_special_cases():
is False
)

assert problem.observable_df.equals(observable_df_expected) is True
assert problem.measurement_df.equals(measurement_df_expected) is True
pd.testing.assert_frame_equal(
problem.observable_df, observable_df_expected
)
pd.testing.assert_frame_equal(
problem.measurement_df, measurement_df_expected
)

assert petab.lint_problem(problem) is False

Expand Down Expand Up @@ -842,13 +841,14 @@ def test_get_required_parameters_for_parameter_table(petab_problem):
# as part of the proportional error model.
assert "observableParameter1_obs1" in noise_placeholders

required_parameters_for_parameter_table = \
required_parameters_for_parameter_table = (
petab.parameters.get_required_parameters_for_parameter_table(
model=petab_problem.model,
condition_df=petab_problem.condition_df,
observable_df=petab_problem.observable_df,
measurement_df=petab_problem.measurement_df,
)
)
# The observable parameter is correctly recognized as a placeholder,
# i.e. does not need to be in the parameter table.
assert (
Expand Down

0 comments on commit b55ebad

Please sign in to comment.