From d8ebcb8793f25d23271788db5c79006aba486b6f Mon Sep 17 00:00:00 2001 From: Dantong Wang Date: Mon, 8 Apr 2019 13:00:55 +0200 Subject: [PATCH 1/7] solve pandas 'chained' assignments problem in generate_experiment_id --- petab/generate_experiment_id.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/petab/generate_experiment_id.py b/petab/generate_experiment_id.py index db52acf3..2a1bf55e 100644 --- a/petab/generate_experiment_id.py +++ b/petab/generate_experiment_id.py @@ -77,7 +77,8 @@ def generate_experiment_id(measurement_data): (observable_transformation == observable_transformation[ind_no_exp_id[0]])) for ind in ind_exp_id[0]: - measurement_data.experimentId[ind] = 'experiment_' + str(count) + measurement_data.loc[ind, 'experimentId'] = 'experiment_' + str( + count) # extract measurements with no assigned experimentId ind_no_exp_id = np.where(measurement_data.experimentId == 0)[0] From 13b67ded8efa05956e2e71b10760d78a8a31d279 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Tue, 9 Apr 2019 15:10:06 +0200 Subject: [PATCH 2/7] Fix assert_noise_distributions_valid (#99) * Fix assert_noise_distributions_valid TypeError: ufunc 'isnan' not supported for the input types, and the inputs could not be safely coerced to any supported types according to the casting rule ''safe'' --- petab/lint.py | 12 +++++++----- tests/test_lint.py | 2 +- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/petab/lint.py b/petab/lint.py index 05fdde0f..bff9e011 100644 --- a/petab/lint.py +++ b/petab/lint.py @@ -256,12 +256,12 @@ def assert_noise_distributions_valid(measurement_df): # check for valid values for trafo in df['observableTransformation']: - if trafo not in ['lin', 'log', 'log10'] and trafo: + if trafo not in ['', 'lin', 'log', 'log10', np.nan]: raise ValueError( f"Unrecognized observable transformation in measurement " f"file: {trafo}.") for distr in df['noiseDistribution']: - if distr not in ['normal', 'laplace'] and distr: + if distr not in ['', 'normal', 'laplace', np.nan]: raise ValueError( f"Unrecognized noise distribution in measurement " f"file: {distr}.") @@ -337,9 +337,11 @@ def assert_overrides_match_parameter_count(measurement_df, observables, noise): if not len(replacements) == 1 \ or not isinstance(replacements[0], numbers.Number): raise AssertionError( - f'No placeholders specified in noise model for:\n{row}\n' - f'But parameter name or multiple overrides provided in ' - 'noiseParameters column.') + f'No placeholders have been specified in the noise model ' + f'SBML AssigmentRule for: ' + f'\n{row}\n' + f'But parameter name or multiple overrides were specified ' + 'in the noiseParameters column.') def lint_problem(problem: 'core.Problem'): diff --git a/tests/test_lint.py b/tests/test_lint.py index 862282c0..c94c9af1 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -194,7 +194,7 @@ def test_assert_noise_distributions_valid(): }) lint.assert_noise_distributions_valid(measurement_df) - measurement_df['observableParameters'] = ['lin', 'log'] + measurement_df['observableTransformation'] = ['lin', 'log'] measurement_df['noiseDistribution'] = ['normal', ''] lint.assert_noise_distributions_valid(measurement_df) From 25aea5698bb31dbf7a4eac4cf49b4c46bb1f403c Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Tue, 9 Apr 2019 22:57:20 +0200 Subject: [PATCH 3/7] Allow for condition-specific dynamic parameter overrides with different scales (Closes #100) --- petab/core.py | 40 +++++++-------------- tests/test_petab.py | 88 +++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 95 insertions(+), 33 deletions(-) diff --git a/petab/core.py b/petab/core.py index d87b8460..f1249725 100644 --- a/petab/core.py +++ b/petab/core.py @@ -534,37 +534,10 @@ def _apply_dynamic_parameter_overrides(mapping, for condition_idx, overrider_id \ in enumerate(condition_df[overridee_id]): if isinstance(overridee_id, str): - _check_dynamic_parameter_override( - overridee_id, overrider_id, parameter_df) mapping[condition_idx][par_sim_id_to_ix[overridee_id]] = \ overrider_id -def _check_dynamic_parameter_override( - overridee_id, overrider_id, parameter_df: pd.DataFrame): - """Check for valid replacement of parameter overridee_id by overrider_id. - Matching scales, etc.""" - - if 'parameterScale' not in parameter_df: - return # Nothing to check - - # in case both parameters are in parameter table, their scale - # must match. - if overridee_id in parameter_df.index \ - and parameter_df.loc[overridee_id, 'parameterScale'] \ - != parameter_df.loc[overrider_id, 'parameterScale']: - raise ValueError(f'Cannot override {overridee_id} with ' - f'with {overrider_id} which have ' - 'different parameterScale.') - - # if not, the scale of the overrider must be lin - # (or needs to be unscaled) - if parameter_df.loc[overrider_id, 'parameterScale'] != 'lin': - raise ValueError(f'No scale given for parameter {overridee_id}, ' - f'assuming "lin" which does not match scale of ' - f'overriding parameter {overrider_id}') - - def fill_in_nominal_values(mapping, parameter_df: pd.DataFrame): """Replace non-estimated parameters by nominalValues. @@ -588,6 +561,19 @@ def fill_in_nominal_values(mapping, parameter_df: pd.DataFrame): if isinstance(val, str): try: mapping[i_condition][i_val] = overrides[val] + # rescale afterwards. if there the parameter is not + # overridden, the previous line raises and we save the + # lookup + + # all overrides will be scaled to 'lin' + if 'parameterScale' in parameter_df: + scale = parameter_df.loc[val, 'parameterScale'] + if scale == 'log': + mapping[i_condition][i_val] = \ + np.exp(mapping[i_condition][i_val]) + elif scale == 'log10': + mapping[i_condition][i_val] = \ + 10**mapping[i_condition][i_val] except KeyError: pass diff --git a/tests/test_petab.py b/tests/test_petab.py index b4fde35f..bd56aede 100644 --- a/tests/test_petab.py +++ b/tests/test_petab.py @@ -22,6 +22,16 @@ def condition_df_2_conditions(): return condition_df +@pytest.fixture +def minimal_sbml_model(): + document = libsbml.SBMLDocument(3, 1) + model = document.createModel() + model.setTimeUnits("second") + model.setExtentUnits("mole") + model.setSubstanceUnits('mole') + return document, model + + @pytest.fixture def petab_problem(): # create test model @@ -261,7 +271,7 @@ def test_partial_override(self, condition_df_2_conditions): assert actual == expected - def test_parameterized_condition_table(self): + def test_parameterized_condition_table(self, minimal_sbml_model): condition_df = pd.DataFrame(data={ 'conditionId': ['condition1', 'condition2', 'condition3'], 'conditionName': ['', 'Condition 2', ''], @@ -282,12 +292,9 @@ def test_parameterized_condition_table(self): 'parameterId': ['dynamicOverride1_1', 'dynamicOverride1_2'], 'parameterName': ['', '...'], # ... }) + parameter_df.set_index('parameterId', inplace=True) - document = libsbml.SBMLDocument(3, 1) - model = document.createModel() - model.setTimeUnits("second") - model.setExtentUnits("mole") - model.setSubstanceUnits('mole') + document, model = minimal_sbml_model model.createParameter().setId('dynamicParameter1') assert petab.get_model_parameters(model) == ['dynamicParameter1'] @@ -305,6 +312,75 @@ def test_parameterized_condition_table(self): assert actual == expected + def test_parameterized_condition_table_changed_scale( + self, minimal_sbml_model): + """Test overriding a dynamic parameter `overridee` with + - a log10 parameter to be estimated (condition 1) + - lin parameter not estimated (condition2) + - log10 parameter not estimated (condition 3) + - constant override (condition 4)""" + + document, model = minimal_sbml_model + model.createParameter().setId('overridee') + assert petab.get_model_parameters(model) == ['overridee'] + + condition_df = pd.DataFrame(data={ + 'conditionId': + ['condition1', 'condition2', 'condition3', 'condition4'], + 'conditionName': '', + 'overridee': + ['dynamicOverrideLog10', 'fixedOverrideLin', + 'fixedOverrideLog10', 10.0] + }) + condition_df.set_index('conditionId', inplace=True) + + measurement_df = pd.DataFrame(data={ + 'simulationConditionId': + ['condition1', 'condition2', 'condition3', 'condition4'], + 'observableId': + ['obs1', 'obs2', 'obs1', 'obs2'], + 'observableParameters': '', + 'noiseParameters': '', + }) + + parameter_df = pd.DataFrame(data={ + 'parameterId': ['dynamicOverrideLog10', + 'fixedOverrideLin', + 'fixedOverrideLog10'], + 'parameterName': '', + 'estimate': [1, 0, 0], + 'nominalValue': [np.nan, 2, -2], + 'parameterScale': ['log10', 'lin', 'log10'] + }) + parameter_df.set_index('parameterId', inplace=True) + + actual_par_map = \ + petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, + sbml_model=model + ) + + actual_scale_map = petab.get_optimization_to_simulation_scale_mapping( + parameter_df=parameter_df, + mapping_par_opt_to_par_sim=actual_par_map + ) + + expected_par_map = [['dynamicOverrideLog10'], + [2.0], + # rescaled: + [0.01], + [10.0]] + + expected_scale_map = [['log10'], + ['lin'], + ['lin'], + ['lin']] + + assert actual_par_map == expected_par_map + assert actual_scale_map == expected_scale_map + def test_get_observable_id(): assert petab.get_observable_id('observable_obs1') == 'obs1' From f017e16521d859db319e6eee752fa98819bb8350 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Tue, 9 Apr 2019 23:46:22 +0200 Subject: [PATCH 4/7] Fix parameter scale mapping for condition table parameters --- petab/core.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/petab/core.py b/petab/core.py index f1249725..65868440 100644 --- a/petab/core.py +++ b/petab/core.py @@ -695,8 +695,14 @@ def get_optimization_to_simulation_scale_mapping( scale = 'lin' else: # is par opt id, thus extract its scale - scale = par_opt_scales_from_df[par_opt_ids_from_df.index(val)] - + try: + scale = \ + par_opt_scales_from_df[par_opt_ids_from_df.index(val)] + except ValueError: + # This is a condition-table parameter which may not be + # present in the parameter table. Those are assumed to be + # 'lin' + scale = 'lin' # append to scales for condition j scales_for_j_condition.append(scale) From 54503b21f8bf8eece18b0808d3ae176becbb645f Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Wed, 10 Apr 2019 13:41:19 +0200 Subject: [PATCH 5/7] Fix nan check Checking for nan with `n` does not work in all cases --- petab/lint.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/petab/lint.py b/petab/lint.py index bff9e011..77bc1c16 100644 --- a/petab/lint.py +++ b/petab/lint.py @@ -256,12 +256,16 @@ def assert_noise_distributions_valid(measurement_df): # check for valid values for trafo in df['observableTransformation']: - if trafo not in ['', 'lin', 'log', 'log10', np.nan]: + if trafo not in ['', 'lin', 'log', 'log10'] \ + and not (isinstance(trafo, numbers.Number) + and np.isnan(trafo)): raise ValueError( f"Unrecognized observable transformation in measurement " f"file: {trafo}.") for distr in df['noiseDistribution']: - if distr not in ['', 'normal', 'laplace', np.nan]: + if distr not in ['', 'normal', 'laplace'] \ + and not (isinstance(distr, numbers.Number) + and np.isnan(distr)): raise ValueError( f"Unrecognized noise distribution in measurement " f"file: {distr}.") From 79cde76832c712e5cdd5ee3074ca9b23c111b9df Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Sun, 28 Apr 2019 15:57:30 +0200 Subject: [PATCH 6/7] =?UTF-8?q?Perform=20mapping=20for=20simulation=20and?= =?UTF-8?q?=20preequilibration=20condition=20indepen=E2=80=A6=20(#106)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Perform mapping for simulation and preequilibration condition independently (Fixes #105) * Refactor, cleanup, add typehints, document * Move parameter mapping functions to separate module * Change parameter mapping to dictionaries with parameter IDs * Apply suggestions from code review * Fix error type * Add function to merge preequilibration and simulation parameters and scales --- petab/__init__.py | 1 + petab/core.py | 456 +++++++++----------------------- petab/parameter_mapping.py | 430 ++++++++++++++++++++++++++++++ petab/sbml.py | 16 +- tests/__init__.py | 0 tests/test_parameter_mapping.py | 311 ++++++++++++++++++++++ tests/test_petab.py | 263 +----------------- 7 files changed, 889 insertions(+), 588 deletions(-) create mode 100644 petab/parameter_mapping.py create mode 100644 tests/__init__.py create mode 100644 tests/test_parameter_mapping.py diff --git a/petab/__init__.py b/petab/__init__.py index 6e7fd39e..d618e5f2 100644 --- a/petab/__init__.py +++ b/petab/__init__.py @@ -1,5 +1,6 @@ from .core import * # noqa: F403, F401 from .lint import * # noqa: F403, F401 from .sbml import * # noqa: F403, F401 +from .parameter_mapping import * # noqa: F403, F401 from .generate_experiment_id import generate_experiment_id # noqa: F403, F401 from .version import __version__ # noqa: F401 diff --git a/petab/core.py b/petab/core.py index 65868440..f2954ab1 100644 --- a/petab/core.py +++ b/petab/core.py @@ -10,6 +10,9 @@ import logging from . import lint from . import sbml +from . import parameter_mapping +from typing import Optional, List, Union, Iterable +import warnings logger = logging.getLogger(__name__) @@ -42,15 +45,16 @@ def __init__(self, measurement_df: pd.DataFrame = None, parameter_df: pd.DataFrame = None): - self.condition_df = condition_df - self.measurement_df = measurement_df - self.parameter_df = parameter_df + self.condition_df: Optional[pd.DataFrame] = condition_df + self.measurement_df: Optional[pd.DataFrame] = measurement_df + self.parameter_df: Optional[pd.DataFrame] = parameter_df - self.sbml_reader = sbml_reader - self.sbml_document = sbml_document - self.sbml_model = sbml_model + self.sbml_reader: Optional[libsbml.SBMLReader] = sbml_reader + self.sbml_document: Optional[libsbml.SBMLDocument] = sbml_document + self.sbml_model: Optional[libsbml.Model] = sbml_model def __getstate__(self): + """Return state for pickling""" state = self.__dict__.copy() # libsbml stuff cannot be serialized directly @@ -66,6 +70,7 @@ def __getstate__(self): return state def __setstate__(self, state): + """Set state after unpickling""" # load SBML model from pickled string sbml_string = state.pop('sbml_string', None) if sbml_string: @@ -80,7 +85,7 @@ def __setstate__(self, state): def from_files(sbml_file: str = None, condition_file: str = None, measurement_file: str = None, - parameter_file: str = None): + parameter_file: str = None) -> 'Problem': """ Factory method to load model and tables from files. @@ -113,7 +118,7 @@ def from_files(sbml_file: str = None, sbml_reader=sbml_reader) @staticmethod - def from_folder(folder: str, model_name: str = None): + def from_folder(folder: str, model_name: str = None) -> 'Problem': """ Factory method to use the standard folder structure and file names, i.e. @@ -149,12 +154,17 @@ def get_constant_parameters(self): to optimization, no sensitivities w.r.t. these parameters are required). """ + warnings.warn("This function will be removed in future releases. ", + DeprecationWarning) + columns_set = set(self.condition_df.columns.values) return list(columns_set - {'conditionId', 'conditionName'}) def get_optimization_parameters(self): """ - Return list of optimization parameter ids. + Return list of optimization parameter IDs. + + See get_optimization_parameters. """ return get_optimization_parameters(self.parameter_df) @@ -162,7 +172,7 @@ def get_dynamic_simulation_parameters(self): """See `get_model_parameters`""" return get_model_parameters(self.sbml_model) - def get_observables(self, remove=False): + def get_observables(self, remove: bool = False): """ Returns dictionary of observables definitions See `assignment_rules_to_dict` for details. @@ -170,7 +180,7 @@ def get_observables(self, remove=False): return get_observables(sbml_model=self.sbml_model, remove=remove) - def get_sigmas(self, remove=False): + def get_sigmas(self, remove: bool = False): """ Return dictionary of observableId => sigma as defined in the SBML model. @@ -188,74 +198,85 @@ def get_noise_distributions(self): measurement_df=self.measurement_df) @property - def x_ids(self): + def x_ids(self) -> List[str]: + """Parameter table parameter IDs""" return list(self.parameter_df.reset_index()['parameterId']) @property - def x_nominal(self): + def x_nominal(self) -> List: + """Parameter table nominal values""" return list(self.parameter_df['nominalValue']) @property - def lb(self): + def lb(self) -> List: + """Parameter table lower bounds""" return list(self.parameter_df['lowerBound']) @property - def ub(self): + def ub(self) -> List: + """Parameter table upper bounds""" return list(self.parameter_df['upperBound']) @property - def x_fixed_indices(self): + def x_fixed_indices(self) -> List[int]: + """Parameter table non-estimated parameter indices""" estimated = list(self.parameter_df['estimate']) return [j for j, val in enumerate(estimated) if val == 0] @property - def x_fixed_vals(self): + def x_fixed_vals(self) -> List: + """Nominal values for parameter table non-estimated parameters""" return [self.x_nominal[val] for val in self.x_fixed_indices] def get_simulation_conditions_from_measurement_df(self): + """See petab.get_simulation_conditions""" return get_simulation_conditions(self.measurement_df) def get_optimization_to_simulation_parameter_mapping( - self, warn_unmapped=True): + self, warn_unmapped: bool = True): """ See get_simulation_to_optimization_parameter_mapping. """ - return get_optimization_to_simulation_parameter_mapping( - self.condition_df, - self.measurement_df, - self.parameter_df, - self.sbml_model, - warn_unmapped=warn_unmapped) + return parameter_mapping\ + .get_optimization_to_simulation_parameter_mapping( + self.condition_df, + self.measurement_df, + self.parameter_df, + self.sbml_model, + warn_unmapped=warn_unmapped) def create_parameter_df(self, *args, **kwargs): - """Create a new PEtab parameter table""" + """Create a new PEtab parameter table + + See create_parameter_df + """ return create_parameter_df(self.sbml_model, self.condition_df, self.measurement_df, *args, **kwargs) -def get_default_condition_file_name(model_name, folder=''): +def get_default_condition_file_name(model_name: str, folder: str = ''): """Get file name according to proposed convention""" return os.path.join(folder, f"experimentalCondition_{model_name}.tsv") -def get_default_measurement_file_name(model_name, folder=''): +def get_default_measurement_file_name(model_name: str, folder: str = ''): """Get file name according to proposed convention""" return os.path.join(folder, f"measurementData_{model_name}.tsv") -def get_default_parameter_file_name(model_name, folder=''): +def get_default_parameter_file_name(model_name: str, folder: str = ''): """Get file name according to proposed convention""" return os.path.join(folder, f"parameters_{model_name}.tsv") -def get_default_sbml_file_name(model_name, folder=''): +def get_default_sbml_file_name(model_name: str, folder: str = ''): """Get file name according to proposed convention""" return os.path.join(folder, f"model_{model_name}.xml") -def get_condition_df(condition_file_name): +def get_condition_df(condition_file_name: str) -> pd.DataFrame: """Read the provided condition file into a `pandas.Dataframe` Conditions are rows, parameters are columns, conditionId is index. @@ -274,7 +295,7 @@ def get_condition_df(condition_file_name): return condition_df -def get_parameter_df(parameter_file_name): +def get_parameter_df(parameter_file_name: str) -> pd.DataFrame: """ Read the provided parameter file into a `pandas.Dataframe`. """ @@ -292,7 +313,7 @@ def get_parameter_df(parameter_file_name): return parameter_df -def get_measurement_df(measurement_file_name): +def get_measurement_df(measurement_file_name: str) -> pd.DataFrame: """ Read the provided measurement file into a `pandas.Dataframe`. """ @@ -304,7 +325,7 @@ def get_measurement_df(measurement_file_name): return measurement_df -def sbml_parameter_is_observable(sbml_parameter): +def sbml_parameter_is_observable(sbml_parameter: libsbml.Parameter) -> bool: """ Returns whether the `libsbml.Parameter` `sbml_parameter` matches the defined observable format. @@ -312,7 +333,7 @@ def sbml_parameter_is_observable(sbml_parameter): return sbml_parameter.getId().startswith('observable_') -def sbml_parameter_is_sigma(sbml_parameter): +def sbml_parameter_is_sigma(sbml_parameter: libsbml.Parameter) -> bool: """ Returns whether the `libsbml.Parameter` `sbml_parameter` matches the defined sigma format. @@ -320,7 +341,7 @@ def sbml_parameter_is_sigma(sbml_parameter): return sbml_parameter.getId().startswith('sigma_') -def get_observables(sbml_model, remove=False): +def get_observables(sbml_model: libsbml.Model, remove: bool = False) -> dict: """ Returns dictionary of observable definitions. See `assignment_rules_to_dict` for details. @@ -333,10 +354,12 @@ def get_observables(sbml_model, remove=False): return observables -def get_sigmas(sbml_model, remove=False): +def get_sigmas(sbml_model: libsbml.Model, remove: bool = False) -> dict: """ Returns dictionary of sigma definitions. - See `assignment_rules_to_dict` for details. + + Keys are observable IDs, for values see `assignment_rules_to_dict` for + details. """ sigmas = sbml.assignment_rules_to_dict( sbml_model, @@ -349,21 +372,19 @@ def get_sigmas(sbml_model, remove=False): return sigmas -def get_noise_distributions(measurement_df): +def get_noise_distributions(measurement_df: pd.DataFrame) -> dict: """ - Returns dictiontary of cost definitions per observable, if specified. + Returns dictionary of cost definitions per observable, if specified. + Looks through all parameters satisfying `sbml_parameter_is_cost` and - return dictionary of key: observableId, value: cost definition. + return as dictionary. - Parameters - ---------- + Parameters: + measurement_df: PEtab measurement table - sbml_model: The sbml model to look in. - remove: bool, optional (default = False) - Whether to remove parameters identified as a cost from the - sbml model. + Returns: + {observableId: cost definition} """ - # lint lint.assert_noise_distributions_valid(measurement_df) # read noise distributions from measurement file @@ -377,23 +398,23 @@ def get_noise_distributions(measurement_df): # extract observable transformation and noise distribution, # use lin+normal as default if none provided - obsTrafo = row.observableTransformation \ + obs_trafo = row.observableTransformation \ if 'observableTransformation' in row \ and row.observableTransformation \ else 'lin' - noiseDistr = row.noiseDistribution \ + noise_distr = row.noiseDistribution \ if 'noiseDistribution' in row \ and row.noiseDistribution \ else 'normal' # add to noise distributions noise_distrs[id_] = { - 'observableTransformation': obsTrafo, - 'noiseDistribution': noiseDistr} + 'observableTransformation': obs_trafo, + 'noiseDistribution': noise_distr} return noise_distrs -def parameter_is_scaling_parameter(parameter, formula): +def parameter_is_scaling_parameter(parameter: str, formula: str) -> bool: """ Returns true if parameter `parameter` is a scaling parameter in formula `formula`. @@ -405,7 +426,7 @@ def parameter_is_scaling_parameter(parameter, formula): return sym_parameter not in (sym_formula / sym_parameter).free_symbols -def parameter_is_offset_parameter(parameter, formula): +def parameter_is_offset_parameter(parameter: str, formula: str) -> bool: """ Returns true if parameter `parameter` is an offset parameter with positive sign in formula `formula`. @@ -417,172 +438,18 @@ def parameter_is_offset_parameter(parameter, formula): return sym_parameter not in (sym_formula - sym_parameter).free_symbols -def get_optimization_to_simulation_parameter_mapping( - condition_df, - measurement_df, - parameter_df=None, - sbml_model=None, - par_sim_ids=None, - simulation_conditions=None, - warn_unmapped=True): - """ - Create array of mappings from PEtab-problem to SBML parameters. - - The length of the returned array is n_conditions, each entry is an array - of length n_par_sim, listing the optimization parameters or constants to - be mapped to the simulation parameters. NaN is used where no mapping - exists. - - If no `par_sim_ids` is passed, parameter ordering will the one obtained - from `get_model_parameters()`. - - Parameters - ---------- - - condition_df, measurement_df, parameter_df: - The dataframes in the petab format. - - parameter_df is optional if par_sim_ids is provided - - sbml_model: - The sbml model with observables and noise specified according to the - petab format. Optional if par_sim_ids is provided. - - par_sim_ids: list of str, optional - Ids of the simulation parameters. If not passed, - these are generated from the files automatically. However, passing - them can ensure having the correct order. - - simulation_conditions: pd.DataFrame - Table of simulation conditions as created by - `petab.get_simulation_conditions`. - - warn_unmapped: - If True, log warning regarding unmapped parameters - """ - perform_mapping_checks(condition_df, measurement_df) - - if simulation_conditions is None: - simulation_conditions = get_simulation_conditions(measurement_df) - - if par_sim_ids is None: - par_sim_ids = get_model_parameters(sbml_model) - - n_conditions = simulation_conditions.shape[0] - - # initialize mapping matrix of shape n_par_dyn_sim_ids x n_conditions - # for the case of matching simulation and optimization parameter vector - mapping = [par_sim_ids[:] for _ in range(0, n_conditions)] - - par_sim_id_to_ix = { - name: idx for idx, name in enumerate(par_sim_ids) - } - - _apply_dynamic_parameter_overrides(mapping, condition_df, parameter_df, - par_sim_id_to_ix) - - # apply output parameter overrides - def _apply_overrides( - overrides, condition_ix, observable_id, override_type): - """ - Apply parameter-overrides for observables and noises to mapping - matrix. - """ - for i, override in enumerate(overrides): - par_sim_ix = par_sim_id_to_ix[ - f'{override_type}Parameter{i+1}_{observable_id}'] - mapping[condition_ix][par_sim_ix] = override - - for condition_ix, condition in simulation_conditions.iterrows(): - cur_measurement_df = get_rows_for_condition(measurement_df, condition) - for _, row in cur_measurement_df.iterrows(): - # we trust that the number of overrides matches (see above) - overrides = split_parameter_replacement_list( - row.observableParameters) - _apply_overrides( - overrides, condition_ix, - row.observableId, override_type='observable') - - overrides = split_parameter_replacement_list(row.noiseParameters) - _apply_overrides( - overrides, condition_ix, - row.observableId, override_type='noise') - - fill_in_nominal_values(mapping, parameter_df) - - handle_missing_overrides(mapping, warn=warn_unmapped) - return mapping - - -def _apply_dynamic_parameter_overrides(mapping, - condition_df: pd.DataFrame, - parameter_df: pd.DataFrame, - par_sim_id_to_ix): - """Apply dynamic parameter overrides from condition table (in-place). - - Arguments: - mapping, par_sim_id_to_ix: - see get_optimization_to_simulation_parameter_mapping - condition_df, parameter_df: PEtab condition and parameter table - """ - for overridee_id in condition_df.columns: - if overridee_id == 'conditionName': - continue - if condition_df[overridee_id].dtype != 'O': - continue - - for condition_idx, overrider_id \ - in enumerate(condition_df[overridee_id]): - if isinstance(overridee_id, str): - mapping[condition_idx][par_sim_id_to_ix[overridee_id]] = \ - overrider_id - - -def fill_in_nominal_values(mapping, parameter_df: pd.DataFrame): - """Replace non-estimated parameters by nominalValues. - - Arguments: - mapping: list of matrices as obtained from - get_optimization_to_simulation_parameter_mapping - parameter_df: - PEtab parameter table - """ - - if parameter_df is None: - return - if 'estimate' not in parameter_df: - return - - overrides = {row.name: row.nominalValue for _, row - in parameter_df.iterrows() if row.estimate != 1} - - for i_condition, mapping_for_condition in enumerate(mapping): - for i_val, val in enumerate(mapping_for_condition): - if isinstance(val, str): - try: - mapping[i_condition][i_val] = overrides[val] - # rescale afterwards. if there the parameter is not - # overridden, the previous line raises and we save the - # lookup - - # all overrides will be scaled to 'lin' - if 'parameterScale' in parameter_df: - scale = parameter_df.loc[val, 'parameterScale'] - if scale == 'log': - mapping[i_condition][i_val] = \ - np.exp(mapping[i_condition][i_val]) - elif scale == 'log10': - mapping[i_condition][i_val] = \ - 10**mapping[i_condition][i_val] - except KeyError: - pass - - -def get_simulation_conditions(measurement_df): +def get_simulation_conditions(measurement_df: pd.DataFrame) -> pd.DataFrame: """ Create a table of separate simulation conditions. A simulation condition is a specific combination of simulationConditionId and preequilibrationConditionId. + + Arguments: + measurement_df: PEtab measurement table + + Returns: + Dataframe with columns 'simulationConditionId' and + 'preequilibrationConditionId'. All-NULL columns will be omitted. """ # find columns to group by (i.e. if not all nans). # can be improved by checking for identical condition vectors @@ -598,124 +465,40 @@ def get_simulation_conditions(measurement_df): return simulation_conditions -def get_rows_for_condition(measurement_df, condition): +def get_rows_for_condition(measurement_df: pd.DataFrame, + condition: Union[pd.DataFrame, dict] + ) -> pd.DataFrame: """ Extract rows in `measurement_df` for `condition` according - to the grouping columns present in `condition`. + to 'preequilibrationConditionId' and 'simulationConditionId' in + `condition`. Returns ------- cur_measurement_df: pd.DataFrame The subselection of rows in `measurement_df` for the - condition `condition. + condition `condition`. """ # filter rows for condition row_filter = 1 # check for equality in all grouping cols if 'preequilibrationConditionId' in condition: row_filter = (measurement_df.preequilibrationConditionId == - condition.preequilibrationConditionId) & row_filter + condition['preequilibrationConditionId']) & row_filter if 'simulationConditionId' in condition: row_filter = (measurement_df.simulationConditionId == - condition.simulationConditionId) & row_filter - + condition['simulationConditionId']) & row_filter # apply filter cur_measurement_df = measurement_df.loc[row_filter, :] return cur_measurement_df -def handle_missing_overrides(mapping_par_opt_to_par_sim, warn=True): - """ - Find all observable parameters and noise parameters that were not mapped, - and set their mapping to np.nan. - - Assumes that parameters matching "(noise|observable)Parameter[0-9]+_" were - all supposed to be overwritten. - - Parameters: - ----------- - warn: - If True, log warning regarding unmapped parameters - """ - _missed_vals = [] - rex = re.compile("^(noise|observable)Parameter[0-9]+_") - for i_condition, mapping_for_condition in \ - enumerate(mapping_par_opt_to_par_sim): - for i_val, val in enumerate(mapping_for_condition): - try: - matches = rex.match(val) - except TypeError: - continue - - if matches: - mapping_for_condition[i_val] = np.nan - _missed_vals.append((i_condition, i_val, val)) - - if len(_missed_vals) and warn: - logger.warning(f"Could not map the following overrides " - f"(condition index, parameter index, parameter): " - f"{_missed_vals}. Usually, this is just due to missing " - f"data points.") - - -def perform_mapping_checks(condition_df, measurement_df): - if lint.measurement_table_has_timepoint_specific_mappings(measurement_df): - # we could allow that for floats, since they don't matter in this - # function and would be simply ignored - raise ValueError( - "Timepoint-specific parameter overrides currently unsupported.") - - -def get_optimization_to_simulation_scale_mapping( - parameter_df, - mapping_par_opt_to_par_sim): - - n_condition = len(mapping_par_opt_to_par_sim) - n_par_sim = len(mapping_par_opt_to_par_sim[0]) - - par_opt_ids_from_df = list(parameter_df.reset_index()['parameterId']) - par_opt_scales_from_df = list(parameter_df.reset_index()['parameterScale']) - - mapping_scale_opt_to_scale_sim = [] - - # iterate over conditions - for j_condition in range(0, n_condition): - # prepare vector of scales for j_condition - scales_for_j_condition = [] - - # iterate over simulation parameters - for j_par_sim in range(n_par_sim): - # extract entry in mapping table for j_par_sim - val = mapping_par_opt_to_par_sim[j_condition][j_par_sim] - - if isinstance(val, numbers.Number): - # fixed value assignment - scale = 'lin' - else: - # is par opt id, thus extract its scale - try: - scale = \ - par_opt_scales_from_df[par_opt_ids_from_df.index(val)] - except ValueError: - # This is a condition-table parameter which may not be - # present in the parameter table. Those are assumed to be - # 'lin' - scale = 'lin' - # append to scales for condition j - scales_for_j_condition.append(scale) - - # append to mapping - mapping_scale_opt_to_scale_sim.append(scales_for_j_condition) - - return mapping_scale_opt_to_scale_sim - - -def get_measurement_parameter_ids(measurement_df): +def get_measurement_parameter_ids(measurement_df: pd.DataFrame) -> list: """ Return list of ID of parameters which occur in measurement table as - observable or noise parameter. + observable or noise parameter overrides. """ def unique_preserve_order(seq): @@ -733,10 +516,16 @@ def get_unique_parameters(series): + get_unique_parameters(measurement_df.noiseParameters)) -def split_parameter_replacement_list(list_string): +def split_parameter_replacement_list(list_string: Union[str, numbers.Number], + delim: str = ';' + ) -> List: """ Split values in observableParameters and noiseParameters in measurement - table. + table. Convert numeric values to float. + + Arguments: + delim: delimiter + list_string: delim-separated stringified list """ def to_float_if_float(x): @@ -744,6 +533,7 @@ def to_float_if_float(x): return float(x) except ValueError: return x + if isinstance(list_string, numbers.Number): # Empty cells in pandas might be turned into nan # We might want to allow nan as replacement... @@ -751,13 +541,24 @@ def to_float_if_float(x): return [] return [list_string] - result = [x.strip() for x in list_string.split(';') if len(x.strip())] + result = [x.strip() for x in list_string.split(delim) if len(x.strip())] return [to_float_if_float(x) for x in result] -def get_placeholders(formula_string, observable_id, override_type): +def get_placeholders(formula_string: str, observable_id: str, + override_type: str) -> set: """ - Get placeholder variables in noise or observable definition. + Get placeholder variables in noise or observable definition for the + given observable ID. + + Arguments: + formula_string: observable formula (typically from SBML model) + observable_id: ID of current observable + override_type: 'observable' or 'noise', depending on whether `formula` + is for observable or for noise model + + Returns: + (Un-ordered) set of placeholder parameter IDs """ pattern = re.compile( re.escape(override_type) + r'Parameter\d+_' + re.escape(observable_id)) @@ -769,23 +570,21 @@ def get_placeholders(formula_string, observable_id, override_type): return placeholders -def get_model_parameters(sbml_model: libsbml.Model): +def get_model_parameters(sbml_model: libsbml.Model) -> List[str]: """Return list of SBML model parameter IDs which are not AssignmentRule targets for observables or sigmas""" - return [p.getId() for p in sbml_model.getListOfParameters() if sbml_model.getAssignmentRuleByVariable(p.getId()) is None] -def get_optimization_parameters(parameter_df): +def get_optimization_parameters(parameter_df: pd.DataFrame) -> List[str]: """ - Get list of optimization parameter ids from parameter - dataframe. + Get list of optimization parameter ids from parameter dataframe. """ return list(parameter_df.reset_index()['parameterId']) -def get_notnull_columns(df, candidates): +def get_notnull_columns(df: pd.DataFrame, candidates: Iterable): """ Return list of df-columns in candidates which are not all null/nan. The output can e.g. be used as input for pandas.DataFrame.groupby. @@ -794,7 +593,8 @@ def get_notnull_columns(df, candidates): if col in df and not np.all(df[col].isnull())] -def create_condition_df(parameter_ids, condition_ids=None): +def create_condition_df(parameter_ids: Iterable[str], + condition_ids: Iterable[str] = None) -> pd.DataFrame: """Create empty condition dataframe Arguments: @@ -839,10 +639,12 @@ def create_measurement_df() -> pd.DataFrame: return df -def create_parameter_df(sbml_model, condition_df, measurement_df, - parameter_scale='log10', - lower_bound=None, - upper_bound=None): +def create_parameter_df(sbml_model: libsbml.Model, + condition_df: pd.DataFrame, + measurement_df: pd.DataFrame, + parameter_scale: str = 'log10', + lower_bound: Iterable = None, + upper_bound: Iterable = None) -> pd.DataFrame: """Create a new PEtab parameter table All table entries can be provided as string or list-like with length @@ -939,7 +741,7 @@ def append_overrides(overrides): return df -def get_observable_id(parameter_id): +def get_observable_id(parameter_id: str) -> str: """Get observable id from sigma or observable parameter_id e.g. for observable_obs1 -> obs1 sigma_obs1 -> obs1 @@ -954,7 +756,7 @@ def get_observable_id(parameter_id): raise ValueError('Cannot extract observable id from: ' + parameter_id) -def measurements_have_replicates(measurement_df: pd.DataFrame): +def measurements_have_replicates(measurement_df: pd.DataFrame) -> bool: """Tests whether the measurements come with replicates Arguments: diff --git a/petab/parameter_mapping.py b/petab/parameter_mapping.py new file mode 100644 index 00000000..f8ba0809 --- /dev/null +++ b/petab/parameter_mapping.py @@ -0,0 +1,430 @@ +"""Functions related to mapping parameter from model to parameter estimation +problem""" + +import pandas as pd +import numpy as np +import libsbml +import numbers +import re +from . import lint +from . import core +from typing import List, Tuple, Dict, Union, Any +import logging + +logger = logging.getLogger(__name__) + +# Parameter mapping for condition +ParMappingDict = Dict[str, Union[str, numbers.Number]] +# Parameter mapping for combination of preequilibration and simulation +# condition +ParMappingDictTuple = Tuple[ParMappingDict, ParMappingDict] +# Same for scale mapping +ScaleMappingDict = Dict[str, str] +ScaleMappingDictTuple = Tuple[ScaleMappingDict, ScaleMappingDict] + + +def get_optimization_to_simulation_parameter_mapping( + condition_df: pd.DataFrame, + measurement_df: pd.DataFrame, + parameter_df: pd.DataFrame = None, + sbml_model: libsbml.Model = None, + simulation_conditions=None, + warn_unmapped: bool = True) -> List[ParMappingDictTuple]: + """ + Create list of mapping dicts from PEtab-problem to SBML parameters. + + Parameters + ---------- + condition_df, measurement_df, parameter_df: + The dataframes in the PEtab format. + + parameter_df is optional if par_sim_ids is provided + + sbml_model: + The sbml model with observables and noise specified according to the + petab format. Optional if par_sim_ids is provided. + + simulation_conditions: pd.DataFrame + Table of simulation conditions as created by + `petab.get_simulation_conditions`. + + warn_unmapped: + If True, log warning regarding unmapped parameters + + Returns + ------- + The length of the returned array is n_conditions, each entry is a tuple of + two dicts of length n_par_sim, listing the optimization parameters or + constants to be mapped to the simulation parameters, first for + preequilibration (empty if no preequilibration condition is specified), + second for simulation. NaN is used where no mapping exists. + """ + # Ensure inputs are okay + perform_mapping_checks(measurement_df) + + if simulation_conditions is None: + simulation_conditions = core.get_simulation_conditions(measurement_df) + + mapping = [] + for condition_ix, condition in simulation_conditions.iterrows(): + cur_measurement_df = core.get_rows_for_condition( + measurement_df, condition) + + if 'preequilibrationConditionId' not in condition \ + or not isinstance(condition.preequilibrationConditionId, str) \ + or not condition.preequilibrationConditionId: + preeq_map = {} + else: + preeq_map = get_parameter_mapping_for_condition( + condition_id=condition.preequilibrationConditionId, + is_preeq=True, + cur_measurement_df=cur_measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, sbml_model=sbml_model, + warn_unmapped=warn_unmapped + ) + + sim_map = get_parameter_mapping_for_condition( + condition_id=condition.simulationConditionId, + is_preeq=False, + cur_measurement_df=cur_measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, sbml_model=sbml_model, + warn_unmapped=warn_unmapped + ) + mapping.append((preeq_map, sim_map),) + + return mapping + + +def get_parameter_mapping_for_condition( + condition_id: str, + is_preeq: bool, + cur_measurement_df: pd.DataFrame, + condition_df: pd.DataFrame, + parameter_df: pd.DataFrame = None, + sbml_model: libsbml.Model = None, + warn_unmapped: bool = True) -> ParMappingDict: + """ + Create dictionary of mappings from PEtab-problem to SBML parameters for the + given condition. + + Parameters + ---------- + condition_id: Condition ID for which to perform mapping + + is_preeq: If true, output parameters will not be mapped + + cur_measurement_df: Measurement sub-table for current condition + + condition_df, parameter_df: + The dataframes in the PEtab format. + parameter_df is optional if par_sim_ids is provided + + sbml_model: + The sbml model with observables and noise specified according to the + petab format. Optional if par_sim_ids is provided. + + warn_unmapped: + If True, log warning regarding unmapped parameters + + Returns + ------- + Dictionary of parameter IDs with mapped parameters IDs to be estimated or + filled in values in case of non-estimated parameters. NaN is used where no + mapping exists. + """ + perform_mapping_checks(cur_measurement_df) + + par_sim_ids = core.get_model_parameters(sbml_model) + + # initialize mapping dict + # for the case of matching simulation and optimization parameter vector + mapping = {par: par for par in par_sim_ids} + + _apply_dynamic_parameter_overrides(mapping, condition_id, condition_df) + + if not is_preeq: + _apply_output_parameter_overrides(mapping, cur_measurement_df) + + fill_in_nominal_values(mapping, parameter_df) + + # TODO fill in fixed parameters (#103) + + handle_missing_overrides(mapping, warn=warn_unmapped) + return mapping + + +def _apply_output_parameter_overrides( + mapping: ParMappingDict, + cur_measurement_df: pd.DataFrame) -> None: + """ + Apply output parameter overrides to the parameter mapping dict for a given + condition as defined in the measurement table (observableParameter, + noiseParameters). + + Arguments: + mapping: parameter mapping dict + cur_measurement_df: + Subset of the measurement table for the current condition + """ + for _, row in cur_measurement_df.iterrows(): + # we trust that the number of overrides matches (see above) + overrides = core.split_parameter_replacement_list( + row.observableParameters) + _apply_overrides_for_observable(mapping, row.observableId, + 'observable', overrides) + + overrides = core.split_parameter_replacement_list(row.noiseParameters) + _apply_overrides_for_observable(mapping, row.observableId, 'noise', + overrides) + + +def _apply_overrides_for_observable( + mapping: ParMappingDict, + observable_id: str, + override_type: str, + overrides: list) -> None: + """ + Apply parameter-overrides for observables and noises to mapping + matrix. + + Arguments: + mapping: mapping dict to which to apply overrides + observable_id: observable ID + override_type: 'observable' or 'noise' + overrides: list of overrides for noise or observable parameters + """ + for i, override in enumerate(overrides): + overridee_id = f'{override_type}Parameter{i+1}_{observable_id}' + mapping[overridee_id] = override + + +def _apply_dynamic_parameter_overrides(mapping: ParMappingDict, + condition_id: str, + condition_df: pd.DataFrame) -> None: + """Apply dynamic parameter overrides from condition table (in-place). + + Arguments: + mapping, par_sim_id_to_ix: + see get_parameter_mapping_for_condition + condition_df: PEtab condition and parameter table + """ + for overridee_id in condition_df.columns: + if overridee_id == 'conditionName': + continue + if condition_df[overridee_id].dtype != 'O': + continue + + overrider_id = condition_df.loc[condition_id, overridee_id] + mapping[overridee_id] = overrider_id + + +def fill_in_nominal_values(mapping: ParMappingDict, + parameter_df: pd.DataFrame) -> None: + """Replace non-estimated parameters in mapping list for a given condition + by nominalValues provided in parameter table. + + Arguments: + mapping: + mapping dict obtained from get_parameter_mapping_for_condition + parameter_df: + PEtab parameter table + """ + + if parameter_df is None: + return + if 'estimate' not in parameter_df: + return + + overrides = {row.name: row.nominalValue for _, row + in parameter_df.iterrows() if row.estimate != 1} + + for par, overridee in mapping.items(): + if not isinstance(overridee, str): + continue + + try: + mapping[par] = overrides[overridee] + # all overrides will be scaled to 'lin' + if 'parameterScale' in parameter_df: + scale = parameter_df.loc[overridee, 'parameterScale'] + if scale == 'log': + mapping[par] = np.exp(mapping[par]) + elif scale == 'log10': + mapping[par] = np.power(10, mapping[par]) + except KeyError: + # parameter is to be estimated + pass + + +def get_optimization_to_simulation_scale_mapping( + parameter_df: pd.DataFrame, + mapping_par_opt_to_par_sim: List[ParMappingDictTuple], + measurement_df: pd.DataFrame, + simulation_conditions: Union[dict, pd.DataFrame] = None +) -> List[ScaleMappingDictTuple]: + """Get parameter scale mapping for all conditions""" + mapping_scale_opt_to_scale_sim = [] + + if simulation_conditions is None: + simulation_conditions = core.get_simulation_conditions(measurement_df) + + # iterate over conditions + for condition_ix, condition in simulation_conditions.iterrows(): + if 'preequilibrationConditionId' not in condition \ + or not isinstance(condition.preequilibrationConditionId, str) \ + or not condition.preequilibrationConditionId: + preeq_map = {} + else: + preeq_map = get_scale_mapping_for_condition( + parameter_df=parameter_df, + mapping_par_opt_to_par_sim=mapping_par_opt_to_par_sim[ + condition_ix][0] + ) + + sim_map = get_scale_mapping_for_condition( + parameter_df=parameter_df, + mapping_par_opt_to_par_sim=mapping_par_opt_to_par_sim[ + condition_ix][1] + ) + + # append to mapping + mapping_scale_opt_to_scale_sim.append((preeq_map, sim_map),) + + return mapping_scale_opt_to_scale_sim + + +def get_scale_mapping_for_condition( + parameter_df: pd.DataFrame, + mapping_par_opt_to_par_sim: ParMappingDict) -> ScaleMappingDict: + """Get parameter scale mapping for the given condition. + + Arguments: + parameter_df: PEtab parameter table + mapping_par_opt_to_par_sim: + Mapping as obtained from get_parameter_mapping_for_condition + """ + def get_scale(par_id_or_val): + if isinstance(par_id_or_val, numbers.Number): + # fixed value assignment + return 'lin' + else: + # is par opt id, thus extract its scale + try: + return parameter_df.loc[par_id_or_val, 'parameterScale'] + except KeyError: + # This is a condition-table parameter which is not + # present in the parameter table. Those are assumed to be + # 'lin' + return 'lin' + + return {par: get_scale(val) + for par, val in mapping_par_opt_to_par_sim.items()} + + +def perform_mapping_checks(measurement_df: pd.DataFrame) -> None: + if lint.measurement_table_has_timepoint_specific_mappings(measurement_df): + # we could allow that for floats, since they don't matter in this + # function and would be simply ignored + raise ValueError( + "Timepoint-specific parameter overrides currently unsupported.") + + +def handle_missing_overrides(mapping_par_opt_to_par_sim: ParMappingDict, + warn: bool = True, + condition_id: str = None) -> None: + """ + Find all observable parameters and noise parameters that were not mapped + and set their mapping to np.nan. + + Assumes that parameters matching "(noise|observable)Parameter[0-9]+_" were + all supposed to be overwritten. + + Parameters: + ----------- + mapping_par_opt_to_par_sim: + Output of get_parameter_mapping_for_condition + warn: + If True, log warning regarding unmapped parameters + """ + _missed_vals = [] + rex = re.compile("^(noise|observable)Parameter[0-9]+_") + for key, val in mapping_par_opt_to_par_sim.items(): + try: + matches = rex.match(val) + except TypeError: + continue + + if matches: + mapping_par_opt_to_par_sim[key] = np.nan + _missed_vals.append(key) + + if len(_missed_vals) and warn: + logger.warning(f"Could not map the following overrides for condition " + f"{condition_id}: " + f"{_missed_vals}. Usually, this is just due to missing " + f"data points.") + + +def merge_preeq_and_sim_pars_condition( + condition_map_preeq: ParMappingDict, + condition_map_sim: ParMappingDict, + condition_scale_map_preeq: ScaleMappingDict, + condition_scale_map_sim: ScaleMappingDict, + condition: Any) -> None: + """Merge preequilibration and simulation parameters and scales while + checking for compatibility. + + This function is meant for the case where we cannot have different + parameters (and scales) for preequilibration and simulation. Therefore, + merge both and ensure matching scales and parameters. + `condition_map_sim` and `condition_scale_map_sim` will ne modified in + place. + + Arguments: + condition_map_preeq, condition_map_sim: + Parameter mapping as obtained from + `get_parameter_mapping_for_condition` + condition_scale_map_preeq, condition_scale_map_sim: + Parameter scale mapping as obtained from + `get_get_scale_mapping_for_condition` + condition: Condition identifier for more informative error messages + """ + if not condition_map_preeq: + # nothing to do + return + + for idx, (par_preeq, par_sim, scale_preeq, scale_sim) \ + in enumerate(zip(condition_map_preeq, + condition_map_sim, + condition_scale_map_preeq, + condition_scale_map_sim)): + if par_preeq != par_sim \ + and not (np.isnan(par_sim) and np.isnan(par_preeq)): + # both identical or both nan is okay + if np.isnan(par_sim): + # unmapped for simulation + par_sim[idx] = par_preeq + elif np.isnan(par_preeq): + # unmapped for preeq is okay + pass + else: + raise ('Cannot handle different values for dynamic ' + f'parameters: for condition {condition} ' + f'parameter {idx} is {par_preeq} for preeq ' + f'and {par_sim} for simulation.') + if scale_preeq != scale_sim: + # both identical is okay + if np.isnan(par_sim): + # unmapped for simulation + scale_sim[idx] = scale_preeq + elif np.isnan(par_preeq): + # unmapped for preeq is okay + pass + else: + raise ('Cannot handle different parameter scales ' + f'parameters: for condition {condition} ' + f'scale for parameter {idx} is ' + f'{scale_preeq} for preeq ' + f'and {scale_sim} for simulation.') diff --git a/petab/sbml.py b/petab/sbml.py index 94451924..38edc8be 100644 --- a/petab/sbml.py +++ b/petab/sbml.py @@ -1,13 +1,16 @@ """Functions for direct access of SBML models""" + import libsbml import math import logging +from typing import Dict, Any logger = logging.getLogger(__name__) def assignment_rules_to_dict( - sbml_model, filter_function=lambda *_: True, remove=False): + sbml_model: libsbml.Model, filter_function=lambda *_: True, + remove: bool = False) -> Dict[str, Dict[str, Any]]: """ Turn assignment rules into dictionary. @@ -110,7 +113,8 @@ def constant_species_to_parameters(sbml_model: libsbml.Model) -> list: return transformables -def is_sbml_consistent(sbml_document: libsbml.SBMLDocument, check_units=False): +def is_sbml_consistent(sbml_document: libsbml.SBMLDocument, + check_units: bool = False): """Check for SBML validity / consistency Arguments: @@ -134,7 +138,7 @@ def is_sbml_consistent(sbml_document: libsbml.SBMLDocument, check_units=False): def log_sbml_errors(sbml_document: libsbml.SBMLDocument, - minimum_severity=libsbml.LIBSBML_SEV_WARNING): + minimum_severity=libsbml.LIBSBML_SEV_WARNING) -> None: """Log libsbml errors Arguments: @@ -157,7 +161,7 @@ def log_sbml_errors(sbml_document: libsbml.SBMLDocument, def globalize_parameters(sbml_model: libsbml.Model, - prepend_reaction_id: bool = False): + prepend_reaction_id: bool = False) -> None: """Turn all local parameters into global parameters with the same properties @@ -203,7 +207,7 @@ def add_global_parameter(sbml_model: libsbml.Model, parameter_name: str = None, constant: bool = False, units: str = 'dimensionless', - value: float = 0.0): + value: float = 0.0) -> libsbml.Parameter: """Add new global parameter to SBML model""" if parameter_name is None: @@ -222,7 +226,7 @@ def create_assigment_rule(sbml_model: libsbml.Model, assignee_id: str, formula: str, rule_id: str = None, - rule_name: str = None): + rule_name: str = None) -> libsbml.AssignmentRule: """Create SBML AssignmentRule Arguments: diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_parameter_mapping.py b/tests/test_parameter_mapping.py new file mode 100644 index 00000000..8eabb410 --- /dev/null +++ b/tests/test_parameter_mapping.py @@ -0,0 +1,311 @@ +import pandas as pd +import petab +import numpy as np + +from petab.sbml import add_global_parameter + +# import fixtures +pytest_plugins = [ + "tests.test_petab", +] + + +class TestGetSimulationToOptimizationParameterMapping(object): + + def test_no_condition_specific(self, condition_df_2_conditions, + minimal_sbml_model): + # Trivial case - no condition-specific parameters + + condition_df = condition_df_2_conditions + + measurement_df = pd.DataFrame(data={ + 'observableId': ['obs1', 'obs2'], + 'simulationConditionId': ['condition1', 'condition2'], + 'preequilibrationConditionId': ['', ''], + 'observableParameters': ['', ''], + 'noiseParameters': ['', ''] + }) + + sbml_doc, sbml_model = minimal_sbml_model + add_global_parameter(sbml_model, 'dynamicParameter1') + add_global_parameter(sbml_model, 'dynamicParameter2') + add_global_parameter(sbml_model, 'dynamicParameter3') + + expected = [({}, + {'dynamicParameter1': 'dynamicParameter1', + 'dynamicParameter2': 'dynamicParameter2', + 'dynamicParameter3': 'dynamicParameter3'}), + ({}, + {'dynamicParameter1': 'dynamicParameter1', + 'dynamicParameter2': 'dynamicParameter2', + 'dynamicParameter3': 'dynamicParameter3'})] + + actual = petab.get_optimization_to_simulation_parameter_mapping( + sbml_model=sbml_model, + measurement_df=measurement_df, + condition_df=condition_df, + ) + + assert actual == expected + + def test_all_override(self, condition_df_2_conditions, + minimal_sbml_model): + # Condition-specific parameters overriding original parameters + condition_df = condition_df_2_conditions + + sbml_doc, sbml_model = minimal_sbml_model + add_global_parameter(sbml_model, 'dynamicParameter1') + add_global_parameter(sbml_model, 'dynamicParameter2') + add_global_parameter(sbml_model, 'observableParameter1_obs1') + add_global_parameter(sbml_model, 'observableParameter2_obs1') + add_global_parameter(sbml_model, 'observableParameter1_obs2') + + measurement_df = pd.DataFrame(data={ + 'observableId': ['obs1', 'obs2', 'obs1', 'obs2'], + 'simulationConditionId': ['condition1', 'condition1', + 'condition2', 'condition2'], + 'preequilibrationConditionId': ['', '', '', ''], + 'observableParameters': ['obs1par1override;obs1par2cond1override', + 'obs2par1cond1override', + 'obs1par1override;obs1par2cond2override', + 'obs2par1cond2override'], + 'noiseParameters': ['', '', '', ''] + }) + + expected = [({}, + {'dynamicParameter1': 'dynamicParameter1', + 'dynamicParameter2': 'dynamicParameter2', + 'observableParameter1_obs1': 'obs1par1override', + 'observableParameter2_obs1': 'obs1par2cond1override', + 'observableParameter1_obs2': 'obs2par1cond1override', + }), + ({}, + {'dynamicParameter1': 'dynamicParameter1', + 'dynamicParameter2': 'dynamicParameter2', + 'observableParameter1_obs1': 'obs1par1override', + 'observableParameter2_obs1': 'obs1par2cond2override', + 'observableParameter1_obs2': 'obs2par1cond2override' + })] + + actual = petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + sbml_model=sbml_model) + + assert actual == expected + + def test_partial_override(self, condition_df_2_conditions, + minimal_sbml_model): + # Condition-specific parameters, keeping original parameters + condition_df = condition_df_2_conditions + + sbml_doc, sbml_model = minimal_sbml_model + add_global_parameter(sbml_model, 'dynamicParameter1') + add_global_parameter(sbml_model, 'observableParameter1_obs1') + add_global_parameter(sbml_model, 'observableParameter2_obs1') + add_global_parameter(sbml_model, 'observableParameter1_obs2') + + measurement_df = pd.DataFrame(data={ + 'observableId': ['obs1', 'obs2', 'obs1', 'obs2'], + 'simulationConditionId': ['condition1', 'condition1', + 'condition2', 'condition2'], + 'preequilibrationConditionId': ['', '', '', ''], + 'observableParameters': ['obs1par1override;obs1par2cond1override', + '', + 'obs1par1override;obs1par2cond2override', + 'obs2par1cond2override'], + 'noiseParameters': ['', '', '', ''] + }) + + expected = [({}, + {'dynamicParameter1': 'dynamicParameter1', + 'observableParameter1_obs1': 'obs1par1override', + 'observableParameter2_obs1': 'obs1par2cond1override', + 'observableParameter1_obs2': np.nan, + }), + ({}, + {'dynamicParameter1': 'dynamicParameter1', + 'observableParameter1_obs1': 'obs1par1override', + 'observableParameter2_obs1': 'obs1par2cond2override', + 'observableParameter1_obs2': 'obs2par1cond2override' + })] + + actual = petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + sbml_model=sbml_model + ) + + assert actual == expected + + def test_parameterized_condition_table(self, minimal_sbml_model): + condition_df = pd.DataFrame(data={ + 'conditionId': ['condition1', 'condition2', 'condition3'], + 'conditionName': ['', 'Condition 2', ''], + 'dynamicParameter1': ['dynamicOverride1_1', + 'dynamicOverride1_2', 0] + }) + condition_df.set_index('conditionId', inplace=True) + + measurement_df = pd.DataFrame(data={ + 'simulationConditionId': ['condition1', 'condition2', + 'condition3'], + 'observableId': ['obs1', 'obs2', 'obs1'], + 'observableParameters': '', + 'noiseParameters': '', + }) + + parameter_df = pd.DataFrame(data={ + 'parameterId': ['dynamicOverride1_1', 'dynamicOverride1_2'], + 'parameterName': ['', '...'], # ... + }) + parameter_df.set_index('parameterId', inplace=True) + + document, model = minimal_sbml_model + model.createParameter().setId('dynamicParameter1') + + assert petab.get_model_parameters(model) == ['dynamicParameter1'] + + actual = petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, + sbml_model=model + ) + + expected = [({}, {'dynamicParameter1': 'dynamicOverride1_1'}), + ({}, {'dynamicParameter1': 'dynamicOverride1_2'}), + ({}, {'dynamicParameter1': 0})] + + assert actual == expected + + def test_parameterized_condition_table_changed_scale( + self, minimal_sbml_model): + """Test overriding a dynamic parameter `overridee` with + - a log10 parameter to be estimated (condition 1) + - lin parameter not estimated (condition2) + - log10 parameter not estimated (condition 3) + - constant override (condition 4)""" + + document, model = minimal_sbml_model + model.createParameter().setId('overridee') + assert petab.get_model_parameters(model) == ['overridee'] + + condition_df = pd.DataFrame(data={ + 'conditionId': + ['condition1', 'condition2', 'condition3', 'condition4'], + 'conditionName': '', + 'overridee': + ['dynamicOverrideLog10', 'fixedOverrideLin', + 'fixedOverrideLog10', 10.0] + }) + condition_df.set_index('conditionId', inplace=True) + + measurement_df = pd.DataFrame(data={ + 'simulationConditionId': + ['condition1', 'condition2', 'condition3', 'condition4'], + 'observableId': + ['obs1', 'obs2', 'obs1', 'obs2'], + 'observableParameters': '', + 'noiseParameters': '', + }) + + parameter_df = pd.DataFrame(data={ + 'parameterId': ['dynamicOverrideLog10', + 'fixedOverrideLin', + 'fixedOverrideLog10'], + 'parameterName': '', + 'estimate': [1, 0, 0], + 'nominalValue': [np.nan, 2, -2], + 'parameterScale': ['log10', 'lin', 'log10'] + }) + parameter_df.set_index('parameterId', inplace=True) + + actual_par_map = \ + petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, + sbml_model=model + ) + + actual_scale_map = petab.get_optimization_to_simulation_scale_mapping( + parameter_df=parameter_df, + measurement_df=measurement_df, + mapping_par_opt_to_par_sim=actual_par_map + ) + + expected_par_map = [({}, {'overridee': 'dynamicOverrideLog10'}), + ({}, {'overridee': 2.0}), + # rescaled: + ({}, {'overridee': 0.01}), + ({}, {'overridee': 10.0})] + + expected_scale_map = [({}, {'overridee': 'log10'}), + ({}, {'overridee': 'lin'}), + ({}, {'overridee': 'lin'}), + ({}, {'overridee': 'lin'})] + + assert actual_par_map == expected_par_map + assert actual_scale_map == expected_scale_map + + # Add preeq condition + measurement_df['preequilibrationConditionId'] = \ + ['condition1', 'condition1', 'condition3', 'condition3'] + actual_par_map = \ + petab.get_optimization_to_simulation_parameter_mapping( + measurement_df=measurement_df, + condition_df=condition_df, + parameter_df=parameter_df, + sbml_model=model + ) + + actual_scale_map = petab.get_optimization_to_simulation_scale_mapping( + parameter_df=parameter_df, + measurement_df=measurement_df, + mapping_par_opt_to_par_sim=actual_par_map + ) + + expected_par_map = [({'overridee': 'dynamicOverrideLog10'}, + {'overridee': 'dynamicOverrideLog10'}), + ({'overridee': 'dynamicOverrideLog10'}, + {'overridee': 2.0}), + # rescaled: + ({'overridee': 0.01}, {'overridee': 0.01}), + ({'overridee': 0.01}, {'overridee': 10.0})] + expected_scale_map = [({'overridee': 'log10'}, {'overridee': 'log10'}), + ({'overridee': 'log10'}, {'overridee': 'lin'}), + ({'overridee': 'lin'}, {'overridee': 'lin'}), + ({'overridee': 'lin'}, {'overridee': 'lin'})] + assert actual_par_map == expected_par_map + assert actual_scale_map == expected_scale_map + + +def test_fill_in_nominal_values(): + parameter_df = pd.DataFrame(data={ + 'parameterId': ['estimated', 'not_estimated'], + 'parameterName': ['', '...'], # ... + 'nominalValue': [0.0, 2.0], + 'estimate': [1, 0] + }) + parameter_df.set_index(['parameterId'], inplace=True) + + mapping = {'estimated': 'estimated', 'not_estimated': 'not_estimated'} + actual = mapping.copy() + petab.fill_in_nominal_values(actual, parameter_df) + expected = {'estimated': 'estimated', 'not_estimated': 2.0} + assert expected == actual + + del parameter_df['estimate'] + # should not replace + mapping = {'estimated': 1.0, 'not_estimated': 1.0} + actual = mapping.copy() + petab.fill_in_nominal_values(actual, parameter_df) + expected = mapping.copy() + assert expected == actual + + mapping = {'estimated': 'estimated', 'not_estimated': 'not_estimated'} + actual = mapping.copy() + petab.fill_in_nominal_values(actual, parameter_df) + expected = mapping.copy() + assert expected == actual diff --git a/tests/test_petab.py b/tests/test_petab.py index bd56aede..d269bed4 100644 --- a/tests/test_petab.py +++ b/tests/test_petab.py @@ -99,8 +99,8 @@ def test_split_parameter_replacement_list(): == ['param1', 'param2'] assert petab.split_parameter_replacement_list('1.0') == [1.0] assert petab.split_parameter_replacement_list('1.0;2.0') == [1.0, 2.0] - assert petab.split_parameter_replacement_list('param1;2.2') == \ - ['param1', 2.2] + assert petab.split_parameter_replacement_list('param1;2.2') \ + == ['param1', 2.2] assert petab.split_parameter_replacement_list(np.nan) == [] assert petab.split_parameter_replacement_list(1.5) == [1.5] @@ -153,233 +153,8 @@ def test_serialization(petab_problem): # Can't test for equality directly, testing for number of parameters # should do the job here - assert len(problem_recreated.sbml_model.getListOfParameters()) == \ - len(petab_problem.sbml_model.getListOfParameters()) - - -class TestGetSimulationToOptimizationParameterMapping(object): - - def test_no_condition_specific(self, condition_df_2_conditions): - # Trivial case - no condition-specific parameters - - condition_df = condition_df_2_conditions - - measurement_df = pd.DataFrame(data={ - 'observableId': ['obs1', 'obs2'], - 'simulationConditionId': ['condition1', 'condition2'], - 'preequilibrationConditionId': ['', ''], - 'observableParameters': ['', ''], - 'noiseParameters': ['', ''] - }) - - expected = [['dynamicParameter1', - 'dynamicParameter2', - 'dynamicParameter3'], - ['dynamicParameter1', - 'dynamicParameter2', - 'dynamicParameter3']] - - actual = petab.get_optimization_to_simulation_parameter_mapping( - measurement_df=measurement_df, - condition_df=condition_df, - par_sim_ids=['dynamicParameter1', - 'dynamicParameter2', - 'dynamicParameter3'] - ) - - assert actual == expected - - def test_all_override(self, condition_df_2_conditions): - # Condition-specific parameters overriding original parameters - condition_df = condition_df_2_conditions - - measurement_df = pd.DataFrame(data={ - 'observableId': ['obs1', 'obs2', 'obs1', 'obs2'], - 'simulationConditionId': ['condition1', 'condition1', - 'condition2', 'condition2'], - 'preequilibrationConditionId': ['', '', '', ''], - 'observableParameters': ['obs1par1override;obs1par2cond1override', - 'obs2par1cond1override', - 'obs1par1override;obs1par2cond2override', - 'obs2par1cond2override'], - 'noiseParameters': ['', '', '', ''] - }) - - expected = [['dynamicParameter1', - 'dynamicParameter2', - 'obs1par1override', - 'obs1par2cond1override', - 'obs2par1cond1override', - ], - ['dynamicParameter1', - 'dynamicParameter2', - 'obs1par1override', - 'obs1par2cond2override', - 'obs2par1cond2override' - ]] - - actual = petab.get_optimization_to_simulation_parameter_mapping( - measurement_df=measurement_df, - condition_df=condition_df, - par_sim_ids=['dynamicParameter1', - 'dynamicParameter2', - 'observableParameter1_obs1', - 'observableParameter2_obs1', - 'observableParameter1_obs2'] - ) - - assert actual == expected - - def test_partial_override(self, condition_df_2_conditions): - # Condition-specific parameters, keeping original parameters - condition_df = condition_df_2_conditions - - measurement_df = pd.DataFrame(data={ - 'observableId': ['obs1', 'obs2', 'obs1', 'obs2'], - 'simulationConditionId': ['condition1', 'condition1', - 'condition2', 'condition2'], - 'preequilibrationConditionId': ['', '', '', ''], - 'observableParameters': ['obs1par1override;obs1par2cond1override', - '', - 'obs1par1override;obs1par2cond2override', - 'obs2par1cond2override'], - 'noiseParameters': ['', '', '', ''] - }) - - expected = [['dynamicParameter1', - 'dynamicParameter2', - 'obs1par1override', - 'obs1par2cond1override', - np.nan, - ], - ['dynamicParameter1', - 'dynamicParameter2', - 'obs1par1override', - 'obs1par2cond2override', - 'obs2par1cond2override' - ]] - - actual = petab.get_optimization_to_simulation_parameter_mapping( - measurement_df=measurement_df, - condition_df=condition_df, - par_sim_ids=['dynamicParameter1', - 'dynamicParameter2', - 'observableParameter1_obs1', - 'observableParameter2_obs1', - 'observableParameter1_obs2'] - ) - - assert actual == expected - - def test_parameterized_condition_table(self, minimal_sbml_model): - condition_df = pd.DataFrame(data={ - 'conditionId': ['condition1', 'condition2', 'condition3'], - 'conditionName': ['', 'Condition 2', ''], - 'dynamicParameter1': ['dynamicOverride1_1', - 'dynamicOverride1_2', 0] - }) - condition_df.set_index('conditionId', inplace=True) - - measurement_df = pd.DataFrame(data={ - 'simulationConditionId': ['condition1', 'condition2', - 'condition3'], - 'observableId': ['obs1', 'obs2', 'obs1'], - 'observableParameters': '', - 'noiseParameters': '', - }) - - parameter_df = pd.DataFrame(data={ - 'parameterId': ['dynamicOverride1_1', 'dynamicOverride1_2'], - 'parameterName': ['', '...'], # ... - }) - parameter_df.set_index('parameterId', inplace=True) - - document, model = minimal_sbml_model - model.createParameter().setId('dynamicParameter1') - - assert petab.get_model_parameters(model) == ['dynamicParameter1'] - - actual = petab.get_optimization_to_simulation_parameter_mapping( - measurement_df=measurement_df, - condition_df=condition_df, - parameter_df=parameter_df, - sbml_model=model - ) - - expected = [['dynamicOverride1_1'], - ['dynamicOverride1_2'], - [0]] - - assert actual == expected - - def test_parameterized_condition_table_changed_scale( - self, minimal_sbml_model): - """Test overriding a dynamic parameter `overridee` with - - a log10 parameter to be estimated (condition 1) - - lin parameter not estimated (condition2) - - log10 parameter not estimated (condition 3) - - constant override (condition 4)""" - - document, model = minimal_sbml_model - model.createParameter().setId('overridee') - assert petab.get_model_parameters(model) == ['overridee'] - - condition_df = pd.DataFrame(data={ - 'conditionId': - ['condition1', 'condition2', 'condition3', 'condition4'], - 'conditionName': '', - 'overridee': - ['dynamicOverrideLog10', 'fixedOverrideLin', - 'fixedOverrideLog10', 10.0] - }) - condition_df.set_index('conditionId', inplace=True) - - measurement_df = pd.DataFrame(data={ - 'simulationConditionId': - ['condition1', 'condition2', 'condition3', 'condition4'], - 'observableId': - ['obs1', 'obs2', 'obs1', 'obs2'], - 'observableParameters': '', - 'noiseParameters': '', - }) - - parameter_df = pd.DataFrame(data={ - 'parameterId': ['dynamicOverrideLog10', - 'fixedOverrideLin', - 'fixedOverrideLog10'], - 'parameterName': '', - 'estimate': [1, 0, 0], - 'nominalValue': [np.nan, 2, -2], - 'parameterScale': ['log10', 'lin', 'log10'] - }) - parameter_df.set_index('parameterId', inplace=True) - - actual_par_map = \ - petab.get_optimization_to_simulation_parameter_mapping( - measurement_df=measurement_df, - condition_df=condition_df, - parameter_df=parameter_df, - sbml_model=model - ) - - actual_scale_map = petab.get_optimization_to_simulation_scale_mapping( - parameter_df=parameter_df, - mapping_par_opt_to_par_sim=actual_par_map - ) - - expected_par_map = [['dynamicOverrideLog10'], - [2.0], - # rescaled: - [0.01], - [10.0]] - - expected_scale_map = [['log10'], - ['lin'], - ['lin'], - ['lin']] - - assert actual_par_map == expected_par_map - assert actual_scale_map == expected_scale_map + assert len(problem_recreated.sbml_model.getListOfParameters()) \ + == len(petab_problem.sbml_model.getListOfParameters()) def test_get_observable_id(): @@ -394,10 +169,11 @@ def test_get_placeholders(): 'observableParameter1_twoParams * ' 'observableParameter2_twoParams + otherParam', 'twoParams', 'observable') \ - == {'observableParameter1_twoParams', 'observableParameter2_twoParams'} + == {'observableParameter1_twoParams', + 'observableParameter2_twoParams'} - assert petab.get_placeholders( - '3.0 * noiseParameter1_oneParam', 'oneParam', 'noise') \ + assert petab.get_placeholders('3.0 * noiseParameter1_oneParam', + 'oneParam', 'noise') \ == {'noiseParameter1_oneParam'} @@ -464,26 +240,3 @@ def test_create_parameter_df(condition_df_2_conditions): actual = parameter_df.index.values.tolist() assert actual == expected assert parameter_df.loc['p0', 'nominalValue'] == 3.0 - - -def test_fill_in_nominal_values(): - parameter_df = pd.DataFrame(data={ - 'parameterId': ['estimated', 'not_estimated'], - 'parameterName': ['', '...'], # ... - 'nominalValue': [0.0, 2.0], - 'estimate': [1, 0] - }) - parameter_df.set_index(['parameterId'], inplace=True) - mapping = [[1.0, 1.0], ['estimated', 'not_estimated']] - - actual = mapping.copy() - petab.fill_in_nominal_values(actual, parameter_df) - expected = [[1.0, 1.0], ['estimated', 2.0]] - assert expected == actual - - del parameter_df['estimate'] - # should not replace - actual = mapping.copy() - petab.fill_in_nominal_values(actual, parameter_df) - expected = mapping.copy() - assert expected == actual From e4487c45d2cd6e5e9eb48c5bbd6653806aea3657 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 2 May 2019 13:36:56 +0200 Subject: [PATCH 7/7] Bump version number (0.0.0a14) --- petab/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petab/version.py b/petab/version.py index f9148e8d..d2f63aba 100644 --- a/petab/version.py +++ b/petab/version.py @@ -1 +1 @@ -__version__ = '0.0.0a13' +__version__ = '0.0.0a14'