Skip to content

Commit

Permalink
Merge pull request #8 from tacaswell/elastic_peak_mnt
Browse files Browse the repository at this point in the history
Elastic peak mnt
  • Loading branch information
licode committed May 9, 2015
2 parents 0ca2acf + ab1f2fd commit 57bbcc9
Showing 1 changed file with 46 additions and 26 deletions.
72 changes: 46 additions & 26 deletions skxray/fitting/xrf_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,24 +173,53 @@ def _set_parameter_hint(param_name, input_dict, input_model):
input_model : object
model object used in lmfit
"""
value = input_dict['value']
if input_dict['bound_type'] == 'none':
input_model.set_param_hint(name=param_name, value=input_dict['value'], vary=True)
input_model.set_param_hint(name=param_name, value=value, vary=True)
elif input_dict['bound_type'] == 'fixed':
input_model.set_param_hint(name=param_name, value=input_dict['value'], vary=False)
input_model.set_param_hint(name=param_name, value=value, vary=False)
elif input_dict['bound_type'] == 'lohi':
input_model.set_param_hint(name=param_name, value=input_dict['value'], vary=True,
min=input_dict['min'], max=input_dict['max'])
input_model.set_param_hint(name=param_name, value=value, vary=True,
min=input_dict['min'],
max=input_dict['max'])
elif input_dict['bound_type'] == 'lo':
input_model.set_param_hint(name=param_name, value=input_dict['value'], vary=True,
input_model.set_param_hint(name=param_name, value=value,
vary=True,
min=input_dict['min'])
elif input_dict['bound_type'] == 'hi':
input_model.set_param_hint(name=param_name, value=input_dict['value'], vary=True,
input_model.set_param_hint(name=param_name, value=value, vary=True,
max=input_dict['max'])
else:
raise ValueError("could not set values for {0}".format(param_name))
logger.debug(' {0} bound type: {1}, value: {2}, range: {3}'.
format(param_name, input_dict['bound_type'], input_dict['value'],
[input_dict['min'], input_dict['max']]))
logger.debug(' %s bound type: %s, value: %f, range: [%f, %f]',
param_name, input_dict['bound_type'], value,
input_dict['min'], input_dict['max'])


def _copy_model_param_hints(target, source, params):
"""
Copy parameters from one model to another
.. warning
This updates ``target`` in-place
Parameters
----------
target : lmfit.Model
The model to be updated
source : lmfit.Model
The model to copy from
params : list
The names of the parameters to copy
"""

for label in params:
target.set_param_hint(label,
value=source[label].value,
expr=label)



def update_parameter_dict(param, fit_results):
Expand Down Expand Up @@ -560,7 +589,8 @@ def setup_element_model(self, elemental_line, default_area=1e5):
parameter = self.params

element_mod = None

param_hints_to_copy = ['e_offset', 'e_linear', 'e_quadratic',
'fwhm_offset', 'fwhm_fanoprime']
if elemental_line in K_LINE:
element = elemental_line.split('_')[0]
e = Element(element)
Expand All @@ -579,22 +609,12 @@ def setup_element_model(self, elemental_line, default_area=1e5):
continue

gauss_mod = ElementModel(prefix=str(element)+'_'+str(line_name)+'_')
gauss_mod.set_param_hint('e_offset',
value=self.compton_param['e_offset'].value,
expr='e_offset')
gauss_mod.set_param_hint('e_linear',
value=self.compton_param['e_linear'].value,
expr='e_linear')
gauss_mod.set_param_hint('e_quadratic',
value=self.compton_param['e_quadratic'].value,
expr='e_quadratic')
gauss_mod.set_param_hint('fwhm_offset',
value=self.compton_param['fwhm_offset'].value,
expr='fwhm_offset')
gauss_mod.set_param_hint('fwhm_fanoprime',
value=self.compton_param['fwhm_fanoprime'].value,
expr='fwhm_fanoprime')
gauss_mod.set_param_hint('epsilon', value=self.epsilon, vary=False)
# copy the fixed parameters from the Compton model
_copy_model_param_hints(gauss_mod, self.compton_param,
param_hints_to_copy)

gauss_mod.set_param_hint('epsilon', value=self.epsilon,
vary=False)

area_name = str(element)+'_'+str(line_name)+'_area'
if area_name in parameter:
Expand Down

0 comments on commit 57bbcc9

Please sign in to comment.