Skip to content

Commit

Permalink
Some indentations. Refs #10945
Browse files Browse the repository at this point in the history
  • Loading branch information
AndreiSavici committed Jan 23, 2015
1 parent b4bac8f commit a419348
Show file tree
Hide file tree
Showing 61 changed files with 1,821 additions and 1,821 deletions.
Expand Up @@ -25,10 +25,10 @@ def PyInit(self):

instruments = []
for instr in sns.instruments():
for tech in instr.techniques():
if "Neutron Diffraction" == str(tech):
instruments.append(instr.shortName())
break
for tech in instr.techniques():
if "Neutron Diffraction" == str(tech):
instruments.append(instr.shortName())
break
self.declareProperty("Instrument", "PG3",
StringListValidator(instruments))
validator = IntArrayBoundedValidator()
Expand Down Expand Up @@ -232,8 +232,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
self.log().information("Reference spectra=%s" % refpixel)
# Remove old calibration files
cmd = "rm "+calib
Expand All @@ -253,7 +253,7 @@ def _cccalibrate(self, wksp, calib):
XMin=-self._ccnumber, XMax=self._ccnumber,
MaxOffset=self._maxoffset, MaskWorkspace=str(wksp)+"mask")
if AnalysisDataService.doesExist(str(wksp)+"cc"):
AnalysisDataService.remove(str(wksp)+"cc")
AnalysisDataService.remove(str(wksp)+"cc")
if self._peakpos2 > 0.0:
wksp = Rebin(InputWorkspace=wksp, OutputWorkspace=wksp.name(),
Params=str(self._peakmin2)+","+str(abs(self._binning[1]))+","+str(self._peakmax2))
Expand All @@ -263,8 +263,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
msg = "Reference spectra = %s, lastpixel_3 = %s" % (refpixel, self._lastpixel3)
self.log().information(msg)
self._lastpixel2 = wksp.getNumberHistograms()*self._lastpixel2/self._lastpixel3-1
Expand Down Expand Up @@ -294,8 +294,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
self.log().information("Reference spectra=%s" % refpixel)
CrossCorrelate(InputWorkspace=wksp, OutputWorkspace=str(wksp)+"cc3",
ReferenceSpectra=refpixel,
Expand Down
176 changes: 88 additions & 88 deletions Code/Mantid/Framework/PythonInterface/plugins/algorithms/DSFinterp.py
Expand Up @@ -6,108 +6,108 @@

class DSFinterp(PythonAlgorithm):

def category(self):
return "Transforms\\Smoothing;Utility;PythonAlgorithms"
def category(self):
return "Transforms\\Smoothing;Utility;PythonAlgorithms"

def name(self):
return 'DSFinterp'
def name(self):
return 'DSFinterp'

def summary(self):
return 'Given a set of parameter values {Ti} and corresponding structure factors S(Q,E,Ti), this algorithm interpolates S(Q,E,T) for any value of parameter T within the range spanned by the {Ti} set.'
def summary(self):
return 'Given a set of parameter values {Ti} and corresponding structure factors S(Q,E,Ti), this algorithm interpolates S(Q,E,T) for any value of parameter T within the range spanned by the {Ti} set.'

def PyInit(self):
arrvalidator = StringArrayMandatoryValidator()
lrg='Input'
self.declareProperty(StringArrayProperty('Workspaces', values=[], validator=arrvalidator, direction=Direction.Input), doc='list of input workspaces')
self.declareProperty('LoadErrors', True, direction=Direction.Input, doc='Do we load error data contained in the workspaces?')
self.declareProperty(FloatArrayProperty('ParameterValues', values=[], validator=FloatArrayMandatoryValidator(),direction=Direction.Input), doc='list of input parameter values')
self.setPropertyGroup('Workspaces', lrg)
self.setPropertyGroup('LoadErrors', lrg)
self.setPropertyGroup('ParameterValues', lrg)
def PyInit(self):
arrvalidator = StringArrayMandatoryValidator()
lrg='Input'
self.declareProperty(StringArrayProperty('Workspaces', values=[], validator=arrvalidator, direction=Direction.Input), doc='list of input workspaces')
self.declareProperty('LoadErrors', True, direction=Direction.Input, doc='Do we load error data contained in the workspaces?')
self.declareProperty(FloatArrayProperty('ParameterValues', values=[], validator=FloatArrayMandatoryValidator(),direction=Direction.Input), doc='list of input parameter values')
self.setPropertyGroup('Workspaces', lrg)
self.setPropertyGroup('LoadErrors', lrg)
self.setPropertyGroup('ParameterValues', lrg)

self.declareProperty('LocalRegression', True, direction=Direction.Input, doc='Perform running local-regression?')
condition = EnabledWhenProperty("LocalRegression", PropertyCriterion.IsDefault)
self.declareProperty('RegressionWindow', 6, direction=Direction.Input, doc='window size for the running local-regression')
self.setPropertySettings("RegressionWindow", condition)
regtypes = [ 'linear', 'quadratic']
self.declareProperty('RegressionType', 'quadratic', StringListValidator(regtypes), direction=Direction.Input, doc='type of local-regression; linear and quadratic are available')
self.setPropertySettings("RegressionType", condition)
lrg = 'Running Local Regression Options'
self.setPropertyGroup('LocalRegression', lrg)
self.setPropertyGroup('RegressionWindow', lrg)
self.setPropertyGroup('RegressionType', lrg)
self.declareProperty('LocalRegression', True, direction=Direction.Input, doc='Perform running local-regression?')
condition = EnabledWhenProperty("LocalRegression", PropertyCriterion.IsDefault)
self.declareProperty('RegressionWindow', 6, direction=Direction.Input, doc='window size for the running local-regression')
self.setPropertySettings("RegressionWindow", condition)
regtypes = [ 'linear', 'quadratic']
self.declareProperty('RegressionType', 'quadratic', StringListValidator(regtypes), direction=Direction.Input, doc='type of local-regression; linear and quadratic are available')
self.setPropertySettings("RegressionType", condition)
lrg = 'Running Local Regression Options'
self.setPropertyGroup('LocalRegression', lrg)
self.setPropertyGroup('RegressionWindow', lrg)
self.setPropertyGroup('RegressionType', lrg)

lrg='Output'
self.declareProperty(FloatArrayProperty('TargetParameters', values=[], ), doc="Parameters to interpolate the structure factor")
self.declareProperty(StringArrayProperty('OutputWorkspaces', values=[], validator=arrvalidator), doc='list of output workspaces to save the interpolated structure factors')
self.setPropertyGroup('TargetParameters', lrg)
self.setPropertyGroup('OutputWorkspaces', lrg)
self.channelgroup = None
lrg='Output'
self.declareProperty(FloatArrayProperty('TargetParameters', values=[], ), doc="Parameters to interpolate the structure factor")
self.declareProperty(StringArrayProperty('OutputWorkspaces', values=[], validator=arrvalidator), doc='list of output workspaces to save the interpolated structure factors')
self.setPropertyGroup('TargetParameters', lrg)
self.setPropertyGroup('OutputWorkspaces', lrg)
self.channelgroup = None

def areWorkspacesCompatible(self, a, b):
sizeA = a.blocksize() * a.getNumberHistograms()
sizeB = b.blocksize() * b.getNumberHistograms()
return sizeA == sizeB
def areWorkspacesCompatible(self, a, b):
sizeA = a.blocksize() * a.getNumberHistograms()
sizeB = b.blocksize() * b.getNumberHistograms()
return sizeA == sizeB

def PyExec(self):
def PyExec(self):
# Check congruence of workspaces
workspaces = self.getProperty('Workspaces').value
fvalues = self.getProperty('ParameterValues').value
if len(workspaces) != len(fvalues):
mesg = 'Number of Workspaces and ParameterValues should be the same'
workspaces = self.getProperty('Workspaces').value
fvalues = self.getProperty('ParameterValues').value
if len(workspaces) != len(fvalues):
mesg = 'Number of Workspaces and ParameterValues should be the same'
#logger.error(mesg)
raise IndexError(mesg)
for workspace in workspaces[1:]:
if not self.areWorkspacesCompatible(mtd[workspaces[0]],mtd[workspace]):
mesg = 'Workspace {0} incompatible with {1}'.format(workspace, workspaces[0])
logger.error(mesg)
raise ValueError(mesg)
raise IndexError(mesg)
for workspace in workspaces[1:]:
if not self.areWorkspacesCompatible(mtd[workspaces[0]],mtd[workspace]):
mesg = 'Workspace {0} incompatible with {1}'.format(workspace, workspaces[0])
logger.error(mesg)
raise ValueError(mesg)
# Load the workspaces into a group of dynamic structure factors
from dsfinterp.dsf import Dsf
from dsfinterp.dsfgroup import DsfGroup
from dsfinterp.channelgroup import ChannelGroup
dsfgroup = DsfGroup()
for idsf in range(len(workspaces)):
dsf = Dsf()
dsf.Load( mtd[workspaces[idsf]] )
if not self.getProperty('LoadErrors').value:
dsf.errors = None # do not incorporate error data
dsf.SetFvalue( fvalues[idsf] )
dsfgroup.InsertDsf(dsf)
from dsfinterp.dsf import Dsf
from dsfinterp.dsfgroup import DsfGroup
from dsfinterp.channelgroup import ChannelGroup
dsfgroup = DsfGroup()
for idsf in range(len(workspaces)):
dsf = Dsf()
dsf.Load( mtd[workspaces[idsf]] )
if not self.getProperty('LoadErrors').value:
dsf.errors = None # do not incorporate error data
dsf.SetFvalue( fvalues[idsf] )
dsfgroup.InsertDsf(dsf)
# Create the intepolator if not instantiated before
if not self.channelgroup:
self.channelgroup = ChannelGroup()
self.channelgroup.InitFromDsfGroup(dsfgroup)
localregression = self.getProperty('LocalRegression').value
if localregression:
regressiontype = self.getProperty('RegressionType').value
windowlength = self.getProperty('RegressionWindow').value
self.channelgroup.InitializeInterpolator(running_regr_type=regressiontype, windowlength=windowlength)
else:
self.channelgroup.InitializeInterpolator(windowlength=0)
if not self.channelgroup:
self.channelgroup = ChannelGroup()
self.channelgroup.InitFromDsfGroup(dsfgroup)
localregression = self.getProperty('LocalRegression').value
if localregression:
regressiontype = self.getProperty('RegressionType').value
windowlength = self.getProperty('RegressionWindow').value
self.channelgroup.InitializeInterpolator(running_regr_type=regressiontype, windowlength=windowlength)
else:
self.channelgroup.InitializeInterpolator(windowlength=0)
# Invoke the interpolator and generate the output workspaces
targetfvalues = self.getProperty('TargetParameters').value
for targetfvalue in targetfvalues:
if targetfvalue < min(fvalues) or targetfvalue > max(fvalues):
mesg = 'Target parameters should lie in [{0}, {1}]'.format(min(fvalues),max(fvalues))
logger.error(mesg)
raise ValueError(mesg)
outworkspaces = self.getProperty('OutputWorkspaces').value
if len(targetfvalues) != len(outworkspaces):
mesg = 'Number of OutputWorkspaces and TargetParameters should be the same'
logger.error(mesg)
raise IndexError(mesg)
for i in range(len(targetfvalues)):
outworkspace = outworkspaces[i]
dsf = self.channelgroup( targetfvalues[i] )
outws = CloneWorkspace( mtd[workspaces[0]], OutputWorkspace=outworkspaces[i])
dsf.Save(outws) # overwrite dataY and dataE
targetfvalues = self.getProperty('TargetParameters').value
for targetfvalue in targetfvalues:
if targetfvalue < min(fvalues) or targetfvalue > max(fvalues):
mesg = 'Target parameters should lie in [{0}, {1}]'.format(min(fvalues),max(fvalues))
logger.error(mesg)
raise ValueError(mesg)
outworkspaces = self.getProperty('OutputWorkspaces').value
if len(targetfvalues) != len(outworkspaces):
mesg = 'Number of OutputWorkspaces and TargetParameters should be the same'
logger.error(mesg)
raise IndexError(mesg)
for i in range(len(targetfvalues)):
outworkspace = outworkspaces[i]
dsf = self.channelgroup( targetfvalues[i] )
outws = CloneWorkspace( mtd[workspaces[0]], OutputWorkspace=outworkspaces[i])
dsf.Save(outws) # overwrite dataY and dataE

#############################################################################################

try:
import dsfinterp
AlgorithmFactory.subscribe(DSFinterp)
import dsfinterp
AlgorithmFactory.subscribe(DSFinterp)
except:
logger.debug('Failed to subscribe algorithm DSFinterp; Python package dsfinterp may be missing (https://pypi.python.org/pypi/dsfinterp)')
pass
logger.debug('Failed to subscribe algorithm DSFinterp; Python package dsfinterp may be missing (https://pypi.python.org/pypi/dsfinterp)')
pass
Expand Up @@ -59,7 +59,7 @@ def PyExec(self):

if inst == "POLDI":
if ws.getNumberHistograms() == 800:
ws.maskDetectors(SpectraList=range(0,800)[::2])
ws.maskDetectors(SpectraList=range(0,800)[::2])

config.appendDataSearchDir(config['groupingFiles.directory'])
grp_file = "POLDI_Grouping_800to400.xml"
Expand Down
Expand Up @@ -34,7 +34,7 @@
class LoadVesuvio(PythonAlgorithm):

def summary(self):
return "Loads raw data produced by the Vesuvio instrument at ISIS."
return "Loads raw data produced by the Vesuvio instrument at ISIS."

def PyInit(self):
self.declareProperty(RUN_PROP, "", StringMandatoryValidator(),
Expand Down Expand Up @@ -417,9 +417,9 @@ def _sum_foil_periods(self):
foil_out_periods, foil_thin_periods, foil_thick_periods = self._get_foil_periods()

if self._nperiods == 6 and self._spectra_type == FORWARD:
mon_out_periods = (5,6)
mon_thin_periods = (3,4)
mon_thick_periods = foil_thick_periods
mon_out_periods = (5,6)
mon_thin_periods = (3,4)
mon_thick_periods = foil_thick_periods
else:
# None indicates same as standard foil
mon_out_periods, mon_thin_periods, mon_thick_periods = (None,None,None)
Expand Down Expand Up @@ -824,7 +824,7 @@ def get_index(self, spectrum_no, foil_state_no):
(spectrum_no >= 167 and spectrum_no <= 174) or \
(spectrum_no >= 183 and spectrum_no <= 190):
# foil_in = 1,3,5, foil out = 2,4,6
foil_periods = self._odd_even
foil_periods = self._odd_even
else:
# foil_in = 2,4,6 foil out = 1,3,5
foil_periods = self._even_odd
Expand Down
Expand Up @@ -178,7 +178,7 @@ def _getEightPackHandle(self,banknum):
try:
return self.instrument.getComponentByName("panel"+"%02d" % banknum)[0]
except:
return None
return None
else:
raise ValueError("Out of range index for "+str(self.instname)+" instrument bank numbers")
else:
Expand Down
Expand Up @@ -4,12 +4,12 @@
from mantid.simpleapi import *

class QueryFlag:
def isMasked(self, detector, yValue):
return detector.isMasked()
def isMasked(self, detector, yValue):
return detector.isMasked()

class QueryValue:
def isMasked(self, detector, yValue):
return yValue == 1
def isMasked(self, detector, yValue):
return yValue == 1

class MaskWorkspaceToCalFile(PythonAlgorithm):

Expand Down

0 comments on commit a419348

Please sign in to comment.