Skip to content

Commit

Permalink
Reduce pylint warnings. Refs #10945
Browse files Browse the repository at this point in the history
  • Loading branch information
AndreiSavici committed Feb 18, 2015
1 parent ee87e15 commit d81deca
Show file tree
Hide file tree
Showing 135 changed files with 4,105 additions and 4,108 deletions.
Expand Up @@ -26,31 +26,31 @@ def PyInit(self):
self._short_inst = "BSS"
self._long_inst = "BASIS"
self._extension = "_event.nxs"

self.declareProperty("RunNumbers", "", "Sample run numbers")
self.declareProperty("DoIndividual", False, "Do each run individually")
self.declareProperty("NoMonitorNorm", False,
self.declareProperty("NoMonitorNorm", False,
"Stop monitor normalization")
self.declareProperty("NormRunNumbers", "", "Normalization run numbers")
arrVal = FloatArrayLengthValidator(2)
self.declareProperty(FloatArrayProperty("NormWavelengthRange", DEFAULT_RANGE,
self.declareProperty("NormRunNumbers", "", "Normalization run numbers")
arrVal = FloatArrayLengthValidator(2)
self.declareProperty(FloatArrayProperty("NormWavelengthRange", DEFAULT_RANGE,
arrVal, direction=Direction.Input),
"Wavelength range for normalization. default:(6.24A, 6.30A)")
self.declareProperty(FloatArrayProperty("EnergyBins", DEFAULT_BINS,
direction=Direction.Input),
"Energy transfer binning scheme (in ueV)")
self.declareProperty(FloatArrayProperty("MomentumTransferBins",
DEFAULT_BINS,
direction=Direction.Input),
"Momentum transfer binning scheme")
"Wavelength range for normalization. default:(6.24A, 6.30A)")
self.declareProperty(FloatArrayProperty("EnergyBins", DEFAULT_BINS,
direction=Direction.Input),
"Energy transfer binning scheme (in ueV)")
self.declareProperty(FloatArrayProperty("MomentumTransferBins",
DEFAULT_BINS,
direction=Direction.Input),
"Momentum transfer binning scheme")
self.declareProperty(FileProperty(name="MaskFile", defaultValue="",
action=FileAction.OptionalLoad, extensions=['.xml']),
"Directory location for standard masking and grouping files.")
action=FileAction.OptionalLoad, extensions=['.xml']),
"Directory location for standard masking and grouping files.")
grouping_type = ["None", "Low-Resolution", "By-Tube"]
self.declareProperty("GroupDetectors", "None",
StringListValidator(grouping_type),
self.declareProperty("GroupDetectors", "None",
StringListValidator(grouping_type),
"Switch for grouping detectors")

def PyExec(self):
config['default.facility'] = "SNS"
config['default.instrument'] = self._long_inst
Expand All @@ -71,65 +71,65 @@ def PyExec(self):
config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
self._maskFile = DEFAULT_MASK_FILE

api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK',
api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK',
InputFile=self._maskFile)

# Work around length issue
_dMask = api.ExtractMask('BASIS_MASK')
self._dMask = _dMask[1]
api.DeleteWorkspace(_dMask[0])
# Do normalization if run numbers are present
norm_runs = self.getProperty("NormRunNumbers").value
self._doNorm = bool(norm_runs)
self.log().information("Do Norm: " + str(self._doNorm))
if self._doNorm:
if ";" in norm_runs:
raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
normRange = self.getProperty("NormWavelengthRange").value
self._normRange = [normRange[0], normRange[1]-normRange[0], normRange[1]]

# Process normalization runs
self._norm_run_list = self._getRuns(norm_runs)
for norm_set in self._norm_run_list:

# Do normalization if run numbers are present
norm_runs = self.getProperty("NormRunNumbers").value
self._doNorm = bool(norm_runs)
self.log().information("Do Norm: " + str(self._doNorm))
if self._doNorm:
if ";" in norm_runs:
raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
normRange = self.getProperty("NormWavelengthRange").value
self._normRange = [normRange[0], normRange[1]-normRange[0], normRange[1]]

# Process normalization runs
self._norm_run_list = self._getRuns(norm_runs)
for norm_set in self._norm_run_list:
extra_extension = "_norm"
self._normWs = self._makeRunName(norm_set[0])
self._normWs += extra_extension
self._normMonWs = self._normWs + "_monitors"
self._normMonWs = self._normWs + "_monitors"
self._sumRuns(norm_set, self._normWs, self._normMonWs, extra_extension)
self._calibData(self._normWs, self._normMonWs)
api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,
Params=self._normRange)
api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
OutputWorkspace="BASIS_NORM_MASK")
self._calibData(self._normWs, self._normMonWs)

api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,
Params=self._normRange)
api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
OutputWorkspace="BASIS_NORM_MASK")

self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
for run_set in self._run_list:
self._samWs = self._makeRunName(run_set[0])
self._samMonWs = self._samWs + "_monitors"
self._samWsRun = str(run_set[0])
self._sumRuns(run_set, self._samWs, self._samMonWs)

self._sumRuns(run_set, self._samWs, self._samMonWs)
# After files are all added, run the reduction
self._calibData(self._samWs, self._samMonWs)
if self._doNorm:
api.MaskDetectors(Workspace=self._samWs,
MaskedWorkspace='BASIS_NORM_MASK')
api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,
OutputWorkspace=self._samWs)

api.ConvertUnits(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
self._calibData(self._samWs, self._samMonWs)

if self._doNorm:
api.MaskDetectors(Workspace=self._samWs,
MaskedWorkspace='BASIS_NORM_MASK')
api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,
OutputWorkspace=self._samWs)

api.ConvertUnits(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
Target='DeltaE', EMode='Indirect')
api.CorrectKiKf(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
api.CorrectKiKf(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
EMode='Indirect')
api.Rebin(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,

api.Rebin(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
Params=self._etBins)
if self._groupDetOpt != "None":
if self._groupDetOpt == "Low-Resolution":
Expand All @@ -141,29 +141,29 @@ def PyExec(self):
if self._overrideMask:
config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

api.GroupDetectors(InputWorkspace=self._samWs,
api.GroupDetectors(InputWorkspace=self._samWs,
OutputWorkspace=self._samWs,
MapFile=grp_file, Behaviour="Sum")

self._samSqwWs = self._samWs+'_sqw'
api.SofQW3(InputWorkspace=self._samWs,
api.SofQW3(InputWorkspace=self._samWs,
OutputWorkspace=self._samSqwWs,
QAxisBinning=self._qBins, EMode='Indirect',
QAxisBinning=self._qBins, EMode='Indirect',
EFixed=DEFAULT_ENERGY)
dave_grp_filename = self._makeRunName(self._samWsRun,

dave_grp_filename = self._makeRunName(self._samWsRun,
False) + ".dat"
api.SaveDaveGrp(Filename=dave_grp_filename,
api.SaveDaveGrp(Filename=dave_grp_filename,
InputWorkspace=self._samSqwWs,
ToMicroEV=True)
processed_filename = self._makeRunName(self._samWsRun,
processed_filename = self._makeRunName(self._samWsRun,
False) + "_sqw.nxs"
api.SaveNexus(Filename=processed_filename,
InputWorkspace=self._samSqwWs)
api.SaveNexus(Filename=processed_filename,
InputWorkspace=self._samSqwWs)

def _getRuns(self, rlist):
"""
Create sets of run numbers for analysis. A semicolon indicates a
Create sets of run numbers for analysis. A semicolon indicates a
separate group of runs to be processed together.
"""
run_list = []
Expand All @@ -172,7 +172,7 @@ def _getRuns(self, rlist):
iap = IntArrayProperty("", rlval)
if self._doIndiv:
run_list.extend([[x] for x in iap.value])
else:
else:
run_list.append(iap.value)
return run_list

Expand All @@ -184,12 +184,12 @@ def _makeRunName(self, run, useShort=True):
return self._short_inst + "_" + str(run)
else:
return self._long_inst + "_" + str(run)

def _makeRunFile(self, run):
"""
Make name like BSS24234
"""
return self._short_inst + str(run)
return self._short_inst + str(run)

def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
for run in run_set:
Expand All @@ -198,57 +198,57 @@ def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
ws_name += extra_ext
mon_ws_name = ws_name + "_monitors"
run_file = self._makeRunFile(run)

api.Load(Filename=run_file, OutputWorkspace=ws_name)

if not self._noMonNorm:
api.LoadNexusMonitors(Filename=run_file,
api.LoadNexusMonitors(Filename=run_file,
OutputWorkspace=mon_ws_name)
if sam_ws != ws_name:
api.Plus(LHSWorkspace=sam_ws, RHSWorkspace=ws_name,
OutputWorkspace=sam_ws)
api.DeleteWorkspace(ws_name)
if mon_ws != mon_ws_name and not self._noMonNorm:
api.Plus(LHSWorkspace=mon_ws,
api.Plus(LHSWorkspace=mon_ws,
RHSWorkspace=mon_ws_name,
OutputWorkspace=mon_ws)
api.DeleteWorkspace(mon_ws_name)

def _calibData(self, sam_ws, mon_ws):
api.LoadInstrument(Workspace=sam_ws,
api.LoadInstrument(Workspace=sam_ws,
Filename=os.path.join(DEFAULT_CONFIG_DIR, 'BASIS_Definition_311.xml'))
api.MaskDetectors(Workspace=sam_ws,
api.MaskDetectors(Workspace=sam_ws,
DetectorList=self._dMask)
#MaskedWorkspace='BASIS_MASK')
api.ModeratorTzeroLinear(InputWorkspace=sam_ws,
api.ModeratorTzeroLinear(InputWorkspace=sam_ws,
OutputWorkspace=sam_ws)
api.LoadParameterFile(Workspace=sam_ws,
api.LoadParameterFile(Workspace=sam_ws,
Filename=os.path.join(DEFAULT_CONFIG_DIR, 'BASIS_silicon_311_Parameters.xml'))
api.ConvertUnits(InputWorkspace=sam_ws,
api.ConvertUnits(InputWorkspace=sam_ws,
OutputWorkspace=sam_ws,
Target='Wavelength', EMode='Indirect')

if not self._noMonNorm:
api.ModeratorTzeroLinear(InputWorkspace=mon_ws,
api.ModeratorTzeroLinear(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws)
api.Rebin(InputWorkspace=mon_ws,
api.Rebin(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws, Params='10')
api.ConvertUnits(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws,
api.ConvertUnits(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws,
Target='Wavelength')
api.OneMinusExponentialCor(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws,
C='0.20749999999999999',
C='0.20749999999999999',
C1='0.001276')
api.Scale(InputWorkspace=mon_ws,
api.Scale(InputWorkspace=mon_ws,
OutputWorkspace=mon_ws,
Factor='9.9999999999999995e-07')
api.RebinToWorkspace(WorkspaceToRebin=sam_ws,
api.RebinToWorkspace(WorkspaceToRebin=sam_ws,
WorkspaceToMatch=mon_ws,
OutputWorkspace=sam_ws)
api.Divide(LHSWorkspace=sam_ws,
RHSWorkspace=mon_ws,
api.Divide(LHSWorkspace=sam_ws,
RHSWorkspace=mon_ws,
OutputWorkspace=sam_ws)

# Register algorithm with Mantid.
AlgorithmFactory.subscribe(BASISReduction311)
Expand Up @@ -25,10 +25,10 @@ def PyInit(self):

instruments = []
for instr in sns.instruments():
for tech in instr.techniques():
if "Neutron Diffraction" == str(tech):
instruments.append(instr.shortName())
break
for tech in instr.techniques():
if "Neutron Diffraction" == str(tech):
instruments.append(instr.shortName())
break
self.declareProperty("Instrument", "PG3",
StringListValidator(instruments))
validator = IntArrayBoundedValidator()
Expand Down Expand Up @@ -67,7 +67,7 @@ def PyInit(self):
self.declareProperty(ITableWorkspaceProperty("FitwindowTableWorkspace", "", Direction.Input, PropertyMode.Optional),
"Name of input table workspace containing the fit window information for each spectrum. ")
self.declareProperty("MinimumPeakHeight", 2., "Minimum value allowed for peak height")
self.declareProperty("MinimumPeakHeightObs", 0.,
self.declareProperty("MinimumPeakHeightObs", 0.,
"Minimum value of a peak's maximum observed Y value for this peak to be used to calculate offset.")

self.declareProperty(MatrixWorkspaceProperty("DetectorResolutionWorkspace", "", Direction.Input, PropertyMode.Optional),
Expand Down Expand Up @@ -232,8 +232,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
self.log().information("Reference spectra=%s" % refpixel)
# Remove old calibration files
cmd = "rm "+calib
Expand All @@ -253,7 +253,7 @@ def _cccalibrate(self, wksp, calib):
XMin=-self._ccnumber, XMax=self._ccnumber,
MaxOffset=self._maxoffset, MaskWorkspace=str(wksp)+"mask")
if AnalysisDataService.doesExist(str(wksp)+"cc"):
AnalysisDataService.remove(str(wksp)+"cc")
AnalysisDataService.remove(str(wksp)+"cc")
if self._peakpos2 > 0.0:
wksp = Rebin(InputWorkspace=wksp, OutputWorkspace=wksp.name(),
Params=str(self._peakmin2)+","+str(abs(self._binning[1]))+","+str(self._peakmax2))
Expand All @@ -263,8 +263,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
msg = "Reference spectra = %s, lastpixel_3 = %s" % (refpixel, self._lastpixel3)
self.log().information(msg)
self._lastpixel2 = wksp.getNumberHistograms()*self._lastpixel2/self._lastpixel3-1
Expand Down Expand Up @@ -294,8 +294,8 @@ def _cccalibrate(self, wksp, calib):
y_s = wksp.readY(s)
midBin = wksp.blocksize()/2
if y_s[midBin] > ymax:
refpixel = s
ymax = y_s[midBin]
refpixel = s
ymax = y_s[midBin]
self.log().information("Reference spectra=%s" % refpixel)
CrossCorrelate(InputWorkspace=wksp, OutputWorkspace=str(wksp)+"cc3",
ReferenceSpectra=refpixel,
Expand Down
@@ -1,7 +1,6 @@
from mantid.api import PythonAlgorithm, AlgorithmFactory, WorkspaceProperty
import mantid.simpleapi
from mantid.kernel import Direction, logger
from string import *

class CheckForSampleLogs(PythonAlgorithm):
""" Check if certain sample logs exists on a workspace
Expand Down Expand Up @@ -38,7 +37,7 @@ def PyExec(self):
for value in logNames.split(','):
value=value.strip()
if len(value)>0:
if not w.run().hasProperty(value):
if not w.run().hasProperty(value):
resultString+='Property '+value+' not found\n'

#return the result
Expand Down

0 comments on commit d81deca

Please sign in to comment.