Skip to content

Commit

Permalink
Refs #11824. More fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
AndreiSavici committed May 28, 2015
1 parent ec63365 commit 6904325
Show file tree
Hide file tree
Showing 11 changed files with 46 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from mantid.kernel import Direction

COMPRESS_TOL_TOF = .01

#pylint: disable=too-many-instance-attributes
class CalibrateRectangularDetectors(PythonAlgorithm):

_instrument = None
Expand Down Expand Up @@ -208,20 +208,20 @@ def _loadData(self, runnumber, extension, filterWall=None):
@param runnumber: run number (integer)
@param extension: file extension
"""
filter = {}
filterDict = {}
if filterWall is not None:
if filterWall[0] > 0.:
filter["FilterByTimeStart"] = filterWall[0]
filterDict["FilterByTimeStart"] = filterWall[0]
if filterWall[1] > 0.:
filter["FilterByTimeStop"] = filterWall[1]
filterDict["FilterByTimeStop"] = filterWall[1]

if runnumber is None or runnumber <= 0:
return None

if extension.endswith("_event.nxs") or extension.endswith(".nxs.h5"):
wksp = self._loadEventNeXusData(runnumber, extension, **filter)
wksp = self._loadEventNeXusData(runnumber, extension, **filterDict)
else:
wksp = self._loadPreNeXusData(runnumber, extension, **filter)
wksp = self._loadPreNeXusData(runnumber, extension, **filterDict)

if self._filterBadPulses and not self.getProperty("CompressOnRead").value:
wksp = FilterBadPulses(InputWorkspace=wksp, OutputWorkspace=wksp.name())
Expand All @@ -231,6 +231,7 @@ def _loadData(self, runnumber, extension, filterWall=None):
Tolerance=COMPRESS_TOL_TOF) # 100ns
return wksp

#pylint: disable=too-many-branches
def _cccalibrate(self, wksp, calib):
if wksp is None:
return None
Expand Down Expand Up @@ -366,6 +367,7 @@ def _cccalibrate(self, wksp, calib):

return wksp

#pylint: disable=too-many-branches
def _multicalibrate(self, wksp, calib):
if wksp is None:
return None
Expand Down Expand Up @@ -478,6 +480,7 @@ def _focus(self, wksp, dummy_calib):
wksp = Rebin(InputWorkspace=wksp, OutputWorkspace=wksp.name(), Params=self._binning)
return wksp

#pylint: disable=too-many-branches
def PyExec(self):
# get generic information
SUFFIX = self.getProperty("Extension").value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,11 @@ def PyInit(self):
self.declareProperty("IncidentMediumSelected", "", doc="Incident medium used for those runs")
self.declareProperty("GeometryCorrectionFlag", False, doc="Use or not the geometry correction")

#pylint: disable=too-many-locals
#pylint: disable=too-many-locals, too-many-branches
def PyExec(self):

print '-- > starting new Reflectometer Reduction ...'

import numpy
import math
from reduction.instruments.reflectometer import wks_utility

#remove all previous workspaces
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@


#--------------------------------------------------------------------------------
#pylint: disable=too-many-instance-attributes
class RefinePowderDiffProfileSeq(PythonAlgorithm):
""" Refine powder diffractometer profile by Le Bail algorithm sequentially
"""
Expand Down Expand Up @@ -50,7 +51,8 @@ def PyInit(self):
self.declareProperty(MatrixWorkspaceProperty("InputWorkspace", "", Direction.Input, PropertyMode.Optional),\
"Name of data workspace containing the diffraction pattern in .prf file. ")

self.declareProperty("WorkspaceIndex", 0, "Spectrum (workspace index starting from 0) of the data to refine against in input workspace.")
self.declareProperty("WorkspaceIndex", 0,
"Spectrum (workspace index starting from 0) of the data to refine against in input workspace.")

self.declareProperty(ITableWorkspaceProperty("SeqControlInfoWorkspace", "", Direction.InOut, PropertyMode.Optional),\
"Name of table workspace containing sequential refinement information.")
Expand Down Expand Up @@ -203,8 +205,8 @@ def _processInputProperties(self):
#--------------------------------------------------------------------
#
#--------------------------------------------------------------------

class SeqRefineProfile:
#pylint: disable=too-many-instance-attributes
class SeqRefineProfile(object):
""" A class to do sequential refinement on peak profile
Use case:
Expand Down Expand Up @@ -264,7 +266,7 @@ def __init__(self, ID, glog):

return


#pylint: disable=too-many-arguments
def initSetup(self, dataws, wsindex, peaktype, profilews, braggpeakws, bkgdtype, bkgdparws, startx, endx):
""" Set up the properties for LeBailFit as the first time including
do a Le bail calculation based on the input parameters
Expand Down Expand Up @@ -359,6 +361,7 @@ def loadProject(self, projectfilename):

return

#pylint: disable=too-many-arguments
def refine(self, dataws, wsindex, parametersToFit, numcycles, startx, endx, laststepindex):
""" Refine parameters
"""
Expand Down Expand Up @@ -601,7 +604,7 @@ def _recordPostRefineInfo(self, refiner):

# Set the record table workspace
rectablews = mtd[self._recordwsname]
numrows = rectablews.rowCount()
#numrows = rectablews.rowCount()
# currstep = numrows-1

rectablews.setCell(self._currstep, 1, str(refiner.outprofilewsname))
Expand Down Expand Up @@ -673,7 +676,6 @@ def generateMCSetupTableProf9(wsname):
def generateMCSetupTableProf10(wsname):
""" Generate a Le Bail fit Monte Carlo random walk setup table
"""
import mantid.simpleapi as api

tablews = api.CreateEmptyTableWorkspace(OutputWorkspace=str(wsname))

Expand Down Expand Up @@ -744,8 +746,8 @@ def resetParametersGroups(tablews):
tablews.setCell(ir, 4, group)
return


class RefineProfileParameters:
#pylint: disable=too-many-instance-attributes
class RefineProfileParameters(object):
""" Class to refine profile parameters ONE step
"""

Expand Down Expand Up @@ -779,6 +781,7 @@ def __init__(self, glog):

return

#pylint: disable=too-many-arguments
def setInputs(self, datawsname, peaktype, profilewsname, braggpeakwsname, bkgdtype, bkgdparwsname):
"""
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class ElasticWindowMultiple(DataProcessorAlgorithm):
_range_2_start = None
_range_2_end = None
_mtd_plot = None
_sample_log_value = ''

def category(self):
return 'Workflow\\Inelastic;PythonAlgorithms;Inelastic'
Expand Down Expand Up @@ -324,8 +325,7 @@ def _get_temperature(self, ws_name):
# Look for temperature in logs in workspace
tmp = run[self._sample_log_name].value
value_action = {'last value': lambda x: x[len(x)-1],
'average': lambda x: x.mean()
}
'average': lambda x: x.mean()}
temp = value_action[self._sample_log_value](tmp)
logger.debug('Temperature %d K found for run: %s' % (temp, run_name))
return temp
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#pylint: disable=invalid-name,attribute-defined-outside-init,too-many-instance-attributes,too-many-branches
#pylint: disable=invalid-name,attribute-defined-outside-init,too-many-instance-attributes,too-many-branches,no-init,deprecated-module
from mantid.kernel import *
from mantid.api import *
from mantid.simpleapi import *
Expand Down Expand Up @@ -732,7 +732,7 @@ def _rename_workspace(self, ws_name):
try:
short_inst_name = facility.instrument(inst_name).shortName()
break
except:
except RuntimeError:
pass
logger.information('Short name for instrument %s is %s' % (inst_name, short_inst_name))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def PyInit(self):
optional=PropertyMode.Optional),
doc='The corrections workspace for scattering and absorptions in sample.')


#pylint: disable=too-many-branches
def PyExec(self):
from IndirectCommon import getEfixed

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,6 @@ def _validate_range(self, property_name):


def PyExec(self):
from mantid import logger
from IndirectCommon import getInstrRun

self._setup()

Expand Down Expand Up @@ -190,13 +188,11 @@ def _add_logs(self):
"""

# Add sample logs to output workspace
sample_logs = [
('calib_peak_min', self._peak_range[0]),
('calib_peak_max', self._peak_range[1]),
('calib_back_min', self._back_range[0]),
('calib_back_max', self._back_range[1]),
('calib_run_numbers', ','.join(self._run_numbers))
]
sample_logs = [('calib_peak_min', self._peak_range[0]),
('calib_peak_max', self._peak_range[1]),
('calib_back_min', self._back_range[0]),
('calib_back_max', self._back_range[1]),
('calib_run_numbers', ','.join(self._run_numbers))]

if self._intensity_scale is not None:
sample_logs.append(('calib_scale_factor', self._intensity_scale))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,8 @@ def _post_process(self):
Handles adding logs, saving and plotting.
"""

sample_logs = [
('res_back_start', self._background[0]),
('res_back_end', self._background[1])
]
sample_logs = [('res_back_start', self._background[0]),
('res_back_end', self._background[1])]

if self._scale_factor != 1.0:
sample_logs.append(('res_scale_factor', self._scale_factor))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,14 +90,14 @@ def load_processed(self, output_dir):
for item in os.listdir(output_dir):
if item.endswith('.txt') and \
(len(filter_string)==0 or item.find(filter_string)>=0):
basename, ext = os.path.splitext(item)
basename, _ = os.path.splitext(item)
Load(Filename=os.path.join(output_dir, item), OutputWorkspace=basename)
(_name,_ts) = basename.split('_#')
CloneWorkspace(InputWorkspace=basename, OutputWorkspace=_name)


def stitch_data(self, input_file, output_dir, q_min, q_step):
from LargeScaleStructures.data_stitching import DataSet, Stitcher, RangeSelector
from LargeScaleStructures.data_stitching import DataSet, Stitcher#, RangeSelector
# Identify the data sets to stitch and order them
workspace_list = []
_list_name = []
Expand All @@ -120,7 +120,7 @@ def stitch_data(self, input_file, output_dir, q_min, q_step):
for item in workspace_list:
data = DataSet(item)
data.load(True, True)
x_min, x_max = data.get_range()
dummy_x_min, x_max = data.get_range()
if x_max > q_max:
q_max = x_max
s.append(data)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,9 @@ def PyInit(self):

self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", "", Direction.Output), "Output workspace")

#pylint: disable= too-few-public-methods
class DataFile(object):
#pylint: disable= too-many-arguments
def __init__(self, workspace, monitor, empty, empty_monitor, is_scan=False, max_index=1):
self.workspace = workspace
self.monitor = monitor
Expand Down Expand Up @@ -155,8 +157,10 @@ def _process_data_file(self, file_info, index_offset):
self.iq_output[i_wl][point+index_offset] = i_q.dataY(0)[0]
self.iq_err_output[i_wl][point+index_offset] = i_q.dataE(0)[0]
except:
Logger("USANSReduction").error("Exception caught for %s on peak %s, point %s. Offset=%s" % (file_info.workspace, i_wl, point, index_offset))
Logger("USANSReduction").error("Array: %s x %s Data: %s" % (len(self.wl_list), self.total_points, file_info.max_index))
Logger("USANSReduction").error("Exception caught for "+\
"%s on peak %s, point %s. Offset=%s" % (file_info.workspace, i_wl, point, index_offset))
Logger("USANSReduction").error("Array: "+
"%s x %s Data: %s" % (len(self.wl_list), self.total_points, file_info.max_index))
Logger("USANSReduction").error(sys.exc_value)
return file_info.max_index

Expand Down Expand Up @@ -228,6 +232,7 @@ def compare(p1,p2):
OutputWorkspace=output_ws_name)
self.setProperty("OutputWorkspace", out_ws)

#pylint: disable=too-many-arguments
def _get_intensity(self, sample, empty, sample_monitor, empty_monitor, tof_min, tof_max):
# Number of pixels we are dealing with
nspecs = sample.getNumberHistograms()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -540,6 +540,7 @@ def sumWithError(self, peak, peak_error):
sum_peak_error = math.np.sqrt(sum_peak_error)
return [sum_peak, sum_peak_error]

#pylint: disable=unused-argument
def _removeBackground(self,
InputWorkspace=None,
from_peak= 0,
Expand Down Expand Up @@ -1157,7 +1158,7 @@ def getSlitsValueAndLambda(full_list_runs,
_lambda_value = getLambdaValue(tmpWks)
lambdaRequest[i] = _lambda_value

def isRunsSorted(list_runs, S1H, S2H):
def isRunsSorted(dummy_list_runs, S1H, S2H):
"""
Make sure the files have been sorted
"""
Expand Down Expand Up @@ -1353,7 +1354,7 @@ def calculate(string_runs=None,\

#array of index of first attenuator
_index_first_A = []
for j in range(len(np.unique(list_attenuator))):
for dummy_j in range(len(np.unique(list_attenuator))):
_index_first_A.append(-1)

index_numerator = -1
Expand Down

0 comments on commit 6904325

Please sign in to comment.