Skip to content

Commit

Permalink
refs #6667 Presumably created hardMaskOnly mode in diagnose
Browse files Browse the repository at this point in the history
removed couple of small test clauses.
  • Loading branch information
abuts committed Jul 5, 2013
1 parent ee8b3ad commit 596aa80
Show file tree
Hide file tree
Showing 4 changed files with 116 additions and 127 deletions.
10 changes: 1 addition & 9 deletions Code/Mantid/instrument/MAPS_Parameters.xml
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@
<parameter name="use_sam_msk_on_monovan" type = "bool">
<value val="False"/>
</parameter>

<!-- if this value is provided (nont None) it is string reperesentation of the number used instead of calculating mono-vanadium based normalization factor
one does not need to provide mono-vanadium run if this value is provided as it will be used instead
-->
Expand Down Expand Up @@ -322,15 +323,6 @@
<value val="True"/>
</parameter>

<!-- # Try to keep and maintain the list of the integrals/operations which can be reusded number of times -->
<parameter name="cash_reusable_parameters" type="bool">
<value val="True"/>
</parameter>
<!-- # Save intermediate workspaces/intergals for debugging purposes -->
<parameter name="do_checkpoint_savings" type="bool">
<value val="True"/>
</parameter>


<!-- List of the words which can be used as a command line arguments to define reducer keywords
the form is reducer_keword1=synonim1=synonim2=synonim3;reducer_keword1=synonim1a, so,
Expand Down
31 changes: 13 additions & 18 deletions Code/Mantid/scripts/Inelastic/DirectEnergyConversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,6 @@ def diagnose(self, white, **kwargs):
DeleteWorkspace(Workspace=kwargs['second_white'])
# Return a mask workspace
diag_mask, det_ids = ExtractMask(InputWorkspace=whiteintegrals,OutputWorkspace=var_name)
if self.do_checkpoint_savings :
file_name = self.make_ckpt_name('white_integrals_masked',white)
SaveNexus(whiteintegrals,file_name)

DeleteWorkspace(Workspace=whiteintegrals)
self.spectra_masks = diag_mask
Expand Down Expand Up @@ -226,11 +223,8 @@ def do_white(self, white_run, spectra_masks, map_file,mon_number=None):
white_ws = self.remap(white_ws, spectra_masks, map_file)

# White beam scale factor
white_ws *= self.wb_scale_factor
if self.do_checkpoint_savings :
result_fileName=self.make_ckpt_name('do_white',white_run, spectra_masks, map_file,mon_number)
SaveNexus(white_ws,result_fileName +'.nxs')

white_ws *= self.wb_scale_factor
self.workspaces_list['white_ws'] = white_ws
return white_ws

def mono_van(self, mono_van, ei_guess, white_run=None, map_file=None,
Expand All @@ -248,6 +242,7 @@ def mono_van(self, mono_van, ei_guess, white_run=None, map_file=None,
white_run, map_file, spectra_masks, Tzero)
# Normalize by vanadium sample weight
monovan /= float(self.van_mass)/float(self.__van_rmm)
self.workspaces_list['monovan_ws'] = monovan
return monovan

def mono_sample(self, mono_run, ei_guess, white_run=None, map_file=None,
Expand All @@ -261,8 +256,9 @@ def mono_sample(self, mono_run, ei_guess, white_run=None, map_file=None,
if result_name is None:
result_name = common.create_resultname(mono_run, prefix=self.instr_name)

return self._do_mono(sample_data, sample_data, result_name, ei_guess,
self.workspaces_list['sample_ws']=self._do_mono(sample_data, sample_data, result_name, ei_guess,
white_run, map_file, spectra_masks, Tzero)
return self.workspaces_list['sample_ws']


# -------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -575,9 +571,7 @@ def remap(self, result_ws, spec_masks, map_file):
Mask and group detectors based on input parameters
"""
if not spec_masks is None:
MaskDetectors(Workspace=result_ws, MaskedWorkspace=spec_masks)
print " check workspace masks for ws: ",result_ws.name()
ar = raw_input("Enter something to continue: ")
MaskDetectors(Workspace=result_ws, MaskedWorkspace=spec_masks)
if not map_file is None:
result_ws = GroupDetectors(InputWorkspace=result_ws,OutputWorkspace=result_ws,
MapFile= map_file, KeepUngroupedSpectra=0, Behaviour='Average')
Expand Down Expand Up @@ -728,14 +722,14 @@ def save_results(self, workspace, save_path, formats = None):
# if ext is none, no need to write anything
if len(ext) == 1 and ext[0] == None :
return

self.psi = 1000000; # for test
save_path = os.path.splitext(save_path)[0]
for ext in formats:
if ext in self.__save_formats :
filename = save_path + ext
self.__save_formats[ext](workspace,filename)
else:
self.log('Unknown file format "{0} requested while saving results.'.format(ext))
self.log("Unknown file format {0} requested while saving results.".format(ext))


#-------------------------------------------------------------------------------
Expand Down Expand Up @@ -821,18 +815,19 @@ def init_idf_params(self, reload_instrument=False):

## Detector diagnosis
# Diag parameters -- keys used by diag method to pick from default parameters. Diag cuts these keys removing diag_ word
# and picks correspondent parameters
# and tries to get rest from the correspondent dgreduced attributes
self.__diag_params = ['diag_tiny', 'diag_huge', 'diag_samp_zero', 'diag_samp_lo', 'diag_samp_hi','diag_samp_sig',\
'diag_van_out_lo', 'diag_van_out_hi', 'diag_van_lo', 'diag_van_hi', 'diag_van_sig', 'diag_variation',\
'diag_bleed_test','diag_bleed_pixels','diag_bleed_maxrate','diag_hard_mask','diag_bkgd_range']
'diag_bleed_test','diag_bleed_pixels','diag_bleed_maxrate','diag_hard_mask','diag_use_hard_mask_only','diag_bkgd_range']

self.__normalization_methods=['none','monitor-1','current'] # 'monitor-2','uamph', peak -- disabled/unknown at the moment

# list of the parameters which should usually be changed by user and if not, user should be warn about it.
self.__abs_units_par_to_change=['sample_mass','sample_rmm']

# the list of the reduction parameters which can be used number of times
self.__reusable_parameters = {}
# the list of the workspaces used by the reducer
self.workspace_list = {}

# mandatrory command line parameter
self.energy_bins = None

Expand Down
68 changes: 35 additions & 33 deletions Code/Mantid/scripts/Inelastic/dgreduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,48 +235,50 @@ def arb_units(wb_run,sample_run,ei_guess,rebin,map_file=None,monovan_run=None,**
#-------------------------------------------------------------------------------------------------------------------------------------------------------
# Here we give control to the Reducer
# --------------------------------------------------------------------------------------------------------
# diag the sample and detector vanadium
if Reducer.use_hard_mask_only: # if it is string, it is treated as bool
totalmask = Reducer.hard_mask

Reducer.log(' Using hardmask only from: '+totalmask)
#Return masking workspace
masking = LoadMask(Instrument=Reducer.instr_name,InputFile=Reducer.hard_mask)
mask_workspace(wb_run,masking)
mask_workspace(sample_run,masking)
else:
masking = Reducer.diagnose(wb_run,sample = mask_run,
# diag the sample and detector vanadium. It will deal with hard mask only if it is set that way
masking = Reducer.diagnose(wb_run,sample = mask_run,
second_white = None,variation=1.1,print_results=True)
if(mask_run!=sample_run) :
copy_masks(mask_run,samle_run)

# Calculate absolute units:
if monovan_run != None and Reducer.mono_correction_factor == None :

if Reducer.use_sam_msk_on_monovan == True or Reducer.use_hard_mask_only:
Reducer.log(' Applying sample run mask to mono van ')
mask_workspace(monovan_run,masking)
if wb_for_monovanadium != wb_run:
mask_workspace(monovan_run,masking)
else:
print '########### Run diagnose for monochromatic vanadium run ##############'

masking2 = Reducer.diagnose(wb_for_monovanadium,sample=monovan_run,
# Calculate absolute units:
if monovan_run != None :
if Reducer.mono_correction_factor == None :
if Reducer.use_sam_msk_on_monovan == True:
Reducer.log(' Applying sample run mask to mono van NOT IMPLEMENTED')
#TODO:
#monovan_ws=common.load_run(monovan_run)
#MaskDetectors(Workspace=monovan_ws, MaskedWorkspace=masking)
#if wb_for_monovanadium != wb_run:
# wb_for_monovan_ws=common.load_run(wb_for_monovanadium)
# MaskDetectors(Workspace=wb_for_monovan_ws, MaskedWorkspace=masking)
else:
print '########### Run diagnose for monochromatic vanadium run ##############'
masking2 = Reducer.diagnose(wb_for_monovanadium,sample=monovan_run,
second_white = None,variation=1.1,print_results=True)

masking=masking+masking2

if wb_for_monovanadium != wb_run:
pass
# combine monovan_run and sample_run masks
#TODO
#MaskDetectors(Workspace=monovan_run,MaskedWorkspace=masking)
# MaskDetectors(Workspace=sample_run,MaskedWorkspace=masking2)
else: # masks have already been combined through common wb_run
pass


# end monodvan diagnosis
Reducer.spectra_masks=masking


else: # if Reducer.mono_correction_factor != None :
pass

# estimate and report the number of failing detectors
failed_sp_list,nSpectra = get_failed_spectra_list_from_masks(masking)
failed_sp_list,nSpectra = get_failed_spectra_list_from_ws(masking)
nMaskedSpectra = len(failed_sp_list)
print 'Diag processed workspace with {0:d} spectra and found {1:d} bad spectra'.format(nSpectra,nMaskedSpectra)




#Run the conversion first on the sample
#Run the conversion first on the sample
deltaE_wkspace_sample = Reducer.convert_to_energy(sample_run, ei_guess, wb_run)


# calculate absolute units integral and apply it to the workspace
if monovan_run != None or Reducer.mono_correction_factor != None :
Expand Down
134 changes: 67 additions & 67 deletions Code/Mantid/scripts/Inelastic/diagnostics.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,82 +70,82 @@ def diagnose(white_int, **kwargs):
LoadMask(Instrument=kwargs.get('instrument_name',''),InputFile=parser.hard_mask,
OutputWorkspace='hard_mask_ws')
MaskDetectors(Workspace=white_int, MaskedWorkspace='hard_mask_ws')
print 'check masks for workspace: ',white_int.name()
var = raw_input("Enter something to continue: ")
# Find out how many detectors we hard masked
_dummy_ws,masked_list = ExtractMask(InputWorkspace='hard_mask_ws')
DeleteWorkspace('_dummy_ws')
test_results[0][0] = os.path.basename(parser.hard_mask)
test_results[0][1] = len(masked_list)

# White beam Test
__white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge,
parser.van_out_lo, parser.van_out_hi,
parser.van_lo, parser.van_hi,
parser.van_sig, start_index, end_index)
test_results[1] = [str(__white_masks), num_failed]
add_masking(white_int, __white_masks, start_index, end_index)
DeleteWorkspace(__white_masks)

# Second white beam test
if 'second_white' in kwargs:
__second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,
parser.van_out_lo, parser.van_out_hi,
parser.van_lo, parser.van_hi, parser.variation,
parser.van_sig, start_index, end_index)
test_results[2] = [str(__second_white_masks), num_failed]
add_masking(white_int, __second_white_masks, start_index, end_index)

#
# Zero total count check for sample counts
#
zero_count_failures = 0
if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False):
add_masking(parser.sample_counts, white_int)
maskZero, zero_count_failures = FindDetectorsOutsideLimits(InputWorkspace=parser.sample_counts,
StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,
if not kwargs.get('hard_mask_only', False):
# White beam Test
__white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge,
parser.van_out_lo, parser.van_out_hi,
parser.van_lo, parser.van_hi,
parser.van_sig, start_index, end_index)
test_results[1] = [str(__white_masks), num_failed]
add_masking(white_int, __white_masks, start_index, end_index)
DeleteWorkspace(__white_masks)

# Second white beam test
if 'second_white' in kwargs:
__second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,
parser.van_out_lo, parser.van_out_hi,
parser.van_lo, parser.van_hi, parser.variation,
parser.van_sig, start_index, end_index)
test_results[2] = [str(__second_white_masks), num_failed]
add_masking(white_int, __second_white_masks, start_index, end_index)

#
# Zero total count check for sample counts
#
zero_count_failures = 0
if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False):
add_masking(parser.sample_counts, white_int)
maskZero, zero_count_failures = FindDetectorsOutsideLimits(InputWorkspace=parser.sample_counts,
StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,
LowThreshold=1e-10, HighThreshold=1e100)
add_masking(white_int, maskZero, start_index, end_index)
DeleteWorkspace(maskZero)

#
# Background check
#
if hasattr(parser, 'background_int'):
add_masking(parser.background_int, white_int)
__bkgd_mask, failures = do_background_test(parser.background_int, parser.samp_lo,
parser.samp_hi, parser.samp_sig, parser.samp_zero, start_index, end_index)
test_results[3] = [str(__bkgd_mask), zero_count_failures + failures]
add_masking(white_int, __bkgd_mask, start_index, end_index)
DeleteWorkspace(__bkgd_mask)
add_masking(white_int, maskZero, start_index, end_index)
DeleteWorkspace(maskZero)

#
# Background check
#
if hasattr(parser, 'background_int'):
add_masking(parser.background_int, white_int)
__bkgd_mask, failures = do_background_test(parser.background_int, parser.samp_lo,
parser.samp_hi, parser.samp_sig, parser.samp_zero, start_index, end_index)
test_results[3] = [str(__bkgd_mask), zero_count_failures + failures]
add_masking(white_int, __bkgd_mask, start_index, end_index)
DeleteWorkspace(__bkgd_mask)

#
# Bleed test
#
if hasattr(parser, 'bleed_test') and parser.bleed_test:
if not hasattr(parser, 'sample_run'):
raise RuntimeError("Bleed test requested but the sample_run keyword has not been provided")
__bleed_masks, failures = do_bleed_test(parser.sample_run, parser.bleed_maxrate, parser.bleed_pixels)
test_results[4] = [str(__bleed_masks), failures]
add_masking(white_int, __bleed_masks)
DeleteWorkspace(__bleed_masks)
#
# Bleed test
#
if hasattr(parser, 'bleed_test') and parser.bleed_test:
if not hasattr(parser, 'sample_run'):
raise RuntimeError("Bleed test requested but the sample_run keyword has not been provided")
__bleed_masks, failures = do_bleed_test(parser.sample_run, parser.bleed_maxrate, parser.bleed_pixels)
test_results[4] = [str(__bleed_masks), failures]
add_masking(white_int, __bleed_masks)
DeleteWorkspace(__bleed_masks)

if hasattr(parser, 'print_results') and parser.print_results:
start_index_name = "from: start"
default=True
if 'start_index' in kwargs:
default = False
start_index_name = "from: "+str(kwargs['start_index'])
end_index_name=" to: end"
if 'end_index' in kwargs :
default = False
end_index_name = " to: "+str(kwargs['end_index'])

testName=start_index_name+end_index_name
if not default :
testName = " For bank: "+start_index_name+end_index_name

print_test_summary(test_results,testName)
if hasattr(parser, 'print_results') and parser.print_results:
start_index_name = "from: start"
default=True
if 'start_index' in kwargs:
default = False
start_index_name = "from: "+str(kwargs['start_index'])
end_index_name=" to: end"
if 'end_index' in kwargs :
default = False
end_index_name = " to: "+str(kwargs['end_index'])
# endif not hard_mask_only

testName=start_index_name+end_index_name
if not default :
testName = " For bank: "+start_index_name+end_index_name

print_test_summary(test_results,testName)

#-------------------------------------------------------------------------------

Expand Down

0 comments on commit 596aa80

Please sign in to comment.