-
Notifications
You must be signed in to change notification settings - Fork 122
/
DirectEnergyConversion.py
1450 lines (1214 loc) · 65.1 KB
/
DirectEnergyConversion.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Conversion class defined for conversion to deltaE for
'direct' inelastic geometry instruments
The class defines various methods to allow users to convert their
files to DeltaE.
Example:
Assuming we have the follwing data files for MARI.
NOTE: This assumes that the data path for these runs is in the
Mantid preferences.
mono-sample: 11015
white: 11060
mono-van: 11001
with sample mass 10g and RMM 435.96
reducer = DirectEnergyConversion('MARI')
# Alter defaults if necessary
reducer.normalise_method = 'monitor-2'
reducer.background = False
reducer.fix_ei = True
reducer.save_format = ['.spe']
#
Set parameters for these runs
reducer.map_file = 'mari_res.map'
reducer.energy_bins = '-10,0.1,80'
Run the conversion
deltaE_wkspace = reducer.convert_to_energy(11015, 85, 11060, 11001)
"""
import CommonFunctions as common
import diagnostics
from mantid.simpleapi import *
from mantid.kernel import funcreturns
from mantid import api
import glob
import sys
import os.path
import math
def setup_reducer(inst_name):
"""
Given an instrument name or prefix this sets up a converter
object for the reduction
"""
try:
return DirectEnergyConversion(inst_name)
except RuntimeError, exc:
raise RuntimeError('Unknown instrument "%s" or wrong IDF file for this instrument, cannot continue' % inst_name)
class DirectEnergyConversion(object):
"""
Performs a convert to energy assuming the provided instrument is an elastic instrument
"""
def diagnose(self, white, **kwargs):
"""
Run diagnostics on the provided workspaces.
This method does some additional processing before moving on to the diagnostics:
1) Computes the white beam integrals, converting to energy
2) Computes the background integral using the instrument defined range
3) Computes a total count from the sample
These inputs are passed to the diagnostics functions
Required inputs:
white - A workspace, run number or filepath of a white beam run. A workspace is assumed to
have simple been loaded and nothing else.
Optional inputs:
sample - A workspace, run number or filepath of a sample run. A workspace is assumed to
have simple been loaded and nothing else. (default = None)
second_white - If provided an additional set of tests is performed on this. (default = None)
hard_mask - A file specifying those spectra that should be masked without testing (default=None)
tiny - Minimum threshold for acceptance (default = 1e-10)
huge - Maximum threshold for acceptance (default = 1e10)
background_test_range - A list of two numbers indicating the background range (default=instrument defaults)
van_out_lo - Lower bound defining outliers as fraction of median value (default = 0.01)
van_out_hi - Upper bound defining outliers as fraction of median value (default = 100.)
van_lo - Fraction of median to consider counting low for the white beam diag (default = 0.1)
van_hi - Fraction of median to consider counting high for the white beam diag (default = 1.5)
van_sig - Error criterion as a multiple of error bar i.e. to fail the test, the magnitude of the\n"
"difference with respect to the median value must also exceed this number of error bars (default=0.0)
samp_zero - If true then zeroes in the vanadium data will count as failed (default = True)
samp_lo - Fraction of median to consider counting low for the white beam diag (default = 0)
samp_hi - Fraction of median to consider counting high for the white beam diag (default = 2.0)
samp_sig - Error criterion as a multiple of error bar i.e. to fail the test, the magnitude of the\n"
"difference with respect to the median value must also exceed this number of error bars (default=3.3)
variation - The number of medians the ratio of the first/second white beam can deviate from
the average by (default=1.1)
bleed_test - If true then the CreatePSDBleedMask algorithm is run
bleed_maxrate - If the bleed test is on then this is the maximum framerate allowed in a tube
bleed_pixels - If the bleed test is on then this is the number of pixels ignored within the
bleed test diagnostic
print_results - If True then the results are printed to the screen
"""
lhs_names = funcreturns.lhs_info('names')
if len(lhs_names) > 0:
var_name = lhs_names[0]
else:
var_name = "diag_mask"
# Check for any keywords that have not been supplied and put in the values set on reducer
for par in self.__diag_params:
arg = par.lstrip('diag_')
if arg not in kwargs:
kwargs[arg] = getattr(self, arg)
# if input parameter is workspace rather then run, we want to keep it
self._keep_wb_workspace = False
if isinstance(white,str) and white in mtd:
self._keep_wb_workspace = True
if isinstance(white,api.Workspace) : # it is workspace itself
self._keep_wb_workspace = True
# If we have a hard_mask, check the instrument name is defined
if 'hard_mask_file' in kwargs:
if 'instrument_name' not in kwargs:
kwargs['instrument_name'] = self.instr_name
if kwargs['use_hard_mask_only'] :
if mtd.doesExist('hard_mask_ws'):
diag_mask = mtd['hard_mask_ws']
else: # build hard mask
# in this peculiar way we can obtain working mask which accounts for initial data grouping in the data file.
# SNS or 1 to 1 maps may probably awoid this stuff and can load masks directly
whitews_name = common.create_resultname(white, suffix='-white')
if whitews_name in mtd:
DeleteWorkspace(Workspace=whitews_name)
# Load
white_data = self.load_data(white,whitews_name,self._keep_wb_workspace)
diag_mask= LoadMask(Instrument=self.instr_name,InputFile=kwargs['hard_mask_file'],
OutputWorkspace='hard_mask_ws')
MaskDetectors(Workspace=white_data, MaskedWorkspace=diag_mask)
diag_mask,masked_list = ExtractMask(InputWorkspace=white_data)
DeleteWorkspace(Workspace=whitews_name)
return diag_mask
# Get the white beam vanadium integrals
whiteintegrals = self.do_white(white, None, None,None) # No grouping yet
if 'second_white' in kwargs:
second_white = kwargs['second_white']
if second_white is None:
del kwargs['second_white']
else:
other_whiteintegrals = self.do_white(second_white, None, None,None) # No grouping yet
kwargs['second_white'] = other_whiteintegrals
# Get the background/total counts from the sample if present
if 'sample' in kwargs:
sample = kwargs['sample']
del kwargs['sample']
# If the bleed test is requested then we need to pass in the sample_run as well
if kwargs.get('bleed_test', False):
kwargs['sample_run'] = sample
# Set up the background integrals
result_ws = common.load_runs(sample)
result_ws = self.normalise(result_ws, result_ws.name(), self.normalise_method)
if 'background_test_range' in kwargs:
bkgd_range = kwargs['background_test_range']
del kwargs['background_test_range']
else:
bkgd_range = self.background_test_range
background_int = Integration(result_ws,
RangeLower=bkgd_range[0],RangeUpper=bkgd_range[1],
IncludePartialBins=True)
total_counts = Integration(result_ws, IncludePartialBins=True)
background_int = ConvertUnits(background_int, "Energy", AlignBins=0)
self.log("Diagnose: finished convertUnits ")
background_int *= self.scale_factor
diagnostics.normalise_background(background_int, whiteintegrals, kwargs.get('second_white',None))
kwargs['background_int'] = background_int
kwargs['sample_counts'] = total_counts
# Check how we should run diag
if self.diag_spectra is None:
# Do the whole lot at once
diagnostics.diagnose(whiteintegrals, **kwargs)
else:
banks = self.diag_spectra.split(";")
bank_spectra = []
for b in banks:
token = b.split(",") # b = "(,)"
if len(token) != 2:
raise ValueError("Invalid bank spectra specification in diag %s" % self.diag_spectra)
start = int(token[0].lstrip('('))
end = int(token[1].rstrip(')'))
bank_spectra.append((start,end))
for index, bank in enumerate(bank_spectra):
kwargs['start_index'] = bank[0] - 1
kwargs['end_index'] = bank[1] - 1
diagnostics.diagnose(whiteintegrals, **kwargs)
if 'sample_counts' in kwargs:
DeleteWorkspace(Workspace='background_int')
DeleteWorkspace(Workspace='total_counts')
if 'second_white' in kwargs:
DeleteWorkspace(Workspace=kwargs['second_white'])
# Return a mask workspace
diag_mask, det_ids = ExtractMask(InputWorkspace=whiteintegrals,OutputWorkspace=var_name)
DeleteWorkspace(Workspace=whiteintegrals)
self.spectra_masks = diag_mask
return diag_mask
def do_white(self, white_run, spectra_masks, map_file,mon_number=None):
"""
Create the workspace, which each spectra containing the correspondent white beam integral (single value)
These intergrals are used as estimate for detector efficiency in wide range of energies
(rather the detector electronic's efficientcy as the geuger counters are very different in efficiency)
and is used to remove the influence of this efficiency to the different detectors.
"""
whitews_name = common.create_resultname(white_run, suffix='-white')
if whitews_name in mtd:
DeleteWorkspace(Workspace=whitews_name)
# Load
white_data = self.load_data(white_run,whitews_name,self._keep_wb_workspace)
# Normalise
white_ws = self.normalise(white_data, whitews_name, self.normalise_method,0.0,mon_number)
# Units conversion
white_ws = ConvertUnits(InputWorkspace=white_ws,OutputWorkspace=whitews_name, Target= "Energy", AlignBins=0)
self.log("do_white: finished convertUnits ")
# This both integrates the workspace into one bin spectra and sets up common bin boundaries for all spectra
low = self.wb_integr_range[0]
upp = self.wb_integr_range[1]
if low > upp:
raise ValueError("White beam integration range is inconsistent. low=%d, upp=%d" % (low,upp))
delta = 2.0*(upp - low)
white_ws = Rebin(InputWorkspace=white_ws,OutputWorkspace=whitews_name, Params=[low, delta, upp])
# Why aren't we doing this...
#Integration(white_ws, white_ws, RangeLower=low, RangeUpper=upp)
# Masking and grouping
white_ws = self.remap(white_ws, spectra_masks, map_file)
# White beam scale factor
white_ws *= self.wb_scale_factor
return white_ws
def mono_van(self, mono_van, ei_guess, white_run=None, map_file=None,
spectra_masks=None, result_name=None, Tzero=None):
"""Convert a mono vanadium run to DeltaE.
If multiple run files are passed to this function, they are summed into a run and then processed
"""
# Load data
sample_data = self.load_data(mono_van)
# Create the result name if necessary
if result_name is None:
result_name = common.create_resultname(mono_van)
monovan = self._do_mono(sample_data, sample_data, result_name, ei_guess,
white_run, map_file, spectra_masks, Tzero)
# Normalize by vanadium sample weight
monovan /= float(self.van_mass)/float(self.van_rmm)
return monovan
def mono_sample(self, mono_run, ei_guess, white_run=None, map_file=None,
spectra_masks=None, result_name=None, Tzero=None):
"""Convert a mono-chromatic sample run to DeltaE.
If multiple run files are passed to this function, they are summed into a run and then processed
"""
# Load data
sample_data = self.load_data(mono_run)
# Create the result name if necessary
if result_name is None:
result_name = common.create_resultname(mono_run, prefix=self.instr_name)
mono_s=self._do_mono(sample_data, sample_data, result_name, ei_guess,
white_run, map_file, spectra_masks, Tzero)
return mono_s
# -------------------------------------------------------------------------------------------
# This actually does the conversion for the mono-sample and mono-vanadium runs
#
# -------------------------------------------------------------------------------------------
def _do_mono(self, data_ws, monitor_ws, result_name, ei_guess,
white_run=None, map_file=None, spectra_masks=None, Tzero=None):
"""
Convert units of a given workspace to deltaE, including possible
normalisation to a white-beam vanadium run.
"""
# Special load monitor stuff.
if (self.instr_name == "CNCS" or self.instr_name == "HYSPEC"):
self.fix_ei = True
ei_value = ei_guess
if (self.instr_name == "HYSPEC"):
Tzero=4.0 + 107.0 / (1+math.pow((ei_value/31.0),3.0))
self.log("Determined T0 of %s for HYSPEC" % str(Tzero))
if (Tzero is None):
tzero = (0.1982*(1+ei_value)**(-0.84098))*1000.0
else:
tzero = Tzero
# apply T0 shift
ChangeBinOffset(InputWorkspace=data_ws,OutputWorkspace=result_name,Offset= -tzero)
mon1_peak = 0.0
elif (self.instr_name == "ARCS" or self.instr_name == "SEQUOIA"):
if 'Filename' in data_ws.getRun(): mono_run = data_ws.getRun()['Filename'].value
else: raise RuntimeError('Cannot load monitors for event reduction. Unable to determine Filename from mono workspace, it should have been added as a run log.')
self.log("mono_run = %s (%s)" % (mono_run,type(mono_run)),'debug')
if mono_run.endswith("_event.nxs"):
monitor_ws=LoadNexusMonitors(Filename=mono_run)
elif mono_run.endswith("_event.dat"):
InfoFilename = mono_run.replace("_neutron_event.dat", "_runinfo.xml")
monitor_ws=LoadPreNexusMonitors(RunInfoFilename=InfoFilename)
try:
ei_calc,firstmon_peak,firstmon_index,TzeroCalculated = \
GetEi(InputWorkspace=monitor_ws, Monitor1Spec=int(self.ei_mon_spectra[0]), Monitor2Spec=int(self.ei_mon_spectra[1]),
EnergyEstimate=ei_guess,FixEi=self.fix_ei)
except:
self.log("Error in GetEi. Using entered values.")
#monitor_ws.getRun()['Ei'] = ei_value
ei_value = ei_guess
AddSampleLog(Workspace=monitor_ws,LogName= 'Ei',LogText= ei_value,LogType= "Number")
ei_calc = None
TzeroCalculated = Tzero
# Set the tzero to be the calculated value
if (TzeroCalculated is None):
tzero = 0.0
else:
tzero = TzeroCalculated
# If we are fixing, then use the guess if given
if (self.fix_ei):
ei_value = ei_guess
# If a Tzero has been entered, use it, if we are fixing.
if (Tzero is not None):
tzero = Tzero
else:
if (ei_calc is not None):
ei_value = ei_calc
else:
ei_value = ei_guess
mon1_peak = 0.0
# apply T0 shift
ChangeBinOffset(InputWorkspace=data_ws,OutputWorkspace= result_name,Offset=-tzero)
self.incident_energy = ei_value
else:
# Do ISIS stuff for Ei
# Both are these should be run properties really
ei_value, mon1_peak = self.get_ei(monitor_ws, result_name, ei_guess)
self.incident_energy = ei_value
# As we've shifted the TOF so that mon1 is at t=0.0 we need to account for this in CalculateFlatBackground and normalisation
bin_offset = -mon1_peak
# For event mode, we are going to histogram in energy first, then go back to TOF
if (self.__facility == "SNS"):
if self.check_background== True:
# Extract the time range for the background determination before we throw it away
background_bins = "%s,%s,%s" % (self.bkgd_range[0] + bin_offset, (self.bkgd_range[1]-self.bkgd_range[0]), self.bkgd_range[1] + bin_offset)
Rebin(InputWorkspace=result_name,OutputWorkspace= "background_origin_ws",Params=background_bins)
# Convert to Et
ConvertUnits(InputWorkspace=result_name,OutputWorkspace= "_tmp_energy_ws", Target="DeltaE",EMode="Direct", EFixed=ei_value)
RenameWorkspace(InputWorkspace="_tmp_energy_ws",OutputWorkspace= result_name)
# Histogram
Rebin(InputWorkspace=result_name,OutputWorkspace= "_tmp_rebin_ws",Params= self.energy_bins, PreserveEvents=False)
RenameWorkspace(InputWorkspace="_tmp_rebin_ws",OutputWorkspace= result_name)
# Convert back to TOF
ConvertUnits(InputWorkspace=result_name,OutputWorkspace=result_name, Target="TOF",EMode="Direct", EFixed=ei_value)
else:
# TODO: This algorithm needs to be separated so that it doesn't actually
# do the correction as well so that it can be moved next to LoadRaw where
# it belongs
if self.det_cal_file == None:
run = data_ws.getRun()
if 'Filename' in run:
filename = run['Filename'].value
else:
raise RuntimeError('Cannot run LoadDetectorInfo: "Filename" property not found on input mono workspace')
if self.relocate_dets:
self.log('_do_mono: Moving detectors to positions specified in RAW file.'+filename)
LoadDetectorInfo(Workspace=result_name,DataFilename=filename,RelocateDets=self.relocate_dets)
else:
self.log('_do_mono: Raw file detector header is superceeded')
if self.relocate_dets:
self.log('_do_mono: Moving detectors to positions specified in cal file ','debug')
if str(self.det_cal_file) in mtd: # it is already workspace
self.__det_cal_file_ws = mtd[str(self.det_cal_file)]
if isinstance(self.det_cal_file,api.Workspace): # it is already workspace
self.__det_cal_file_ws = self.det_cal_file
if self.__det_cal_file_ws == None :
self.log('_do_mono: Loading detector info from file : ' +str(self.det_cal_file),'debug')
file = FileFinder.getFullPath(str(self.det_cal_file))
if len(file) == 0: # try to find run
file = common.find_file(self.det_cal_file)
LoadDetectorInfo(Workspace=result_name,DataFilename=file,RelocateDets= self.relocate_dets)
self.log('_do_mono: Loading detector info completed ','debug')
else:
self.log('_do_mono: Copying detectors positions from det_cal_file workspace : '+self.__det_cal_file_ws.name())
CopyInstrumentParameters(InputWorkspace=self.__det_cal_file_ws,OutputWorkspace=result_name)
self.log('_do_mono: Copying detectors positions complete','debug')
if self.check_background == True:
# Remove the count rate seen in the regions of the histograms defined as the background regions, if the user defined such region
ConvertToDistribution(Workspace=result_name)
if (self.__facility == "SNS"):
CalculateFlatBackground(InputWorkspace="background_origin_ws",OutputWorkspace= "background_ws",
StartX= self.bkgd_range[0] + bin_offset,EndX= self.bkgd_range[1] + bin_offset,
WorkspaceIndexList= '',Mode= 'Mean',OutputMode= 'Return Background')
# Delete the raw data background region workspace
DeleteWorkspace("background_origin_ws")
# Convert to distribution to make it compatible with the data workspace (result_name).
ConvertToDistribution(Workspace="background_ws")
# Subtract the background
Minus(LHSWorkspace=result_name,RHSWorkspace= "background_ws",OutputWorkspace=result_name)
# Delete the determined background
DeleteWorkspace("background_ws")
else:
CalculateFlatBackground(InputWorkspace=result_name,OutputWorkspace=result_name,
StartX= self.bkgd_range[0] + bin_offset,EndX= self.bkgd_range[1] + bin_offset,
WorkspaceIndexList= '',Mode= 'Mean')
ConvertFromDistribution(Workspace=result_name)
# Normalise using the chosen method
# TODO: This really should be done as soon as possible after loading
self.normalise(mtd[result_name], result_name, self.normalise_method, range_offset=bin_offset)
# This next line will fail the SystemTests
#ConvertUnits(result_ws, result_ws, Target="DeltaE",EMode='Direct', EFixed=ei_value)
# But this one passes...
ConvertUnits(InputWorkspace=result_name,OutputWorkspace=result_name, Target="DeltaE",EMode='Direct')
self.log("_do_mono: finished ConvertUnits for : "+result_name)
if not self.energy_bins is None:
Rebin(InputWorkspace=result_name,OutputWorkspace=result_name,Params= self.energy_bins,PreserveEvents=False)
if self.apply_detector_eff:
if (self.__facility == "SNS"):
# Need to be in lambda for detector efficiency correction
ConvertUnits(InputWorkspace=result_name,OutputWorkspace= result_name, Target="Wavelength", EMode="Direct", EFixed=ei_value)
He3TubeEfficiency(InputWorkspace=result_name,OutputWorkspace=result_name)
ConvertUnits(InputWorkspace=result_name,OutputWorkspace= result_name, Target="DeltaE",EMode='Direct', EFixed=ei_value)
else:
DetectorEfficiencyCor(InputWorkspace=result_name,OutputWorkspace=result_name)
self.log("_do_mono: finished DetectorEfficiencyCor for : "+result_name)
# Ki/Kf Scaling...
if self.apply_kikf_correction:
self.log('Start Applying ki/kf corrections to the workpsace : '+result_name)
CorrectKiKf(InputWorkspace=result_name,OutputWorkspace= result_name, EMode='Direct')
self.log('finished applying ki/kf corrections for : '+result_name)
# Make sure that our binning is consistent
if not self.energy_bins is None:
Rebin(InputWorkspace=result_name,OutputWorkspace= result_name,Params= self.energy_bins)
# Masking and grouping
result_ws = mtd[result_name]
result_ws = self.remap(result_ws, spectra_masks, map_file)
ConvertToDistribution(Workspace=result_ws)
# White beam correction
if white_run is not None:
white_ws = self.do_white(white_run, spectra_masks, map_file,None)
result_ws /= white_ws
# Overall scale factor
result_ws *= self.scale_factor
return result_ws
#-------------------------------------------------------------------------------
def convert_to_energy(self, mono_run, ei, white_run=None, mono_van=None,\
abs_ei=None, abs_white_run=None, save_path=None, Tzero=None, \
motor=None, offset=None):
"""
One-shot function to convert the given runs to energy
"""
self.incident_energy = ei;
# Check if we need to perform the absolute normalisation first
if not mono_van is None:
if abs_ei is None:
abs_ei = ei
mapping_file = self.abs_map_file
spectrum_masks = self.spectra_masks
monovan_wkspace = self.mono_van(mono_van, abs_ei, abs_white_run, mapping_file, spectrum_masks)
# TODO: Need a better check than this...
if (abs_white_run is None):
self.log("Performing Normalisation to Mono Vanadium.")
norm_factor = self.calc_average(monovan_wkspace,ei)
else:
self.log("Performing Absolute Units Normalisation.")
# Perform Abs Units...
norm_factor = self.monovan_abs(monovan_wkspace)
DeleteWorkspace(monovan_wkspace.name())
else:
norm_factor = None
# Figure out what to call the workspace
result_name = mono_run
if not result_name is None:
result_name = common.create_resultname(mono_run)
# Main run file conversion
sample_wkspace = self.mono_sample(mono_run, ei, white_run, self.map_file,
self.spectra_masks, result_name, Tzero)
if not norm_factor is None:
sample_wkspace /= norm_factor
#calculate psi from sample environment motor and offset
if self.__facility == 'ISIS' :
default_offset=float('NaN')
if not (motor is None) and sample_wkspace.getRun().hasProperty(motor):
default_offset = 0
else:
default_offset=0
if (offset is None):
self.motor_offset = default_offset
else:
self.motor_offset = float(offset)
self.motor=0
if not (motor is None):
# Check if motor name exists
if sample_wkspace.getRun().hasProperty(motor):
self.motor=sample_wkspace.getRun()[motor].value[0]
self.log("Motor value is %s" % self.motor)
else:
self.log("Could not find such sample environment log. Will use psi=offset")
self.psi = self.motor+self.motor_offset
# Save then finish
self.save_results(sample_wkspace, save_path)
# Clear loaded raw data to free up memory
common.clear_loaded_data()
return sample_wkspace
#----------------------------------------------------------------------------------
# Reduction steps
#----------------------------------------------------------------------------------
def get_ei(self, input_ws, resultws_name, ei_guess):
"""
Calculate incident energy of neutrons
"""
fix_ei = str(self.fix_ei).lower()
if fix_ei == 'true':
fix_ei = True
elif fix_ei == 'false':
fix_ei = False
elif fix_ei == 'fixei':
fix_ei = True
else:
raise TypeError('Unknown option passed to get_ei "%s"' % fix_ei)
self.incident_energy= ei_guess
monitor_ws = input_ws;
monitors_from_separate_ws=False;
if type(monitor_ws) is str:
monitor_ws = mtd[monitor_ws]
try:
nsp = monitor_ws.getSpectrum(int(self.ei_mon_spectra[0]));
except:
monitors_from_separate_ws = True
mon_ws = monitor_ws.getName()+'_monitors'
monitor_ws = mtd[mon_ws];
# Calculate the incident energy
ei,mon1_peak,mon1_index,tzero = \
GetEi(InputWorkspace=monitor_ws, Monitor1Spec=int(self.ei_mon_spectra[0]), Monitor2Spec=int(self.ei_mon_spectra[1]),
EnergyEstimate=ei_guess,FixEi=self.fix_ei)
self.incident_energy = ei
if monitors_from_separate_ws:
AddSampleLog(Workspace=input_ws,LogName='Ei',LogText=str(ei),LogType='Number')
# Adjust the TOF such that the first monitor peak is at t=0
ChangeBinOffset(InputWorkspace=input_ws,OutputWorkspace= resultws_name,Offset= -float(str(mon1_peak)))
mon1_det = monitor_ws.getDetector(mon1_index)
mon1_pos = mon1_det.getPos()
src_name = input_ws.getInstrument().getSource().getName()
MoveInstrumentComponent(Workspace=resultws_name,ComponentName= src_name, X=mon1_pos.getX(), Y=mon1_pos.getY(), Z=mon1_pos.getZ(), RelativePosition=False)
return ei, mon1_peak
def remap(self, result_ws, spec_masks, map_file):
"""
Mask and group detectors based on input parameters
"""
if not spec_masks is None:
MaskDetectors(Workspace=result_ws, MaskedWorkspace=spec_masks)
if not map_file is None:
result_ws = GroupDetectors(InputWorkspace=result_ws,OutputWorkspace=result_ws,
MapFile= map_file, KeepUngroupedSpectra=0, Behaviour='Average')
return result_ws
def normalise(self, data_ws, result_name, method, range_offset=0.0,mon_number=None):
"""
Apply normalisation using specified source
"""
if method is None :
method = "undefined"
if not method in self.__normalization_methods:
raise KeyError("Normaliation method: "+method+" is not among known normalization methods")
# Make sure we don't call this twice
method = method.lower()
done_log = "DirectInelasticReductionNormalisedBy"
if done_log in data_ws.getRun() or method == 'none':
if data_ws.name() != result_name:
CloneWorkspace(InputWorkspace=data_ws, OutputWorkspace=result_name)
output = mtd[result_name]
elif method == 'monitor-1':
range_min = self.norm_mon_integration_range[0] + range_offset
range_max = self.norm_mon_integration_range[1] + range_offset
if mon_number is None:
mon_spectr_num=int(self.mon1_norm_spec)
else:
mon_spectr_num=mon_number
NormaliseToMonitor(InputWorkspace=data_ws, OutputWorkspace=result_name, MonitorSpectrum=mon_spectr_num,
IntegrationRangeMin=float(str(range_min)), IntegrationRangeMax=float(str(range_max)),IncludePartialBins=True)
output = mtd[result_name]
elif method == 'current':
NormaliseByCurrent(InputWorkspace=data_ws, OutputWorkspace=result_name)
output = mtd[result_name]
else:
raise RuntimeError('Normalisation scheme ' + reference + ' not found. It must be one of monitor-1, current, or none')
# Add a log to the workspace to say that the normalisation has been done
AddSampleLog(Workspace=output, LogName=done_log,LogText=method)
return output
def calc_average(self, data_ws,energy_incident):
"""
Compute the average Y value of a workspace.
The average is computed by collapsing the workspace to a single bin per spectra then masking
masking out detectors given by the FindDetectorsOutsideLimits and MedianDetectorTest algorithms.
The average is then the computed as the using the remainder and factoring in their errors as weights, i.e.
average = sum(Yvalue[i]*weight[i]) / sum(weights)
where only those detectors that are unmasked are used and the weight[i] = 1/errorValue[i].
"""
if self.monovan_integr_range is None :
self.monovan_integr_range = [self.monovan_lo_frac*energy_incident,self.monovan_hi_frac*energy_incident];
e_low = self.monovan_integr_range[0]
e_upp = self.monovan_integr_range[1]
if e_low > e_upp:
raise ValueError("Inconsistent mono-vanadium integration range defined!")
data_ws=Rebin(InputWorkspace=data_ws,OutputWorkspace=data_ws,Params= [e_low, 2.*(e_upp-e_low), e_upp])
data_ws=ConvertToMatrixWorkspace(InputWorkspace=data_ws,OutputWorkspace= data_ws)
args = {}
args['tiny'] = self.tiny
args['huge'] = self.huge
args['van_out_lo'] = self.monovan_lo_bound
args['van_out_hi'] = self.monovan_hi_bound
args['van_lo'] = self.monovan_lo_frac
args['van_hi'] = self.monovan_hi_frac
args['van_sig'] = self.samp_sig
args['use_hard_mask_only']=self.use_hard_mask_only;
args['print_results'] = False
diagnostics.diagnose(data_ws, **args)
monovan_masks,det_ids = ExtractMask(InputWorkspace=data_ws,OutputWorkspace='monovan_masks')
MaskDetectors(Workspace=data_ws, MaskedWorkspace=monovan_masks)
DeleteWorkspace(Workspace=monovan_masks)
ConvertFromDistribution(Workspace=data_ws)
nhist = data_ws.getNumberHistograms()
average_value = 0.0
weight_sum = 0.0
ic =0
for i in range(nhist):
try:
det = data_ws.getDetector(i)
except Exception:
continue
if det.isMasked():
continue
y_value = data_ws.readY(i)[0]
if y_value != y_value:
continue
ic = ic+1;
weight = 1.0/data_ws.readE(i)[0]
average_value += y_value * weight
weight_sum += weight
if weight_sum == 0:
raise RuntimeError(str.format(" Vanadium weighth calculated from {0} spectra out of {1} histohrams is equal to 0 and sum: {2} Check vanadium intergation ranges or diagnostic settings ",str(ic),str(nhist),str(average_value)))
return average_value / weight_sum
def monovan_abs(self, ei_workspace):
"""Calculates the scaling factor required for normalisation to absolute units.
The given workspace must contain an Ei value. This will have happened if GetEi
has been run
"""
# Scale by vanadium cross-section which is energy dependent up to a point
run = ei_workspace.getRun()
try:
ei_prop = run['Ei']
except KeyError:
raise RuntimeError('The given workspace "%s" does not contain an Ei value. Run GetEi first.' % str(ei_workspace))
ei_value = ei_prop.value
absnorm_factor = self.calc_average(ei_workspace,ei_value)
if ei_value >= 200.0:
xsection = 421.0
else:
xsection = 400.0 + (ei_value/10.0)
absnorm_factor /= xsection
return absnorm_factor * (float(self.sample_mass)/float(self.sample_rmm))
def save_results(self, workspace, save_file=None, formats = None):
"""
Save the result workspace to the specfied filename using the list of formats specified in
formats. If formats is None then the default list is used
"""
if save_file is None:
save_file = workspace.getName()
elif os.path.isdir(save_file):
save_file = os.path.join(save_file, workspace.getName())
elif save_file == '':
raise ValueError('Empty filename is not allowed for saving')
else:
pass
if formats is None:
formats = self.save_format
if type(formats) == str:
formats = [formats]
# if ext is none, no need to write anything
if formats is None:
return
save_file = os.path.splitext(save_file)[0]
# this is mainly for debugging purposes as real save do not return anything
for ext in formats:
if ext in self.__save_formats :
filename = save_file + ext
self.__save_formats[ext](workspace,filename)
else:
self.log("Unknown file format {0} requested while saving results.".format(ext))
#-------------------------------------------------------------------------------
def load_data(self, runs,new_ws_name=None,keep_previous_ws=False):
"""
Load a run or list of runs. If a list of runs is given then
they are summed into one.
"""
event_mode = False;
if hasattr(self, 'nexus_in_event_mode') and self.nexus_in_event_mode:
event_mode = True;
sum = True
result_ws = common.load_runs(runs, sum)
if new_ws_name != None :
if keep_previous_ws:
result_ws = CloneWorkspace(InputWorkspace = result_ws,OutputWorkspace = new_ws_name)
else:
result_ws = RenameWorkspace(InputWorkspace=result_ws,OutputWorkspace=new_ws_name)
self.setup_mtd_instrument(result_ws)
return result_ws
#---------------------------------------------------------------------------
# Behind the scenes stuff
#---------------------------------------------------------------------------
def __init__(self, instr_name=None):
"""
Constructor
"""
self._to_stdout = True
self._log_to_mantid = False
self._idf_values_read = False
self._keep_wb_workspace=False # when input data for reducer is wb workspace rather then run number, we want to keep this workspace. But usually not
self.spectra_masks = None;
if not (instr_name is None or len(instr_name)==0 or instr_name == '__empty_') : # first time run or empty run
self.initialise(instr_name)
#----------------------------------------------------------------------------------
# Complex setters/getters
#----------------------------------------------------------------------------------
@property
def instr_name(self):
return self._instr_name
@instr_name.setter
def instr_name(self,new_name):
if not hasattr(self,'instr_name') :
self._instr_name=None
if new_name is None:
return
# Instrument name might be a prefix, query Mantid for the full name
short_name=''
full_name=''
try :
instrument = config.getFacility().instrument(new_name)
short_name = instrument.shortName()
full_name = instrument.name()
except:
# it is possible to have wrong facility:
facilities = config.getFacilities()
old_facility = str(config.getFacility())
for facility in facilities:
config.setString('default.facility',facility.name())
try :
instrument = facility.instrument(new_name)
short_name = instrument.shortName()
full_name = instrument.name()
if len(short_name)>0 :
break
except:
pass
if len(short_name)==0 :
config.setString('default.facility',old_facility)
raise KeyError(" Can not find/set-up the instrument: "+new_name+' in any supported facility')
new_name = short_name
self.__facility = str(config.getFacility())
if new_name == self.instr_name:
return
self._instr_name = new_name
config['default.instrument'] = full_name
if not hasattr(self,'instument') or self.instrument.getName() != instr_name :
# Load an empty instrument if one isn't already there
idf_dir = config.getString('instrumentDefinition.directory')
try:
idf_file=api.ExperimentInfo.getInstrumentFilename(new_name)
tmp_ws_name = '__empty_' + new_name
if not mtd.doesExist(tmp_ws_name):
LoadEmptyInstrument(Filename=idf_file,OutputWorkspace=tmp_ws_name)
self.instrument = mtd[tmp_ws_name].getInstrument()
except:
self.instrument = None
self._instr_name = None
raise RuntimeError('Cannot load instrument for prefix "%s"' % new_name)
# Initialise other IDF parameters
self.init_idf_params(True)
# Vanadium rmm
@property
def van_rmm(self):
"""The rmm of Vanadium is a constant, should not be instrument parameter. Atom not exposed to python :( """
return 50.9415
@van_rmm.deleter
def van_rmm(self):
pass
@property
def facility(self):
return self.__facility
# integration range for monochromatic vanadium,
@property
def monovan_integr_range(self):
if not hasattr(self,'_monovan_integr_range') or self._monovan_integr_range is None:
ei = self.incident_energy
range = [self.monovan_lo_frac*ei,self.monovan_hi_frac*ei]
return range
else:
return self._monovan_integr_range
@monovan_integr_range.setter
def monovan_integr_range(self,value):
self._monovan_integr_range = value
# format to save data
@property
def save_format(self):
return self._save_format
@save_format.setter
def save_format(self, value):
# user can clear save formats by setting save_format=None or save_format = [] or save_format=''
# if empty string or empty list is provided as part of the list, all save_format-s set up earlier are cleared
# clear format by using None
if value is None:
self._save_format = None
return
# check string, if it is emtpy, clear save format, if not contiunue
if isinstance(value,str):
if value not in self.__save_formats :
self.log("Trying to set saving in unknown format: \""+str(value)+"\" No saving will occur for this format")
if len(value) == 0: # user wants to clear internal save formats
self._save_format = None
return
elif isinstance(value,list):
if len(value) > 0 :
# set single default save format recursively
for val in value:
self.save_format = val
return;
# clear all previous formats by providing empty list
else:
self._save_format = None;
return
# here we came to setting list of save formats
if self._save_format is None:
self._save_format = [];
self._save_format.append(value);
# bin ranges
@property
def energy_bins(self):
return self._energy_bins;
@energy_bins.setter
def energy_bins(self,value):
if value != None:
if isinstance(value,str):
list = str.split(value,',');
nBlocks = len(list);
for i in xrange(0,nBlocks,3):
value = [float(list[i]),float(list[i+1]),float(list[i+2])]
else:
nBlocks = len(value);
if nBlocks%3 != 0:
raise KeyError("Energy_bin value has to be either list of n-blocks of 3 number each or string representation of this list with numbers separated by commas")
#TODO: implement single value settings according to rebin
self._energy_bins= value
# map file name
@property
def map_file(self):
return self._map_file;
@map_file.setter
def map_file(self,value):
if value != None:
fileName, fileExtension = os.path.splitext(value)
if (not fileExtension):
value=value+'.map'
self._map_file = value
# monovanadium map file name
@property
def monovan_mapfile(self):
return self._monovan_mapfile;
@monovan_mapfile.setter
def monovan_mapfile(self,value):
if value != None:
fileName, fileExtension = os.path.splitext(value)
if (not fileExtension):
value=value+'.map'
self._monovan_mapfile = value
@property
def relocate_dets(self) :
if self.det_cal_file != None or self.__det_cal_file_ws != None: