forked from npshub/mantid
-
Notifications
You must be signed in to change notification settings - Fork 0
/
isis_reduction_steps.py
4360 lines (3770 loc) · 189 KB
/
isis_reduction_steps.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
# pylint: disable=too-many-lines, too-many-branches, invalid-name, super-on-old-class, protected-access,
# pylint: disable=too-few-public-methods,too-few-public-methods, too-many-arguments, too-many-instance-attributes
"""
This file defines what happens in each step in the data reduction, it's
the guts of the reduction. See ISISReducer for order the steps are run
in and the names they are given to identify them
Most of this code is a copy-paste from SANSReduction.py, organized to be used with
ReductionStep objects. The guts needs refactoring.
"""
import os
import re
import math
from collections import namedtuple
from mantid.kernel import Logger
from mantid.api import WorkspaceGroup, Workspace, IEventWorkspace
from mantid.simpleapi import *
from SANSUtility import (GetInstrumentDetails, MaskByBinRange,
isEventWorkspace, getFilePathFromWorkspace,
getWorkspaceReference, slice2histogram, getFileAndName,
mask_detectors_with_masking_ws, check_child_ws_for_name_and_type_for_added_eventdata,
extract_spectra,
extract_child_ws_for_added_eventdata, load_monitors_for_multiperiod_event_data,
MaskWithCylinder, get_masked_det_ids, get_masked_det_ids_from_mask_file, INCIDENT_MONITOR_TAG,
can_load_as_event_workspace, is_convertible_to_float, correct_q_resolution_for_can,
is_valid_user_file_extension, ADD_TAG)
import DarkRunCorrection as DarkCorr
import SANSUserFileParser as UserFileParser
from reducer_singleton import ReductionStep
sanslog = Logger("SANS")
DEBUG = False
# A global name for the Q Resolution workspace which lives longer than a reducer core
QRESOLUTION_WORKSPACE_NAME = "Q_Resolution_ISIS_SANS"
QRESOLUTION_MODERATOR_WORKSPACE_NAME = "Q_Resolution_MODERATOR_ISIS_SANS"
def _issueWarning(msg):
"""
Prints a message to the log marked as warning
@param msg: message to be issued
"""
print(msg)
sanslog.warning(msg)
def _issueInfo(msg):
"""
Prints a message to the log
@param msg: message to be issued
"""
print(msg)
sanslog.notice(msg)
def is_prompt_peak_instrument(reducer):
if reducer.instrument.name() == 'LOQ' or reducer.instrument.name() == 'LARMOR':
return True
else:
return False
def get_wavelength_min_and_max(reducer):
return reducer.to_wavelen.wav_low, reducer.to_wavelen.wav_high
class LoadRun(object):
UNSET_PERIOD = -1
def __init__(self, run_spec=None, trans=False, reload=True, entry=UNSET_PERIOD):
"""
Load a data file, move its detector to the right position according
to the beam center and normalize the data.
@param run_spec: the run number followed by dot and the extension
@param trans: set to true if the file is from a transmission run (default: False)
@param reload: if to reload the workspace if it is already present
@param entry: the entry number of the run, useful for multi-period files (default: load the entire file)
"""
super(LoadRun, self).__init__()
self._data_file = run_spec
self._is_trans = trans
self._reload = reload
# entry number of the run inside the run file that will be analysed, as requested by the caller
self._period = int(entry)
self._index_of_group = 0
# set to the total number of periods in the file
self.periods_in_file = None
self.ext = ''
self.shortrun_no = -1
# the name of the loaded workspace in Mantid
self._wksp_name = ''
def curr_period(self):
if self._period != self.UNSET_PERIOD:
return self._period
return self._index_of_group + 1
def move2ws(self, index):
if self.periods_in_file > 1:
if index < self.periods_in_file:
self._index_of_group = index
return True
else:
return False
def get_wksp_name(self):
ref_ws = mtd[str(self._wksp_name)]
if isinstance(ref_ws, WorkspaceGroup):
return ref_ws[self._index_of_group].name()
else:
return self._wksp_name
wksp_name = property(get_wksp_name, None, None, None)
def _load_transmission(self, inst=None, is_can=False, extra_options=None):
if extra_options is None:
extra_options = dict()
if '.raw' in self._data_file or '.RAW' in self._data_file:
self._load(inst, is_can, extra_options)
return
# the intention of the code below is a good idea. Hence the reason why
# I have left in the code but commented it out. As of this writing
# LoadNexusMonitors throws an error if LoadNexusMonitors is a histogram
# i.e. this algorithm only works for event files at present. The error
# gets presented in red to the user and causes confusion. When either
# LoadNexusMonitor can load histogram data as well or other equivalent
# change the code below which is not commented out can be deleted and
# the code commented out can be uncomment and modified as necessary
self._load(inst, is_can, extra_options)
workspace = self._get_workspace_name()
if workspace in mtd:
outWs = mtd[workspace]
if isinstance(outWs, IEventWorkspace):
if workspace + "_monitors" in mtd:
RenameWorkspace(InputWorkspace=workspace + "_monitors", OutputWorkspace=workspace)
self.periods_in_file = 1
self._wksp_name = workspace
# For sans, in transmission, we care only about the monitors. Hence,
# by trying to load only the monitors we speed up the reduction process.
# besides, we avoid loading events which is useless for transmission.
# it may fail, if the input file was not a nexus file, in this case,
# it pass the job to the default _load method.
# try:
# outWs = LoadNexusMonitors(self._data_file, OutputWorkspace=workspace)
# self.periods_in_file = 1
# self._wksp_name = workspace
# except:
# self._load(inst, is_can, extra_options)
def _load(self, inst=None, is_can=False, extra_options=None):
"""
Load a workspace and read the logs into the passed instrument reference
@param inst: a reference to the current instrument
@param iscan: set this to True for can runs
@param extra_options: arguments to pass on to the Load Algorithm.
@return: number of periods in the workspace
"""
if extra_options is None:
extra_options = dict()
if self._period != self.UNSET_PERIOD:
workspace = self._get_workspace_name(self._period)
if not can_load_as_event_workspace(self._data_file):
extra_options['EntryNumber'] = self._period
else:
workspace = self._get_workspace_name()
extra_options['OutputWorkspace'] = workspace
outWs = Load(self._data_file, **extra_options)
appendix = "_monitors"
# We need to check if we are dealing with a group workspace which is made up of added event data. Note that
# we can also have a group workspace which is associated with period data, which don't want to deal with here.
added_event_data_flag = False
if isinstance(outWs, WorkspaceGroup) and check_child_ws_for_name_and_type_for_added_eventdata(outWs):
extract_child_ws_for_added_eventdata(outWs, appendix)
added_event_data_flag = True
# Reload the outWs, it has changed from a group workspace to an event workspace
outWs = mtd[workspace]
monitor_ws_name = workspace + appendix
# Handle simple EventWorkspace data
if not added_event_data_flag:
if isinstance(outWs, IEventWorkspace):
try:
LoadNexusMonitors(self._data_file, OutputWorkspace=monitor_ws_name)
except ValueError as details:
sanslog.warning('The file does not contain monitors. \n'
+ 'The normalization might behave differently than you expect.\n'
' Further details: ' + str(details) + '\n')
else:
if monitor_ws_name in mtd:
DeleteWorkspace(monitor_ws_name)
# Handle Multi-period Event data
if not added_event_data_flag:
if isinstance(outWs, WorkspaceGroup) and len(outWs) > 0 and check_child_ws_for_name_and_type_for_added_eventdata(outWs):
pass
elif isinstance(outWs, WorkspaceGroup) and len(outWs) > 0 and isinstance(outWs[0], IEventWorkspace):
load_monitors_for_multiperiod_event_data(workspace=outWs, data_file=self._data_file,
monitor_appendix=appendix)
loader_name = ''
if isinstance(outWs, WorkspaceGroup):
historyWs = outWs[0]
else:
historyWs = outWs
try:
last_algorithm = historyWs.getHistory().lastAlgorithm()
loader_name = last_algorithm.getProperty('LoaderName').value
except RuntimeError as details:
sanslog.warning(
'Tried to get a loader name. But it seems that there is no loader name. Further info: ' + str(details))
if loader_name == 'LoadRaw':
self._loadSampleDetails(workspace)
if self._period != self.UNSET_PERIOD and isinstance(outWs, WorkspaceGroup):
outWs = mtd[self._leaveSinglePeriod(outWs.name(), self._period, appendix)]
self.periods_in_file = self._find_workspace_num_periods(workspace)
self._wksp_name = workspace
def _get_workspace_name(self, entry_num=None):
"""
Creates a name for the workspace that will contain the raw
data. If the entry number == 1 it is omitted, unless
optional_entry_no = False
@param entry_num: if this argument is set to an integer it will be added to the filename after a p
"""
run = str(self.shortrun_no)
if entry_num:
if entry_num == self.UNSET_PERIOD:
entry_num = 1
run += 'p' + str(int(entry_num))
if self._is_trans:
return run + '_trans_' + self.ext.lower()
else:
return run + '_sans_' + self.ext.lower()
def _loadSampleDetails(self, ws_name):
ws_pointer = mtd[str(ws_name)]
if isinstance(ws_pointer, WorkspaceGroup):
workspaces = [ws for ws in ws_pointer]
else:
workspaces = [ws_pointer]
for ws in workspaces:
LoadSampleDetailsFromRaw(ws, self._data_file)
def _loadFromWorkspace(self, reducer):
""" It substitute the work of _assignHelper for workspaces, or, at least,
prepare the internal attributes, to be processed by the _assignHelper.
It is executed when the input for the constructor (run_spec) is given a workspace
If reload is False, it will try to get all information necessary to use the given
workspace as the one for the post-processing.
If reload is True, it will try to get all the information necessary to reload this
workspace from the data file.
"""
assert isinstance(self._data_file, Workspace)
ws_pointer = self._data_file
try:
_file_path = getFilePathFromWorkspace(ws_pointer)
except:
raise RuntimeError("Failed to retrieve information to reload this workspace " + str(self._data_file))
self._data_file = _file_path
self.ext = _file_path[-3:]
if isinstance(ws_pointer, WorkspaceGroup):
self.shortrun_no = ws_pointer[0].getRunNumber()
else:
self.shortrun_no = ws_pointer.getRunNumber()
if self._reload:
# give to _assignHelper the responsibility of loading this data.
return False
# test if the sample details are already loaded, necessary only for raw files:
if '.nxs' not in self._data_file[-4:]:
self._loadSampleDetails(ws_pointer)
# so, it will try, not to reload the workspace.
self._wksp_name = ws_pointer.name()
self.periods_in_file = self._find_workspace_num_periods(self._wksp_name)
# check that the current workspace has never been moved
hist_str = self._getHistory(ws_pointer)
if 'Algorithm: Move' in hist_str or 'Algorithm: Rotate' in hist_str:
raise RuntimeError('Moving components needs to be made compatible with not reloading the sample')
return True
# Helper function
def _assignHelper(self, reducer):
if isinstance(self._data_file, Workspace):
loaded_flag = self._loadFromWorkspace(reducer)
if loaded_flag:
return
if self._data_file == '' or self._data_file.startswith('.'):
raise RuntimeError('Sample needs to be assigned as run_number.file_type')
try:
if reducer.instrument.name() == "":
raise AttributeError
except AttributeError:
raise AttributeError('No instrument has been assign, run SANS2D or LOQ first')
self._data_file = self._extract_run_details(self._data_file)
if not self._reload:
raise NotImplementedError('Raw workspaces must be reloaded, run with reload=True')
spectrum_limits = dict()
if self._is_trans:
if reducer.instrument.name() == 'SANS2D' and int(self.shortrun_no) < 568:
dimension = GetInstrumentDetails(reducer.instrument)[0]
spec_min = dimension * dimension * 2
spectrum_limits = {'SpectrumMin': spec_min, 'SpectrumMax': spec_min + 4}
try:
if self._is_trans and reducer.instrument.name() != 'LOQ':
# Unfortunately, LOQ in transmission acquire 3 monitors the 3 monitor usually
# is the first spectrum for detector. This causes the following method to fail
# when it tries to load only monitors. Hence, we are forced to skip this method
# for LOQ. ticket #8559
self._load_transmission(reducer.instrument, extra_options=spectrum_limits)
else:
# the spectrum_limits is not the default only for transmission data
self._load(reducer.instrument, extra_options=spectrum_limits)
except RuntimeError as details:
sanslog.warning(str(details))
self._wksp_name = ''
return
return
def _leaveSinglePeriod(self, workspace, period, appendix):
groupW = mtd[workspace]
if not isinstance(groupW, WorkspaceGroup):
logger.warning("Invalid request for getting single period in a non group workspace")
return workspace
if len(groupW) < period:
raise ValueError('Period number ' + str(period) + ' doesn\'t exist in workspace ' + groupW.name())
ws_name = groupW[period - 1].name()
# If we are dealing with event data, then we also want to extract and rename the according monitor data set
monitor_name = ""
if isEventWorkspace(groupW[period - 1]):
# Check if the monitor ws exists and extract it
expected_mon_name = ws_name + appendix
expected_mon_group_name = groupW.name() + appendix
if mtd.doesExist(expected_mon_name):
monitor_name = expected_mon_name
if mtd.doesExist(expected_mon_group_name):
group_mon_ws = mtd[expected_mon_group_name]
group_mon_ws.remove(expected_mon_name)
DeleteWorkspace(expected_mon_group_name)
# remove this workspace from the group
groupW.remove(ws_name)
# remove the entire group
DeleteWorkspace(groupW)
new_name = self._get_workspace_name(period)
new_mon_name = new_name + appendix
if new_name != ws_name:
RenameWorkspace(ws_name, OutputWorkspace=new_name)
if monitor_name != "" and new_mon_name != monitor_name:
RenameWorkspace(monitor_name, OutputWorkspace=new_mon_name)
return new_name
def _extract_run_details(self, run_string):
"""
Takes a run number and file type and generates the filename, workspace name and log name
@param run_string: either the name of a run file or a run number followed by a dot and then the file type, i.e. file extension
"""
listOfFiles = FileFinder.findRuns(run_string)
firstFile = listOfFiles[0]
self.ext = firstFile[-3:]
self.shortrun_no = int(re.findall(r'\d+', run_string)[-1])
return firstFile
def _find_workspace_num_periods(self, workspace):
"""
@param workspace: the name of the workspace
"""
numPeriods = -1
pWorksp = mtd[workspace]
if isinstance(pWorksp, WorkspaceGroup):
# get the number of periods in a group using the fact that each period has a different name
numPeriods = len(pWorksp)
else:
numPeriods = 1
return numPeriods
def _getHistory(self, wk_name):
getWorkspaceReference(wk_name)
if isinstance(wk_name, Workspace):
ws_h = wk_name.getHistory()
else:
if wk_name not in mtd:
return ""
ws_h = mtd[wk_name].getHistory()
hist_str = str(ws_h)
return hist_str
def getCorrospondingPeriod(self, sample_period, reducer):
"""
Gets the period number that corresponds to the passed sample period number, based on:
if the workspace has the same number of periods as the sample it gives returns requested
period, if it contains only one period it returns 1 and everything else is an error
@param sample_period: the period in the sample that is of interest
@return: depends on the number of entries in the workspace, could be the same number as passed or 1
@raise RuntimeError: if there is ambiguity
"""
if self.periods_in_file == 1:
# this is a single entry file, don't consider entries
return 1
elif self._period != self.UNSET_PERIOD:
# the user specified a definite period, use it
return self._period
elif self.periods_in_file == reducer.get_sample().loader.periods_in_file:
# use corresponding periods, the same entry as the sample in each case
return sample_period
else:
raise RuntimeError(
'There is a mismatch in the number of periods (entries) in the file between the sample and another run')
class LoadTransmissions(object):
"""
Loads the file used to apply the transmission correction to the
sample or can
"""
_direct_name = None
_trans_name = None
def __init__(self, is_can=False, reload=True):
"""
Two settings can be set at initialization, if this is for
can and if the workspaces should be reloaded if they already
exist
@param is_can: if this is to correct the can (default false i.e. it's for the sample)
@param reload: setting this to false will mean the workspaces aren't reloaded if they already exist (default True i.e. reload)
"""
self.trans = None
self.direct = None
self._reload = reload
self._period_t = -1
self._period_d = -1
self.can = is_can
def set_trans(self, trans, period=-1):
self._trans_name = trans
self._period_t = period
def set_direc(self, direct, period=-1):
self._direct_name = direct
self._period_d = period
def execute(self, reducer, workspace):
if self._trans_name not in [None, '']:
self.trans = LoadRun(self._trans_name, trans=True, reload=self._reload, entry=self._period_t)
self.trans._assignHelper(reducer)
if isinstance(self._trans_name, Workspace):
self._trans_name = self._trans_name.name()
if not self.trans.wksp_name:
# do nothing if no workspace was specified
return '', ''
if self._direct_name not in [None, '']:
self.direct = LoadRun(self._direct_name, trans=True, reload=self._reload, entry=self._period_d)
self.direct._assignHelper(reducer)
if isinstance(self._direct_name, Workspace):
self._direct_name = self._direct_name.name()
if not self.direct.wksp_name:
raise RuntimeError('Transmission run set without direct run error')
# transmission workspaces sometimes have monitor locations, depending on the instrument, load these locations
reducer.instrument.load_transmission_inst(self.trans.wksp_name, self.direct.wksp_name,
reducer.get_beam_center())
return self.trans.wksp_name, self.direct.wksp_name
class CanSubtraction(ReductionStep):
"""
Apply the same corrections to the can that were applied to the sample and
then subtracts this can from the sample.
"""
def __init__(self):
super(CanSubtraction, self).__init__()
def execute(self, reducer, workspace):
"""
Apply same corrections as for sample workspace then subtract from data
"""
if reducer.get_can() is None:
return
# rename the sample workspace, its name will be restored to the original once the subtraction has been done
tmp_smp = workspace + "_sam_tmp"
RenameWorkspace(InputWorkspace=workspace, OutputWorkspace=tmp_smp)
tmp_can = workspace + "_can_tmp"
# do same corrections as were done to the sample
reducer.reduce_can(tmp_can)
# we now have the can workspace, use it
Minus(LHSWorkspace=tmp_smp, RHSWorkspace=tmp_can, OutputWorkspace=workspace)
# Correct the Q resolution entries in the output workspace
correct_q_resolution_for_can(mtd[tmp_smp], mtd[tmp_can], mtd[workspace])
# clean up the workspaces ready users to see them if required
if reducer.to_Q.output_type == '1D':
StripEndNans()
self._keep_partial_results(tmp_smp, tmp_can)
def get_wksp_name(self):
return self.workspace.wksp_name
wksp_name = property(get_wksp_name, None, None, None)
def get_periods_in_file(self):
return self.workspace.periods_in_file
def _keep_partial_results(self, sample_name, can_name):
# user asked to keep these results 8970
gp_name = 'sample_can_reductions'
if mtd.doesExist(gp_name):
gpr = mtd[gp_name]
for wsname in [sample_name, can_name]:
if not gpr.contains(wsname):
gpr.add(wsname)
else:
GroupWorkspaces([sample_name, can_name], OutputWorkspace=gp_name)
periods_in_file = property(get_periods_in_file, None, None, None)
def _pass_dx_values_to_can_subtracted_if_required(self, original_ws, subtracted_ws):
'''
We pass the DX values from the original workspace to the subtracted workspace.
This means we currently do nothing with potential DX values in the can workspace.
Also that if there are DX values, then they are in all spectra
'''
if not original_ws.hasDx(0):
return
for index in range(0, original_ws.getNumHistograms()):
subtracted_ws.setDx(index, original_ws.dataDX(index))
class Mask_ISIS(ReductionStep):
"""
Marks some spectra so that they are not included in the analysis
Provides ISIS specific mask functionality (e.g. parsing
MASK commands from user files), inherits from Mask
"""
def __init__(self, timemask='', timemask_r='', timemask_f='',
specmask='', specmask_r='', specmask_f=''):
self._xml = []
# these spectra will be masked by the algorithm MaskDetectors
self.detect_list = []
# List of pixels to mask
self.masked_pixels = []
self.time_mask = timemask
self.time_mask_r = timemask_r
self.time_mask_f = timemask_f
self.spec_mask_r = specmask_r
self.spec_mask_f = specmask_f
# as far as I can used to possibly set phi masking
# not to be applied even though _lim_phi_xml has been set
self.mask_phi = True
self.phi_mirror = True
self._lim_phi_xml = ''
self.phi_min = -90.0
self.phi_max = 90.0
# read only phi (only used in ...)
# this option seems totally bizarre to me since it allow
# set_phi_limit to be called but not setting the _lim_phi_xml
# string.....
self._readonly_phi = False
# used to assess if set phi limit has been called just once
# in which case exactly one phi range has been masked
# and get_phi_limits
self._numberOfTimesSetPhiLimitBeenCalled = 0
self.spec_list = []
# is set when there is an arm to mask, it's the width in millimetres
self.arm_width = None
# when there is an arm to mask this is its angle in degrees
self.arm_angle = None
# RMD Mod 24/7/13
self.arm_x = None
self.arm_y = None
########################## Masking ################################################
# Mask the corners and beam stop if radius parameters are given
self.min_radius = None
self.max_radius = None
def add_xml_shape(self, complete_xml_element):
"""
Add an arbitrary shape to region to be masked
@param complete_xml_element: description of the shape to add
"""
if not complete_xml_element.startswith('<'):
raise ValueError('Excepted xml string but found: ' + str(complete_xml_element))
self._xml.append(complete_xml_element)
def _infinite_plane(self, id, plane_pt, normal_pt, complement=False):
"""
Generates xml code for an infinite plane
@param id: a string to refer to the shape by
@param plane_pt: a point in the plane
@param normal_pt: the direction of a normal to the plane
@param complement: mask in the direction of the normal or away
@return the xml string
"""
return '<infinite-plane id="' + str(id) + '">' + \
'<point-in-plane x="' + str(plane_pt[0]) + '" y="' + str(plane_pt[1]) + '" z="' + \
str(plane_pt[2]) + '" />' + \
'<normal-to-plane x="' + str(normal_pt[0]) + '" y="' + str(normal_pt[1]) + '" z="' +\
str(normal_pt[2]) + '" />' + \
'</infinite-plane>\n'
def _infinite_cylinder(self, centre, radius, axis, id='shape'):
"""
Generates xml code for an infintely long cylinder
@param centre: a tuple for a point on the axis
@param radius: cylinder radius
@param axis: cylinder orientation
@param id: a string to refer to the shape by
@return the xml string
"""
return '<infinite-cylinder id="' + str(id) + '">' + \
'<centre x="' + str(centre[0]) + '" y="' + str(centre[1]) + '" z="' + str(centre[2]) + '" />' + \
'<axis x="' + str(axis[0]) + '" y="' + str(axis[1]) + '" z="' + str(axis[2]) + '" />' + \
'<radius val="' + str(radius) + '" /></infinite-cylinder>\n'
def _finite_cylinder(self, centre, radius, height, axis, id='shape'):
"""
Generates xml code for an infintely long cylinder
@param centre: a tuple for a point on the axis
@param radius: cylinder radius
@param height: cylinder height
@param axis: cylinder orientation
@param id: a string to refer to the shape by
@return the xml string
"""
return '<cylinder id="' + str(id) + '">' + \
'<centre-of-bottom-base x="' + str(centre[0]) + '" y="' + str(centre[1]) + '" z="' + str(centre[2]) + \
'" />' + \
'<axis x="' + str(axis[0]) + '" y="' + str(axis[1]) + '" z="' + str(axis[2]) + '" />' + \
'<radius val="' + str(radius) + '" /><height val="' + str(height) + '" /></cylinder>\n'
def add_cylinder(self, radius, xcentre, ycentre, ID='shape'):
'''Mask the inside of an infinite cylinder on the input workspace.'''
self.add_xml_shape(
self._infinite_cylinder([xcentre, ycentre, 0.0], radius, [0, 0, 1], id=ID) + '<algebra val="' + str(
ID) + '"/>')
def add_outside_cylinder(self, radius, xcentre=0.0, ycentre=0.0, ID='shape'):
'''Mask out the outside of a cylinder or specified radius'''
self.add_xml_shape(
self._infinite_cylinder([xcentre, ycentre, 0.0], radius, [0, 0, 1], id=ID) + '<algebra val="#' + str(
ID) + '"/>')
def set_radi(self, _min, _max):
self.min_radius = float(_min) / 1000.
self.max_radius = float(_max) / 1000.
def _whichBank(self, instName, specNo):
"""
Return either 'rear' or 'front' depending on which bank the spectrum number belong to
@param instName Instrument name. Used for MASK Ssp command to tell what bank it refer to
@param specNo Spectrum number
"""
bank = 'rear'
if instName.upper() == 'LOQ':
if 16387 <= specNo <= 17784:
bank = 'front'
if instName.upper() == 'SANS2D':
if 36873 <= specNo <= 73736:
bank = 'front'
return bank
def parse_instruction(self, instName, details): # noqa: C901
"""
Parse an instruction line from an ISIS mask file
@param instName Instrument name. Used for MASK Ssp command to tell what bank it refer to
@param details Line to parse
"""
details = details.lstrip()
details = details.upper()
if not details.startswith('MASK') and not details.startswith('L/PHI'):
_issueWarning('Ignoring malformed mask line ' + details)
return
if 'L/PHI' in details:
phiParts = details.split()
if len(phiParts) == 3:
mirror = phiParts[0] != 'L/PHI/NOMIRROR'
phiMin = phiParts[1]
phiMax = phiParts[2]
self.set_phi_limit(float(phiMin), float(phiMax), mirror)
return
else:
_issueWarning('Unrecognized L/PHI masking line command "' + details + '"')
return
parts = details.split('/')
# A spectrum mask or mask spectra range with H and V commands
if len(parts) == 1: # Command is to type MASK something
argToMask = details[4:].lstrip().upper()
bank = 'rear'
# special case for MASK Ssp where try to infer the bank the spectrum number belong to
if 'S' in argToMask:
if '>' in argToMask:
pieces = argToMask.split('>')
low = int(pieces[0].lstrip('S'))
upp = int(pieces[1].lstrip('S'))
bankLow = self._whichBank(instName, low)
bankUpp = self._whichBank(instName, upp)
if bankLow != bankUpp:
_issueWarning('The spectra in Mask command: ' + details
+ ' belong to two different banks. Default to use bank ' + bankLow)
bank = bankLow
else:
bank = self._whichBank(instName, int(argToMask.lstrip('S')))
# Default to the rear detector if not MASK Ssp command
self.add_mask_string(argToMask, detect=bank)
elif len(parts) == 2: # Command is to type MASK/ something
_type = parts[1] # this is the part of the command following /
typeSplit = _type.split() # used for command such as MASK/REAR Hn and MASK/Line w a
if _type == 'CLEAR': # Command is specifically MASK/CLEAR
self.spec_mask_r = ''
self.spec_mask_f = ''
elif _type.startswith('T'):
if _type.startswith('TIME'):
bin_range = _type[4:].lstrip()
else:
bin_range = _type[1:].lstrip()
self.time_mask += ';' + bin_range
elif len(typeSplit) == 2:
# Commands such as MASK/REAR Hn, where typeSplit[0] then equal 'REAR'
if 'S' in typeSplit[1].upper():
_issueWarning('MASK command of type ' + details
+ ' deprecated. Please use instead MASK Ssp1[>Ssp2]')
if typeSplit[0].upper() != 'REAR' and instName == 'LOQ':
_issueWarning('MASK command of type ' + details
+ ' can, until otherwise requested, only be used for the REAR (default)'
' Main detector of LOQ. '
+ 'Default to the Main detector of LOQ for this mask command')
self.add_mask_string(mask_string=typeSplit[1], detect='rear')
else:
self.add_mask_string(mask_string=typeSplit[1], detect=typeSplit[0])
elif _type.startswith('LINE'):
# RMD mod 24/7/13
if len(typeSplit) == 5:
self.arm_width = float(typeSplit[1])
self.arm_angle = float(typeSplit[2])
self.arm_x = float(typeSplit[3])
self.arm_y = float(typeSplit[4])
elif len(typeSplit) == 3:
self.arm_width = float(typeSplit[1])
self.arm_angle = float(typeSplit[2])
self.arm_x = 0.0
self.arm_y = 0.0
else:
_issueWarning('Unrecognized line masking command "' + details
+ '" syntax is MASK/LINE width angle or MASK/LINE width angle x y')
else:
_issueWarning('Unrecognized masking option "' + details + '"')
elif len(parts) == 3:
_type = parts[1]
if _type == 'CLEAR':
self.time_mask = ''
self.time_mask_r = ''
self.time_mask_f = ''
elif _type == 'TIME' or _type == 'T':
parts = parts[2].split()
if len(parts) == 3:
detname = parts[0].rstrip()
bin_range = parts[1].rstrip() + ' ' + parts[2].lstrip()
if detname.upper() == 'FRONT':
self.time_mask_f += ';' + bin_range
elif detname.upper() == 'REAR':
self.time_mask_r += ';' + bin_range
else:
_issueWarning('Detector \'' + detname
+ '\' not found in currently selected instrument '
+ self.instrument.name() + '. Skipping line.')
else:
_issueWarning('Unrecognized masking line "' + details + '"')
else:
_issueWarning('Unrecognized masking line "' + details + '"')
def add_mask_string(self, mask_string, detect):
if detect.upper() == 'FRONT' or detect.upper() == 'HAB':
self.spec_mask_f += ',' + mask_string
elif detect.upper() == 'REAR':
self.spec_mask_r += ',' + mask_string
else:
_issueWarning('Detector \'' + detect
+ '\' not found in currently selected instrument '
+ self.instrument.name() + '. Skipping line.')
def _ConvertToSpecList(self, maskstring, detector):
'''
Convert a mask string to a spectra list
6/8/9 RKH attempt to add a box mask e.g. h12+v34 (= one pixel at intersection), h10>h12+v101>v123 (=block 3 wide, 23 tall)
@param maskstring Is a comma separated list of mask commands for masking spectra using the e.g. the h, s and v commands
'''
# Compile spectra ID list
if maskstring == '':
return ''
masklist = maskstring.split(',')
speclist = ''
for x in masklist:
x = x.lower()
if '+' in x:
bigPieces = x.split('+')
if '>' in bigPieces[0]:
pieces = bigPieces[0].split('>')
low = int(pieces[0].lstrip('hv'))
upp = int(pieces[1].lstrip('hv'))
else:
low = int(bigPieces[0].lstrip('hv'))
upp = low
if '>' in bigPieces[1]:
pieces = bigPieces[1].split('>')
low2 = int(pieces[0].lstrip('hv'))
upp2 = int(pieces[1].lstrip('hv'))
else:
low2 = int(bigPieces[1].lstrip('hv'))
upp2 = low2
if 'h' in bigPieces[0] and 'v' in bigPieces[1]:
ydim = abs(upp - low) + 1
xdim = abs(upp2 - low2) + 1
speclist += detector.spectrum_block(low, low2, ydim, xdim) + ','
elif 'v' in bigPieces[0] and 'h' in bigPieces[1]:
xdim = abs(upp - low) + 1
ydim = abs(upp2 - low2) + 1
speclist += detector.spectrum_block(low2, low, ydim, xdim) + ','
else:
print("error in mask, ignored: " + x)
elif '>' in x: # Commands: MASK Ssp1>Ssp2, MASK Hn1>Hn2 and MASK Vn1>Vn2
pieces = x.split('>')
low = int(pieces[0].lstrip('hvs'))
upp = int(pieces[1].lstrip('hvs'))
if 'h' in pieces[0]:
nstrips = abs(upp - low) + 1
speclist += detector.spectrum_block(low, 0, nstrips, 'all') + ','
elif 'v' in pieces[0]:
nstrips = abs(upp - low) + 1
speclist += detector.spectrum_block(0, low, 'all', nstrips) + ','
else:
for i in range(low, upp + 1):
speclist += str(i) + ','
elif 'h' in x:
speclist += detector.spectrum_block(int(x.lstrip('h')), 0, 1, 'all') + ','
elif 'v' in x:
speclist += detector.spectrum_block(0, int(x.lstrip('v')), 'all', 1) + ','
elif 's' in x: # Command MASK Ssp. Although note commands of type MASK Ssp1>Ssp2 handled above
speclist += x.lstrip('s') + ','
elif x == '':
# empty entries are allowed
pass
elif len(x.split()) == 4:
_issueWarning('Box mask entry "%s" ignored. Box masking is not supported by Mantid' % ('mask ' + x))
else:
raise SyntaxError('Problem reading a mask entry: "%s"' % x)
# remove any trailing comma
if speclist.endswith(','):
speclist = speclist[0:len(speclist) - 1]
return speclist
def _mask_phi(self, id, centre, phimin, phimax, use_mirror=True):
'''
Mask the detector bank such that only the region specified in the
phi range is left unmasked
Purpose of this method is to populate self._lim_phi_xml
'''
# convert all angles to be between 0 and 360
while phimax > 360:
phimax -= 360
while phimax < 0:
phimax += 360
while phimin > 360:
phimin -= 360
while phimin < 0:
phimin += 360
while phimax < phimin:
phimax += 360
# Convert to radians
phimin = math.pi * phimin / 180.0
phimax = math.pi * phimax / 180.0
id = str(id)
self._lim_phi_xml = \
self._infinite_plane(id + '_plane1', centre,
[math.cos(-phimin + math.pi / 2.0), math.sin(-phimin + math.pi / 2.0), 0]) \
+ self._infinite_plane(id + '_plane2', centre,
[-math.cos(-phimax + math.pi / 2.0), -math.sin(-phimax + math.pi / 2.0), 0])
if use_mirror:
self._lim_phi_xml += self._infinite_plane(id + '_plane3', centre,
[math.cos(-phimax + math.pi / 2.0),
math.sin(-phimax + math.pi / 2.0), 0]) \
+ self._infinite_plane(id + '_plane4', centre, [-math.cos(-phimin + math.pi / 2.0),
-math.sin(-phimin + math.pi / 2.0), 0]) \
+ '<algebra val="#((' + id + '_plane1 ' + id + '_plane2):(' + id + '_plane3 ' + id + '_plane4))" />'
else:
# the formula is different for acute verses obtuse angles
if phimax - phimin > math.pi:
# to get an obtruse angle, a wedge that's more than half the area, we need to add the semi-inifinite volumes
self._lim_phi_xml += '<algebra val="#(' + id + '_plane1:' + id + '_plane2)" />'
else:
# an acute angle, wedge is more less half the area, we need to use the intesection of those semi-inifinite volumes
self._lim_phi_xml += '<algebra val="#(' + id + '_plane1 ' + id + '_plane2)" />'
def _mask_line(self, startPoint, length, width, angle):
'''
Creates the xml to mask a line of the given width and height at the given angle
into the member _line_xml. The masking object which is used to mask a line of say
a detector array is a finite cylinder
@param startPoint: startPoint of line
@param length: length of line
@param width: width of line in mm
@param angle: angle of line in xy-plane in units of degrees
@return: return xml shape string
'''
return self._finite_cylinder(startPoint, width / 2000.0, length,
[math.cos(angle * math.pi / 180.0), math.sin(angle * math.pi / 180.0), 0.0], "arm")
def get_phi_limits_tag(self):
"""
Get the values of the lowest and highest boundaries
Used to append to output workspace name
@return 'Phi'low'_'high if it has been set
"""
if self.mask_phi and self._lim_phi_xml != '' and (abs(self.phi_max - self.phi_min) != 180.0):
return 'Phi' + str(self.phi_min) + '_' + str(self.phi_max)
else:
return ''
def set_phi_limit(self, phimin, phimax, phimirror, override=True):
'''
... (tx to Richard for changes to this function
for ticket #)
@param phimin:
@param phimax:
@param phimirror:
@param override: This one I don't understand. It seem
dangerous to be allowed to set this one to false.
Also this option cannot be set from the command interface
@return: return xml shape string
'''
if phimirror: