-
Notifications
You must be signed in to change notification settings - Fork 122
/
IndirectReductionCommon.py
756 lines (593 loc) · 28.5 KB
/
IndirectReductionCommon.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
#pylint: disable=invalid-name,too-many-branches,too-many-arguments,deprecated-module,no-name-in-module
from mantid.api import WorkspaceGroup, AlgorithmManager
from mantid import mtd, logger, config
import os
import numpy as np
#-------------------------------------------------------------------------------
def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files, load_opts=None):
"""
Loads a set of files and extracts just the spectra we care about (i.e. detector range and monitor).
@param data_files List of data file names
@param ipf_filename FIle path/name for the instrument parameter file to load
@param spec_min Minimum spectra ID to load
@param spec_max Maximum spectra ID to load
@param sum Sum loaded files
@param load_opts Additional options to be passed to load algorithm
@return List of loaded workspace names and flag indicating chopped data
"""
from mantid.simpleapi import (Load, LoadVesuvio, LoadParameterFile,
ChopData, ExtractSingleSpectrum,
CropWorkspace)
if load_opts is None:
load_opts = {}
workspace_names = []
for filename in data_files:
# The filename without path and extension will be the workspace name
ws_name = os.path.splitext(os.path.basename(filename))[0]
logger.debug('Loading file %s as workspace %s' % (filename, ws_name))
if 'VESUVIO' in ipf_filename:
evs_filename = os.path.basename(filename).replace('EVS', '')
LoadVesuvio(Filename=evs_filename,
OutputWorkspace=ws_name,
SpectrumList='1-198',
**load_opts)
else:
Load(Filename=filename,
OutputWorkspace=ws_name,
**load_opts)
# Load the instrument parameters
LoadParameterFile(Workspace=ws_name,
Filename=ipf_filename)
# Add the workspace to the list of workspaces
workspace_names.append(ws_name)
# Get the spectrum number for the monitor
instrument = mtd[ws_name].getInstrument()
monitor_index = int(instrument.getNumberParameter('Workflow.Monitor1-SpectrumNumber')[0])
logger.debug('Workspace %s monitor 1 spectrum number :%d' % (ws_name, monitor_index))
# Chop data if required
try:
chop_threshold = mtd[ws_name].getInstrument().getNumberParameter('Workflow.ChopDataIfGreaterThan')[0]
x_max = mtd[ws_name].readX(0)[-1]
chopped_data = x_max > chop_threshold
except IndexError:
chopped_data = False
logger.information('Workspace %s need data chop: %s' % (ws_name, str(chopped_data)))
workspaces = [ws_name]
if chopped_data:
ChopData(InputWorkspace=ws_name,
OutputWorkspace=ws_name,
MonitorWorkspaceIndex=monitor_index,
IntegrationRangeLower=5000.0,
IntegrationRangeUpper=10000.0,
NChops=5)
workspaces = mtd[ws_name].getNames()
for chop_ws_name in workspaces:
# Get the monitor spectrum
monitor_ws_name = chop_ws_name + '_mon'
ExtractSingleSpectrum(InputWorkspace=chop_ws_name,
OutputWorkspace=monitor_ws_name,
WorkspaceIndex=monitor_index)
# Crop to the detectors required
CropWorkspace(InputWorkspace=chop_ws_name, OutputWorkspace=chop_ws_name,
StartWorkspaceIndex=int(spec_min) - 1,
EndWorkspaceIndex=int(spec_max) - 1)
logger.information('Loaded workspace names: %s' % (str(workspace_names)))
logger.information('Chopped data: %s' % (str(chopped_data)))
# Sum files if needed
if sum_files:
if chopped_data:
workspace_names = sum_chopped_runs(workspace_names)
else:
workspace_names = sum_regular_runs(workspace_names)
logger.information('Summed workspace names: %s' % (str(workspace_names)))
return workspace_names, chopped_data
#-------------------------------------------------------------------------------
def sum_regular_runs(workspace_names):
"""
Sum runs with single workspace data.
@param workspace_names List of names of input workspaces
@return List of names of workspaces
"""
from mantid.simpleapi import (MergeRuns, Scale, AddSampleLog,
DeleteWorkspace)
# Use the first workspace name as the result of summation
summed_detector_ws_name = workspace_names[0]
summed_monitor_ws_name = workspace_names[0] + '_mon'
# Get a list of the run numbers for the original data
run_numbers = ','.join([str(mtd[ws_name].getRunNumber()) for ws_name in workspace_names])
# Generate lists of the detector and monitor workspaces
detector_workspaces = ','.join(workspace_names)
monitor_workspaces = ','.join([ws_name + '_mon' for ws_name in workspace_names])
# Merge the raw workspaces
MergeRuns(InputWorkspaces=detector_workspaces,
OutputWorkspace=summed_detector_ws_name)
MergeRuns(InputWorkspaces=monitor_workspaces,
OutputWorkspace=summed_monitor_ws_name)
# Delete old workspaces
for idx in range(1, len(workspace_names)):
DeleteWorkspace(workspace_names[idx])
DeleteWorkspace(workspace_names[idx] + '_mon')
# Derive the scale factor based on number of merged workspaces
scale_factor = 1.0 / len(workspace_names)
logger.information('Scale factor for summed workspaces: %f' % scale_factor)
# Scale the new detector and monitor workspaces
Scale(InputWorkspace=summed_detector_ws_name,
OutputWorkspace=summed_detector_ws_name,
Factor=scale_factor)
Scale(InputWorkspace=summed_monitor_ws_name,
OutputWorkspace=summed_monitor_ws_name,
Factor=scale_factor)
# Add the list of run numbers to the result workspace as a sample log
AddSampleLog(Workspace=summed_detector_ws_name, LogName='multi_run_numbers',
LogType='String', LogText=run_numbers)
# Only have the one workspace now
return [summed_detector_ws_name]
#-------------------------------------------------------------------------------
def sum_chopped_runs(workspace_names):
"""
Sum runs with chopped data.
"""
from mantid.simpleapi import (MergeRuns, Scale, DeleteWorkspace)
try:
num_merges = len(mtd[workspace_names[0]].getNames())
except:
raise RuntimeError('Not all runs have been chopped, cannot sum.')
merges = list()
# Generate a list of workspaces to be merged
for idx in range(0, num_merges):
merges.append({'detector':list(), 'monitor':list()})
for ws_name in workspace_names:
detector_ws_name = mtd[ws_name].getNames()[idx]
monitor_ws_name = detector_ws_name + '_mon'
merges[idx]['detector'].append(detector_ws_name)
merges[idx]['monitor'].append(monitor_ws_name)
for merge in merges:
# Merge the chopped run segments
MergeRuns(InputWorkspaces=','.join(merge['detector']),
OutputWorkspace=merge['detector'][0])
MergeRuns(InputWorkspaces=','.join(merge['monitor']),
OutputWorkspace=merge['monitor'][0])
# Scale the merged runs
merge_size = len(merge['detector'])
factor = 1.0 / merge_size
Scale(InputWorkspace=merge['detector'][0],
OutputWorkspace=merge['detector'][0],
Factor=factor,
Operation='Multiply')
Scale(InputWorkspace=merge['monitor'][0],
OutputWorkspace=merge['monitor'][0],
Factor=factor,
Operation='Multiply')
# Remove the old workspaces
for idx in range(1, merge_size):
DeleteWorkspace(merge['detector'][idx])
DeleteWorkspace(merge['monitor'][idx])
# Only have the one workspace now
return [workspace_names[0]]
#-------------------------------------------------------------------------------
def identify_bad_detectors(workspace_name):
"""
Identify detectors which should be masked
@param workspace_name Name of worksapce to use ot get masking detectors
@return List of masked spectra
"""
from mantid.simpleapi import (IdentifyNoisyDetectors, DeleteWorkspace)
instrument = mtd[workspace_name].getInstrument()
try:
masking_type = instrument.getStringParameter('Workflow.Masking')[0]
except IndexError:
masking_type = 'None'
logger.information('Masking type: %s' % (masking_type))
masked_spec = list()
if masking_type == 'IdentifyNoisyDetectors':
ws_mask = '__workspace_mask'
IdentifyNoisyDetectors(InputWorkspace=workspace_name,
OutputWorkspace=ws_mask)
# Convert workspace to a list of spectra
num_spec = mtd[ws_mask].getNumberHistograms()
masked_spec = [spec for spec in range(0, num_spec) if mtd[ws_mask].readY(spec)[0] == 0.0]
# Remove the temporary masking workspace
DeleteWorkspace(ws_mask)
logger.debug('Masked specta for workspace %s: %s' % (workspace_name, str(masked_spec)))
return masked_spec
#-------------------------------------------------------------------------------
def unwrap_monitor(workspace_name):
"""
Unwrap monitor if required based on value of Workflow.UnwrapMonitor parameter
@param workspace_name Name of workspace
@return True if the monitor was unwrapped
"""
from mantid.simpleapi import (UnwrapMonitor, RemoveBins, FFTSmooth)
monitor_workspace_name = workspace_name + '_mon'
instrument = mtd[monitor_workspace_name].getInstrument()
# Determine if the monitor should be unwrapped
try:
unwrap = instrument.getStringParameter('Workflow.UnwrapMonitor')[0]
if unwrap == 'Always':
should_unwrap = True
elif unwrap == 'BaseOnTimeRegime':
mon_time = mtd[monitor_workspace_name].readX(0)[0]
det_time = mtd[workspace_name].readX(0)[0]
logger.notice(str(mon_time) + " " + str(det_time))
should_unwrap = mon_time == det_time
else:
should_unwrap = False
except IndexError:
should_unwrap = False
logger.debug('Need to unwrap monitor for %s: %s' % (workspace_name, str(should_unwrap)))
if should_unwrap:
sample = instrument.getSample()
sample_to_source = sample.getPos() - instrument.getSource().getPos()
radius = mtd[workspace_name].getDetector(0).getDistance(sample)
z_dist = sample_to_source.getZ()
l_ref = z_dist + radius
logger.debug('For workspace %s: radius=%d, z_dist=%d, l_ref=%d' %
(workspace_name, radius, z_dist, l_ref))
_, join = UnwrapMonitor(InputWorkspace=monitor_workspace_name,
OutputWorkspace=monitor_workspace_name,
LRef=l_ref)
RemoveBins(InputWorkspace=monitor_workspace_name,
OutputWorkspace=monitor_workspace_name,
XMin=join - 0.001, XMax=join + 0.001,
Interpolation='Linear')
try:
FFTSmooth(InputWorkspace=monitor_workspace_name,
OutputWorkspace=monitor_workspace_name,
WorkspaceIndex=0)
except ValueError:
raise ValueError('Uneven bin widths are not supported.')
return should_unwrap
#-------------------------------------------------------------------------------
def process_monitor_efficiency(workspace_name):
"""
Process monitor efficiency for a given workspace.
@param workspace_name Name of workspace to process monitor for
"""
from mantid.simpleapi import OneMinusExponentialCor
monitor_workspace_name = workspace_name + '_mon'
instrument = mtd[workspace_name].getInstrument()
try:
area = instrument.getNumberParameter('Workflow.Monitor1-Area')[0]
thickness = instrument.getNumberParameter('Workflow.Monitor1-Thickness')[0]
attenuation = instrument.getNumberParameter('Workflow.Monitor1-Attenuation')[0]
except IndexError:
raise ValueError('Cannot get monitor details form parameter file')
if area == -1 or thickness == -1 or attenuation == -1:
logger.information('For workspace %s, skipping monitor efficiency' % (workspace_name))
return
OneMinusExponentialCor(InputWorkspace=monitor_workspace_name,
OutputWorkspace=monitor_workspace_name,
C=attenuation * thickness,
C1=area)
#-------------------------------------------------------------------------------
def scale_monitor(workspace_name):
"""
Scale monitor intensity by a factor given as the Workflow.MonitorScalingFactor parameter.
@param workspace_name Name of workspace to process monitor for
"""
from mantid.simpleapi import Scale
monitor_workspace_name = workspace_name + '_mon'
instrument = mtd[workspace_name].getInstrument()
try:
scale_factor = instrument.getNumberParameter('Workflow.Monitor1-ScalingFactor')[0]
except IndexError:
logger.information('No monitor scaling factor found for workspace %s' % workspace_name)
return
if scale_factor != 1.0:
Scale(InputWorkspace=monitor_workspace_name,
OutputWorkspace=monitor_workspace_name,
Factor=1.0 / scale_factor,
Operation='Multiply')
#-------------------------------------------------------------------------------
def scale_detectors(workspace_name, e_mode='Indirect'):
"""
Scales detectors by monitor intensity.
@param workspace_name Name of detector workspace
@param e_mode Energy mode (Indirect for spectroscopy, Elastic for diffraction)
"""
from mantid.simpleapi import (ConvertUnits, RebinToWorkspace, Divide)
monitor_workspace_name = workspace_name + '_mon'
ConvertUnits(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Target='Wavelength',
EMode=e_mode)
RebinToWorkspace(WorkspaceToRebin=workspace_name,
WorkspaceToMatch=monitor_workspace_name,
OutputWorkspace=workspace_name)
Divide(LHSWorkspace=workspace_name,
RHSWorkspace=monitor_workspace_name,
OutputWorkspace=workspace_name)
#-------------------------------------------------------------------------------
def group_spectra(workspace_name, masked_detectors, method, group_file=None, group_ws=None):
"""
Groups spectra in a given workspace according to the Workflow.GroupingMethod and
Workflow.GroupingFile parameters and GrpupingPolicy property.
@param workspace_name Name of workspace to group spectra of
@param masked_detectors List of spectra numbers to mask
@param method Grouping method (IPF, All, Individual, File, Workspace)
@param group_file File for File method
@param group_ws Workspace for Workspace method
"""
from mantid.simpleapi import (MaskDetectors, GroupDetectors)
instrument = mtd[workspace_name].getInstrument()
# If grouping as per he IPF is desired
if method == 'IPF':
# Get the grouping method from the parameter file
try:
grouping_method = instrument.getStringParameter('Workflow.GroupingMethod')[0]
except IndexError:
grouping_method = 'Individual'
else:
# Otherwise use the value of GroupingPolicy
grouping_method = method
logger.information('Grouping method for workspace %s is %s' % (workspace_name, grouping_method))
if grouping_method == 'Individual':
# Nothing to do here
return
elif grouping_method == 'All':
# Get a list of all spectra minus those which are masked
num_spec = mtd[workspace_name].getNumberHistograms()
spectra_list = [spec for spec in range(0, num_spec) if spec not in masked_detectors]
# Apply the grouping
GroupDetectors(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Behaviour='Average',
WorkspaceIndexList=spectra_list)
elif grouping_method == 'File':
# Get the filename for the grouping file
if group_file is not None:
grouping_file = group_file
else:
try:
grouping_file = instrument.getStringParameter('Workflow.GroupingFile')[0]
except IndexError:
raise RuntimeError('Cannot get grouping file from properties or IPF.')
# If the file is not found assume it is in the grouping files directory
if not os.path.isfile(grouping_file):
grouping_file = os.path.join(config.getString('groupingFiles.directory'), grouping_file)
# If it is still not found just give up
if not os.path.isfile(grouping_file):
raise RuntimeError('Cannot find grouping file: %s' % (grouping_file))
# Mask detectors if required
if len(masked_detectors) > 0:
MaskDetectors(Workspace=workspace_name,
WorkspaceIndexList=masked_detectors)
# Apply the grouping
GroupDetectors(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Behaviour='Average',
MapFile=grouping_file)
elif grouping_method == 'Workspace':
# Apply the grouping
GroupDetectors(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Behaviour='Average',
CopyGroupingFromWorkspace=group_ws)
else:
raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace_name))
#-------------------------------------------------------------------------------
def fold_chopped(workspace_name):
"""
Folds multiple frames of a data set into one workspace.
@param workspace_name Name of the group to fold
"""
from mantid.simpleapi import (MergeRuns, DeleteWorkspace, CreateWorkspace,
Divide)
workspaces = mtd[workspace_name].getNames()
merged_ws = workspace_name + '_merged'
MergeRuns(InputWorkspaces=','.join(workspaces), OutputWorkspace=merged_ws)
scaling_ws = '__scaling_ws'
unit = mtd[workspace_name].getItem(0).getAxis(0).getUnit().unitID()
ranges = []
for ws in mtd[workspace_name].getNames():
x_min = mtd[ws].dataX(0)[0]
x_max = mtd[ws].dataX(0)[-1]
ranges.append((x_min, x_max))
DeleteWorkspace(Workspace=ws)
data_x = mtd[merged_ws].readX(0)
data_y = []
data_e = []
for i in range(0, mtd[merged_ws].blocksize()):
y_val = 0.0
for rng in ranges:
if data_x[i] >= rng[0] and data_x[i] <= rng[1]:
y_val += 1.0
data_y.append(y_val)
data_e.append(0.0)
CreateWorkspace(OutputWorkspace=scaling_ws,
DataX=data_x,
DataY=data_y,
DataE=data_e,
UnitX=unit)
Divide(LHSWorkspace=merged_ws,
RHSWorkspace=scaling_ws,
OutputWorkspace=workspace_name)
DeleteWorkspace(Workspace=merged_ws)
DeleteWorkspace(Workspace=scaling_ws)
#-------------------------------------------------------------------------------
def rename_reduction(workspace_name, multiple_files):
"""
Renames a worksapce according to the naming policy in the Workflow.NamingConvention parameter.
@param workspace_name Name of workspace
@param multiple_files Insert the multiple file marker
@return New name of workspace
"""
from mantid.simpleapi import RenameWorkspace
import string
is_multi_frame = isinstance(mtd[workspace_name], WorkspaceGroup)
# Get the instrument
if is_multi_frame:
instrument = mtd[workspace_name].getItem(0).getInstrument()
else:
instrument = mtd[workspace_name].getInstrument()
# Get the naming convention parameter form the parameter file
try:
convention = instrument.getStringParameter('Workflow.NamingConvention')[0]
except IndexError:
# Defualt to run title if naming convention parameter not set
convention = 'RunTitle'
logger.information('Naming convention for workspace %s is %s' % (workspace_name, convention))
# Get run number
if is_multi_frame:
run_number = mtd[workspace_name].getItem(0).getRun()['run_number'].value
else:
run_number = mtd[workspace_name].getRun()['run_number'].value
logger.information('Run number for workspace %s is %s' % (workspace_name, run_number))
inst_name = instrument.getName()
for facility in config.getFacilities():
try:
short_inst_name = facility.instrument(inst_name).shortName()
break
except _:
pass
logger.information('Short name for instrument %s is %s' % (inst_name, short_inst_name))
# Get run title
if is_multi_frame:
run_title = mtd[workspace_name].getItem(0).getRun()['run_title'].value.strip()
else:
run_title = mtd[workspace_name].getRun()['run_title'].value.strip()
logger.information('Run title for workspace %s is %s' % (workspace_name, run_title))
if multiple_files:
multi_run_marker = '_multi'
else:
multi_run_marker = ''
if convention == 'None':
new_name = workspace_name
elif convention == 'RunTitle':
valid = "-_.() %s%s" % (string.ascii_letters, string.digits)
formatted_title = ''.join([c for c in run_title if c in valid])
new_name = '%s%s%s-%s' % (short_inst_name.lower(), run_number, multi_run_marker, formatted_title)
elif convention == 'AnalyserReflection':
analyser = instrument.getStringParameter('analyser')[0]
reflection = instrument.getStringParameter('reflection')[0]
new_name = '%s%s%s_%s%s_red' % (short_inst_name.upper(), run_number, multi_run_marker,
analyser, reflection)
else:
raise RuntimeError('No valid naming convention for workspace %s' % workspace_name)
logger.information('New name for %s workspace: %s' % (workspace_name, new_name))
RenameWorkspace(InputWorkspace=workspace_name,
OutputWorkspace=new_name)
return new_name
#-------------------------------------------------------------------------------
def plot_reduction(workspace_name, plot_type):
"""
Plot a given workspace based on the Plot property.
@param workspace_name Name of workspace to plot
@param plot_types Type of plot to create
"""
if plot_type == 'Spectra' or plot_type == 'Both':
from mantidplot import plotSpectrum
num_spectra = mtd[workspace_name].getNumberHistograms()
try:
plotSpectrum(workspace_name, range(0, num_spectra))
except RuntimeError:
logger.notice('Spectrum plotting canceled by user')
can_plot_contour = mtd[workspace_name].getNumberHistograms() > 1
if (plot_type == 'Contour' or plot_type == 'Both') and can_plot_contour:
from mantidplot import importMatrixWorkspace
plot_workspace = importMatrixWorkspace(workspace_name)
plot_workspace.plotGraph2D()
#-------------------------------------------------------------------------------
def save_reduction(worksspace_names, formats, x_units='DeltaE'):
"""
Saves the workspaces to the default save directory.
@param worksspace_names List of workspace names to save
@param formats List of formats to save in
@param Output X units
"""
from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
SaveAscii, Rebin, DeleteWorkspace,
ConvertSpectrumAxis, SaveDaveGrp)
for workspace_name in worksspace_names:
if 'spe' in formats:
SaveSPE(InputWorkspace=workspace_name,
Filename=workspace_name + '.spe')
if 'nxs' in formats:
SaveNexusProcessed(InputWorkspace=workspace_name,
Filename=workspace_name + '.nxs')
if 'nxspe' in formats:
SaveNXSPE(InputWorkspace=workspace_name,
Filename=workspace_name + '.nxspe')
if 'ascii' in formats:
# Version 1 of SaveAscii produces output that works better with excel/origin
# For some reason this has to be done with an algorithm object, using the function
# wrapper with Version did not change the version that was run
saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 1)
saveAsciiAlg.initialize()
saveAsciiAlg.setProperty('InputWorkspace', workspace_name)
saveAsciiAlg.setProperty('Filename', workspace_name + '.dat')
saveAsciiAlg.execute()
if 'aclimax' in formats:
if x_units == 'DeltaE_inWavenumber':
bins = '24, -0.005, 4000' #cm-1
else:
bins = '3, -0.005, 500' #meV
Rebin(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name + '_aclimax_save_temp',
Params=bins)
SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
Filename=workspace_name + '_aclimax.dat',
Separator='Tab')
DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')
if 'davegrp' in formats:
ConvertSpectrumAxis(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name + '_davegrp_save_temp',
Target='ElasticQ',
EMode='Indirect')
SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
Filename=workspace_name + '.grp')
DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
#-------------------------------------------------------------------------------
def get_multi_frame_rebin(workspace_name, rebin_string):
"""
Creates a rebin string for rebinning multiple frames data.
@param workspace_name Name of multiple frame workspace group
@param rebin_string Original rebin string
@return New rebin string
@return Maximum number of bins in input workspaces
"""
multi_frame = isinstance(mtd[workspace_name], WorkspaceGroup)
if rebin_string is not None and multi_frame:
rebin_string_comp = rebin_string.split(',')
if len(rebin_string_comp) >= 5:
rebin_string_2 = ','.join(rebin_string_comp[2:])
else:
rebin_string_2 = rebin_string
bin_counts = [mtd[ws].blocksize() for ws in mtd[workspace_name].getNames()]
num_bins = np.amax(bin_counts)
return rebin_string_2, num_bins
return None, None
#-------------------------------------------------------------------------------
def rebin_reduction(workspace_name, rebin_string, multi_frame_rebin_string, num_bins):
"""
@param workspace_name Name of workspace to rebin
@param rebin_string Rebin parameters
@param multi_frame_rebin_string Rebin string for multiple frame rebinning
@param num_bins Max number of bins in input frames
"""
from mantid.simpleapi import (Rebin, RebinToWorkspace)
if rebin_string is not None:
if multi_frame_rebin_string is not None and num_bins is not None:
# Multi frame data
if mtd[workspace_name].blocksize() == num_bins:
Rebin(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Params=rebin_string)
else:
Rebin(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Params=multi_frame_rebin_string)
else:
# Regular data
Rebin(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
Params=rebin_string)
else:
try:
# If user does not want to rebin then just ensure uniform binning across spectra
RebinToWorkspace(WorkspaceToRebin=workspace_name,
WorkspaceToMatch=workspace_name,
OutputWorkspace=workspace_name)
except RuntimeError:
logger.warning('Rebinning failed, will try to continue anyway.')
#-------------------------------------------------------------------------------