/
app.py
1345 lines (1198 loc) · 59.7 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import os
import re
import matplotlib
import warnings
warnings.simplefilter('ignore')
matplotlib.use('Qt4Agg')
matplotlib.rcParams['backend.qt4'] = 'PySide'
os.environ['ETS_TOOLKIT'] = 'qt4'
from traits.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'qt4'
from enable.api import ComponentEditor
from traits.api import List, Str, Float, HasTraits, Instance, Button, Enum, Bool, \
DelegatesTo, Range, HTML
from traitsui.api import Item, UItem, HGroup, VGroup, View, spring, Label, HSplit, Group, VGrid, \
CheckListEditor, Tabbed, DefaultOverride, EnumEditor, HTMLEditor, InstanceEditor
from pyface.api import DirectoryDialog, OK, ImageResource, ProgressDialog
from chaco.api import OverlayPlotContainer
import csv
from xye import XYEDataset
from chaco_output import PlotOutput
from raw_data_plot import RawDataPlot
from dataset_editor import DatasetEditor, DatasetUI
from wavelength_editor import WavelengthEditor, WavelengthUI
from ui_helpers import get_save_as_filename, get_save_as_csv_filename, \
open_file_dir_with_default_handler, open_file_with_default_handler, \
get_file_list_from_dialog, get_file_from_dialog, get_save_as_xyz_filename, get_transformed_filename, \
get_txt_filename, get_save_processed_dir
import processing
from processing import DatasetProcessor
from plot_generator import PlotGenerator
from peak_fit_window import PeakFitWindow
from processing_background_removal import subtract_background_from_all_datasets, \
get_subtracted_datasets, CurveFitter
from define_background import empty_xye_dataset, min_max_x
from xyzoutput import write_to_file, XYZGenerator
from transform_data import apply_transform, find_datasets_with_descriptor, dataset_already_transformed
from peak_fitting import autosearch_peaks, fit_peaks_background, createPeakRows
from peak_editor import PeakFittingEditor
from traitsui.message import message
from traitsui.editors.button_editor import ButtonEditor
import numpy
import version
title = "PDViPeR - " + version.__version__
size = (1200, 700)
def create_datasetui(dataset):
ui = DatasetUI(name=dataset.name, dataset=dataset, color=None)
dataset.metadata['ui'] = ui
ui_w = WavelengthUI(name=dataset.name, dataset=dataset)
dataset.metadata['ui_w'] = ui_w
return (ui, ui_w)
class Global(HasTraits):
"""
This is just a container class for the file list so that the normalisation reference
selector can use a drop-down list selector normally reserved by traitsui for Enum
traits, but where we want to use a list instead and have the dropdown updated upon
updates to the list. See discussion here describing this:
http://enthought-dev.117412.n3.nabble.com/How-to-force-an-update-to-an-enum-list-to-propagate-td3489135.html
"""
file_list = List([])
dataset_names = List([])
def populate_list(self, filepaths):
positions = set()
for f in filepaths:
try:
positions.add(re.search('_(p[1-4])_', os.path.basename(f)).group(1))
except:
pass
self.file_list = sorted(positions) + [os.path.basename(f) for f in filepaths]
def populate_dataset_name_list(self, datasets):
names = set()
for d in datasets:
names.add(d.name)
self.dataset_names = sorted(names)
g = Global()
class ControlPanel(HasTraits):
file_paths = List(Str)
open_files = Button("Open files...")
edit_datasets = Button("Edit datasets...")
generate_plot = Button("Generate plot...")
help_button = Button("Help...")
close_files = Button("Reset data and plots")
export_xyz = Button("Save as xyz file...")
#view_group
scale = Enum('linear', 'log', 'sqrt')
options = List
legend = Enum('Overlay', 'Off', 'Window')('Overlay')
reset_button = Button("Reset view")
copy_to_clipboard = Button("Copy to clipboard")
save_as_image = Button("Save as image...")
save_parabs_as_csv = Button("Save parabs as csv...")
save_path = Str
_options = [ 'Show gridlines', 'Show crosslines' ]
# Process tab
merge_positions = Enum('all', 'p1+p2', 'p3+p4', 'p12+p34')('p1+p2')
load_partners = Button
splice = Bool(True)
merge = Bool(False)
merge_regrid = Bool(False)
normalise = Bool(True)
# See comment in class Global() for an explanation of the following traits
g = Instance(Global, ())
file_list = DelegatesTo('g')
dataset_names = DelegatesTo('g')
normalisation_source_filenames = Enum(values='file_list')
selection_dataset_names = Enum(values='dataset_names')
def _g_default(self):
return g
correction = Float(0.0)
align_positions = Bool(False)
bt_start_peak_select = Button
bt_end_peak_select = Button
peak_selecting = Bool(False)
what_to_plot = Enum('Plot new', 'Plot old and new')('Plot old and new')
bt_process = Button("Apply")
bt_undo_processing = Button("Undo")
bt_save = Button("Save...")
most_recent_path = Str('')
# Background removal tab
man_define_bg_button_label = Str("Define")
bt_manually_define_background = Button('man_define_bg_button_label')
bt_subtract_manual_background = Button("Subtract")
bt_clear_manual_background = Button("Clear")
background_selected = Bool(False)
curve_order = Range(1, 1000)(3)
curve_type = Enum('Linear Interpolation', 'Chebyschev Polynomial', 'Cosine Fourier Series')('Chebyschev Polynomial')
bt_fit = Button("Curve fit")
bt_save_curve = Button("Save fit params")
bt_clear_fit = Button("Clear fit")
bt_load_background = Button("Load...")
bt_subtract_background = Button("Subtract background")
bt_save_background = Button("Save...")
# backgrounded_files = List
background_file = None
backgrounds_fitted = Bool(False)
background_fits = List (XYEDataset)
background_manual = None
all_fit_params = List
background_datasets = set()
manual_bg_fitted = Bool(False)
selected_dataset = Str
# theta/d/Q tab
filename_field = Str("d")
bt_convertscale_abscissa = Button("Convert/scale abscissa...")
raw_data_plot = Instance(RawDataPlot)
fitter = None
# rescaling
x_offset = Float(0.0)
y_offset = Float(0.0)
x_multiplier = Float(1.0)
y_multiplier = Float(1.0)
bt_apply_transform = Button("Apply")
bt_save_transformed = Button("Save...")
transform_selected_dataset = Enum(values='dataset_names')
select_range_min = Float(0.0)
select_range_max = Float(-1.0)
bt_crop_range = Button("Select range")
# peak fitting
bt_autosearch_peaks = Button("Auto search peaks")
select_peaks_button_label = Str("Select peaks")
bt_select_peaks = Button('select_peaks_button_label')
bt_edit_peaks = Button("Edit peaks")
bt_plot_peak_fit = Button("Plot fitted peaks")
bt_clear_peaks = Button("Clear peaks")
peak_editor = None
peak_labels = []
peak_select_dataset = Enum(values='dataset_names')
bt_refine_peaks = Button("LSQ peak refinement")
toggle_peaks_label_value = Str("Hide peak Labels")
bt_toggle_peak_labels = Button('toggle_peaks_label_value')
show_peak_labels = True
peak_list = []
view_group = VGroup(
Group(
#Label('Scale:'),
UItem('scale',resizable=True, enabled_when='object._has_data()'),
show_border=True, label='Scale:'),
Group(
# Label('Legend:'),
UItem('legend', resizable=True, name='_legend', editor=EnumEditor(values={
'Overlay': '1:Overlay',
'Off': '2:Off',
'Window': '3:Separate Window', },
cols=3),
style='custom', enabled_when='object._has_data()'),
show_border=True, label="Legend",
),
Group(
UItem('options', editor=CheckListEditor(name='_options'), style='custom',
enabled_when='object._has_data()'),
),
UItem('reset_button',resizable=True, style="custom", enabled_when='object._has_data()'),
spring,
'_',
spring,
UItem('copy_to_clipboard', resizable=True,enabled_when='object._has_data()'),
UItem('save_as_image', resizable=True,enabled_when='object._has_data()'),
UItem('save_parabs_as_csv', resizable=True,enabled_when='object._has_data()'),
springy=False, label="View", style="custom", dock="tab",
)
process_group = VGroup(
VGroup(
UItem(name='merge_positions',
style='custom',
editor=EnumEditor(values={
'p1+p2' : '1: p1+p2',
'p3+p4' : '2: p3+p4',
'p12+p34' : '3: p12+p34',
'all' : '4: all',
}, cols=2),
resizable=True,
enabled_when='object._has_data()'
),
UItem('load_partners', resizable=True, enabled_when='object._has_data() and (object.merge_positions != "all")'),
label='Positions to process:',show_border=True,springy=False,
),
VGroup(
Item('align_positions', enabled_when='object._has_data() and (object.merge_positions != "all")'),
HGroup(
UItem('bt_start_peak_select', label='Select peak',
enabled_when='object.align_positions and not object.peak_selecting and (object.merge_positions != "all")'),
UItem('bt_end_peak_select', label='Align',
enabled_when='object.peak_selecting and (object.merge_positions != "all")'),
springy=True,
),
Item('correction', label='Zero correction:', enabled_when='object._has_data()'),
show_border=True,springy=False,
),
VGroup(
HGroup(
Item('splice'),
Item('merge', enabled_when='object.merge_positions != "p12+p34"'),
enabled_when='object._has_data() and (object.merge_positions != "all")'
),
HGroup(
Item('normalise', label='Normalise', enabled_when='object._has_data() and (object.merge_positions != "p12+p34")'),
Item('merge_regrid', label='Grid', enabled_when='object._has_data()'),
),
VGroup(
Label('Normalise to:'),
UItem('normalisation_source_filenames', style='simple',resizable=True,
enabled_when='object.normalise and object._has_data()'),
),
show_border=True,springy=False,
),
UItem('what_to_plot',resizable=True, editor=DefaultOverride(cols=2), style='custom',
enabled_when='object._has_data()'),
spring,
spring,
UItem('bt_process', resizable=True,enabled_when='object._has_data()'),
UItem('bt_undo_processing', resizable=True,enabled_when='object.undo_state is not None'),
UItem('bt_save', resizable=True,enabled_when='object._has_data()'),
label='Process',
springy=False,
)
background_removal_group = VGroup(
VGrid(
UItem('bt_manually_define_background', editor=ButtonEditor(label_value='object.man_define_bg_button_label'),
enabled_when='object._has_data()'),
UItem('bt_clear_manual_background', enabled_when='object._has_data() and object.manual_bg_fitted'),
show_border=True, label='Define a background curve by selecting points',
),
Group(
UItem('bt_load_background',resizable=True, enabled_when='object._has_data()'),
show_border=True, label='Load from file:',
),
VGroup(
UItem('selection_dataset_names', resizable=True, enabled_when='object._has_data()'),
UItem('curve_type', resizable=True, enabled_when='object._has_data()'),
Label('Number of Fit Parameters:'),
UItem('curve_order', resizable=True, enabled_when='object._has_data()'),
HGroup(
UItem('bt_fit', enabled_when='object._has_data()'),
UItem('bt_save_curve', enabled_when='object._has_data()'),
UItem('bt_clear_fit', enabled_when='object._has_data()'),
),
show_border=True,springy=False,label='Fit a background curve:',
),
spring,
spring,
UItem('bt_subtract_background', resizable=True, enabled_when='object._has_data() and (object.background_file!=None or object.manual_bg_fitted or object.backgrounds_fitted)'),
UItem('bt_save_background',resizable=True, enabled_when='object._has_data() and object.processed_datasets!=[] and (object.background_file!=None or object.manual_bg_fitted or object.backgrounds_fitted)'),
label='Background',
springy=False,
)
convert_xscale_group = VGroup(
VGroup(
UItem('filename_field',
enabled_when='object._has_data()',resizable=True),
UItem('bt_convertscale_abscissa',
label='Convert/scale abscissa...',
enabled_when='object._has_data()',resizable=True
),
show_border=True, label='Filename label (prefix_<label>_nnnn.xye):',
),
VGroup(
# Label('Dataset:'),
Group(
UItem('transform_selected_dataset',resizable=True, enabled_when='object._has_data()'),
show_border=True,label='Dataset:'),
HGroup(
Item('select_range_min',resizable=True, label='min', enabled_when='object._has_data()'),
Item('select_range_max',resizable=True, label='max', enabled_when='object._has_data()', tooltip='-1.0 selects to the maximum x value'),
show_border=True, label='Select Range (x) axis',
),
UItem('bt_crop_range',resizable=True, enabled_when='object._has_data()'),
),
spring,
VGroup(
HGroup(
# Label('Offset'),
Item('x_offset',resizable=True, label='x', enabled_when='object._has_data()'),
Item('y_offset', resizable=True,label='y', enabled_when='object._has_data()'),
label='Offset', show_border = True,
),
HGroup(
# Label('Multiplier'),
Item('x_multiplier',resizable=True, label='x', enabled_when='object._has_data()'),
Item('y_multiplier', resizable=True,label='y', enabled_when='object._has_data()'),
label='Multiplier', show_border=True,
),
UItem('bt_apply_transform',resizable=True, enabled_when='object._has_data()'),
label='X and Y offsets and multipliers to rescale data', show_border=True,
),
spring,
UItem('bt_save_transformed', resizable=True,enabled_when='object._has_data()'),
label='Transforms',
)
peak_fitting_group = VGroup(
VGroup(
UItem('peak_select_dataset', resizable=True, label='Dataset', enabled_when='object._has_data()'),
show_border=True, label='Dataset',
),
VGroup(
UItem('bt_autosearch_peaks', resizable=True, enabled_when='object._has_data() and object.select_peaks_button_label=="Select peaks"'),
UItem('bt_select_peaks',resizable=True, editor=ButtonEditor(label_value='object.select_peaks_button_label'), enabled_when='object._has_data()'),
UItem('bt_edit_peaks',resizable=True, enabled_when='object.peak_editor is not None and object.peak_editor.raw_dataset.name==object.peak_select_dataset'),
UItem('bt_plot_peak_fit', resizable=True,enabled_when='object.peak_editor is not None and object.peak_editor.raw_dataset.name==object.peak_select_dataset'),
UItem('bt_clear_peaks', resizable=True,enabled_when='object.peak_editor is not None and object.peak_editor.raw_dataset.name==object.peak_select_dataset'),
UItem('bt_toggle_peak_labels',resizable=True, editor=ButtonEditor(label_value='object.toggle_peaks_label_value'), enabled_when='object.peak_editor!=None and object.peak_editor.raw_dataset.name==object.peak_select_dataset')
),
spring,
spring,
label='Peak Fitting',
)
view = View(VGroup(
UItem('open_files', resizable=True, padding=0),
UItem('edit_datasets', resizable=True, enabled_when='object._has_data()'),
UItem('generate_plot', resizable=True, enabled_when='object._has_data()'),
UItem('help_button', resizable=True),
UItem('close_files', resizable=True, enabled_when='object._has_data()'),
UItem('export_xyz', resizable=True, enabled_when='object._has_data()'),
spring,
spring,
Group(
view_group,
process_group,
background_removal_group,
convert_xscale_group,
peak_fitting_group,
springy=False,
show_border=False,
layout="tabbed",
),
),
)
def __init__(self, container, raw_data_plot, plot, *args, **kws):
super(ControlPanel, self).__init__(*args, **kws)
self.container = container
self.raw_data_plot = raw_data_plot
self.plot = plot
self.datasets = []
self.dataset_pairs = set()
self.undo_state = None
self.peak_list=[]
self.peak_labels=[]
self.file_paths=[]
# The list of all options.
#self._options = [ 'Show legend', 'Show gridlines', 'Show crosslines' ]
self._options = [ 'Show gridlines', 'Show crosslines' ]
self._legend = 'Overlay'
self.options = self._options
self.legend = self.legend
self.processed_datasets = []
self.background_datasets = set()
self.bg_removed_datasets = set() # keep track of datasets where we have removed the background so we don't do it twice
def _has_data(self):
return len(self.datasets) != 0
def _rename_dats(self, file_list):
for f in file_list:
file_list.remove(f)
f = re.sub(r'(.*?)(\.dat)$', r"\1.xye", f)
file_list.append(f)
return file_list
def _open_files_changed(self):
file_list = get_file_list_from_dialog()
if file_list:
self._reset_all()
self.most_recent_path = os.path.dirname(file_list[0])
file_list = self._rename_dats(file_list)
self.file_paths = file_list
def _reset_all(self):
self.datasets = []
self.dataset_pairs = set()
self.undo_state = None
self.file_paths = []
self.processed_datasets = []
self.background_file = None
self.background_manual = None
self.background_fit = None
self.backgrounds_fitted = False
self.bg_removed_datasets = set()
self.background_datasets = set()
self.peak_list = []
self.fitter = None
self.raw_data_plot.reset_tools()
if self.peak_labels is not []:
self.raw_data_plot.remove_peak_labels(self.peak_labels)
self.peak_labels = []
self.peak_editor = None
def _file_paths_changed(self, new):
"""
When the file dialog box is closed with a selection of filenames,
just generate a list of all the filenames
"""
self.datasets = []
numfiles=len(self.file_paths[:])
if numfiles>20:
progress = ProgressDialog(title="progress", message="loading %d files."%numfiles, max=numfiles )
progress.open()
# self.file_paths is modified by _add_dataset_pair() so iterate over a copy of it.
for i,filename in enumerate(self.file_paths[:]):
self._add_xye_dataset(filename)
(cont,skip)=progress.update(i)
if not cont or skip:
break
progress.update(numfiles)
else:
for filename in self.file_paths[:]:
self._add_xye_dataset(filename)
self._plot_datasets(self.datasets)
self.datasets.sort(key=lambda d: d.name)
self._refresh_normalise_to_list()
self._refresh_dataset_name_list()
def _add_xye_dataset(self, file_path, container=True):
try:
dataset = XYEDataset.from_file(file_path)
except IOError:
return
if container:
self.datasets.append(dataset)
create_datasetui(dataset)
return dataset
def _load_partners_changed(self):
for filename in self.file_paths[:]:
self._add_dataset_pair(filename)
self._plot_datasets(self.datasets)
self.datasets.sort(key=lambda d: d.name)
self._refresh_dataset_name_list()
def _plot_datasets(self, datasets, reset_view=True):
datasets_to_plot = datasets[:]
if self.background_file is not None:
datasets_to_plot.append(self.background_file)
self.raw_data_plot.plot_datasets(datasets_to_plot, scale=self.scale,
reset_view=reset_view)
self._options_changed(self.options)
self.container.request_redraw()
def _edit_datasets_changed(self):
editor = DatasetEditor(datasets=self.datasets)
editor.edit_traits()
if self._plot_datasets(self.datasets, reset_view=False) is None:
self.raw_data_plot.remove_peak_labels(self.peak_labels)
def _options_changed(self, opts):
# opts just contains the keys that are true.
# Create a dict all_options that has True/False for each item.
all_options = dict.fromkeys(self._options, False)
true_options = dict.fromkeys(opts, True)
all_options.update(true_options)
#self.raw_data_plot.show_legend(all_options['Show legend']) #need to change this to allow for popup window option
#self.raw_data_plot.show_legend(opts['Legend']) #need to change this to allow for popup window option
self.raw_data_plot.show_grids(all_options['Show gridlines'])
self.raw_data_plot.show_crosslines(all_options['Show crosslines'])
self.container.request_redraw()
def _legend_changed(self, legend):
self.raw_data_plot.show_legend(legend)
self.container.request_redraw()
def _bt_start_peak_select_changed(self):
self.raw_data_plot.start_range_select()
self.peak_selecting = True
def _bt_end_peak_select_changed(self):
self.peak_selecting = False
selection_range = self.raw_data_plot.end_range_select()
if not selection_range:
return
range_low, range_high = selection_range
# fit the peak in all loaded dataseries
self._get_partners()
for datapair in self._get_dataset_pairs():
processing.fit_peaks_for_a_dataset_pair(
range_low, range_high, datapair, self.normalise)
editor = PeakFitWindow(dataset_pairs=self._get_dataset_pairs(),
range=selection_range)
editor.edit_traits()
def _get_dataset_pairs(self):
datasets_dict = dict([ (d.name, d) for d in self.datasets ])
return [ (datasets_dict[file1], datasets_dict[file2]) \
for file1, file2 in self.dataset_pairs ]
def _bt_process_changed(self):
'''
Button click event handler for processing.
'''
# Save the unprocessed data series at this point for later undoing
processed_datasets = []
processor = DatasetProcessor(self.normalise, self.correction,
self.align_positions,
self.splice, self.merge, self.merge_regrid,
self.normalisation_source_filenames,
self.datasets)
# Processing at this point depends on the "Positions to process:" radiobutton
# selection:
# If Splice==True, get all pairs and splice them
# If Merge==True, get all pairs and merge them
# If Normalise==True, always normalise
# If Grid===True, output gridded and ungridded
# The following processing code sould really be placed into a processor.process()
# method, but I only worked out how to pass required stuff late in the day, so
# I do this stuff here.
if self.merge_positions == 'p12+p34':
self._get_partners() # pair up datasets corresponding to the radiobutton selection
for dataset_pair in self._get_dataset_pairs():
datasets = processor.splice_overlapping_datasets(dataset_pair)
for dataset in datasets:
dataset.metadata['ui'].name = dataset.name + ' (processed)'
dataset.metadata['ui'].color = None
processed_datasets.extend(datasets)
elif self.merge_positions == 'all':
# Handle "all" selection for regrid and normalise
for d in self.datasets:
dataset = processor.normalise_me(d)
if dataset is not None:
processed_datasets.append(dataset)
dataset.metadata['ui'].name = dataset.name + ' (processed)'
dataset.metadata['ui'].color = None
d = dataset
dataset = processor.regrid_me(d)
if dataset is not None:
processed_datasets.append(dataset)
dataset.metadata['ui'].name = dataset.name + ' (processed)'
dataset.metadata['ui'].color = None
else:
self._get_partners() # pair up datasets corresponding to the radiobutton selection
for dataset_pair in self._get_dataset_pairs():
datasets = processor.process_dataset_pair(dataset_pair)
for dataset in datasets:
dataset.metadata['ui'].name = dataset.name + ' (processed)'
dataset.metadata['ui'].color = None
processed_datasets.extend(datasets)
self.processed_datasets = processed_datasets
self._refresh_dataset_name_list()
self._plot_processed_datasets()
def _plot_processed_datasets(self):
self._save_state()
if 'old' in self.what_to_plot:
datasets_to_plot = self.datasets + self.processed_datasets
else:
datasets_to_plot = self.processed_datasets
self._plot_datasets(datasets_to_plot)
def _save_state(self):
self.undo_state = (self.datasets[:], self.dataset_pairs.copy())
def _restore_state(self):
if self.undo_state is not None:
self.datasets, self.dataset_pairs = self.undo_state
self.undo_state = None
def _bt_undo_processing_changed(self):
self._restore_state()
self._refresh_dataset_name_list()
self._plot_datasets(self.datasets)
def _bt_save_changed(self):
wildcard = 'GSAS file (.fxye)|*.fxye|XYE file (.xye)|*.xye'
result,path,extension = get_save_processed_dir(self.most_recent_path,wildcard)
if result:
print path,extension
if extension=='fxye':
for dataset in self.processed_datasets:
tmpname=dataset.name.split('.')[0]+".fxye"
filename= os.path.join(path, tmpname)
dataset.save_fxye(filename)
else:
for dataset in self.processed_datasets:
filename= os.path.join(path, dataset.name)
dataset.save(filename)
open_file_with_default_handler(path)
#dlg = DirectoryDialog(title='Save results', action='save as', wildcard=wildcard, default_path=self.most_recent_path)
# if dlg.open() == OK:
# self.most_recent_path = dlg.path
# for dataset in self.processed_datasets:
# filename = os.path.join(dlg.path, dataset.name)
# dataset.save(filename)
# open_file_with_default_handler(dlg.path)
def _save_as_image_changed(self):
if len(self.datasets) == 0:
return
filename = get_save_as_filename()
if filename:
PlotOutput.save_as_image(self.container, filename)
open_file_dir_with_default_handler(filename)
def _save_parabs_as_csv_changed(self):
if len(self.datasets) == 0:
return
filename = get_save_as_csv_filename()
if filename:
with open(filename, 'wb') as csvfile:
writer = csv.writer(csvfile, dialect='excel')
# base the header columns on the first dataset
columns = [c for c in self.datasets[0].metadata.keys()
if c not in ['ui', 'ui_w']]
header_columns = sorted(columns, key=str.lower)
writer.writerow(['filename'] + header_columns)
# header written, now write the rows
for d in self.datasets:
# Just pull out the data from the columns defined above, skipping missing data
row = [d.source] + [d.metadata.get(i, '') for i in header_columns]
writer.writerow(row)
def _copy_to_clipboard_changed(self):
if self.datasets:
PlotOutput.copy_to_clipboard(self.container)
def _scale_changed(self):
self._plot_datasets(self.datasets)
def _get_partner(self, position_index):
# return index of partner; i.e., 2=>1, 1=>2, 3=>4, 4=>3, 12=>34, 34=>12
if position_index in [1, 2, 3, 4]:
partner = ((position_index - 1) ^ 1) + 1
elif position_index == 12:
partner = 34
elif position_index == 34:
partner = 12
else:
raise 'unparsable position'
return partner
def _get_position(self, filename):
m = re.search('_[pP]([0-9]+)_', filename)
try:
return int(m.group(1))
except (AttributeError, ValueError):
return None
def _repl(self, m):
newindex = self._get_partner(int(m.group(2)))
return m.group(1) + str(newindex) + m.group(3)
def _add_dataset_pair(self, filename):
current_directory, filebase = os.path.split(filename)
position_index = self._get_position(filename)
if position_index is None:
return
# base filename for the associated position.
# other_filebase = re.sub(r"_[pP]{}_".format(position_index),
# r"_[pP]{}_".format(self._get_partner(position_index)),
# filebase)
other_filebase = re.sub(r"(_[pP])(\d+)(_)", self._repl, filename)
other_filename = os.path.join(current_directory, other_filebase)
if not os.path.exists(other_filename):
return
# OK, we've got the names and paths, now add the actual data and references.
if other_filename not in self.file_paths:
self._add_xye_dataset(other_filename)
# We also need to append the new path to the file_paths List trait which is
# already populated by the files selected using the file selection dialog
self.file_paths.append(other_filename)
self._refresh_normalise_to_list()
def _bt_load_background_changed(self):
filename = get_file_from_dialog()
if filename is not None:
self.background_fit = None
self.background_file = self._add_xye_dataset(filename, container=False)
self.background_file.metadata['ui'].name = self.background_file.name + ' (background)'
self.background_file.metadata['ui'].color = None
self._plot_datasets(self.datasets)
self.background_datasets.add(self.background_file)
def _bt_fit_changed(self):
"""
Fits a curve of the selected type to the selected dataset. It does this by first finding the peaks in the whole dataset and fitting their
positions with the selected order of the curve to get an expression for the background. Background is then evaluated at the same x points
as the dataset and a new xyedataset created for the background. Background dataset is then added to the datasets (so that it is plotted)
and also added as an attribute to the original dataset for later use subtracting the background
"""
# varyList=[r'Back'] #we only want to fit the background parameters here
varyList = [] # we only want to fit the background parameters here but that is included directly in the fitting routine
fit_params = {'U':1, 'V':-1, 'W':0.3, 'X':0, 'Y':0, 'backType':self.curve_type, 'Back:0':1.0, 'Zero':0} # these are currently needed in the version of the routine we've modified from gsas
# need to further think on how to get rid of them, they are just the parameters for the gaussian and lorentzian that would be used
# to define an overall broadening of the curves due to the instrument. Qinfen says they don't want to have the overall broadening
for i in range(1, self.curve_order):
fit_params.update({'Back:' + str(i):0.0})
dataset_to_fit = self._find_dataset_by_name(self.selection_dataset_names, self.datasets + self.processed_datasets)
if dataset_to_fit is not None:
dataset_to_fit.fit_params = fit_params
dataset_to_fit.fit_params.update({'datasetName':dataset_to_fit.name})
limits = (dataset_to_fit.data[0, 0], dataset_to_fit.data[-1, 0])
peak_list = autosearch_peaks(dataset_to_fit, limits, dataset_to_fit.fit_params)
if peak_list is None:
message(message='Unable to fit background, try manually defining a background', title='Background Fit Error', buttons=[ 'OK' ], parent=None)
peak_list = []
self._set_basic_fit_params(dataset_to_fit, 'fit_params') # reset the peaks parameters
return
background, peak_profile, new_fit_params = fit_peaks_background(peak_list, varyList, dataset_to_fit, self.background_fit, dataset_to_fit.fit_params)
dataset_to_fit.fit_params.update(new_fit_params)
if hasattr(dataset_to_fit, 'background'):
background_fit = dataset_to_fit.background
else:
background_fit = dataset_to_fit.copy()
background_fit.metadata['ui'].name = dataset_to_fit.name + ' fit (background)'
background_fit.metadata['ui'].color = None
background_fit.data[:, 1] = background
dataset_to_fit.background = background_fit
existing_fit = self._find_dataset_by_uiname(dataset_to_fit.name + ' fit (background)', self.datasets)
if existing_fit is not None:
self.datasets.remove(existing_fit)
self.datasets.append(background_fit)
self.background_fits.append(background_fit)
self.background_datasets.add(background_fit)
self.backgrounds_fitted = True
self._plot_processed_datasets()
def _bt_clear_fit_changed(self):
"""
Removes the fitted background from the selected dataset and from the plot window
"""
d = self._find_dataset_by_name(self.selection_dataset_names, self.datasets + self.processed_datasets)
if hasattr(d, 'background') and re.search(r'fit \(background\)$', d.background.metadata['ui'].name) is not None:
self.datasets.remove(d.background)
self.background_datasets.remove(d.background)
self.background_fits.remove(d.background)
delattr(d, 'background')
self._plot_processed_datasets()
def _bt_save_curve_changed(self):
"""
Exports the fitted background parameters for the currently selected dataset to a text file.
"""
dataset = self._find_dataset_by_name(self.selection_dataset_names, self.datasets + self.processed_datasets)
name = dataset.name.split(".")[0] + "_background_params.txt"
filename = str(get_txt_filename(os.path.join(self.most_recent_path, name)))
with file(filename, 'w') as outfile:
outfile.write("Background Parameters\n") # filename.write()
outfile.write("Dataset name: " + dataset.fit_params['datasetName'] + "\n")
outfile.write("Fit type: " + dataset.fit_params['backType'] + "\n")
nBak = 0
while True:
key = 'Back:' + str(nBak)
if key in dataset.fit_params:
outfile.write(key + ": " + str(dataset.fit_params[key]) + "\n")
nBak += 1
else:
break
def _bt_subtract_background_changed(self):
"""
Triggers the background subtraction. If there are processed datasets then we subtract the background from that, otherwise we subtract
the background from the raw dataset. We pass all the different kinds of background that could be defined and work out the logic in the
subtract_background... method
"""
pdata_as_set = set(self.processed_datasets)
if len(pdata_as_set - self.bg_removed_datasets) > 0: # need to check for datasets with background already subtracted
processed_datasets = subtract_background_from_all_datasets(pdata_as_set - self.bg_removed_datasets, self.background_file, self.background_manual, self.fitter)
else:
processed_datasets = subtract_background_from_all_datasets(self.datasets, self.background_file, self.background_manual, self.fitter)
if self.raw_data_plot.line_tool is not None:
self.raw_data_plot.remove_line_tool()
for pd in processed_datasets:
self.bg_removed_datasets.add(pd)
self.processed_datasets.extend(processed_datasets)
self._plot_processed_datasets()
self._refresh_dataset_name_list()
def _bt_save_background_changed(self):
"""
Outputs the fitted background parameters to a file.
"""
wildcard = 'All files (*.*)|*.*|'
dlg = DirectoryDialog(title='Save results', default_path=self.most_recent_path)
if dlg.open() == OK:
self.most_recent_path = dlg.path
for dataset in self.processed_datasets:
filename = os.path.join(dlg.path, dataset.name)
dataset.save(filename)
open_file_with_default_handler(dlg.path)
def _fit_manual_background_callback(self):
self.man_define_bg_button_label = "Define"
self.manual_bg_fitted = True
self._plot_processed_datasets()
self.raw_data_plot.remove_tooltips('line_drawer_tool')
self.raw_data_plot.plot.overlays.remove(self.raw_data_plot.line_tool)
self.raw_data_plot.zoom_tool.drag_button = "left"
self.container.request_redraw()
def _bt_manually_define_background_changed(self):
"""
When the define button is clicked, this function attaches a line-drawing tool to the plot to select points to define a background.
The _plot_processed_datasets routine is passed as a parameter so that it can be called when the points are finalised which is done
in the MyLineDrawer class extending the line drawing tool
"""
if self.man_define_bg_button_label == "Define":
man_bg = self._manual_background_fitted()
if man_bg:
self.datasets.remove(man_bg)
self._plot_processed_datasets()
self.background_manual = empty_xye_dataset(size=min_max_x(self.datasets))
create_datasetui(self.background_manual)
self.fitter = CurveFitter(curve_type='Spline', deg=self.curve_order)
self.raw_data_plot.add_line_drawer(self.datasets, self.fitter, self._fit_manual_background_callback, self.background_manual)
self.man_define_bg_button_label = "Cancel"
self.container.request_redraw()
self.background_datasets.add(self.background_manual)
elif self.man_define_bg_button_label == "Cancel":
self.raw_data_plot.remove_line_tool()
self.fitter = None
self.background_manual = None
self.man_define_bg_button_label = "Define"
self.container.request_redraw()
def _manual_background_fitted(self):
return self._find_dataset_by_uiname('fit (manual background)', self.datasets)
def _bt_clear_manual_background_changed(self):
"""
Removes the background dataset created from the manually defined, spline fitted background
"""
if self.background_manual:
self.datasets.remove(self.background_manual)
self.background_datasets.remove(self.background_manual)
self.fitter = None
self.background_manual = None
self.manual_bg_fitted = False
for d in get_subtracted_datasets(self.processed_datasets):
self.processed_datasets.remove(d)
self.raw_data_plot.remove_line_tool()
self._plot_processed_datasets()
self._refresh_dataset_name_list()
def _get_partners(self):
"""
Populates the self.dataset_pairs list with all dataset partners in
self.file_paths corresponding to the merge_positions radiobutton selection.
"""
matching_re = {'all': '' ,
'p1+p2': '_[pP][12]_' ,
'p3+p4': '_[pP][34]_' ,
'p12+p34':'_[pP](?:12|34)_',
}
basenames = [os.path.basename(f) for f in self.file_paths]
filtered_paths = [f for f in basenames if re.search(matching_re[self.merge_positions], f) is not None]
self.dataset_pairs = set()
for filebase in filtered_paths:
# base filename for the first position.
position_index = self._get_position(filebase)
if position_index is None:
return
other_filebase = re.sub(r"(_[pP])(\d+)(_)", self._repl, filebase)
if filebase in basenames and other_filebase in basenames:
if position_index != 12 and (position_index & 1) == 0:
self.dataset_pairs.add((other_filebase, filebase))
else:
self.dataset_pairs.add((filebase, other_filebase))
return self.dataset_pairs
def _refresh_dataset_name_list(self):
# add the processed but not the backgrounded
dataset_set = set(self.datasets + self.processed_datasets) - self.background_datasets
g.populate_dataset_name_list(list(dataset_set))
def _refresh_normalise_to_list(self):
g.populate_list(self.file_paths)
def _file_paths_changed(self, new):
"""
When the file dialog box is closed with a selection of filenames,
just generate a list of all the filenames
"""
self.datasets = []
numfiles=len(self.file_paths[:])
if numfiles>20:
progress = ProgressDialog(title="progress", message="loading %d files."%numfiles, max=numfiles )
progress.open()
# self.file_paths is modified by _add_dataset_pair() so iterate over a copy of it.
for i,filename in enumerate(self.file_paths[:]):
self._add_xye_dataset(filename)
(cont,skip)=progress.update(i)
if not cont or skip:
break
progress.update(numfiles)
else:
for filename in self.file_paths[:]:
self._add_xye_dataset(filename)
self._plot_datasets(self.datasets)
self.datasets.sort(key=lambda d: d.name)
self._refresh_normalise_to_list()
self._refresh_dataset_name_list()
def _load_partners_changed(self):
for filename in self.file_paths[:]:
self._add_dataset_pair(filename)
self._plot_datasets(self.datasets)
self.datasets.sort(key=lambda d: d.name)
self._refresh_dataset_name_list()
def _plot_datasets(self, datasets, reset_view=True):
datasets_to_plot = datasets[:]
if self.background_file is not None:
datasets_to_plot.append(self.background_file)
self.raw_data_plot.plot_datasets(datasets_to_plot, scale=self.scale,
reset_view=reset_view)
self._options_changed(self.options)
self.container.request_redraw()
def _edit_datasets_changed(self):
editor = DatasetEditor(datasets=self.datasets)
editor.edit_traits()
if self._plot_datasets(self.datasets, reset_view=False) is None:
self.raw_data_plot.remove_peak_labels(self.peak_labels)