/
collate.py
66 lines (48 loc) · 2 KB
/
collate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/usr/bin/env python
"""
Check drift correction results.
Hazen 01/18
"""
import glob
import numpy
import tifffile
import storm_analysis.sa_utilities.hdf5_to_image as hdf5ToImage
import storm_analysis.diagnostics.drift_correction.settings as settings
def collate():
# We assume that there are two directories, the first with xy correction
# only and the second with xyz correction.
#
# Make 2D images of the drift corrected data.
if False:
for adir in ["test_01/", "test_02/"]:
im = hdf5ToImage.render2DImage(adir + "test.hdf5", sigma = 0.5)
tifffile.imsave(adir + "test_im_2d.tif", im.astype(numpy.float32))
# Make 3D images of the drift corrected data.
if False:
z_edges = numpy.arange(-0.5, 0.55, 0.1)
for adir in ["test_01/", "test_02/"]:
images = hdf5ToImage.render3DImage(adir + "test.hdf5", z_edges, sigma = 0.5)
with tifffile.TiffWriter(adir + "test_im_3d.tif") as tf:
for elt in images:
tf.save(elt.astype(numpy.float32))
# Measure error in drift measurements.
if True:
print("Drift correction RMS error (nanometers):")
for i, elt in enumerate(["drift_xy.txt", "drift_xyz.txt"]):
print(" ", elt)
ref = numpy.loadtxt(elt)
exp = numpy.loadtxt("test_{0:02}/test_drift.txt".format(i+1))
max_len = exp.shape[0]
for j, elt in enumerate(["X", "Y", "Z"]):
refv = ref[:max_len,j]
expv = exp[:,j+1]
# Correct for DC offset.
expv += numpy.mean(refv - expv)
# Calculate error (in nanometers).
if (elt == "Z"):
print(" ", elt, "{0:.3f}".format(numpy.std(refv - expv) * 1.0e+3))
else:
print(" ", elt, "{0:.3f}".format(numpy.std(refv - expv) * settings.pixel_size))
print()
if (__name__ == "__main__"):
collate()