/
coreg.py
248 lines (217 loc) · 8.31 KB
/
coreg.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
"""Coordinate Point Extractor for KIT system."""
# Author: Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD-3-Clause
# Copyright the MNE-Python contributors.
import pickle
import re
from collections import OrderedDict
from os import SEEK_CUR, PathLike
from pathlib import Path
import numpy as np
from ..._fiff._digitization import _make_dig_points
from ...channels.montage import (
_check_dig_shape,
read_custom_montage,
read_dig_polhemus_isotrak,
read_polhemus_fastscan,
)
from ...transforms import (
Transform,
als_ras_trans,
apply_trans,
get_ras_to_neuromag_trans,
)
from ...utils import _check_fname, _check_option, warn
from .constants import FIFF, KIT
INT32 = "<i4"
FLOAT64 = "<f8"
def read_mrk(fname):
r"""Marker Point Extraction in MEG space directly from sqd.
Parameters
----------
fname : path-like
Absolute path to Marker file.
File formats allowed: \*.sqd, \*.mrk, \*.txt.
Returns
-------
mrk_points : ndarray, shape (n_points, 3)
Marker points in MEG space [m].
"""
from .kit import _read_dirs
fname = Path(_check_fname(fname, "read", must_exist=True, name="mrk file"))
if fname.suffix != ".pickled":
_check_option("file extension", fname.suffix, (".sqd", ".mrk", ".txt"))
if fname.suffix in (".sqd", ".mrk"):
with open(fname, "rb", buffering=0) as fid:
dirs = _read_dirs(fid)
fid.seek(dirs[KIT.DIR_INDEX_COREG]["offset"])
# skips match_done, meg_to_mri and mri_to_meg
fid.seek(KIT.INT + (2 * KIT.DOUBLE * 16), SEEK_CUR)
mrk_count = np.fromfile(fid, INT32, 1)[0]
pts = []
for _ in range(mrk_count):
# mri_type, meg_type, mri_done, meg_done
_, _, _, meg_done = np.fromfile(fid, INT32, 4)
_, meg_pts = np.fromfile(fid, FLOAT64, 6).reshape(2, 3)
if meg_done:
pts.append(meg_pts)
mrk_points = np.array(pts)
elif fname.suffix == ".txt":
mrk_points = _read_dig_kit(fname, unit="m")
elif fname.suffix == ".pickled":
warn(
"Reading pickled files is unsafe and not future compatible, save "
"to a standard format (text or FIF) instead, e.g. with:\n"
r"np.savetxt(fid, pts, delimiter=\"\\t\", newline=\"\\n\")",
FutureWarning,
)
with open(fname, "rb") as fid:
food = pickle.load(fid) # nosec B301
try:
mrk_points = food["mrk"]
except Exception:
raise ValueError(f"{fname} does not contain marker points.") from None
# check output
mrk_points = np.asarray(mrk_points)
if mrk_points.shape != (5, 3):
err = f"{repr(fname)} is no marker file, shape is {mrk_points.shape}"
raise ValueError(err)
return mrk_points
def read_sns(fname):
"""Sensor coordinate extraction in MEG space.
Parameters
----------
fname : path-like
Absolute path to sensor definition file.
Returns
-------
locs : numpy.array, shape = (n_points, 3)
Sensor coil location.
"""
p = re.compile(
r"\d,[A-Za-z]*,([\.\-0-9]+),"
+ r"([\.\-0-9]+),([\.\-0-9]+),"
+ r"([\.\-0-9]+),([\.\-0-9]+)"
)
with open(fname) as fid:
locs = np.array(p.findall(fid.read()), dtype=float)
return locs
def _set_dig_kit(mrk, elp, hsp, eeg, *, bad_coils=()):
"""Add landmark points and head shape data to the KIT instance.
Digitizer data (elp and hsp) are represented in [mm] in the Polhemus
ALS coordinate system. This is converted to [m].
Parameters
----------
mrk : path-like | array_like, shape (5, 3) | None
Marker points representing the location of the marker coils with
respect to the MEG Sensors, or path to a marker file.
elp : path-like | array_like, shape (8, 3) | None
Digitizer points representing the location of the fiducials and the
marker coils with respect to the digitized head shape, or path to a
file containing these points.
hsp : path-like | array, shape (n_points, 3) | None
Digitizer head shape points, or path to head shape file. If more
than 10`000 points are in the head shape, they are automatically
decimated.
bad_coils : list
Indices of bad marker coils (up to two). Bad coils will be excluded
when computing the device-head transformation.
eeg : dict
Ordered dict of EEG dig points.
Returns
-------
dig_points : list
List of digitizer points for info['dig'].
dev_head_t : Transform
A dictionary describing the device-head transformation.
hpi_results : list
The hpi results.
"""
from ...coreg import _decimate_points, fit_matched_points
if isinstance(hsp, (str, Path, PathLike)):
hsp = _read_dig_kit(hsp)
n_pts = len(hsp)
if n_pts > KIT.DIG_POINTS:
hsp = _decimate_points(hsp, res=0.005)
n_new = len(hsp)
warn(
f"The selected head shape contained {n_pts} points, which is more than "
f"recommended ({KIT.DIG_POINTS}), and was automatically downsampled to "
f"{n_new} points. The preferred way to downsample is using FastScan."
)
if isinstance(elp, (str, Path, PathLike)):
elp_points = _read_dig_kit(elp)
if len(elp_points) != 8:
raise ValueError(
f"File {repr(elp)} should contain 8 points; got shape "
f"{elp_points.shape}."
)
elp = elp_points
if len(bad_coils) > 0:
elp = np.delete(elp, np.array(bad_coils) + 3, 0)
# check we have at least 3 marker coils (whether read from file or
# passed in directly)
if len(elp) not in (6, 7, 8):
raise ValueError(f"ELP should contain 6 ~ 8 points; got shape {elp.shape}.")
if isinstance(mrk, (str, Path, PathLike)):
mrk = read_mrk(mrk)
if len(bad_coils) > 0:
mrk = np.delete(mrk, bad_coils, 0)
if len(mrk) not in (3, 4, 5):
raise ValueError(f"MRK should contain 3 ~ 5 points; got shape {mrk.shape}.")
mrk = apply_trans(als_ras_trans, mrk)
nasion, lpa, rpa = elp[:3]
nmtrans = get_ras_to_neuromag_trans(nasion, lpa, rpa)
elp = apply_trans(nmtrans, elp)
hsp = apply_trans(nmtrans, hsp)
eeg = OrderedDict((k, apply_trans(nmtrans, p)) for k, p in eeg.items())
# device head transform
trans = fit_matched_points(tgt_pts=elp[3:], src_pts=mrk, out="trans")
nasion, lpa, rpa = elp[:3]
elp = elp[3:]
dig_points = _make_dig_points(nasion, lpa, rpa, elp, hsp, dig_ch_pos=eeg)
dev_head_t = Transform("meg", "head", trans)
hpi_results = [
dict(
dig_points=[
dict(
ident=ci,
r=r,
kind=FIFF.FIFFV_POINT_HPI,
coord_frame=FIFF.FIFFV_COORD_UNKNOWN,
)
for ci, r in enumerate(mrk)
],
coord_trans=dev_head_t,
)
]
return dig_points, dev_head_t, hpi_results
def _read_dig_kit(fname, unit="auto"):
# Read dig points from a file and return ndarray, using FastSCAN for .txt
fname = _check_fname(fname, "read", must_exist=True, name="hsp or elp file")
assert unit in ("auto", "m", "mm")
_check_option("file extension", fname.suffix, (".hsp", ".elp", ".mat", ".txt"))
if fname.suffix == ".txt":
unit = "mm" if unit == "auto" else unit
out = read_polhemus_fastscan(fname, unit=unit, on_header_missing="ignore")
elif fname.suffix in (".hsp", ".elp"):
unit = "m" if unit == "auto" else unit
mon = read_dig_polhemus_isotrak(fname, unit=unit)
if fname.suffix == ".hsp":
dig = [d["r"] for d in mon.dig if d["kind"] != FIFF.FIFFV_POINT_CARDINAL]
else:
dig = [d["r"] for d in mon.dig]
if (
dig
and mon.dig[0]["kind"] == FIFF.FIFFV_POINT_CARDINAL
and mon.dig[0]["ident"] == FIFF.FIFFV_POINT_LPA
):
# LPA, Nasion, RPA -> NLR
dig[:3] = [dig[1], dig[0], dig[2]]
out = np.array(dig, float)
else:
assert fname.suffix == ".mat"
out = np.array([d["r"] for d in read_custom_montage(fname).dig])
_check_dig_shape(out)
return out