-
Notifications
You must be signed in to change notification settings - Fork 1
/
COBRA.py
121 lines (89 loc) · 3.83 KB
/
COBRA.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import sys
sys.path.append("..")
from utils.gp_utils import *
import numpy as np
from numpy.typing import *
from sklearn.cluster import KMeans
def transform_2D_to_3D(K, exrtinsics, P2D, P3D):
# Invert the calibration matrix
K_inv = np.linalg.inv(K)
# Extract rotation matrix R and translation vector t from the extrinsic matrix
R = exrtinsics[:, :3]
t = exrtinsics[:, 3]
# compute lambda
l = ((K_inv @ P2D).T @ (R @ P3D + t)) / np.linalg.norm(K_inv @ P2D, ord=2) ** 2
P3D_est = l * R.T @ K_inv @ P2D - R.T @ t
return P3D_est
def transform_3D_to_2D(K, extrinsics, P3D):
p2d_homo = K @ extrinsics[:3, :] @ P3D
return p2d_homo[:-1] / p2d_homo[-1]
def compute_likelihood(point_3D_ref, p3d_observed, sigma=1.0, weight=1):
eu_distance = np.linalg.norm(point_3D_ref - p3d_observed)
likelihood = weight * mt.exp(-0.5 * (eu_distance**2) / (sigma**2))
#likelihood = 1/(mt.sqrt(2*mt.pi) * sigma) * weight * mt.exp(-0.5 * (eu_distance**2) / (sigma**2))
return eu_distance, likelihood
class COBRA:
def __init__(self, gps: ClusteredGPs, path) -> None:
gps.__load__(path)
self.gps = gps
self.centers = gps.centers
def calc_conf_lower_bound(self, delta, std_template, weights):
# norm_factor = (1/ (delta**2)) * std_template
# wsum = std_template * (1 - mt.exp(-((delta**2)/(2*std_template))))
# return norm_factor * in_sum
norm_factor = 1 / delta**2
wsum = np.sum(
weights
* std_template**2
* (1 - mt.exp(-((delta**2) / (2 * std_template**2)))),
axis=0,
)
return norm_factor * wsum
def score_pose(
self,
points2D: NDArray,
points3D: NDArray,
RT: NDArray,
K: NDArray,
sigma_hat: float,
weights: NDArray = None,
delta: float = 5,
):
back_proj_3D = []
# back-project 2D points using the estimate pose
# compute the 3D point across the ray with least squares
for p2d, p3d in zip(points2D, points3D):
back_proj_3D.append(transform_2D_to_3D(K, RT, np.append(p2d, 1.0), p3d))
back_proj_3D = np.asarray(back_proj_3D)
# classify backprojected points to reference points
_, class_idxs = classify_points_to_center(back_proj_3D, self.gps.centers)
# phi,theta,d parameterization
distances = distance_from_centers(back_proj_3D, self.gps.centers, class_idxs)
_, phi_thetas, ds_observed, sorted_indices = direction_distance_given_class(
back_proj_3D, distances, self.gps.centers, class_idxs
)
# predict d given phis,thetas and compute the 3D point location
# in the coordinate frame of the object
_, xyz, _ = self.gps.predict_(phi_thetas, self.centers, class_idxs)
likelihoods = []
distances = []
# if weights are not provided default to 1/N
if weights is None:
weights = np.ones((len(xyz)))
# compute the likelihood for each point
for idx, (xyz_, xyz_ob) in enumerate(zip(xyz, back_proj_3D[sorted_indices])):
eu_distance, likelihood = compute_likelihood(
xyz_, xyz_ob, sigma=sigma_hat, weight=weights[idx]
)
likelihoods.append(likelihood)
distances.append(eu_distance)
# compute lower bound for confidence score
conf_lower_bound = self.calc_conf_lower_bound(
delta=delta, std_template=sigma_hat, weights=weights
)
return (
np.sum(np.array(likelihoods)) / len(weights),
np.array(distances),
np.array(conf_lower_bound) / len(weights),
xyz
)