Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Euler angles to headpose tracker result #2175

Merged
merged 8 commits into from Aug 24, 2021
32 changes: 31 additions & 1 deletion pupil_src/shared_modules/head_pose_tracker/function/utils.py
Expand Up @@ -59,6 +59,35 @@ def convert_matrix_to_extrinsic(extrinsic_matrix):
return merge_extrinsics(rotation, translation)


def rod_to_euler(rotation_pose):
"""
:param rotation_pose: Compact Rodrigues rotation vector, representing
the rotation axis with its length encoding the angle in radians to rotate
:return: x,y,z: Orientation in the Pitch, Roll and Yaw axes as Euler angles
according to 'right hand' convention
"""
# convert Rodrigues rotation vector to matrix
rot = cv2.Rodrigues(rotation_pose)[0]

# rotate 180 degrees in y- and z-axes (corresponds to yaw and roll) to align
# with right hand rule (relative to the coordinate system of the origin marker
rot_mat = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]])
rot = np.matmul(rot, rot_mat)

# convert to euler angles
sin_y = np.sqrt(rot[0, 0] * rot[0, 0] + rot[1, 0] * rot[1, 0])
if not sin_y < 1e-6:
x = np.arctan2(rot[2, 1], rot[2, 2])
y = -np.arctan2(-rot[2, 0], sin_y)
z = -np.arctan2(rot[1, 0], rot[0, 0])
else:
x = np.arctan2(-rot[1, 2], rot[1, 1])
y = -np.arctan2(-rot[2, 0], sin_y)
z = 0

return np.rad2deg([x, y, z])


def get_camera_pose(camera_extrinsics):
papr marked this conversation as resolved.
Show resolved Hide resolved
if camera_extrinsics is None:
return get_none_camera_extrinsics()
Expand All @@ -67,7 +96,8 @@ def get_camera_pose(camera_extrinsics):
rotation_pose = -rotation_ext
translation_pose = np.matmul(-cv2.Rodrigues(rotation_ext)[0].T, translation_ext)
camera_pose = merge_extrinsics(rotation_pose, translation_pose)
return camera_pose
camera_orientation = rod_to_euler(rotation_pose)
return camera_pose, camera_orientation


def convert_marker_extrinsics_to_points_3d(marker_extrinsics):
Expand Down
Expand Up @@ -17,12 +17,15 @@

ROTATION_HEADER = tuple("rotation_" + dim for dim in "xyz")
TRANSLATION_HEADER = tuple("translation_" + dim for dim in "xyz")
ORIENTATION_HEADER = ("pitch", "yaw", "roll")
VERTICES_HEADER = tuple(
"vert_{}_{}".format(idx, dim) for idx in range(4) for dim in "xyz"
)

MODEL_HEADER = ("marker_id",) + VERTICES_HEADER
POSES_HEADER = ("timestamp",) + ROTATION_HEADER + TRANSLATION_HEADER
POSES_HEADER = (
("timestamp",) + ROTATION_HEADER + TRANSLATION_HEADER + ORIENTATION_HEADER
)


def export_routine(rec_dir, model, poses):
Expand All @@ -32,14 +35,16 @@ def export_routine(rec_dir, model, poses):

def _export_model(rec_dir, model):
logger.info("Exporting head pose model to {}".format(rec_dir))
model_path = os.path.join(rec_dir, "head_pose_tacker_model.csv")
model_path = os.path.join(rec_dir, "head_pose_tracker_model.csv")
_write_csv(model_path, MODEL_HEADER, model)


def _export_poses(rec_dir, poses):
logger.info("Exporting {} head poses to {}".format(len(poses), rec_dir))
poses_path = os.path.join(rec_dir, "head_pose_tacker_poses.csv")
poses_flat = [(p["timestamp"], *p["camera_poses"]) for p in poses]
poses_path = os.path.join(rec_dir, "head_pose_tracker_poses.csv")
poses_flat = [
(p["timestamp"], *p["camera_poses"], *p["camera_orientation"]) for p in poses
]
_write_csv(poses_path, POSES_HEADER, poses_flat)


Expand Down
Expand Up @@ -18,13 +18,14 @@

def get_pose_data(extrinsics, timestamp):
if extrinsics is not None:
camera_poses = utils.get_camera_pose(extrinsics)
camera_poses, camera_orientation = utils.get_camera_pose(extrinsics)
camera_pose_matrix = utils.convert_extrinsic_to_matrix(camera_poses)
return {
"camera_extrinsics": extrinsics.tolist(),
"camera_poses": camera_poses.tolist(),
"camera_trace": camera_poses[3:6].tolist(),
"camera_pose_matrix": camera_pose_matrix.tolist(),
"camera_orientation": camera_orientation.tolist(),
"timestamp": timestamp,
}
else:
Expand All @@ -33,6 +34,7 @@ def get_pose_data(extrinsics, timestamp):
"camera_poses": [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
"camera_trace": [np.nan, np.nan, np.nan],
"camera_pose_matrix": None,
"camera_orientation": [np.nan, np.nan, np.nan],
"timestamp": timestamp,
}

Expand Down