forked from microsoft/HoloLensForCV
-
Notifications
You must be signed in to change notification settings - Fork 4
/
DepthPvMapper.cpp
142 lines (128 loc) · 5.89 KB
/
DepthPvMapper.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
#include "pch.h"
#include "DepthPvMapper.h"
namespace ComputeOnDevice
{
cv::Mat DepthPvMapper::createImageToCamMapping(HoloLensForCV::SensorFrame^ depthFrame) {
cv::Mat imageToCameraMapping = cv::Mat(depthFrame->SoftwareBitmap->PixelHeight, depthFrame->SoftwareBitmap->PixelWidth, CV_32FC2, cv::Scalar::all(0));
for (int x = 0; x < depthFrame->SoftwareBitmap->PixelWidth; ++x) {
for (int y = 0; y < depthFrame->SoftwareBitmap->PixelWidth; ++y) {
Windows::Foundation::Point uv = { float(x), float(y) };
Windows::Foundation::Point xy(0, 0);
if (depthFrame->SensorStreamingCameraIntrinsics->MapImagePointToCameraUnitPlane(uv, &xy)) {
imageToCameraMapping.at<cv::Vec2f>(y, x) = cv::Vec2f(xy.X, xy.Y);
}
}
}
return imageToCameraMapping;
}
DepthPvMapper::DepthPvMapper(HoloLensForCV::SensorFrame^ depthFrame)
{
Init(depthFrame);
}
DepthPvMapper::DepthPvMapper()
{
}
DepthPvMapper::~DepthPvMapper()
{
}
void DepthPvMapper::Init(HoloLensForCV::SensorFrame^ depthFrame) {
_imageToCameraMapping = createImageToCamMapping(depthFrame);
}
static cv::Mat floatMToCvMat(Windows::Foundation::Numerics::float4x4 in) {
cv::Mat res = cv::Mat(4, 4, CV_32F);
res.at<float>(0, 0) = in.m11;
res.at<float>(0, 1) = in.m12;
res.at<float>(0, 2) = in.m13;
res.at<float>(0, 3) = in.m14;
res.at<float>(1, 0) = in.m21;
res.at<float>(1, 1) = in.m22;
res.at<float>(1, 2) = in.m23;
res.at<float>(1, 3) = in.m24;
res.at<float>(2, 0) = in.m31;
res.at<float>(2, 1) = in.m32;
res.at<float>(2, 2) = in.m33;
res.at<float>(2, 3) = in.m34;
res.at<float>(3, 0) = in.m41;
res.at<float>(3, 1) = in.m42;
res.at<float>(3, 2) = in.m43;
res.at<float>(3, 3) = in.m44;
return res;
}
static cv::Vec4f vecDotM(cv::Vec4f vec, Windows::Foundation::Numerics::float4x4 m) {
cv::Vec4f res;
res.val[0] = vec.val[0] * m.m11 + vec.val[1] * m.m21 + vec.val[2] * m.m31 + vec.val[3] * m.m41;
res.val[1] = vec.val[0] * m.m12 + vec.val[1] * m.m22 + vec.val[2] * m.m32 + vec.val[3] * m.m42;
res.val[2] = vec.val[0] * m.m13 + vec.val[1] * m.m23 + vec.val[2] * m.m33 + vec.val[3] * m.m43;
res.val[3] = vec.val[0] * m.m14 + vec.val[1] * m.m24 + vec.val[2] * m.m34 + vec.val[3] * m.m44;
return res;
}
// Projects depth sensor data to PV frame and returns Mat with measured distances in mm in PV frame coordinates
cv::Mat DepthPvMapper::MapDepthToPV(HoloLensForCV::SensorFrame^ pvFrame, HoloLensForCV::SensorFrame^ depthFrame,
int depthRangeFrom, int depthRangeTo, int patchRadius) {
int pvWidth = pvFrame->SoftwareBitmap->PixelWidth;
int pvHeight = pvFrame->SoftwareBitmap->PixelHeight;
cv::Mat res(pvHeight, pvWidth, CV_16UC1, cv::Scalar::all(0));
cv::Mat pointCloud = get4DPointCloudFromDepth(depthFrame, depthRangeFrom, depthRangeTo);
cv::Mat depthImage;
rmcv::WrapHoloLensSensorFrameWithCvMat(depthFrame, depthImage);
auto depthFrameToOrigin = depthFrame->FrameToOrigin;
auto depthCamViewTransform = depthFrame->CameraViewTransform;
auto pvFrameToOrigin = pvFrame->FrameToOrigin;
auto pvCamViewTransform = pvFrame->CameraViewTransform;
auto pvCamProjTransform = pvFrame->CameraProjectionTransform;
Windows::Foundation::Numerics::float4x4 depthCamViewTransformInv;
Windows::Foundation::Numerics::float4x4 pvFrameToOriginInv;
if (!Windows::Foundation::Numerics::invert(depthCamViewTransform, &depthCamViewTransformInv) ||
!Windows::Foundation::Numerics::invert(pvFrameToOrigin, &pvFrameToOriginInv))
{
dbg::trace(L"Can't map depth to pv, invalid transform matrices");
return res;
}
// build point cloud -> pv view transform matrix
auto depthPointToWorld = depthCamViewTransformInv * depthFrameToOrigin;
auto depthPointToPvFrame = depthPointToWorld * pvFrameToOriginInv;
auto depthPointToCamView = depthPointToPvFrame * pvCamViewTransform;
auto depthPointToImage = depthPointToCamView * pvCamProjTransform;
// loop through point cloud and estimate coordinates
for (int x = 0; x < pointCloud.cols; ++x) {
for (int y = 0; y < pointCloud.rows; ++y) {
cv::Vec4f point = pointCloud.at<cv::Vec4f>(y, x);
if (point.val[0] == 0 && point.val[1] == 0 && point.val[2] == 0)
continue;
// project point
cv::Vec4f projPoint = vecDotM(point, depthPointToImage);
cv::Vec3f normProjPoint = cv::Vec3f(projPoint.val[0] / projPoint.val[3], projPoint.val[1] / projPoint.val[3], projPoint.val[2] / projPoint.val[3]);
// convert point with central origin and y axis up to pv image coordinates
if (normProjPoint.val[0] > -1 && normProjPoint.val[0] < 1 && normProjPoint.val[1] > -1 && normProjPoint.val[1] < 1)
{
int imgX = (int)(pvWidth * (normProjPoint.val[0] + 1) / 2.0);
int imgY = (int)(pvHeight * (1 - (normProjPoint.val[1] + 1) / 2.0));
for (int i = imgX - patchRadius; i <= imgX + patchRadius; i++)
for (int j = imgY - patchRadius; j <= imgY + patchRadius; j++)
if (i >= 0 && j >= 0 && i < res.cols && j < res.rows)
res.at<ushort>(j, i) = (ushort)depthImage.at<ushort>(y, x);
}
}
}
return res;
}
cv::Mat DepthPvMapper::get4DPointCloudFromDepth(HoloLensForCV::SensorFrame^ depthFrame, int depthRangeFrom, int depthRangeTo) {
cv::Mat depthImage;
rmcv::WrapHoloLensSensorFrameWithCvMat(depthFrame, depthImage);
cv::Mat pointCloud(depthImage.rows, depthImage.cols, CV_32FC4, cv::Scalar::all(0));
for (int x = 0; x < depthImage.cols; ++x) {
for (int y = 0; y < depthImage.rows; ++y) {
if (depthImage.at<unsigned short>(y, x) < depthRangeFrom || depthImage.at<unsigned short>(y, x) > depthRangeTo) {
continue;
}
auto camPoint = _imageToCameraMapping.at<cv::Vec2f>(y, x);
Windows::Foundation::Point uv = { float(x), float(y) };
Windows::Foundation::Point xy(camPoint.val[0], camPoint.val[1]);
cv::Point3f d(xy.X, xy.Y, 1);
d *= -(depthImage.at<unsigned short>(y, x) / 1000.0) * (1 / sqrt(d.x*d.x + d.y*d.y + 1));
pointCloud.at<cv::Vec4f>(y, x) = cv::Vec4f(d.x, d.y, d.z, 1);
}
}
return pointCloud;
}
}