Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
tree: 61f4b14c8f
Fetching contributors…

Cannot retrieve contributors at this time

372 lines (301 sloc) 11.347 kb
#include <opencv/cv.h>
#include <opencv/highgui.h>
#include <stdio.h>
#include <libfreenect/libfreenect.h>
#include <libfreenect/libfreenect_sync.h>
// #include "nestk/ntk/ntk.h"
// #include "nestk/ntk/camera/calibration.h"
// I don't think we need this
// #include "libfreenect_cv.h"
// there is some apparently
// well known BS
// about how the .cpp files need
// to be included for this to work
// on mac os x's C compiler.
#include "maxflow/graph.h"
#include "maxflow/graph.cpp"
#include "maxflow/maxflow.cpp"
struct distortionMatrices {
cv::Mat1d rgb_distortion;
cv::Mat1d rgb_intrinsics;
};
void readMatrix(cv::FileStorage& input_file, const std::string& name, CvMat& matrix)
{
cv::FileNode node = input_file[name];
//CvMat* m;
//m = (CvMat*)node.readObj();
//ntk_throw_exception_if(!m, std::string("Could not read field ") + name + " from yml file.");
matrix = &(CvMat*)node.readObj(); //m;
}
distortionMatrices loadCalibrationDataFromFile(const char* filename)
{
/*! Intrinsics 3x3 matrix for the rgb channel */
cv::Mat1d rgb_intrinsics;
/*! Distortion 1x5 matrix for rgb channel. */
cv::Mat1d rgb_distortion;
cv::FileStorage calibration_file (filename, cv::CV_STORAGE_READ);
readMatrix(calibration_file, std::string("rgb_distortion"), rgb_distortion);
readMatrix(calibration_file, std::string("rgb_intrinsics"), rgb_intrinsics);
distortionMatrices distMatrices;
distMatrices.rgb_distortion = rgb_distortion;
distMatrices.rgb_intrinsics = rgb_intrinsics;
return distMatrices;
}
int graph_idx_from_coor(int r, int c) {
return (r * 640) + (c);
}
char* get_rgb_array(char *data, int r, int c) {
int data_pos = (r * 640) + (c * 3);
char* ret = new char[3];
ret[0] = data[data_pos]; // r
ret[1] = data[data_pos + 1]; // g
ret[2] = data[data_pos + 2]; // b
return ret;
}
float weight_calculator(char x[], char y[]) {
// args x and y are the rgb vals
// themselves.
// calcs the weight from x -> y
// debug: printf("%d %d %d\n", x[0], x[1], x[2]);
// debug: printf("%d %d %d\n", y[0], y[1], y[2]);
float k = 3.0;
float contrast = abs( (x[0] + x[1] + x[2])/3 - (y[0] + y[1] + y[2])/3 )/255;
return k * (1 - contrast);
}
int main(int argc, char **argv) {
distortionMatrices distMatrices = loadCalibrationDataFromFile("kinect_calibration.yml");
typedef Graph<double,double,double> GraphType;
char *rgb;
rgb = (char *)malloc(640 * 480 * 3);
unsigned int timestamp;
uint16_t *depth;
depth = (uint16_t *)malloc(640 * 480 * 2);
printf("Segmentation Loading ... \nPress ESC on the Segmentation window to quit.\nUsing CTRL+C might blow up your RAM. In your face.\n");
while (cvWaitKey(10) < 0) {
freenect_sync_get_video((void **)&rgb, &timestamp, 0, FREENECT_VIDEO_RGB);
if (!rgb) {
printf("Error: (rgb) Kinect not connected?\n");
return -1;
}
freenect_sync_get_depth((void **)&depth, &timestamp, 0, FREENECT_DEPTH_11BIT);
// okay somewhere here we calibrate.
// DEBUG TO SHOW DEPTH
//IplImage *depthImage = cvCreateImageHeader(cvSize(640, 480), 16, 1);
//cvSetData(depthImage, depth, 640 * 2);
//cvShowImage("Depth", GlViewColor(depthImage));
if (!depth) {
printf("Error: (depth) Kinect not here bro.\n");
return -1;
}
GraphType *g = new GraphType( (640 * 480), (640 * 480) );
// DEBUG TO SHOW RGB
//IplImage *image = cvCreateImageHeader(cvSize(640, 480), 8, 3);
//cvSetData(image, rgb, 640 * 3);
//cvCvtColor(image, image, CV_RGB2BGR);
//cvShowImage("RGB", image);
g -> add_node((640 * 480));
int r, c = 0;
for (r = 0; r < 480; r++) {
for (c = 0; c < 640; c++) {
int main_node = graph_idx_from_coor(r, c);
double weight = 0.0;
uint16_t depthval = (uint16_t)depth[(r * 640) + (c)];
double thres = 600;
g -> add_tweights(
main_node,
depthval,
2 * thres - depthval
);
// we only need to do this
// for every third row (top and bottom edge weights are set)
if ((r + 1) % 3 == 0) {
// top
if (r != 0) { // make sure we ain't at the
// top already
weight = weight_calculator(
get_rgb_array(rgb, r, c),
get_rgb_array(rgb, r - 1, c)
);
g -> add_edge(
main_node,
graph_idx_from_coor(r - 1, c),
weight,
weight
);
}
// bottom
if (r != 479) { // were not at maxbottom
weight = weight_calculator(
get_rgb_array(rgb, r, c),
get_rgb_array(rgb, r + 1, c)
);
g -> add_edge(
main_node,
graph_idx_from_coor(r + 1, c),
weight,
weight
);
}
} // if r+1 % 3
// we only need to do this
// every other column. (as right edges are set)
if ((c + 1) % 2) {
// right
if (c != 639) { // were arent at maxright
weight = weight_calculator(
get_rgb_array(rgb, r, c),
get_rgb_array(rgb, r, c + 1)
);
g -> add_edge(
main_node,
graph_idx_from_coor(r, c + 1),
weight,
weight
);
}
}
// left
/*if (c != 0) { // we arent at maxleft
weight = weight_calculator(
get_rgb_array(rgb, r, c),
get_rgb_array(rgb, r, c - 1)
);
g -> add_edge(
main_node,
graph_idx_from_coor(r, c - 1),
weight,
weight
);
}*/
}
}
int flow = g -> maxflow();
// DEBUG: printf("FLOW = %d\n", flow);
IplImage *segmentedImage = cvCreateImage(cvSize(640, 480), 8, 3);
char *segmentedImageData = segmentedImage->imageData;
int i = 0;
for (i = 0; i < 640 * 480; i++) {
int k = i * 3;
segmentedImageData[k] = 20;
segmentedImageData[k + 1] = 20;
segmentedImageData[k + 2] = 20;
if (g -> what_segment(i) == GraphType::SOURCE) {
segmentedImageData[k] = rgb[k];
segmentedImageData[k + 1] = rgb[k + 1];
segmentedImageData[k + 2] = rgb[k + 2];
}
}
cvCvtColor(segmentedImage, segmentedImage, CV_RGB2BGR);
cvShowImage("Segmented", segmentedImage);
}
free(rgb);
free(depth);
}
// I'm commenting this out because I don't think we particularly need it.
/*IplImage *GlViewColor(IplImage *depth)
{
static IplImage *image = 0;
if (!image) image = cvCreateImage(cvSize(640,480), 8, 3);
char *depth_mid = image->imageData;
int i;
for (i = 0; i < 640*480; i++) {
int lb = ((short *)depth->imageData)[i] % 256;
int ub = ((short *)depth->imageData)[i] / 256;
switch (ub) {
case 0:
depth_mid[3*i+2] = 255;
depth_mid[3*i+1] = 255-lb;
depth_mid[3*i+0] = 255-lb;
break;
case 1:
depth_mid[3*i+2] = 255;
depth_mid[3*i+1] = lb;
depth_mid[3*i+0] = 0;
break;
case 2:
depth_mid[3*i+2] = 255-lb;
depth_mid[3*i+1] = 255;
depth_mid[3*i+0] = 0;
break;
case 3:
depth_mid[3*i+2] = 0;
depth_mid[3*i+1] = 255;
depth_mid[3*i+0] = lb;
break;
case 4:
depth_mid[3*i+2] = 0;
depth_mid[3*i+1] = 255-lb;
depth_mid[3*i+0] = 255;
break;
case 5:
depth_mid[3*i+2] = 0;
depth_mid[3*i+1] = 0;
depth_mid[3*i+0] = 255-lb;
break;
default:
depth_mid[3*i+2] = 0;
depth_mid[3*i+1] = 0;
depth_mid[3*i+0] = 0;
break;
}
}
return image;
}
*/
/*void undistortImages()
{
cv::Mat3b tmp3b;
cv::Mat1f tmp1f;
cv::Mat3b& rgb_im = m_image->rgbRef();
const cv::Mat3b& raw_rgb_im = m_image->rawRgb();
if (m_image->calibration()->rgbSize() != m_image->calibration()->rawRgbSize())
{
// First cut color image to the undistorted image size (not used for kinect).
cv::Size rgb_size = m_image->calibration()->rgbSize();
cv::Mat roi = raw_rgb_im(cv::Rect((raw_rgb_im.cols-rgb_size.width)/2.0,
(raw_rgb_im.rows-rgb_size.height)/2.0,
rgb_size.width,
rgb_size.height));
roi.copyTo(rgb_im);
}
else
{
raw_rgb_im.copyTo(rgb_im);
}
if (!m_image->calibration()->zero_rgb_distortion)
{
remap(rgb_im, tmp3b,
m_image->calibration()->rgb_undistort_map1,
m_image->calibration()->rgb_undistort_map2,
CV_INTER_LINEAR);
tmp3b.copyTo(rgb_im);
}
if (!m_image->calibration()->zero_depth_distortion)
{
remap(m_image->rawDepthRef(), m_image->depthRef(),
m_image->calibration()->depth_undistort_map1,
m_image->calibration()->depth_undistort_map2,
CV_INTER_LINEAR);
}
else
{
m_image->rawDepthRef().copyTo(m_image->depthRef());
}
if (m_image->calibration()->zero_depth_distortion ||
(m_flags & NoAmplitudeIntensityUndistort))
{
m_image->rawAmplitudeRef().copyTo(m_image->amplitudeRef());
m_image->rawIntensityRef().copyTo(m_image->intensityRef());
}
else
{
remap(m_image->rawAmplitudeRef(), m_image->amplitudeRef(),
m_image->calibration()->depth_undistort_map1,
m_image->calibration()->depth_undistort_map2,
CV_INTER_LINEAR);
remap(m_image->rawIntensityRef(), m_image->intensityRef(),
m_image->calibration()->depth_undistort_map1,
m_image->calibration()->depth_undistort_map2,
CV_INTER_LINEAR);
}
}*/
Jump to Line
Something went wrong with that request. Please try again.