Skip to content
Browse files

new gesture engine, still figuring stuff out, initial push

  • Loading branch information...
1 parent 53c2e45 commit 8ba62c92f2d54f8e7bbc8e4ce5dd42a459a0b880 Roy Shilkrot committed Mar 6, 2011
View
104 new_cv/NewCV/FreenectDevice.h
@@ -0,0 +1,104 @@
+/*
+ * FreenectDevice.h
+ * NewCV
+ *
+ * Created by Roy Shilkrot on 3/3/11.
+ * Copyright 2011 MIT. All rights reserved.
+ *
+ */
+#include "libfreenect.hpp"
+#include <pthread.h>
+#include <cv.hpp>
+#include <highgui.h>
+#include <ml.h>
+
+using namespace cv;
+
+#include <iostream>
+using namespace std;
+
+class Mutex {
+public:
+ Mutex() {
+ pthread_mutex_init( &m_mutex, NULL );
+ }
+ void lock() {
+ pthread_mutex_lock( &m_mutex );
+ }
+ void unlock() {
+ pthread_mutex_unlock( &m_mutex );
+ }
+private:
+ pthread_mutex_t m_mutex;
+};
+
+class MyFreenectDevice : public Freenect::FreenectDevice {
+public:
+ MyFreenectDevice(freenect_context *_ctx, int _index)
+ : Freenect::FreenectDevice(_ctx, _index), m_buffer_depth(FREENECT_DEPTH_11BIT_SIZE),m_buffer_rgb(FREENECT_VIDEO_RGB_SIZE), m_gamma(2048), m_new_rgb_frame(false), m_new_depth_frame(false),
+ depthMat(Size(640,480),CV_16UC1), rgbMat(Size(640,480),CV_8UC3,Scalar(0)), ownMat(Size(640,480),CV_8UC3,Scalar(0))
+ {
+ for( unsigned int i = 0 ; i < 2048 ; i++) {
+ float v = i/2048.0;
+ v = std::pow(v, 3)* 6;
+ m_gamma[i] = v*6*256;
+ }
+ }
+ // Do not call directly even in child
+ void VideoCallback(void* _rgb, uint32_t timestamp) {
+// std::cout << "RGB callback" << std::endl;
+ m_rgb_mutex.lock();
+ uint8_t* rgb = static_cast<uint8_t*>(_rgb);
+ rgbMat.data = rgb;
+ m_new_rgb_frame = true;
+ m_rgb_mutex.unlock();
+ };
+ // Do not call directly even in child
+ void DepthCallback(void* _depth, uint32_t timestamp) {
+// std::cout << "Depth callback" << std::endl;
+ m_depth_mutex.lock();
+ uint16_t* depth = static_cast<uint16_t*>(_depth);
+ depthMat.data = (uchar*) depth;
+ m_new_depth_frame = true;
+ m_depth_mutex.unlock();
+ }
+
+ bool getVideo(Mat& output) {
+ m_rgb_mutex.lock();
+ if(m_new_rgb_frame) {
+ cv::cvtColor(rgbMat, output, CV_RGB2BGR);
+ m_new_rgb_frame = false;
+ m_rgb_mutex.unlock();
+ return true;
+ } else {
+ m_rgb_mutex.unlock();
+ return false;
+ }
+ }
+
+ bool getDepth(Mat& output) {
+ m_depth_mutex.lock();
+ if(m_new_depth_frame) {
+ depthMat.copyTo(output);
+ m_new_depth_frame = false;
+ m_depth_mutex.unlock();
+ return true;
+ } else {
+ m_depth_mutex.unlock();
+ return false;
+ }
+ }
+
+private:
+ std::vector<uint8_t> m_buffer_depth;
+ std::vector<uint8_t> m_buffer_rgb;
+ std::vector<uint16_t> m_gamma;
+ Mat depthMat;
+ Mat rgbMat;
+ Mat ownMat;
+ Mutex m_rgb_mutex;
+ Mutex m_depth_mutex;
+ bool m_new_rgb_frame;
+ bool m_new_depth_frame;
+};
+
View
419 new_cv/NewCV/main.cpp
@@ -0,0 +1,419 @@
+#include "FreenectDevice.h"
+
+Scalar refineSegments(const Mat& img,
+ Mat& mask,
+ Mat& dst,
+ vector<Point>& contour,
+ vector<Point>& second_contour,
+ Point2i& previous)
+{
+ // int niters = 3;
+
+ vector<vector<Point> > contours;
+ vector<Vec4i> hierarchy;
+
+ Mat temp;
+
+ blur(mask, temp, Size(11,11));
+ temp = temp > 85.0;
+
+ findContours( temp, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
+
+ if(dst.data==NULL)
+ dst = Mat::zeros(img.size(), CV_8UC1);
+ else
+ dst.setTo(Scalar(0));
+
+ if( contours.size() == 0 )
+ return Scalar(-1,-1);
+
+ // iterate through all the top-level contours,
+ // draw each connected component with its own random color
+ int idx = 0, largestComp = -1, secondlargest = -1;
+ double maxWArea = 0, maxJArea = 0;
+ vector<double> justarea(contours.size());
+ vector<double> weightedarea(contours.size());
+
+ // for( ; idx >= 0; idx = hierarchy[idx][0] )
+ for (; idx<contours.size(); idx++)
+ {
+ const vector<Point>& c = contours[idx];
+ Scalar _mean = mean(Mat(contours[idx]));
+ justarea[idx] = fabs(contourArea(Mat(c)));
+ weightedarea[idx] = fabs(contourArea(Mat(c))) /
+ ((previous.x >- 1) ? (1.0 + norm(Point(_mean[0],_mean[1])-previous)) : 1.0); //consider distance from last blob
+ }
+ for (idx = 0; idx<contours.size(); idx++) {
+ if( weightedarea[idx] > maxWArea )
+ {
+ maxWArea = weightedarea[idx];
+ largestComp = idx;
+ }
+ }
+ for (idx = 0; idx < contours.size(); idx++) {
+ if ( justarea[idx] > maxJArea && idx != largestComp ) {
+ maxJArea = justarea[idx];
+ secondlargest = idx;
+ }
+ }
+
+ Scalar color( 255 );
+ // cout << "largest cc " << largestComp << endl;
+ // drawContours( dst, contours, largestComp, color, CV_FILLED); //, 8, hierarchy );
+ // for (idx=0; idx<contours[largestComp].size()-1; idx++) {
+ // line(dst, contours[largestComp][idx], contours[largestComp][idx+1], color, 2);
+ //
+ if(largestComp >= 0) {
+ int num = contours[largestComp].size();
+ Point* pts = &(contours[largestComp][0]);
+ fillPoly(dst, (const Point**)(&pts), &num, 1, color);
+
+ Scalar b = mean(Mat(contours[largestComp]));
+ b[2] = justarea[largestComp];
+
+ contour.clear();
+ contour = contours[largestComp];
+
+ second_contour.clear();
+ if(secondlargest >= 0) {
+ second_contour = contours[secondlargest];
+ b[3] = maxJArea;
+ }
+
+ previous.x = b[0]; previous.y = b[1];
+ return b;
+ } else
+ return Scalar(-1,-1);
+
+}
+
+#define LABEL_GARBAGE 0
+#define LABEL_OPEN 1
+#define LABEL_FIST 2
+#define LABEL_THUMB 3
+
+
+int main(int argc, char **argv) {
+ bool die(false);
+ string filename("snapshot");
+ string suffix(".png");
+ int i_snap(0),iter(0);
+
+ Mat depthMat(Size(640,480),CV_16UC1);
+ Mat depthf (Size(640,480),CV_8UC1);
+ Mat rgbMat(Size(640,480),CV_8UC3,Scalar(0));
+ Mat ownMat(Size(640,480),CV_8UC3,Scalar(0));
+
+ Freenect::Freenect<MyFreenectDevice> freenect;
+ MyFreenectDevice& device = freenect.createDevice(0);
+
+ bool registered = false;
+ Mat blobMaskOutput = Mat::zeros(Size(640,480),CV_8UC1),outC;
+ Point midBlob;
+
+ int startX = 250, sizeX = 150, num_x_reps = 10, num_y_reps = 10;
+ double height_over_num_y_reps = 480/num_y_reps,
+ width_over_num_x_reps = sizeX/num_x_reps;
+
+
+ vector<double> _d(num_x_reps * num_y_reps); //the descriptor
+ Mat descriptorMat(_d);
+
+// CvNormalBayesClassifier classifier; //doesnt work
+ CvKNearest classifier;
+// CvSVM classifier; //doesnt work
+// CvBoost classifier; //only good for 2 classes
+// CvDTree classifier;
+
+
+ vector<vector<double> > training_data;
+ vector<int> label_data;
+ PCA pca;
+ Mat labelMat, dataMat;
+ vector<float> label_counts(4);
+
+ bool trained = false, loaded = false;
+
+ device.startVideo();
+ device.startDepth();
+ while (!die) {
+ device.getVideo(rgbMat);
+ device.getDepth(depthMat);
+// cv::imshow("rgb", rgbMat);
+// depthMat.convertTo(depthf, CV_8UC1, 255.0/2048.0);
+// cv::imshow("depth",depthf);
+
+ //interpolation & inpainting
+ {
+ Mat _tmp,_tmp1; // = (depthMat - 400.0); //minimum observed value is ~440. so shift a bit
+ Mat(depthMat - 400.0).convertTo(_tmp1,CV_64FC1);
+ _tmp.setTo(Scalar(2048), depthMat > 750.0); //cut off at 600 to create a "box" where the user interacts
+// _tmp.convertTo(depthf, CV_8UC1, 255.0/1648.0); //values are 0-2048 (11bit), account for -400 = 1648
+
+ //quadratic interpolation
+// cv::pow(_tmp,2.0,_tmp1);
+// _tmp1 = _tmp1 * 4.0;
+
+// try {
+// cv:log(_tmp,_tmp1);
+// }
+// catch (cv::Exception e) {
+// cerr << e.what() << endl;
+// exit(0);
+// }
+
+ Point minLoc; double minval,maxval;
+ minMaxLoc(_tmp1, &minval, &maxval, NULL, NULL);
+ _tmp1.convertTo(depthf, CV_8UC1, 255.0/maxval);
+
+ Mat small_depthf; resize(depthf,small_depthf,Size(),0.2,0.2);
+ cv::inpaint(small_depthf,(small_depthf == 255),_tmp1,5.0,INPAINT_TELEA);
+
+ resize(_tmp1, _tmp, depthf.size());
+ _tmp.copyTo(depthf, (depthf == 255));
+ }
+
+ cvtColor(depthf, outC, CV_GRAY2BGR);
+
+ Mat blobMaskInput = depthf < 120; //anything not white is "real" depth, TODO: inpainting invalid data
+ vector<Point> ctr,ctr2;
+
+ //closest point to the camera
+ Point minLoc; double minval,maxval;
+ minMaxLoc(depthf, &minval, &maxval, &minLoc, NULL, blobMaskInput);
+ circle(outC, minLoc, 5, Scalar(0,255,0), 3);
+
+ blobMaskInput = depthf < (minval + 18);
+
+ Scalar blb = refineSegments(Mat(),blobMaskInput,blobMaskOutput,ctr,ctr2,midBlob); //find contours in the foreground, choose biggest
+// if (blobMaskOutput.data != NULL) {
+// imshow("first", blobMaskOutput);
+// }
+ /////// blb :
+ //blb[0] = x, blb[1] = y, blb[2] = 1st blob size, blb[3] = 2nd blob size.
+
+
+
+ if(blb[0]>=0 && blb[2] > 500) { //1st blob detected, and is big enough
+ //cvtColor(depthf, outC, CV_GRAY2BGR);
+
+ Scalar mn,stdv;
+ meanStdDev(depthf,mn,stdv,blobMaskInput);
+
+ //cout << "min: " << minval << ", max: " << maxval << ", mean: " << mn[0] << endl;
+
+ //now refining blob by looking at the mean depth value it has...
+ blobMaskInput = depthf < (mn[0] + stdv[0]);
+
+ blb = refineSegments(Mat(),blobMaskInput,blobMaskOutput,ctr,ctr2,midBlob);
+
+ imshow("blob", blobMaskOutput);
+
+ if(blb[0] >= 0 && blb[2] > 300) {
+ //draw contour
+ Scalar color(0,0,255);
+ for (int idx=0; idx<ctr.size()-1; idx++)
+ line(outC, ctr[idx], ctr[idx+1], color, 1);
+ line(outC, ctr[ctr.size()-1], ctr[0], color, 1);
+
+ if(ctr2.size() > 0) { //second blob detected
+ Scalar color2(255,0,255);
+ for (int idx=0; idx<ctr2.size()-1; idx++)
+ line(outC, ctr2[idx], ctr2[idx+1], color2, 2);
+ line(outC, ctr2[ctr2.size()-1], ctr2[0], color2, 2);
+ }
+
+ //blob center
+ circle(outC, Point(blb[0],blb[1]), 50, Scalar(255,0,0), 3);
+
+ Mat blobDepth,blobEdge;
+ depthf.copyTo(blobDepth,blobMaskOutput);
+ Laplacian(blobDepth, blobEdge, 8);
+// equalizeHist(blobEdge, blobEdge);//just for visualization
+
+ Mat logPolar(depthf.size(),CV_8UC1);
+ cvLogPolar(&((IplImage)blobEdge), &((IplImage)logPolar), Point2f(blb[0],blb[1]), 80.0);
+
+// for (int i=0; i<num_x_reps+1; i++) {
+// //verical lines
+// line(logPolar, Point(startX+i*width_over_num_x_reps, 0), Point(startX+i*width_over_num_x_reps,479), Scalar(255), 2);
+// }
+// for(int i=0; i<num_y_reps+1; i++) {
+// //horizontal
+// line(logPolar, Point(startX, i*height_over_num_y_reps), Point(startX+sizeX,i*height_over_num_y_reps), Scalar(255), 2);
+// }
+
+ double total = 0.0;
+
+ //histogram
+ for (int i=0; i<num_x_reps; i++) {
+ for(int j=0; j<num_y_reps; j++) {
+ Mat part = logPolar(
+ Range(j*height_over_num_y_reps,(j+1)*height_over_num_y_reps),
+ Range(startX+i*width_over_num_x_reps,startX+(i+1)*width_over_num_x_reps)
+ );
+
+ int count = countNonZero(part); //TODO: use calcHist
+// part.setTo(Scalar(count/10.0)); //for debug: show the value in the image
+
+ _d[i*num_x_reps + j] = count;
+ total += count;
+ }
+ }
+
+ descriptorMat = descriptorMat / total;
+
+ /*
+ Mat images[1] = {logPolar(Range(0,30),Range(0,30))};
+ int nimages = 1;
+ int channels[1] = {0};
+ int dims = 1;
+ float range_0[]={0,256};
+ float* ranges[] = { range_0 };
+ int histSize[1] = { 5 };
+
+ calcHist(, <#int nimages#>, <#const int *channels#>, <#const Mat mask#>, <#MatND hist#>, <#int dims#>, <#const int *histSize#>, <#const float **ranges#>, <#bool uniform#>, <#bool accumulate#>)
+ */
+
+// Mat _tmp(logPolar.size(),CV_8UC1);
+// cvLogPolar(&((IplImage)logPolar), &((IplImage)_tmp),Point2f(blb[0],blb[1]), 80.0, CV_WARP_INVERSE_MAP);
+// imshow("descriptor", _tmp);
+// imshow("logpolar", logPolar);
+ }
+ }
+
+ if(trained) {
+ Mat results(1,1,CV_32FC1);
+ Mat samples; Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
+
+ Mat samplesAfterPCA = pca.project(samples);
+
+ classifier.find_nearest(&((CvMat)samplesAfterPCA), 1, &((CvMat)results));
+// ((float*)results.data)[0] = classifier.predict(&((CvMat)samples))->value;
+
+ Mat lc(label_counts); lc *= 0.9;
+
+// label_counts[(int)((float*)results.data)[0]] *= 0.9;
+ label_counts[(int)((float*)results.data)[0]] += 0.1;
+ Point maxLoc;
+ minMaxLoc(lc, NULL, NULL, NULL, &maxLoc);
+ int res = maxLoc.y;
+
+ stringstream ss; ss << "prediction: ";
+ if (res == LABEL_OPEN) {
+ ss << "Open hand";
+ }
+ if (res == LABEL_FIST) {
+ ss << "Fist";
+ }
+ if (res == LABEL_THUMB) {
+ ss << "Thumb";
+ }
+ if (res == LABEL_GARBAGE) {
+ ss << "Garbage";
+ }
+ putText(outC, ss.str(), Point(20,50), CV_FONT_HERSHEY_PLAIN, 3.0, Scalar(0,0,255), 2);
+ }
+
+ imshow("blobs", outC);
+
+ char k = cvWaitKey(5);
+ if( k == 27 ){
+ break;
+ }
+ if( k == 8 ) {
+ std::ostringstream file;
+ file << filename << i_snap << suffix;
+ cv::imwrite(file.str(),rgbMat);
+ i_snap++;
+ }
+ if (k == 'g') {
+ //put into training as 'garbage'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_GARBAGE);
+ cout << "learn grabage" << endl;
+ }
+ if(k == 'o') {
+ //put into training as 'open'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_OPEN);
+ cout << "learn open" << endl;
+ }
+ if(k == 'f') {
+ //put into training as 'fist'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_FIST);
+ cout << "learn fist" << endl;
+ }
+ if(k == 'h') {
+ //put into training as 'thumb'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_THUMB);
+ cout << "learn thumb" << endl;
+ }
+ if (k=='t') {
+ //train model
+ cout << "train model" << endl;
+ if(loaded != true) {
+ dataMat = Mat(training_data.size(),_d.size(),CV_32FC1); //descriptors as matrix rows
+ for (uint i=0; i<training_data.size(); i++) {
+ Mat v = dataMat(Range(i,i+1),Range::all());
+ Mat(Mat(training_data[i]).t()).convertTo(v,CV_32FC1,1.0);
+ }
+ Mat(label_data).convertTo(labelMat,CV_32FC1);
+ }
+
+ pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,15);
+ Mat dataAfterPCA;
+ pca.project(dataMat,dataAfterPCA);
+
+ classifier.train(&((CvMat)dataAfterPCA), &((CvMat)labelMat));
+
+ trained = true;
+ }
+// if(k=='p' && trained) {
+// //predict
+// Mat results(1,1,CV_32FC1);
+// Mat samples(1,64,CV_32FC1); Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
+// classifier.find_nearest(&((CvMat)samples), 1, &((CvMat)results));
+// cout << "prediction: " << ((float*)results.data)[0] << endl;
+// }
+ if(k=='s') {
+ cout << "save training data" << endl;
+// classifier.save("knn-classifier-open-fist-thumb.yaml"); //not implemented
+ dataMat = Mat(training_data.size(),_d.size(),CV_32FC1); //descriptors as matrix rows
+ for (uint i=0; i<training_data.size(); i++) {
+ Mat v = dataMat(Range(i,i+1),Range::all());
+ Mat(Mat(training_data[i]).t()).convertTo(v,CV_32FC1,1.0);
+ }
+ Mat(label_data).convertTo(labelMat,CV_32FC1);
+
+ FileStorage fs;
+ fs.open("data-samples-labels.yaml", CV_STORAGE_WRITE);
+ if (fs.isOpened()) {
+ fs << "samples" << dataMat;
+ fs << "labels" << labelMat;
+ loaded = true;
+ fs.release();
+ } else {
+ cerr << "can't open saved data" << endl;
+ }
+ }
+ if(k=='l') {
+ FileStorage fs;
+ fs.open("data-samples-labels.yaml", CV_STORAGE_READ);
+ if (fs.isOpened()) {
+ fs["samples"] >> dataMat;
+ fs["labels"] >> labelMat;
+ loaded = true;
+ fs.release();
+ } else {
+ cerr << "can't open saved data" << endl;
+ }
+ }
+ }
+
+ device.stopVideo();
+ device.stopDepth();
+ return 0;
+}
View
104 webkit-plugin-mac/FreenectDevice.h
@@ -0,0 +1,104 @@
+/*
+ * FreenectDevice.h
+ * NewCV
+ *
+ * Created by Roy Shilkrot on 3/3/11.
+ * Copyright 2011 MIT. All rights reserved.
+ *
+ */
+#include "libfreenect.hpp"
+#include <pthread.h>
+#include <cv.hpp>
+#include <highgui.h>
+#include <ml.h>
+
+using namespace cv;
+
+#include <iostream>
+using namespace std;
+
+class Mutex {
+public:
+ Mutex() {
+ pthread_mutex_init( &m_mutex, NULL );
+ }
+ void lock() {
+ pthread_mutex_lock( &m_mutex );
+ }
+ void unlock() {
+ pthread_mutex_unlock( &m_mutex );
+ }
+private:
+ pthread_mutex_t m_mutex;
+};
+
+class MyFreenectDevice : public Freenect::FreenectDevice {
+public:
+ MyFreenectDevice(freenect_context *_ctx, int _index)
+ : Freenect::FreenectDevice(_ctx, _index), m_buffer_depth(FREENECT_DEPTH_11BIT_SIZE),m_buffer_rgb(FREENECT_VIDEO_RGB_SIZE), m_gamma(2048), m_new_rgb_frame(false), m_new_depth_frame(false),
+ depthMat(Size(640,480),CV_16UC1), rgbMat(Size(640,480),CV_8UC3,Scalar(0)), ownMat(Size(640,480),CV_8UC3,Scalar(0))
+ {
+ for( unsigned int i = 0 ; i < 2048 ; i++) {
+ float v = i/2048.0;
+ v = std::pow(v, 3)* 6;
+ m_gamma[i] = v*6*256;
+ }
+ }
+ // Do not call directly even in child
+ void VideoCallback(void* _rgb, uint32_t timestamp) {
+// std::cout << "RGB callback" << std::endl;
+ m_rgb_mutex.lock();
+ uint8_t* rgb = static_cast<uint8_t*>(_rgb);
+ rgbMat.data = rgb;
+ m_new_rgb_frame = true;
+ m_rgb_mutex.unlock();
+ };
+ // Do not call directly even in child
+ void DepthCallback(void* _depth, uint32_t timestamp) {
+// std::cout << "Depth callback" << std::endl;
+ m_depth_mutex.lock();
+ uint16_t* depth = static_cast<uint16_t*>(_depth);
+ depthMat.data = (uchar*) depth;
+ m_new_depth_frame = true;
+ m_depth_mutex.unlock();
+ }
+
+ bool getVideo(Mat& output) {
+ m_rgb_mutex.lock();
+ if(m_new_rgb_frame) {
+ cv::cvtColor(rgbMat, output, CV_RGB2BGR);
+ m_new_rgb_frame = false;
+ m_rgb_mutex.unlock();
+ return true;
+ } else {
+ m_rgb_mutex.unlock();
+ return false;
+ }
+ }
+
+ bool getDepth(Mat& output) {
+ m_depth_mutex.lock();
+ if(m_new_depth_frame) {
+ depthMat.copyTo(output);
+ m_new_depth_frame = false;
+ m_depth_mutex.unlock();
+ return true;
+ } else {
+ m_depth_mutex.unlock();
+ return false;
+ }
+ }
+
+private:
+ std::vector<uint8_t> m_buffer_depth;
+ std::vector<uint8_t> m_buffer_rgb;
+ std::vector<uint16_t> m_gamma;
+ Mat depthMat;
+ Mat rgbMat;
+ Mat ownMat;
+ Mutex m_rgb_mutex;
+ Mutex m_depth_mutex;
+ bool m_new_rgb_frame;
+ bool m_new_depth_frame;
+};
+
View
377 webkit-plugin-mac/gesture_engine.cpp
@@ -0,0 +1,377 @@
+#include "FreenectDevice.h"
+
+Scalar _refineSegments(const Mat& img,
+ Mat& mask,
+ Mat& dst,
+ vector<Point>& contour,
+ vector<Point>& second_contour,
+ Point2i& previous)
+{
+ // int niters = 3;
+
+ vector<vector<Point> > contours;
+ vector<Vec4i> hierarchy;
+
+ Mat temp;
+
+ blur(mask, temp, Size(11,11));
+ temp = temp > 85.0;
+
+ findContours( temp, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
+
+ if(dst.data==NULL)
+ dst = Mat::zeros(img.size(), CV_8UC1);
+ else
+ dst.setTo(Scalar(0));
+
+ if( contours.size() == 0 )
+ return Scalar(-1,-1);
+
+ // iterate through all the top-level contours,
+ // draw each connected component with its own random color
+ int idx = 0, largestComp = -1, secondlargest = -1;
+ double maxWArea = 0, maxJArea = 0;
+ vector<double> justarea(contours.size());
+ vector<double> weightedarea(contours.size());
+
+ // for( ; idx >= 0; idx = hierarchy[idx][0] )
+ for (; idx<contours.size(); idx++)
+ {
+ const vector<Point>& c = contours[idx];
+ Scalar _mean = mean(Mat(contours[idx]));
+ justarea[idx] = fabs(contourArea(Mat(c)));
+ weightedarea[idx] = fabs(contourArea(Mat(c))) /
+ ((previous.x >- 1) ? (1.0 + norm(Point(_mean[0],_mean[1])-previous)) : 1.0); //consider distance from last blob
+ }
+ for (idx = 0; idx<contours.size(); idx++) {
+ if( weightedarea[idx] > maxWArea )
+ {
+ maxWArea = weightedarea[idx];
+ largestComp = idx;
+ }
+ }
+ for (idx = 0; idx < contours.size(); idx++) {
+ if ( justarea[idx] > maxJArea && idx != largestComp ) {
+ maxJArea = justarea[idx];
+ secondlargest = idx;
+ }
+ }
+
+ Scalar color( 255 );
+ // cout << "largest cc " << largestComp << endl;
+ // drawContours( dst, contours, largestComp, color, CV_FILLED); //, 8, hierarchy );
+ // for (idx=0; idx<contours[largestComp].size()-1; idx++) {
+ // line(dst, contours[largestComp][idx], contours[largestComp][idx+1], color, 2);
+ //
+ if(largestComp >= 0) {
+ int num = contours[largestComp].size();
+ Point* pts = &(contours[largestComp][0]);
+ fillPoly(dst, (const Point**)(&pts), &num, 1, color);
+
+ Scalar b = mean(Mat(contours[largestComp]));
+ b[2] = justarea[largestComp];
+
+ contour.clear();
+ contour = contours[largestComp];
+
+ second_contour.clear();
+ if(secondlargest >= 0) {
+ second_contour = contours[secondlargest];
+ b[3] = maxJArea;
+ }
+
+ previous.x = b[0]; previous.y = b[1];
+ return b;
+ } else
+ return Scalar(-1,-1);
+
+}
+
+#define LABEL_GARBAGE 0
+#define LABEL_OPEN 1
+#define LABEL_FIST 2
+#define LABEL_THUMB 3
+
+
+void* gesture_engine(void* _arg) {
+ bool die(false);
+
+ Mat depthMat(Size(640,480),CV_16UC1);
+ Mat depthf (Size(640,480),CV_8UC1);
+ Mat rgbMat(Size(640,480),CV_8UC3,Scalar(0));
+ Mat ownMat(Size(640,480),CV_8UC3,Scalar(0));
+
+ Freenect::Freenect<MyFreenectDevice> freenect;
+ MyFreenectDevice& device = freenect.createDevice(0);
+
+ bool registered = false;
+ Mat blobMaskOutput = Mat::zeros(Size(640,480),CV_8UC1),outC;
+ Point midBlob;
+
+ //descriptor parameters
+ int startX = 250, sizeX = 150, num_x_reps = 10, num_y_reps = 10;
+ double height_over_num_y_reps = 480/num_y_reps,
+ width_over_num_x_reps = sizeX/num_x_reps;
+
+
+ vector<double> _d(num_x_reps * num_y_reps); //the descriptor
+ Mat descriptorMat(_d);
+
+ CvKNearest classifier;
+
+ vector<vector<double> > training_data;
+ vector<int> label_data;
+ PCA pca;
+ Mat labelMat, dataMat;
+ vector<float> label_counts(4);
+
+ bool trained = false, loaded = false;
+
+ device.startVideo();
+ device.startDepth();
+ while (!die) {
+ device.getVideo(rgbMat);
+ device.getDepth(depthMat);
+
+ //interpolation & inpainting
+ {
+ Mat _tmp,_tmp1; // = (depthMat - 400.0); //minimum observed value is ~440. so shift a bit
+ Mat(depthMat - 400.0).convertTo(_tmp1,CV_64FC1);
+ _tmp.setTo(Scalar(2048), depthMat > 750.0); //cut off at 600 to create a "box" where the user interacts
+
+ Point minLoc; double minval,maxval;
+ minMaxLoc(_tmp1, &minval, &maxval, NULL, NULL);
+ _tmp1.convertTo(depthf, CV_8UC1, 255.0/maxval);
+
+ Mat small_depthf; resize(depthf,small_depthf,Size(),0.2,0.2);
+ cv::inpaint(small_depthf,(small_depthf == 255),_tmp1,5.0,INPAINT_TELEA);
+
+ resize(_tmp1, _tmp, depthf.size());
+ _tmp.copyTo(depthf, (depthf == 255));
+ }
+
+ cvtColor(depthf, outC, CV_GRAY2BGR);
+
+ Mat blobMaskInput = depthf < 120; //anything not white is "real" depth, TODO: inpainting invalid data
+ vector<Point> ctr,ctr2;
+
+ //closest point to the camera
+ Point minLoc; double minval,maxval;
+ minMaxLoc(depthf, &minval, &maxval, &minLoc, NULL, blobMaskInput);
+ circle(outC, minLoc, 5, Scalar(0,255,0), 3);
+
+ blobMaskInput = depthf < (minval + 18);
+
+ Scalar blb = _refineSegments(Mat(),blobMaskInput,blobMaskOutput,ctr,ctr2,midBlob); //find contours in the foreground, choose biggest
+ /////// blb :
+ //blb[0] = x, blb[1] = y, blb[2] = 1st blob size, blb[3] = 2nd blob size.
+ if(blb[0]>=0 && blb[2] > 500) { //1st blob detected, and is big enough
+ //cvtColor(depthf, outC, CV_GRAY2BGR);
+
+ Scalar mn,stdv;
+ meanStdDev(depthf,mn,stdv,blobMaskInput);
+
+ //cout << "min: " << minval << ", max: " << maxval << ", mean: " << mn[0] << endl;
+
+ //now refining blob by looking at the mean depth value it has...
+ blobMaskInput = depthf < (mn[0] + stdv[0]*.5);
+
+ blb = _refineSegments(Mat(),blobMaskInput,blobMaskOutput,ctr,ctr2,midBlob);
+
+ imshow("blob", blobMaskOutput);
+
+ if(blb[0] >= 0 && blb[2] > 300) {
+ //draw contour
+ Scalar color(0,0,255);
+ for (int idx=0; idx<ctr.size()-1; idx++)
+ line(outC, ctr[idx], ctr[idx+1], color, 1);
+ line(outC, ctr[ctr.size()-1], ctr[0], color, 1);
+
+ if(ctr2.size() > 0) { //second blob detected
+ Scalar color2(255,0,255);
+ for (int idx=0; idx<ctr2.size()-1; idx++)
+ line(outC, ctr2[idx], ctr2[idx+1], color2, 2);
+ line(outC, ctr2[ctr2.size()-1], ctr2[0], color2, 2);
+ }
+
+ //blob center
+ circle(outC, Point(blb[0],blb[1]), 50, Scalar(255,0,0), 3);
+
+ Mat blobDepth,blobEdge;
+ depthf.copyTo(blobDepth,blobMaskOutput);
+ Laplacian(blobDepth, blobEdge, 8);
+// equalizeHist(blobEdge, blobEdge);//just for visualization
+
+ Mat logPolar(depthf.size(),CV_8UC1);
+ cvLogPolar(&((IplImage)blobEdge), &((IplImage)logPolar), Point2f(blb[0],blb[1]), 80.0);
+
+// for (int i=0; i<num_x_reps+1; i++) {
+// //verical lines
+// line(logPolar, Point(startX+i*width_over_num_x_reps, 0), Point(startX+i*width_over_num_x_reps,479), Scalar(255), 2);
+// }
+// for(int i=0; i<num_y_reps+1; i++) {
+// //horizontal
+// line(logPolar, Point(startX, i*height_over_num_y_reps), Point(startX+sizeX,i*height_over_num_y_reps), Scalar(255), 2);
+// }
+
+ double total = 0.0;
+
+ //histogram
+ for (int i=0; i<num_x_reps; i++) {
+ for(int j=0; j<num_y_reps; j++) {
+ Mat part = logPolar(
+ Range(j*height_over_num_y_reps,(j+1)*height_over_num_y_reps),
+ Range(startX+i*width_over_num_x_reps,startX+(i+1)*width_over_num_x_reps)
+ );
+
+ int count = countNonZero(part); //TODO: use calcHist
+// part.setTo(Scalar(count/10.0)); //for debug: show the value in the image
+
+ _d[i*num_x_reps + j] = count;
+ total += count;
+ }
+ }
+
+ descriptorMat = descriptorMat / total;
+
+ /*
+ Mat images[1] = {logPolar(Range(0,30),Range(0,30))};
+ int nimages = 1;
+ int channels[1] = {0};
+ int dims = 1;
+ float range_0[]={0,256};
+ float* ranges[] = { range_0 };
+ int histSize[1] = { 5 };
+
+ calcHist(, <#int nimages#>, <#const int *channels#>, <#const Mat mask#>, <#MatND hist#>, <#int dims#>, <#const int *histSize#>, <#const float **ranges#>, <#bool uniform#>, <#bool accumulate#>)
+ */
+
+// Mat _tmp(logPolar.size(),CV_8UC1);
+// cvLogPolar(&((IplImage)logPolar), &((IplImage)_tmp),Point2f(blb[0],blb[1]), 80.0, CV_WARP_INVERSE_MAP);
+// imshow("descriptor", _tmp);
+// imshow("logpolar", logPolar);
+ }
+ }
+
+ if(trained) {
+ Mat results(1,1,CV_32FC1);
+ Mat samples; Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
+
+ Mat samplesAfterPCA = pca.project(samples);
+
+ classifier.find_nearest(&((CvMat)samplesAfterPCA), 1, &((CvMat)results));
+// ((float*)results.data)[0] = classifier.predict(&((CvMat)samples))->value;
+
+ Mat lc(label_counts); lc *= 0.9;
+
+// label_counts[(int)((float*)results.data)[0]] *= 0.9;
+ label_counts[(int)((float*)results.data)[0]] += 0.1;
+ Point maxLoc;
+ minMaxLoc(lc, NULL, NULL, NULL, &maxLoc);
+ int res = maxLoc.y;
+
+ stringstream ss; ss << "prediction: ";
+ if (res == LABEL_OPEN) {
+ ss << "Open hand";
+ }
+ if (res == LABEL_FIST) {
+ ss << "Fist";
+ }
+ if (res == LABEL_THUMB) {
+ ss << "Thumb";
+ }
+ if (res == LABEL_GARBAGE) {
+ ss << "Garbage";
+ }
+ putText(outC, ss.str(), Point(20,50), CV_FONT_HERSHEY_PLAIN, 3.0, Scalar(0,0,255), 2);
+ }
+
+ imshow("blobs", outC);
+
+ char k = cvWaitKey(5);
+ if( k == 27 ){
+ break;
+ }
+ if (k == 'g') {
+ //put into training as 'garbage'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_GARBAGE);
+ cout << "learn grabage" << endl;
+ }
+ if(k == 'o') {
+ //put into training as 'open'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_OPEN);
+ cout << "learn open" << endl;
+ }
+ if(k == 'f') {
+ //put into training as 'fist'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_FIST);
+ cout << "learn fist" << endl;
+ }
+ if(k == 'h') {
+ //put into training as 'thumb'
+ training_data.push_back(_d);
+ label_data.push_back(LABEL_THUMB);
+ cout << "learn thumb" << endl;
+ }
+ if (k=='t') {
+ //train model
+ cout << "train model" << endl;
+ if(loaded != true) {
+ dataMat = Mat(training_data.size(),_d.size(),CV_32FC1); //descriptors as matrix rows
+ for (uint i=0; i<training_data.size(); i++) {
+ Mat v = dataMat(Range(i,i+1),Range::all());
+ Mat(Mat(training_data[i]).t()).convertTo(v,CV_32FC1,1.0);
+ }
+ Mat(label_data).convertTo(labelMat,CV_32FC1);
+ }
+
+ pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,15);
+ Mat dataAfterPCA;
+ pca.project(dataMat,dataAfterPCA);
+
+ classifier.train(&((CvMat)dataAfterPCA), &((CvMat)labelMat));
+
+ trained = true;
+ }
+ if(k=='s') {
+ cout << "save training data" << endl;
+// classifier.save("knn-classifier-open-fist-thumb.yaml"); //not implemented
+ dataMat = Mat(training_data.size(),_d.size(),CV_32FC1); //descriptors as matrix rows
+ for (uint i=0; i<training_data.size(); i++) {
+ Mat v = dataMat(Range(i,i+1),Range::all());
+ Mat(Mat(training_data[i]).t()).convertTo(v,CV_32FC1,1.0);
+ }
+ Mat(label_data).convertTo(labelMat,CV_32FC1);
+
+ FileStorage fs;
+ fs.open("data-samples-labels.yaml", CV_STORAGE_WRITE);
+ if (fs.isOpened()) {
+ fs << "samples" << dataMat;
+ fs << "labels" << labelMat;
+ loaded = true;
+ fs.release();
+ } else {
+ cerr << "can't open saved data" << endl;
+ }
+ }
+ if(k=='l') {
+ FileStorage fs;
+ fs.open("data-samples-labels.yaml", CV_STORAGE_READ);
+ if (fs.isOpened()) {
+ fs["samples"] >> dataMat;
+ fs["labels"] >> labelMat;
+ loaded = true;
+ fs.release();
+ } else {
+ cerr << "can't open saved data" << endl;
+ }
+ }
+ }
+
+ device.stopVideo();
+ device.stopDepth();
+ return 0;
+}
View
17 webkit-plugin-mac/gesture_engine.hpp
@@ -0,0 +1,17 @@
+/*
+ * gesture_engine.hpp
+ * webkit-plugin-mac
+ *
+ * Created by Roy Shilkrot on 3/6/11.
+ * Copyright 2011 MIT. All rights reserved.
+ *
+ */
+
+#ifndef _GESTURE_ENGINE_HPP
+#define _GESTURE_ENGINE_HPP
+
+
+int gesture_engine(void* _arg);
+
+
+#endif
View
158 webkit-plugin-mac/include/libfreenect.hpp
@@ -0,0 +1,158 @@
+/*
+ * This file is part of the OpenKinect Project. http://www.openkinect.org
+ *
+ * Copyright (c) 2010 individual OpenKinect contributors. See the CONTRIB file
+ * for details.
+ *
+ * This code is licensed to you under the terms of the Apache License, version
+ * 2.0, or, at your option, the terms of the GNU General Public License,
+ * version 2.0. See the APACHE20 and GPL2 files for the text of the licenses,
+ * or the following URLs:
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.gnu.org/licenses/gpl-2.0.txt
+ *
+ * If you redistribute this file in source form, modified or unmodified, you
+ * may:
+ * 1) Leave this header intact and distribute it under the same terms,
+ * accompanying it with the APACHE20 and GPL20 files, or
+ * 2) Delete the Apache 2.0 clause and accompany it with the GPL2 file, or
+ * 3) Delete the GPL v2 clause and accompany it with the APACHE20 file
+ * In all cases you must keep the copyright notice intact and include a copy
+ * of the CONTRIB file.
+ *
+ * Binary distributions must follow the binary distribution requirements of
+ * either License.
+ */
+
+#pragma once
+
+#include <libfreenect.h>
+#include <stdexcept>
+#include <map>
+#include <pthread.h>
+
+namespace Freenect {
+ class Noncopyable {
+ public:
+ Noncopyable() {}
+ ~Noncopyable() {}
+ private:
+ Noncopyable( const Noncopyable& );
+ const Noncopyable& operator=( const Noncopyable& );
+ };
+
+ class FreenectTiltState {
+ friend class FreenectDevice;
+ FreenectTiltState(freenect_raw_tilt_state *_state):
+ m_state(_state), m_code(_state->tilt_status)
+ {}
+ public:
+ void getAccelerometers(double* x, double* y, double* z) {
+ freenect_get_mks_accel(m_state, x, y, z);
+ }
+ double getTiltDegs() {
+ return freenect_get_tilt_degs(m_state);
+ }
+ public:
+ freenect_tilt_status_code m_code;
+ private:
+ freenect_raw_tilt_state *m_state;
+ };
+
+ class FreenectDevice : Noncopyable {
+ public:
+ FreenectDevice(freenect_context *_ctx, int _index) {
+ if(freenect_open_device(_ctx, &m_dev, _index) < 0) throw std::runtime_error("Cannot open Kinect");
+ freenect_set_user(m_dev, this);
+ freenect_set_video_format(m_dev, FREENECT_VIDEO_RGB);
+ freenect_set_depth_format(m_dev, FREENECT_DEPTH_11BIT);
+ freenect_set_depth_callback(m_dev, freenect_depth_callback);
+ freenect_set_video_callback(m_dev, freenect_video_callback);
+ }
+ ~FreenectDevice() {
+ if(freenect_close_device(m_dev) < 0) throw std::runtime_error("Cannot shutdown Kinect");
+ }
+ void startVideo() {
+ if(freenect_start_video(m_dev) < 0) throw std::runtime_error("Cannot start RGB callback");
+ }
+ void stopVideo() {
+ if(freenect_stop_video(m_dev) < 0) throw std::runtime_error("Cannot stop RGB callback");
+ }
+ void startDepth() {
+ if(freenect_start_depth(m_dev) < 0) throw std::runtime_error("Cannot start depth callback");
+ }
+ void stopDepth() {
+ if(freenect_stop_depth(m_dev) < 0) throw std::runtime_error("Cannot stop depth callback");
+ }
+ void setTiltDegrees(double _angle) {
+ if(freenect_set_tilt_degs(m_dev, _angle) < 0) throw std::runtime_error("Cannot set angle in degrees");
+ }
+ void setLed(freenect_led_options _option) {
+ if(freenect_set_led(m_dev, _option) < 0) throw std::runtime_error("Cannot set led");
+ }
+ void updateState() {
+ if (freenect_update_tilt_state(m_dev) < 0) throw std::runtime_error("Cannot update device state");
+ }
+ FreenectTiltState getState() const {
+ return FreenectTiltState(freenect_get_tilt_state(m_dev));
+ }
+ // Do not call directly even in child
+ virtual void VideoCallback(void *video, uint32_t timestamp) = 0;
+ // Do not call directly even in child
+ virtual void DepthCallback(void *depth, uint32_t timestamp) = 0;
+ private:
+ freenect_device *m_dev;
+ static void freenect_depth_callback(freenect_device *dev, void *depth, uint32_t timestamp) {
+ FreenectDevice* device = static_cast<FreenectDevice*>(freenect_get_user(dev));
+ device->DepthCallback(depth, timestamp);
+ }
+ static void freenect_video_callback(freenect_device *dev, void *video, uint32_t timestamp) {
+ FreenectDevice* device = static_cast<FreenectDevice*>(freenect_get_user(dev));
+ device->VideoCallback(video, timestamp);
+ }
+ };
+
+ template <class T>class Freenect : Noncopyable {
+ public:
+ Freenect() : m_stop(false) {
+ if(freenect_init(&m_ctx, NULL) < 0) throw std::runtime_error("Cannot initialize freenect library");
+ if(pthread_create(&m_thread, NULL, pthread_callback, (void*)this) != 0) throw std::runtime_error("Cannot initialize freenect thread");
+ }
+ ~Freenect() {
+ for(typename std::map<int, T*>::iterator it = m_devices.begin() ; it != m_devices.end() ; ++it) {
+ delete it->second;
+ }
+ m_stop = true;
+ pthread_join(m_thread, NULL);
+ if(freenect_shutdown(m_ctx) < 0) throw std::runtime_error("Cannot cleanup freenect library");
+ }
+ T& createDevice(int _index) {
+ m_devices.insert(std::make_pair<int, T*>(_index, new T(m_ctx, _index)));
+ return *(m_devices[_index]);
+ }
+ void deleteDevice(int _index) {
+ m_devices.erase(_index);
+ }
+ int deviceCount() {
+ return freenect_num_devices(m_ctx);
+ }
+ // Do not call directly, thread runs here
+ void operator()() {
+ while(!m_stop) {
+ if(freenect_process_events(m_ctx) < 0) throw std::runtime_error("Cannot process freenect events");
+ }
+ }
+ static void *pthread_callback(void *user_data) {
+ Freenect<T>* freenect = static_cast<Freenect<T>*>(user_data);
+ (*freenect)();
+ return NULL;
+ }
+ private:
+ freenect_context *m_ctx;
+ volatile bool m_stop;
+ pthread_t m_thread;
+ std::map<int, T*> m_devices;
+ };
+
+}
+
View
8 webkit-plugin-mac/webkit-plugin-mac.xcodeproj/project.pbxproj
@@ -20,6 +20,7 @@
938DB702130F43F0008C37B1 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 938DB701130F43F0008C37B1 /* Accelerate.framework */; };
93F690C71321789600F53A8A /* DLog.mm in Sources */ = {isa = PBXBuildFile; fileRef = 93F690C61321789600F53A8A /* DLog.mm */; };
93F691821321A45600F53A8A /* JavaScriptCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 93F691811321A45600F53A8A /* JavaScriptCore.framework */; };
+ D713093013235B55001594A0 /* gesture_engine.cpp in Sources */ = {isa = PBXBuildFile; fileRef = D713092E13235B55001594A0 /* gesture_engine.cpp */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@@ -70,6 +71,9 @@
93F690C51321789600F53A8A /* DLog.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DLog.h; sourceTree = "<group>"; };
93F690C61321789600F53A8A /* DLog.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = DLog.mm; sourceTree = "<group>"; };
93F691811321A45600F53A8A /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = System/Library/Frameworks/JavaScriptCore.framework; sourceTree = SDKROOT; };
+ D713092E13235B55001594A0 /* gesture_engine.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = gesture_engine.cpp; sourceTree = "<group>"; };
+ D713092F13235B55001594A0 /* FreenectDevice.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FreenectDevice.h; sourceTree = "<group>"; };
+ D713093213235BCB001594A0 /* gesture_engine.hpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = gesture_engine.hpp; sourceTree = "<group>"; };
DD92D38A0106425D02CA0E72 /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = /System/Library/Frameworks/Cocoa.framework; sourceTree = "<absolute>"; };
/* End PBXFileReference section */
@@ -176,6 +180,9 @@
32DBCF9F0370C38200C91783 /* Other Sources */ = {
isa = PBXGroup;
children = (
+ D713093213235BCB001594A0 /* gesture_engine.hpp */,
+ D713092E13235B55001594A0 /* gesture_engine.cpp */,
+ D713092F13235B55001594A0 /* FreenectDevice.h */,
938564A1130E6EC0000F4333 /* ocv_freenect.mm */,
938564A2130E6EC0000F4333 /* ocv_freenect.hpp */,
938564A6130E6EE0000F4333 /* bg_fg_blobs.cpp */,
@@ -269,6 +276,7 @@
938564A3130E6EC0000F4333 /* ocv_freenect.mm in Sources */,
938564A8130E6EE0000F4333 /* bg_fg_blobs.cpp in Sources */,
93F690C71321789600F53A8A /* DLog.mm in Sources */,
+ D713093013235B55001594A0 /* gesture_engine.cpp in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

0 comments on commit 8ba62c9

Please sign in to comment.
Something went wrong with that request. Please try again.