Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

gesture engine integradted, somehow registering is not working

  • Loading branch information...
commit 0d10884ba65b5856a99fc7f34c81953cc796902d 1 parent 6a58520
Roy Shilkrot authored committed
22 new_cv/NewCV/main.cpp
@@ -140,8 +140,8 @@ int main(int argc, char **argv) {
140 140 device.getVideo(rgbMat);
141 141 device.getDepth(depthMat);
142 142 // cv::imshow("rgb", rgbMat);
143   -// depthMat.convertTo(depthf, CV_8UC1, 255.0/2048.0);
144   -// cv::imshow("depth",depthf);
  143 + depthMat.convertTo(depthf, CV_8UC1, 255.0/2048.0);
  144 + cv::imshow("depth",depthf);
145 145
146 146 //interpolation & inpainting
147 147 {
@@ -254,7 +254,7 @@ int main(int argc, char **argv) {
254 254 );
255 255
256 256 int count = countNonZero(part); //TODO: use calcHist
257   -// part.setTo(Scalar(count/10.0)); //for debug: show the value in the image
  257 + //part.setTo(Scalar(count/10.0)); //for debug: show the value in the image
258 258
259 259 _d[i*num_x_reps + j] = count;
260 260 total += count;
@@ -275,10 +275,10 @@ int main(int argc, char **argv) {
275 275 calcHist(, <#int nimages#>, <#const int *channels#>, <#const Mat mask#>, <#MatND hist#>, <#int dims#>, <#const int *histSize#>, <#const float **ranges#>, <#bool uniform#>, <#bool accumulate#>)
276 276 */
277 277
278   -// Mat _tmp(logPolar.size(),CV_8UC1);
279   -// cvLogPolar(&((IplImage)logPolar), &((IplImage)_tmp),Point2f(blb[0],blb[1]), 80.0, CV_WARP_INVERSE_MAP);
280   -// imshow("descriptor", _tmp);
281   -// imshow("logpolar", logPolar);
  278 + Mat _tmp(logPolar.size(),CV_8UC1);
  279 + cvLogPolar(&((IplImage)logPolar), &((IplImage)_tmp),Point2f(blb[0],blb[1]), 80.0, CV_WARP_INVERSE_MAP);
  280 + imshow("descriptor", _tmp);
  281 + imshow("logpolar", logPolar);
282 282 }
283 283 }
284 284
@@ -286,7 +286,7 @@ int main(int argc, char **argv) {
286 286 Mat results(1,1,CV_32FC1);
287 287 Mat samples; Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
288 288
289   - Mat samplesAfterPCA = pca.project(samples);
  289 + Mat samplesAfterPCA = samples; //pca.project(samples);
290 290
291 291 classifier.find_nearest(&((CvMat)samplesAfterPCA), 1, &((CvMat)results));
292 292 // ((float*)results.data)[0] = classifier.predict(&((CvMat)samples))->value;
@@ -363,9 +363,9 @@ int main(int argc, char **argv) {
363 363 Mat(label_data).convertTo(labelMat,CV_32FC1);
364 364 }
365 365
366   - pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,15);
367   - Mat dataAfterPCA;
368   - pca.project(dataMat,dataAfterPCA);
  366 +// pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,15);
  367 + Mat dataAfterPCA = dataMat;
  368 +// pca.project(dataMat,dataAfterPCA);
369 369
370 370 classifier.train(&((CvMat)dataAfterPCA), &((CvMat)labelMat));
371 371
298 webkit-plugin-mac/gesture_engine.cpp
@@ -22,10 +22,11 @@
22 22 #define LABEL_FIST 2
23 23 #define LABEL_THUMB 3
24 24
  25 +extern void send_event(const string& etype, const string& edata);
  26 +
25 27 class GestureEngine {
26 28 private:
27   -
28   - bool die;
  29 + bool running;
29 30
30 31 Mat depthMat;
31 32 Mat depthf;
@@ -33,7 +34,7 @@ class GestureEngine {
33 34 Mat ownMat;
34 35
35 36 Freenect::Freenect<MyFreenectDevice> freenect;
36   - MyFreenectDevice& device;
  37 + MyFreenectDevice* device;
37 38
38 39 bool registered;
39 40 Mat blobMaskOutput;
@@ -58,6 +59,20 @@ class GestureEngine {
58 59
59 60 bool trained;
60 61 bool loaded;
  62 +
  63 + int mode;
  64 +
  65 + int register_ctr,register_secondbloc_ctr;
  66 +
  67 + Point2i appear; double appearTS;
  68 +
  69 + Point2i lastMove;
  70 +
  71 + int hcr_ctr;
  72 + vector<int> hc_stack;
  73 + int hc_stack_ptr;
  74 +
  75 + int pca_number_of_features;
61 76
62 77 Scalar _refineSegments(const Mat& img,
63 78 Mat& mask,
@@ -65,36 +80,59 @@ class GestureEngine {
65 80 vector<Point>& contour,
66 81 vector<Point>& second_contour,
67 82 Point2i& previous);
68   - void TrainModel();
  83 + int TrainModel();
69 84 void SaveModelData();
70   - void LoadModelData();
  85 + int LoadModelData(const char* filename);
71 86 void InterpolateAndInpaint();
72 87 void ComputeDescriptor(Scalar);
  88 + string GetStringForGestureCode(int);
  89 + void CheckRegistered(Scalar,int);
  90 + int GetMostLikelyGesture();
73 91
74 92 public:
75   - GestureEngine(): die(false),
76   - depthMat(Mat(Size(640,480),CV_16UC1)),
77   - depthf(Mat(Size(640,480),CV_8UC1)),
78   - rgbMat(Mat(Size(640,480),CV_8UC3,Scalar(0))),
79   - ownMat(Mat(Size(640,480),CV_8UC3,Scalar(0))),
80   - device(freenect.createDevice(0)),
  93 + bool die;
  94 +
  95 + GestureEngine(): running(false),
81 96 registered(false),
82   - blobMaskOutput(Mat::zeros(Size(640,480),CV_8UC1)),
83 97 startX(250),
84 98 sizeX(150),
85 99 num_x_reps(10),
86 100 num_y_reps(10),
87 101 height_over_num_y_reps(480/num_y_reps),
88 102 width_over_num_x_reps(sizeX/num_x_reps),
89   - _d(vector<double>(num_x_reps * num_y_reps)),
90   - descriptorMat(Mat(_d)),
91 103 label_counts(vector<float>(4)),
92 104 trained(false),
93   - loaded(false)
  105 + loaded(false),
  106 + die(false),
  107 + mode(LABEL_GARBAGE),
  108 + pca_number_of_features(15)
94 109 {
  110 + depthMat = Mat(Size(640,480),CV_16UC1);
  111 + depthf = Mat(Size(640,480),CV_8UC1);
  112 + rgbMat = Mat(Size(640,480),CV_8UC3,Scalar(0));
  113 + ownMat = Mat(Size(640,480),CV_8UC3,Scalar(0));
  114 + blobMaskOutput = Mat(Size(640,480),CV_8UC1,Scalar(0));
  115 +
  116 + _d = vector<double>(num_x_reps*num_y_reps);
  117 + descriptorMat = Mat(_d);
  118 +
  119 + register_ctr = register_secondbloc_ctr = 0;
  120 + registered = false;
  121 +
  122 + appear = Point2i(-1,-1);
  123 + appearTS = -1;
  124 +
  125 + midBlob = Point2i(-1,-1);
  126 + lastMove = Point2i(-1,-1);
  127 +
  128 + hcr_ctr = -1;
  129 + hc_stack = vector<int>(20);
  130 + hc_stack_ptr = 0;
95 131 };
96 132
97 133 void RunEngine();
  134 + bool getRunning() { return running; }
  135 + int InitializeFreenect(const char* );
98 136 };
99 137
100 138 Scalar GestureEngine::_refineSegments(const Mat& img,
@@ -184,7 +222,7 @@ Scalar GestureEngine::_refineSegments(const Mat& img,
184 222
185 223 }
186 224
187   -void GestureEngine::TrainModel() {
  225 +int GestureEngine::TrainModel() {
188 226 cout << "train model" << endl;
189 227 if(loaded != true) {
190 228 dataMat = Mat(training_data.size(),_d.size(),CV_32FC1); //descriptors as matrix rows
@@ -195,13 +233,20 @@ void GestureEngine::TrainModel() {
195 233 Mat(label_data).convertTo(labelMat,CV_32FC1);
196 234 }
197 235
198   - pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,15);
199   - Mat dataAfterPCA;
200   - pca.project(dataMat,dataAfterPCA);
201   -
202   - classifier.train(&((CvMat)dataAfterPCA), &((CvMat)labelMat));
  236 + try {
  237 + pca = pca(dataMat,Mat(),CV_PCA_DATA_AS_ROW,pca_number_of_features);
  238 + Mat dataAfterPCA;
  239 + pca.project(dataMat,dataAfterPCA);
  240 +
  241 + classifier.train(&((CvMat)dataAfterPCA), &((CvMat)labelMat));
  242 +
  243 + trained = true;
  244 + } catch (cv::Exception e) {
  245 + cerr << "Can't train model: " << e.what();
  246 + return 0;
  247 + }
203 248
204   - trained = true;
  249 + return 1;
205 250 }
206 251
207 252 void GestureEngine::SaveModelData() {
@@ -226,9 +271,9 @@ void GestureEngine::SaveModelData() {
226 271 }
227 272 }
228 273
229   -void GestureEngine::LoadModelData() {
  274 +int GestureEngine::LoadModelData(const char* filename) {
230 275 FileStorage fs;
231   - fs.open("data-samples-labels.yaml", CV_STORAGE_READ);
  276 + fs.open(filename, CV_STORAGE_READ);
232 277 if (fs.isOpened()) {
233 278 fs["samples"] >> dataMat;
234 279 fs["labels"] >> labelMat;
@@ -236,14 +281,16 @@ void GestureEngine::LoadModelData() {
236 281 fs.release();
237 282 } else {
238 283 cerr << "can't open saved data" << endl;
  284 + return 0;
239 285 }
  286 + return 1;
240 287 }
241 288
242 289 void GestureEngine::InterpolateAndInpaint() {
243 290 //interpolation & inpainting
244 291 Mat _tmp,_tmp1; // = (depthMat - 400.0); //minimum observed value is ~440. so shift a bit
245 292 Mat(depthMat - 400.0).convertTo(_tmp1,CV_64FC1);
246   - _tmp.setTo(Scalar(2048), depthMat > 750.0); //cut off at 600 to create a "box" where the user interacts
  293 +// _tmp1.setTo(Scalar(2048-400.0), depthMat > 750.0); //cut off at 600 to create a "box" where the user interacts
247 294
248 295 Point minLoc; double minval,maxval;
249 296 minMaxLoc(_tmp1, &minval, &maxval, NULL, NULL);
@@ -313,13 +360,141 @@ void GestureEngine::ComputeDescriptor(Scalar blb) {
313 360
314 361 }
315 362
  363 +string GestureEngine::GetStringForGestureCode(int res) {
  364 + if (res == LABEL_OPEN) {
  365 + return "Open hand";
  366 + }
  367 + if (res == LABEL_FIST) {
  368 + return "Fist";
  369 + }
  370 + if (res == LABEL_THUMB) {
  371 + return "Thumb";
  372 + }
  373 + if (res == LABEL_GARBAGE) {
  374 + return "Garbage";
  375 + }
  376 + return "none";
  377 +}
  378 +
  379 +void GestureEngine::CheckRegistered(Scalar blb, int recognized_gesture) {
  380 + register_ctr = MIN((register_ctr + 1),60);
  381 +
  382 + if(blb[3] > 5000)
  383 + register_secondbloc_ctr = MIN((register_secondbloc_ctr + 1),60);
  384 +
  385 + if (register_ctr > 30 && !registered) {
  386 + registered = true;
  387 + appear.x = -1;
  388 + lastMove.x = blb[0]; lastMove.y = blb[1];
  389 +
  390 + cout << "blob size " << blb[2] << endl;
  391 +
  392 + if(register_secondbloc_ctr < 30) {
  393 + cout << "register pointer" << endl;
  394 + stringstream ss; ss << "\"mode\":\""<< GetStringForGestureCode(recognized_gesture) <<"\"";
  395 + send_event("Register", ss.str());
  396 + } else {
  397 + cout << "register tab swithcer" << endl;
  398 + send_event("Register", "\"mode\":\"twohands\"");
  399 + }
  400 + }
  401 +
  402 + if(registered) {
  403 + stringstream ss;
  404 + ss << "\"x\":" << (int)floor(blb[0]*100.0/640.0)
  405 + << ",\"y\":" << (int)floor(blb[1]*100.0/480.0)
  406 + << ",\"z\":" << 100; //(int)(mn[0] * 2.0);
  407 + //cout << "move: " << ss.str() << endl;
  408 + send_event("Move", ss.str());
  409 +
  410 + hc_stack.at(hc_stack_ptr) = hcr_ctr;
  411 + hc_stack_ptr = (hc_stack_ptr + 1) % hc_stack.size();
  412 +
  413 + //if thumb recognized - send "hand click"
  414 + if (recognized_gesture == LABEL_THUMB) {
  415 + cout << "Hand click!" << endl;
  416 + send_event("HandClick", "");
  417 + }
  418 + } else {
  419 + //not registered, look for gestures
  420 + if(appear.x<0) {
  421 + //first appearence of blob
  422 + appear = midBlob;
  423 + // update_bg_model = false;
  424 + appearTS = getTickCount();
  425 + cout << "appear ("<<appearTS<<") " << appear.x << "," << appear.y << endl;
  426 + } else {
  427 + //blob was seen before, how much time passed
  428 + double timediff = ((double)getTickCount()-appearTS)/getTickFrequency();
  429 + if (timediff > .2 && timediff < 1.0) {
  430 + //enough time passed from appearence
  431 + line(outC, appear, cv::Point(blb[0],blb[1]), Scalar(0,0,255), 3);
  432 + if (appear.x - blb[0] > 100) {
  433 + cout << "right"<<endl; appear.x = -1;
  434 + send_event("SwipeRight", "");
  435 + register_ctr = 0;
  436 + } else if (appear.x - blb[0] < -100) {
  437 + cout << "left" <<endl; appear.x = -1;
  438 + send_event("SwipeLeft", "");
  439 + register_ctr = 0;
  440 + } else if (appear.y - blb[1] > 100) {
  441 + cout << "up" << endl; appear.x = -1;
  442 + send_event("SwipeUp", "");
  443 + register_ctr = 0;
  444 + } else if (appear.y - blb[1] < -100) {
  445 + cout << "down" << endl; appear.x = -1;
  446 + send_event("SwipeDown", "");
  447 + register_ctr = 0;
  448 + }
  449 + }
  450 + if(timediff >= 1.0) {
  451 + cout << "a ghost..."<<endl;
  452 + //a second passed from appearence - reset 1st appear
  453 + appear.x = -1;
  454 + appearTS = -1;
  455 + midBlob.x = midBlob.y = -1;
  456 + }
  457 + }
  458 + }
  459 +// send_image(outC);
  460 +}
  461 +
  462 +int GestureEngine::InitializeFreenect(const char* data) {
  463 + try {
  464 + device = &freenect.createDevice(0);
  465 + device->startVideo();
  466 + device->startDepth();
  467 + }
  468 + catch (std::runtime_error e) {
  469 + return 0;
  470 + }
  471 + if(!LoadModelData(data)) return 0;
  472 + if(!TrainModel()) return 0;
  473 +
  474 + return 1;
  475 +}
  476 +
  477 +int GestureEngine::GetMostLikelyGesture() {
  478 + Mat results(1,1,CV_32FC1);
  479 + Mat samples; Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
  480 + Mat samplesAfterPCA = pca.project(samples);
  481 +
  482 + classifier.find_nearest(&((CvMat)samplesAfterPCA), 1, &((CvMat)results));
  483 +
  484 + Mat lc(label_counts); lc *= 0.9;
  485 + label_counts[(int)((float*)results.data)[0]] += 0.1;
  486 + Point maxLoc;
  487 + minMaxLoc(lc, NULL, NULL, NULL, &maxLoc);
  488 + return maxLoc.y;
  489 +}
  490 +
316 491 void GestureEngine::RunEngine() {
317   - device.startVideo();
318   - device.startDepth();
  492 +
  493 + running = true;
319 494
320 495 while (!die) {
321   - device.getVideo(rgbMat);
322   - device.getDepth(depthMat);
  496 + device->getVideo(rgbMat);
  497 + device->getDepth(depthMat);
323 498
324 499 InterpolateAndInpaint();
325 500
@@ -370,49 +545,27 @@ void GestureEngine::RunEngine() {
370 545 //blob center
371 546 circle(outC, Point(blb[0],blb[1]), 50, Scalar(255,0,0), 3);
372 547
373   - ComputeDescriptor(blb);
374   - }
375   - }
376   -
377   - if(trained) {
378   - Mat results(1,1,CV_32FC1);
379   - Mat samples; Mat(Mat(_d).t()).convertTo(samples,CV_32FC1);
380   -
381   - Mat samplesAfterPCA = pca.project(samples);
382   -
383   - classifier.find_nearest(&((CvMat)samplesAfterPCA), 1, &((CvMat)results));
384   -// ((float*)results.data)[0] = classifier.predict(&((CvMat)samples))->value;
385   -
386   - Mat lc(label_counts); lc *= 0.9;
387   -
388   -// label_counts[(int)((float*)results.data)[0]] *= 0.9;
389   - label_counts[(int)((float*)results.data)[0]] += 0.1;
390   - Point maxLoc;
391   - minMaxLoc(lc, NULL, NULL, NULL, &maxLoc);
392   - int res = maxLoc.y;
393   -
394   - stringstream ss; ss << "prediction: ";
395   - if (res == LABEL_OPEN) {
396   - ss << "Open hand";
397   - }
398   - if (res == LABEL_FIST) {
399   - ss << "Fist";
400   - }
401   - if (res == LABEL_THUMB) {
402   - ss << "Thumb";
403   - }
404   - if (res == LABEL_GARBAGE) {
405   - ss << "Garbage";
  548 + if(trained) {
  549 + ComputeDescriptor(blb);
  550 + int gesture_code = GetMostLikelyGesture();
  551 +
  552 + { //debug
  553 + stringstream ss; ss << "prediction: " << GetStringForGestureCode(gesture_code);
  554 + putText(outC, ss.str(), Point(20,50), CV_FONT_HERSHEY_PLAIN, 3.0, Scalar(0,0,255), 2);
  555 + }
  556 +
  557 + CheckRegistered(blb, gesture_code);
  558 + }
406 559 }
407   - putText(outC, ss.str(), Point(20,50), CV_FONT_HERSHEY_PLAIN, 3.0, Scalar(0,0,255), 2);
408 560 }
409   -
  561 +
410 562 imshow("blobs", outC);
411 563
412 564 char k = cvWaitKey(5);
413 565 if( k == 27 ){
414 566 break;
415 567 }
  568 + /*
416 569 if (k == 'g') {
417 570 //put into training as 'garbage'
418 571 training_data.push_back(_d);
@@ -446,16 +599,27 @@ void GestureEngine::RunEngine() {
446 599 if(k=='l') {
447 600 LoadModelData();
448 601 }
  602 + */
449 603 }
450 604
451   - device.stopVideo();
452   - device.stopDepth();
  605 + device->stopVideo();
  606 + device->stopDepth();
  607 +
  608 + running = false;
453 609 }
454 610
  611 +GestureEngine ge;
455 612
456 613 void* gesture_engine(void* _arg) {
457 614
458   - GestureEngine ge;
459 615 ge.RunEngine();
460 616
461 617 }
  618 +
  619 +void kill_gesture_engine() {
  620 + ge.die = true;
  621 +}
  622 +
  623 +bool is_gesture_engine_dead() { return !ge.getRunning(); }
  624 +
  625 +int init_gesture_engine(const char* data) { return ge.InitializeFreenect(data); }
4 webkit-plugin-mac/gesture_engine.hpp
@@ -27,6 +27,8 @@
27 27
28 28
29 29 int gesture_engine(void* _arg);
30   -
  30 +void kill_gesture_engine();
  31 +bool is_gesture_engine_dead();
  32 +int init_gesture_engine(const char* data);
31 33
32 34 #endif
4 webkit-plugin-mac/webkit-plugin-mac.xcodeproj/project.pbxproj
@@ -21,6 +21,7 @@
21 21 93F690C71321789600F53A8A /* DLog.mm in Sources */ = {isa = PBXBuildFile; fileRef = 93F690C61321789600F53A8A /* DLog.mm */; };
22 22 93F691821321A45600F53A8A /* JavaScriptCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 93F691811321A45600F53A8A /* JavaScriptCore.framework */; };
23 23 D713093013235B55001594A0 /* gesture_engine.cpp in Sources */ = {isa = PBXBuildFile; fileRef = D713092E13235B55001594A0 /* gesture_engine.cpp */; };
  24 + D7CC1ABA1325B7EC001FB6D2 /* data-samples-labels.yaml in Resources */ = {isa = PBXBuildFile; fileRef = D7CC1AB91325B7EC001FB6D2 /* data-samples-labels.yaml */; };
24 25 /* End PBXBuildFile section */
25 26
26 27 /* Begin PBXFileReference section */
@@ -74,6 +75,7 @@
74 75 D713092E13235B55001594A0 /* gesture_engine.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = gesture_engine.cpp; sourceTree = "<group>"; };
75 76 D713092F13235B55001594A0 /* FreenectDevice.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FreenectDevice.h; sourceTree = "<group>"; };
76 77 D713093213235BCB001594A0 /* gesture_engine.hpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = gesture_engine.hpp; sourceTree = "<group>"; };
  78 + D7CC1AB91325B7EC001FB6D2 /* data-samples-labels.yaml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; name = "data-samples-labels.yaml"; path = "../new_cv/NewCV/build/Debug/data-samples-labels.yaml"; sourceTree = SOURCE_ROOT; };
77 79 DD92D38A0106425D02CA0E72 /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = /System/Library/Frameworks/Cocoa.framework; sourceTree = "<absolute>"; };
78 80 /* End PBXFileReference section */
79 81
@@ -113,6 +115,7 @@
113 115 0259C582FE90428111CA0C5A /* Resources */ = {
114 116 isa = PBXGroup;
115 117 children = (
  118 + D7CC1AB91325B7EC001FB6D2 /* data-samples-labels.yaml */,
116 119 8D1AC9730486D14A00FE50C9 /* Info.plist */,
117 120 8D1AC97F0486D23B00FE50C9 /* InfoPlist.strings */,
118 121 );
@@ -246,6 +249,7 @@
246 249 files = (
247 250 8D1AC9800486D23B00FE50C9 /* InfoPlist.strings in Resources */,
248 251 938563E3130E61AF000F4333 /* Info.plist in Resources */,
  252 + D7CC1ABA1325B7EC001FB6D2 /* data-samples-labels.yaml in Resources */,
249 253 );
250 254 runOnlyForDeploymentPostprocessing = 0;
251 255 };
16 webkit-plugin-mac/webkit_plugin_macView.mm
@@ -10,7 +10,8 @@
10 10 #import <stdlib.h>
11 11 #import <string.h>
12 12 #import <JavaScriptCore/JavaScriptCore.h>
13   -#include "ocv_freenect.hpp"
  13 +//#include "ocv_freenect.hpp"
  14 +#include "gesture_engine.hpp"
14 15
15 16 // PRIVATE METHODS ---------------------------------------------------------------------------------
16 17
@@ -147,7 +148,8 @@ - (id)_initWithArguments:(NSDictionary *)newArguments {
147 148 - (void) ocvMainLoop {
148 149 ocvThread = [NSThread currentThread];
149 150 [ocvThread setName:@"ocvMainLoop"];
150   - ocvFreenectThread(NULL);
  151 +// ocvFreenectThread(NULL);
  152 + gesture_engine(NULL);
151 153 }
152 154
153 155 @end
@@ -209,8 +211,9 @@ - (void) InitDepthJS {
209 211 if (!haveInitDevice) {
210 212 DLog(@"[DepthJS] Device not yet init; initing");
211 213 hostPlugin = self;
212   - int failed = initFreenect();
213   - haveInitDevice = !failed;
  214 + NSString* pathToData = [[NSBundle bundleWithIdentifier:@"edu.mit.media.depthjs"] pathForResource:@"data-samples-labels" ofType:@"yaml"];
  215 + int success = init_gesture_engine([pathToData cStringUsingEncoding:NSASCIIStringEncoding]);
  216 + haveInitDevice = success;
214 217 if (haveInitDevice) {
215 218 DLog(@"[DepthJS] Successfully inited Kinect; Starting ocv thread");
216 219 [NSThread detachNewThreadSelector:@selector(ocvMainLoop) toTarget:self withObject:nil];
@@ -228,10 +231,11 @@ - (void) ShutdownDepthJS {
228 231 haveInitDevice = false;
229 232 if (hostPlugin == self) {
230 233 hostPlugin = NULL;
231   - killOcvFreenect();
  234 +// killOcvFreenect();
  235 + kill_gesture_engine();
232 236 if (ocvThread != nil) [ocvThread cancel];
233 237 ocvThread = nil;
234   - while (!isDead()) {
  238 + while (!is_gesture_engine_dead()) {
235 239 [NSThread sleepForTimeInterval:0.01];
236 240 }
237 241 }

0 comments on commit 0d10884

Please sign in to comment.
Something went wrong with that request. Please try again.