Skip to content

Commit

Permalink
Merge pull request #181 from luxonis/feature_tracker
Browse files Browse the repository at this point in the history
Feature tracking support
  • Loading branch information
SzabolcsGergely committed Aug 10, 2021
2 parents 986034e + 1a17cbb commit 43c2d15
Show file tree
Hide file tree
Showing 17 changed files with 680 additions and 4 deletions.
3 changes: 3 additions & 0 deletions CMakeLists.txt
Expand Up @@ -160,6 +160,7 @@ add_library(${TARGET_CORE_NAME}
src/pipeline/node/IMU.cpp
src/pipeline/node/EdgeDetector.cpp
src/pipeline/node/SPIIn.cpp
src/pipeline/node/FeatureTracker.cpp
src/pipeline/datatype/Buffer.cpp
src/pipeline/datatype/ImgFrame.cpp
src/pipeline/datatype/ImageManipConfig.cpp
Expand All @@ -175,6 +176,8 @@ add_library(${TARGET_CORE_NAME}
src/pipeline/datatype/IMUData.cpp
src/pipeline/datatype/StereoDepthConfig.cpp
src/pipeline/datatype/EdgeDetectorConfig.cpp
src/pipeline/datatype/TrackedFeatures.cpp
src/pipeline/datatype/FeatureTrackerConfig.cpp
src/utility/Initialization.cpp
src/utility/Resources.cpp
src/xlink/XLinkConnection.cpp
Expand Down
2 changes: 1 addition & 1 deletion cmake/Depthai/DepthaiDeviceSideConfig.cmake
Expand Up @@ -2,7 +2,7 @@
set(DEPTHAI_DEVICE_SIDE_MATURITY "snapshot")

# "full commit hash of device side binary"
set(DEPTHAI_DEVICE_SIDE_COMMIT "5a4001ab6618e113eb14ef5020a4773dce48e981")
set(DEPTHAI_DEVICE_SIDE_COMMIT "a858d35dbe1574fc02e590ae04524ade7b5bf0ba")

# "version if applicable"
set(DEPTHAI_DEVICE_SIDE_VERSION "")
4 changes: 4 additions & 0 deletions examples/CMakeLists.txt
Expand Up @@ -271,6 +271,10 @@ dai_add_example(bootloader_version src/bootloader_version.cpp ON)
#dai_add_example(flash_bootloader src/flash_bootloader.cpp OFF)
dai_add_example(edge_detector src/edge_detector.cpp ON)

dai_add_example(feature_tracker src/feature_tracker.cpp ON)

dai_add_example(corner_detector src/corner_detector.cpp ON)

# Calibration Read and write samples
dai_add_example(calibration_flash src/calibration_flash.cpp OFF)
dai_add_example(calibration_flash_version5 src/calibration_flash_v5.cpp OFF)
Expand Down
118 changes: 118 additions & 0 deletions examples/src/corner_detector.cpp
@@ -0,0 +1,118 @@
#include <iostream>

// Inludes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
#include "deque"
#include "unordered_map"
#include "unordered_set"

static void drawFeatures(cv::Mat& frame, std::vector<dai::TrackedFeature>& features) {
static const auto pointColor = cv::Scalar(0, 0, 255);
static const int circleRadius = 2;
for(auto& feature : features) {
cv::circle(frame, cv::Point(feature.position.x, feature.position.y), circleRadius, pointColor, -1, cv::LINE_AA, 0);
}
}

int main() {
using namespace std;

// Create pipeline
dai::Pipeline pipeline;

// Define sources and outputs
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto featureTrackerLeft = pipeline.create<dai::node::FeatureTracker>();
auto featureTrackerRight = pipeline.create<dai::node::FeatureTracker>();

auto xoutPassthroughFrameLeft = pipeline.create<dai::node::XLinkOut>();
auto xoutTrackedFeaturesLeft = pipeline.create<dai::node::XLinkOut>();
auto xoutPassthroughFrameRight = pipeline.create<dai::node::XLinkOut>();
auto xoutTrackedFeaturesRight = pipeline.create<dai::node::XLinkOut>();
auto xinTrackedFeaturesConfig = pipeline.create<dai::node::XLinkIn>();

xoutPassthroughFrameLeft->setStreamName("passthroughFrameLeft");
xoutTrackedFeaturesLeft->setStreamName("trackedFeaturesLeft");
xoutPassthroughFrameRight->setStreamName("passthroughFrameRight");
xoutTrackedFeaturesRight->setStreamName("trackedFeaturesRight");
xinTrackedFeaturesConfig->setStreamName("trackedFeaturesConfig");

// Properties
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

// Disable optical flow
featureTrackerLeft->initialConfig.setMotionEstimator(false);
featureTrackerRight->initialConfig.setMotionEstimator(false);

// Linking
monoLeft->out.link(featureTrackerLeft->inputImage);
featureTrackerLeft->passthroughInputImage.link(xoutPassthroughFrameLeft->input);
featureTrackerLeft->outputFeatures.link(xoutTrackedFeaturesLeft->input);
xinTrackedFeaturesConfig->out.link(featureTrackerLeft->inputConfig);

monoRight->out.link(featureTrackerRight->inputImage);
featureTrackerRight->passthroughInputImage.link(xoutPassthroughFrameRight->input);
featureTrackerRight->outputFeatures.link(xoutTrackedFeaturesRight->input);
xinTrackedFeaturesConfig->out.link(featureTrackerRight->inputConfig);

auto featureTrackerConfig = featureTrackerRight->initialConfig.get();

printf("Press 's' to switch between Harris and Shi-Thomasi corner detector! \n");

// Connect to device and start pipeline
dai::Device device(pipeline);

// Output queues used to receive the results
auto passthroughImageLeftQueue = device.getOutputQueue("passthroughFrameLeft", 8, false);
auto outputFeaturesLeftQueue = device.getOutputQueue("trackedFeaturesLeft", 8, false);
auto passthroughImageRightQueue = device.getOutputQueue("passthroughFrameRight", 8, false);
auto outputFeaturesRightQueue = device.getOutputQueue("trackedFeaturesRight", 8, false);

auto inputFeatureTrackerConfigQueue = device.getInputQueue("trackedFeaturesConfig");

const auto leftWindowName = "left";
const auto rightWindowName = "right";

while(true) {
auto inPassthroughFrameLeft = passthroughImageLeftQueue->get<dai::ImgFrame>();
cv::Mat passthroughFrameLeft = inPassthroughFrameLeft->getFrame();
cv::Mat leftFrame;
cv::cvtColor(passthroughFrameLeft, leftFrame, cv::COLOR_GRAY2BGR);

auto inPassthroughFrameRight = passthroughImageRightQueue->get<dai::ImgFrame>();
cv::Mat passthroughFrameRight = inPassthroughFrameRight->getFrame();
cv::Mat rightFrame;
cv::cvtColor(passthroughFrameRight, rightFrame, cv::COLOR_GRAY2BGR);

auto trackedFeaturesLeft = outputFeaturesLeftQueue->get<dai::TrackedFeatures>()->trackedFeatures;
drawFeatures(leftFrame, trackedFeaturesLeft);

auto trackedFeaturesRight = outputFeaturesRightQueue->get<dai::TrackedFeatures>()->trackedFeatures;
drawFeatures(rightFrame, trackedFeaturesRight);

// Show the frame
cv::imshow(leftWindowName, leftFrame);
cv::imshow(rightWindowName, rightFrame);

int key = cv::waitKey(1);
if(key == 'q') {
break;
} else if(key == 's') {
if(featureTrackerConfig.cornerDetector.type == dai::FeatureTrackerConfig::CornerDetector::Type::HARRIS) {
featureTrackerConfig.cornerDetector.type = dai::FeatureTrackerConfig::CornerDetector::Type::SHI_THOMASI;
printf("Switching to Shi-Thomasi \n");
} else {
featureTrackerConfig.cornerDetector.type = dai::FeatureTrackerConfig::CornerDetector::Type::HARRIS;
printf("Switching to Harris \n");
}
auto cfg = dai::FeatureTrackerConfig();
cfg.set(featureTrackerConfig);
inputFeatureTrackerConfigQueue->send(cfg);
}
}
return 0;
}
194 changes: 194 additions & 0 deletions examples/src/feature_tracker.cpp
@@ -0,0 +1,194 @@
#include <iostream>

// Inludes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
#include "deque"
#include "unordered_map"
#include "unordered_set"

static const auto lineColor = cv::Scalar(200, 0, 200);
static const auto pointColor = cv::Scalar(0, 0, 255);

class FeatureTrackerDrawer {
private:
static const int circleRadius = 2;
static const int maxTrackedFeaturesPathLength = 30;
// for how many frames the feature is tracked
static int trackedFeaturesPathLength;

using featureIdType = decltype(dai::Point2f::x);

std::unordered_set<featureIdType> trackedIDs;
std::unordered_map<featureIdType, std::deque<dai::Point2f>> trackedFeaturesPath;

std::string trackbarName;
std::string windowName;

public:
void trackFeaturePath(std::vector<dai::TrackedFeature>& features) {
std::unordered_set<featureIdType> newTrackedIDs;
for(auto& currentFeature : features) {
auto currentID = currentFeature.id;
newTrackedIDs.insert(currentID);

if(!trackedFeaturesPath.count(currentID)) {
trackedFeaturesPath.insert({currentID, std::deque<dai::Point2f>()});
}
std::deque<dai::Point2f>& path = trackedFeaturesPath.at(currentID);

path.push_back(currentFeature.position);
while(path.size() > std::max(1, trackedFeaturesPathLength)) {
path.pop_front();
}
}

std::unordered_set<featureIdType> featuresToRemove;
for(auto& oldId : trackedIDs) {
if(!newTrackedIDs.count(oldId)) {
featuresToRemove.insert(oldId);
}
}

for(auto& id : featuresToRemove) {
trackedFeaturesPath.erase(id);
}

trackedIDs = newTrackedIDs;
}

void drawFeatures(cv::Mat& img) {
cv::setTrackbarPos(trackbarName.c_str(), windowName.c_str(), trackedFeaturesPathLength);

for(auto& featurePath : trackedFeaturesPath) {
std::deque<dai::Point2f>& path = featurePath.second;
int j = 0;
for(j = 0; j < path.size() - 1; j++) {
auto src = cv::Point(path[j].x, path[j].y);
auto dst = cv::Point(path[j + 1].x, path[j + 1].y);
cv::line(img, src, dst, lineColor, 1, cv::LINE_AA, 0);
}

cv::circle(img, cv::Point(path[j].x, path[j].y), circleRadius, pointColor, -1, cv::LINE_AA, 0);
}
}

FeatureTrackerDrawer(std::string trackbarName, std::string windowName) {
this->trackbarName = trackbarName;
this->windowName = windowName;
cv::namedWindow(windowName.c_str());
cv::createTrackbar(trackbarName.c_str(), windowName.c_str(), &trackedFeaturesPathLength, maxTrackedFeaturesPathLength, nullptr);
}
};

int FeatureTrackerDrawer::trackedFeaturesPathLength = 10;

int main() {
using namespace std;

// Create pipeline
dai::Pipeline pipeline;

// Define sources and outputs
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto featureTrackerLeft = pipeline.create<dai::node::FeatureTracker>();
auto featureTrackerRight = pipeline.create<dai::node::FeatureTracker>();

auto xoutPassthroughFrameLeft = pipeline.create<dai::node::XLinkOut>();
auto xoutTrackedFeaturesLeft = pipeline.create<dai::node::XLinkOut>();
auto xoutPassthroughFrameRight = pipeline.create<dai::node::XLinkOut>();
auto xoutTrackedFeaturesRight = pipeline.create<dai::node::XLinkOut>();
auto xinTrackedFeaturesConfig = pipeline.create<dai::node::XLinkIn>();

xoutPassthroughFrameLeft->setStreamName("passthroughFrameLeft");
xoutTrackedFeaturesLeft->setStreamName("trackedFeaturesLeft");
xoutPassthroughFrameRight->setStreamName("passthroughFrameRight");
xoutTrackedFeaturesRight->setStreamName("trackedFeaturesRight");
xinTrackedFeaturesConfig->setStreamName("trackedFeaturesConfig");

// Properties
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

// Linking
monoLeft->out.link(featureTrackerLeft->inputImage);
featureTrackerLeft->passthroughInputImage.link(xoutPassthroughFrameLeft->input);
featureTrackerLeft->outputFeatures.link(xoutTrackedFeaturesLeft->input);
xinTrackedFeaturesConfig->out.link(featureTrackerLeft->inputConfig);

monoRight->out.link(featureTrackerRight->inputImage);
featureTrackerRight->passthroughInputImage.link(xoutPassthroughFrameRight->input);
featureTrackerRight->outputFeatures.link(xoutTrackedFeaturesRight->input);
xinTrackedFeaturesConfig->out.link(featureTrackerRight->inputConfig);

// By default the least mount of resources are allocated
// increasing it improves performance when optical flow is enabled
auto numShaves = 2;
auto numMemorySlices = 2;
featureTrackerLeft->setHardwareResources(numShaves, numMemorySlices);
featureTrackerRight->setHardwareResources(numShaves, numMemorySlices);

auto featureTrackerConfig = featureTrackerRight->initialConfig.get();

printf("Press 's' to switch between Lucas-Kanade optical flow and hardware accelerated motion estimation! \n");

// Connect to device and start pipeline
dai::Device device(pipeline);

// Output queues used to receive the results
auto passthroughImageLeftQueue = device.getOutputQueue("passthroughFrameLeft", 8, false);
auto outputFeaturesLeftQueue = device.getOutputQueue("trackedFeaturesLeft", 8, false);
auto passthroughImageRightQueue = device.getOutputQueue("passthroughFrameRight", 8, false);
auto outputFeaturesRightQueue = device.getOutputQueue("trackedFeaturesRight", 8, false);

auto inputFeatureTrackerConfigQueue = device.getInputQueue("trackedFeaturesConfig");

const auto leftWindowName = "left";
auto leftFeatureDrawer = FeatureTrackerDrawer("Feature tracking duration (frames)", leftWindowName);

const auto rightWindowName = "right";
auto rightFeatureDrawer = FeatureTrackerDrawer("Feature tracking duration (frames)", rightWindowName);

while(true) {
auto inPassthroughFrameLeft = passthroughImageLeftQueue->get<dai::ImgFrame>();
cv::Mat passthroughFrameLeft = inPassthroughFrameLeft->getFrame();
cv::Mat leftFrame;
cv::cvtColor(passthroughFrameLeft, leftFrame, cv::COLOR_GRAY2BGR);

auto inPassthroughFrameRight = passthroughImageRightQueue->get<dai::ImgFrame>();
cv::Mat passthroughFrameRight = inPassthroughFrameRight->getFrame();
cv::Mat rightFrame;
cv::cvtColor(passthroughFrameRight, rightFrame, cv::COLOR_GRAY2BGR);

auto trackedFeaturesLeft = outputFeaturesLeftQueue->get<dai::TrackedFeatures>()->trackedFeatures;
leftFeatureDrawer.trackFeaturePath(trackedFeaturesLeft);
leftFeatureDrawer.drawFeatures(leftFrame);

auto trackedFeaturesRight = outputFeaturesRightQueue->get<dai::TrackedFeatures>()->trackedFeatures;
rightFeatureDrawer.trackFeaturePath(trackedFeaturesRight);
rightFeatureDrawer.drawFeatures(rightFrame);

// Show the frame
cv::imshow(leftWindowName, leftFrame);
cv::imshow(rightWindowName, rightFrame);

int key = cv::waitKey(1);
if(key == 'q') {
break;
} else if(key == 's') {
if(featureTrackerConfig.motionEstimator.type == dai::FeatureTrackerConfig::MotionEstimator::Type::LUCAS_KANADE_OPTICAL_FLOW) {
featureTrackerConfig.motionEstimator.type = dai::FeatureTrackerConfig::MotionEstimator::Type::HW_MOTION_ESTIMATION;
printf("Switching to hardware accelerated motion estimation \n");
} else {
featureTrackerConfig.motionEstimator.type = dai::FeatureTrackerConfig::MotionEstimator::Type::LUCAS_KANADE_OPTICAL_FLOW;
printf("Switching to Lucas-Kanade optical flow \n");
}
auto cfg = dai::FeatureTrackerConfig();
cfg.set(featureTrackerConfig);
inputFeatureTrackerConfigQueue->send(cfg);
}
}
return 0;
}

0 comments on commit 43c2d15

Please sign in to comment.