diff --git a/README.md b/README.md index 20a494eb8..c5b4c53aa 100644 --- a/README.md +++ b/README.md @@ -43,27 +43,28 @@ Make sure submodules are updated ``` git submodule update --init --recursive ``` +ℹ️ For the `--parallel` argument of the commands below, specify a value `[num CPU cores]` or less, to reduce memory consumption during build. E.g.: `--parallel 8` **Static library** ``` mkdir build && cd build cmake .. -cmake --build . --parallel 8 +cmake --build . --parallel ``` **Dynamic library** ``` mkdir build && cd build cmake .. -D BUILD_SHARED_LIBS=ON -cmake --build . --parallel 8 +cmake --build . --parallel ``` ## Installing To install specify optional prefix and build target install ``` cmake .. -D CMAKE_INSTALL_PREFIX=[path/to/install/dir] -cmake --build . --parallel 8 -cmake --build . --target install --parallel 8 +cmake --build . --parallel +cmake --build . --target install --parallel ``` ## Running tests @@ -72,7 +73,7 @@ To run the tests build the library with the following options ``` mkdir build_tests && cd build_tests cmake .. -D DEPTHAI_TEST_EXAMPLES=ON -D DEPTHAI_BUILD_TESTS=ON -D DEPTHAI_BUILD_EXAMPLES=ON -cmake --build . --parallel 8 +cmake --build . --parallel ctest ``` diff --git a/cmake/Depthai/DepthaiDeviceSideConfig.cmake b/cmake/Depthai/DepthaiDeviceSideConfig.cmake index 11ef95434..0020832ed 100644 --- a/cmake/Depthai/DepthaiDeviceSideConfig.cmake +++ b/cmake/Depthai/DepthaiDeviceSideConfig.cmake @@ -2,7 +2,7 @@ set(DEPTHAI_DEVICE_SIDE_MATURITY "snapshot") # "full commit hash of device side binary" -set(DEPTHAI_DEVICE_SIDE_COMMIT "b71584129b101b30e4632678c19d33b020c04c36") +set(DEPTHAI_DEVICE_SIDE_COMMIT "3b51648b0d96c9391d878eede0fd5fd9b44084ca") # "version if applicable" set(DEPTHAI_DEVICE_SIDE_VERSION "") diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 794832f56..256ce9afa 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -130,6 +130,9 @@ dai_add_example(device_queue_event src/device_queue_event_example.cpp) # OpenCV support example dai_add_example(opencv_support src/opencv_support_example.cpp) +# RGB-depth alignment example +dai_add_example(rgb_depth_aligned src/rgb_depth_aligned_example.cpp) + # Device side decoding example for mobilenet-ssd dai_add_example(mobilenet_device_side_decoding src/mobilenet_device_side_decoding_example.cpp) target_compile_definitions(mobilenet_device_side_decoding PRIVATE BLOB_PATH="${mobilenet_blob}") diff --git a/examples/src/rgb_depth_aligned_example.cpp b/examples/src/rgb_depth_aligned_example.cpp new file mode 100644 index 000000000..76cfb496c --- /dev/null +++ b/examples/src/rgb_depth_aligned_example.cpp @@ -0,0 +1,110 @@ +#include +#include + +#include "utility.hpp" + +// Includes common necessary includes for development using depthai library +#include "depthai/depthai.hpp" + +// Optional. If set (true), the ColorCamera is downscaled from 1080p to 720p. +// Otherwise (false), the aligned depth is automatically upscaled to 1080p +static constexpr bool downscaleColor = true; + +int main() { + using namespace std; + + dai::Pipeline p; + std::vector queueNames; + + auto cam = p.create(); + cam->setBoardSocket(dai::CameraBoardSocket::RGB); + cam->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P); + if(downscaleColor) cam->setIspScale(2, 3); + // For now, RGB needs fixed focus to properly align with depth. + // This value was used during calibration + cam->initialControl.setManualFocus(130); + + auto rgbOut = p.create(); + rgbOut->setStreamName("rgb"); + queueNames.push_back("rgb"); + cam->isp.link(rgbOut->input); + + auto left = p.create(); + left->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P); + left->setBoardSocket(dai::CameraBoardSocket::LEFT); + + auto right = p.create(); + right->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P); + right->setBoardSocket(dai::CameraBoardSocket::RIGHT); + + auto stereo = p.create(); + stereo->setConfidenceThreshold(200); + // LR-check is required for depth alignment + stereo->setLeftRightCheck(true); + stereo->setDepthAlign(dai::CameraBoardSocket::RGB); + left->out.link(stereo->left); + right->out.link(stereo->right); + + auto depthOut = p.create(); + depthOut->setStreamName("depth"); + queueNames.push_back("depth"); + // Currently we use the 'disparity' output. TODO 'depth' + stereo->disparity.link(depthOut->input); + + // Connect to device + dai::Device d(p); + d.startPipeline(); + + // Sets queues size and behavior + for(const auto& name : queueNames) { + d.getOutputQueue(name, 4, false); + } + + std::unordered_map frame; + + while(1) { + std::unordered_map> latestPacket; + + auto queueEvents = d.getQueueEvents(queueNames); + for(const auto& name : queueEvents) { + auto packets = d.getOutputQueue(name)->tryGetAll(); + auto count = packets.size(); + if(count > 0) { + latestPacket[name] = packets[count - 1]; + } + } + + for(const auto& name : queueNames) { + if(latestPacket.find(name) != latestPacket.end()) { + if(name == "depth") { + frame[name] = latestPacket[name]->getFrame(); + // Optional, extend range 0..95 -> 0..255, for a better visualisation + if(1) frame[name].convertTo(frame[name], CV_8UC1, 255. / 95); + // Optional, apply false colorization + if(1) cv::applyColorMap(frame[name], frame[name], cv::COLORMAP_HOT); + } else { + frame[name] = latestPacket[name]->getCvFrame(); + } + + cv::imshow(name, frame[name]); + } + } + + // Blend when both received + if(frame.find("rgb") != frame.end() && frame.find("depth") != frame.end()) { + // Need to have both frames in BGR format before blending + if(frame["depth"].channels() < 3) { + cv::cvtColor(frame["depth"], frame["depth"], cv::COLOR_GRAY2BGR); + } + cv::Mat blended; + cv::addWeighted(frame["rgb"], 0.6, frame["depth"], 0.4, 0, blended); + cv::imshow("rgb-depth", blended); + frame.clear(); + } + + int key = cv::waitKey(1); + if(key == 'q') { + return 0; + } + } +} diff --git a/examples/src/spatial_location_calculator_example.cpp b/examples/src/spatial_location_calculator_example.cpp index d0762ed75..294c33d93 100644 --- a/examples/src/spatial_location_calculator_example.cpp +++ b/examples/src/spatial_location_calculator_example.cpp @@ -35,14 +35,10 @@ int main() { monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P); monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); - bool outputDepth = true; - bool outputRectified = false; bool lrcheck = false; bool subpixel = false; // StereoDepth - stereo->setOutputDepth(outputDepth); - stereo->setOutputRectified(outputRectified); stereo->setConfidenceThreshold(255); // stereo->setMedianFilter(dai::StereoDepthProperties::MedianFilter::MEDIAN_OFF); diff --git a/examples/src/spatial_mobilenet_example.cpp b/examples/src/spatial_mobilenet_example.cpp index 338c5bc1e..0a33d5155 100644 --- a/examples/src/spatial_mobilenet_example.cpp +++ b/examples/src/spatial_mobilenet_example.cpp @@ -45,7 +45,6 @@ dai::Pipeline createNNPipeline(std::string nnPath) { monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); /// setting node configs - stereo->setOutputDepth(true); stereo->setConfidenceThreshold(255); spatialDetectionNetwork->setBlobPath(nnPath); @@ -189,4 +188,4 @@ int main(int argc, char** argv) { } return 0; -} \ No newline at end of file +} diff --git a/examples/src/spatial_mobilenet_mono_example.cpp b/examples/src/spatial_mobilenet_mono_example.cpp index 9cbd58025..3a7a56758 100644 --- a/examples/src/spatial_mobilenet_mono_example.cpp +++ b/examples/src/spatial_mobilenet_mono_example.cpp @@ -43,8 +43,6 @@ dai::Pipeline createNNPipeline(std::string nnPath) { monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); // StereoDepth - stereo->setOutputDepth(true); - stereo->setOutputRectified(true); stereo->setConfidenceThreshold(255); // Link plugins CAM -> STEREO -> XLINK @@ -199,4 +197,4 @@ int main(int argc, char** argv) { } return 0; -} \ No newline at end of file +} diff --git a/examples/src/spatial_object_tracker_example.cpp b/examples/src/spatial_object_tracker_example.cpp index 2692e2809..317330973 100644 --- a/examples/src/spatial_object_tracker_example.cpp +++ b/examples/src/spatial_object_tracker_example.cpp @@ -42,7 +42,6 @@ dai::Pipeline createNNPipeline(std::string nnPath) { monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); /// setting node configs - stereo->setOutputDepth(true); stereo->setConfidenceThreshold(255); spatialDetectionNetwork->setBlobPath(nnPath); @@ -168,4 +167,4 @@ int main(int argc, char** argv) { } return 0; -} \ No newline at end of file +} diff --git a/examples/src/spatial_tiny_yolo_example.cpp b/examples/src/spatial_tiny_yolo_example.cpp index 19a67a602..315647316 100644 --- a/examples/src/spatial_tiny_yolo_example.cpp +++ b/examples/src/spatial_tiny_yolo_example.cpp @@ -52,7 +52,6 @@ dai::Pipeline createNNPipeline(std::string nnPath) { monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); /// setting node configs - stereo->setOutputDepth(true); stereo->setConfidenceThreshold(255); spatialDetectionNetwork->setBlobPath(nnPath); @@ -203,4 +202,4 @@ int main(int argc, char** argv) { } return 0; -} \ No newline at end of file +} diff --git a/examples/src/stereo_example.cpp b/examples/src/stereo_example.cpp index 732d5d05b..262cef977 100644 --- a/examples/src/stereo_example.cpp +++ b/examples/src/stereo_example.cpp @@ -56,8 +56,6 @@ int main() { if(withDepth) { // StereoDepth - stereo->setOutputDepth(outputDepth); - stereo->setOutputRectified(outputRectified); stereo->setConfidenceThreshold(200); stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout // stereo->loadCalibrationFile("../../../../depthai/resources/depthai.calib"); @@ -79,7 +77,9 @@ int main() { stereo->rectifiedRight.link(xoutRectifR->input); } stereo->disparity.link(xoutDisp->input); - stereo->depth.link(xoutDepth->input); + if (outputDepth) { + stereo->depth.link(xoutDepth->input); + } } else { // Link plugins CAM -> XLINK @@ -99,15 +99,10 @@ int main() { auto rectifRightQueue = withDepth ? d.getOutputQueue("rectified_right", 8, false) : nullptr; while(1) { - auto t1 = steady_clock::now(); auto left = leftQueue->get(); - auto t2 = steady_clock::now(); cv::imshow("left", cv::Mat(left->getHeight(), left->getWidth(), CV_8UC1, left->getData().data())); - auto t3 = steady_clock::now(); auto right = rightQueue->get(); - auto t4 = steady_clock::now(); cv::imshow("right", cv::Mat(right->getHeight(), right->getWidth(), CV_8UC1, right->getData().data())); - auto t5 = steady_clock::now(); if(withDepth) { // Note: in some configurations (if depth is enabled), disparity may output garbage data @@ -127,24 +122,17 @@ int main() { if(outputRectified) { auto rectifL = rectifLeftQueue->get(); cv::Mat rectifiedLeftFrame = rectifL->getFrame(); - cv::flip(rectifiedLeftFrame, rectifiedLeftFrame, 1); + //cv::flip(rectifiedLeftFrame, rectifiedLeftFrame, 1); cv::imshow("rectified_left", rectifiedLeftFrame); auto rectifR = rectifRightQueue->get(); cv::Mat rectifiedRightFrame = rectifR->getFrame(); - cv::flip(rectifiedRightFrame, rectifiedRightFrame, 1); + //cv::flip(rectifiedRightFrame, rectifiedRightFrame, 1); cv::imshow("rectified_right", rectifiedRightFrame); } } - int ms1 = std::chrono::duration_cast(t2 - t1).count(); - int ms2 = std::chrono::duration_cast(t3 - t2).count(); - int ms3 = std::chrono::duration_cast(t4 - t3).count(); - int ms4 = std::chrono::duration_cast(t5 - t4).count(); - int loop = std::chrono::duration_cast(t5 - t1).count(); - - std::cout << ms1 << " " << ms2 << " " << ms3 << " " << ms4 << " loop: " << loop << std::endl; int key = cv::waitKey(1); if(key == 'q') { return 0; diff --git a/include/depthai/pipeline/datatype/CameraControl.hpp b/include/depthai/pipeline/datatype/CameraControl.hpp index 344b9f119..c1ab698fa 100644 --- a/include/depthai/pipeline/datatype/CameraControl.hpp +++ b/include/depthai/pipeline/datatype/CameraControl.hpp @@ -105,10 +105,10 @@ class CameraControl : public Buffer { void setAutoExposureRegion(uint16_t startX, uint16_t startY, uint16_t width, uint16_t height); /** - * Set a command to specify auto exposure compenstaion - * @param compensation Compensation value between -128..127 + * Set a command to specify auto exposure compensation + * @param compensation Compensation value between -9..9 */ - void setAutoExposureCompensation(int8_t compensation); + void setAutoExposureCompensation(int compensation); /** * Set a command to specify auto banding mode @@ -119,7 +119,7 @@ class CameraControl : public Buffer { /** * Set a command to manually specify exposure * @param exposureTimeUs Exposure time in microseconds - * @param sensitivityIso Sensitivity as ISO value + * @param sensitivityIso Sensitivity as ISO value, usual range 100..1600 */ void setManualExposure(uint32_t exposureTimeUs, uint32_t sensitivityIso); @@ -138,46 +138,40 @@ class CameraControl : public Buffer { // Other image controls /** - * Set a command to specify auto white balance lock - * @param lock Auto white balance lock mode enabled or disabled - */ - void setBrightness(uint16_t value); // TODO move to AE? - - /** - * Set a command to specify auto white balance lock - * @param lock Auto white balance lock mode enabled or disabled + * Set a command to adjust image brightness + * @param value Brightness, range -10..10 */ - void setContrast(uint16_t value); + void setBrightness(int value); /** - * Set a command to specify saturation value - * @param value Saturation + * Set a command to adjust image contrast + * @param value Contrast, range -10..10 */ - void setSaturation(uint16_t value); + void setContrast(int value); /** - * Set a command to specify sharpness value - * @param value Sharpness + * Set a command to adjust image saturation + * @param value Saturation, range -10..10 */ - void setSharpness(uint16_t value); + void setSaturation(int value); /** - * Set a command to specify noise reduction strength - * @param value Noise reduction strength + * Set a command to adjust image sharpness + * @param value Sharpness, range 0..4 */ - void setNoiseReductionStrength(uint16_t value); + void setSharpness(int value); /** - * Set a command to specify luma denoise value - * @param value Luma denoise + * Set a command to adjust luma denoise amount + * @param value Luma denoise amount, range 0..4 */ - void setLumaDenoise(uint16_t value); + void setLumaDenoise(int value); /** - * Set a command to specify chroma denoise value - * @param value Chroma denoise + * Set a command to adjust chroma denoise amount + * @param value Chroma denoise amount, range 0..4 */ - void setChromaDenoise(uint16_t value); + void setChromaDenoise(int value); /** * Set a command to specify scene mode diff --git a/include/depthai/pipeline/node/ColorCamera.hpp b/include/depthai/pipeline/node/ColorCamera.hpp index 5adc19db9..b59ec0322 100644 --- a/include/depthai/pipeline/node/ColorCamera.hpp +++ b/include/depthai/pipeline/node/ColorCamera.hpp @@ -28,6 +28,8 @@ class ColorCamera : public Node { std::shared_ptr rawControl; + int getScaledSize(int input, int num, int denom) const; + public: /** * Constructs ColorCamera node. @@ -74,6 +76,20 @@ class ColorCamera : public Node { */ Output still{*this, "still", Output::Type::MSender, {{DatatypeEnum::ImgFrame, false}}}; + /** + * Outputs ImgFrame message that carries YUV420 planar (I420/IYUV) frame data. + * + * Generated by the ISP engine, and the source for the 'video', 'preview' and 'still' outputs + */ + Output isp{*this, "isp", Output::Type::MSender, {{DatatypeEnum::ImgFrame, false}}}; + + /** + * Outputs ImgFrame message that carries RAW10-packed (MIPI CSI-2 format) frame data. + * + * Captured directly from the camera sensor, and the source for the 'isp' output. + */ + Output raw{*this, "raw", Output::Type::MSender, {{DatatypeEnum::ImgFrame, false}}}; + /** * Specify which board socket to use * @param boardSocket Board socket to use @@ -119,18 +135,48 @@ class ColorCamera : public Node { /// Set preview output size void setPreviewSize(int width, int height); + /// Set preview output size, as a tuple + void setPreviewSize(std::tuple size); + /// Set video output size void setVideoSize(int width, int height); + /// Set video output size, as a tuple + void setVideoSize(std::tuple size); + /// Set still output size void setStillSize(int width, int height); + /// Set still output size, as a tuple + void setStillSize(std::tuple size); + /// Set sensor resolution void setResolution(Properties::SensorResolution resolution); /// Get sensor resolution Properties::SensorResolution getResolution() const; + /** + * Set 'isp' output scaling (numerator/denominator), preserving the aspect ratio. + * The fraction numerator/denominator is simplified first to a irreducible form, + * then a set of hardware scaler constraints applies: + * max numerator = 16, max denominator = 63 + */ + void setIspScale(int numerator, int denominator); + + /// Set 'isp' output scaling, as a tuple + void setIspScale(std::tuple scale); + + /** + * Set 'isp' output scaling, per each direction. If the horizontal scaling factor + * (horizNum/horizDen) is different than the vertical scaling factor + * (vertNum/vertDen), a distorted (stretched or squished) image is generated + */ + void setIspScale(int horizNum, int horizDenom, int vertNum, int vertDenom); + + /// Set 'isp' output scaling, per each direction, as tuples + void setIspScale(std::tuple horizScale, std::tuple vertScale); + /** * Set rate at which camera should produce frames * @param fps Rate in frames per second @@ -171,6 +217,13 @@ class ColorCamera : public Node { /// Get sensor resolution height int getResolutionHeight() const; + /// Get 'isp' output resolution as size, after scaling + std::tuple getIspSize() const; + /// Get 'isp' output width + int getIspWidth() const; + /// Get 'isp' output height + int getIspHeight() const; + /** * Specify sensor center crop. * Resolution size / video size diff --git a/include/depthai/pipeline/node/StereoDepth.hpp b/include/depthai/pipeline/node/StereoDepth.hpp index 4b6e0842d..885ff5f84 100644 --- a/include/depthai/pipeline/node/StereoDepth.hpp +++ b/include/depthai/pipeline/node/StereoDepth.hpp @@ -101,6 +101,17 @@ class StereoDepth : public Node { */ void setMedianFilter(Properties::MedianFilter median); + /** + * @param align Set the disparity/depth alignment: centered (between the 'left' and 'right' inputs), + * or from the perspective of a rectified output stream + */ + void setDepthAlign(Properties::DepthAlign align); + + /** + * @param camera Set the camera from whose perspective the disparity/depth will be aligned + */ + void setDepthAlign(CameraBoardSocket camera); + /** * Confidence threshold for disparity calculation * @param confThr Confidence threshold value 0..255 @@ -141,15 +152,17 @@ class StereoDepth : public Node { void setRectifyMirrorFrame(bool enable); /** - * Enable outputting rectified frames. Optimizes computation on device side when disabled + * Enable outputting rectified frames. Optimizes computation on device side when disabled. + * DEPRECATED. The outputs are auto-enabled if used */ - void setOutputRectified(bool enable); + [[deprecated("Function call should be removed")]] void setOutputRectified(bool enable); /** * Enable outputting 'depth' stream (converted from disparity). - * In certain configurations, this will disable 'disparity' stream + * In certain configurations, this will disable 'disparity' stream. + * DEPRECATED. The output is auto-enabled if used */ - void setOutputDepth(bool enable); + [[deprecated("Function call should be removed")]] void setOutputDepth(bool enable); }; } // namespace node diff --git a/shared/depthai-shared b/shared/depthai-shared index 965d8bff2..7f55bf769 160000 --- a/shared/depthai-shared +++ b/shared/depthai-shared @@ -1 +1 @@ -Subproject commit 965d8bff23783d08ab332b03ec44fc96f3a56ffb +Subproject commit 7f55bf7692a327215b501abf7458a1f439ff6ab8 diff --git a/src/opencv/ImgFrame.cpp b/src/opencv/ImgFrame.cpp index 17d6d0fe0..658f8aa30 100755 --- a/src/opencv/ImgFrame.cpp +++ b/src/opencv/ImgFrame.cpp @@ -118,7 +118,7 @@ cv::Mat ImgFrame::getCvFrame() { } break; case Type::YUV420p: - cv::cvtColor(frame, output, cv::ColorConversionCodes::COLOR_YUV420p2BGR); + cv::cvtColor(frame, output, cv::ColorConversionCodes::COLOR_YUV2BGR_IYUV); break; case Type::NV12: diff --git a/src/pipeline/datatype/CameraControl.cpp b/src/pipeline/datatype/CameraControl.cpp index d58c52ba5..a95ffed17 100644 --- a/src/pipeline/datatype/CameraControl.cpp +++ b/src/pipeline/datatype/CameraControl.cpp @@ -61,7 +61,7 @@ void CameraControl::setAutoExposureRegion(uint16_t startX, uint16_t startY, uint cfg.aeRegion.height = height; cfg.aeRegion.priority = 1; // TODO } -void CameraControl::setAutoExposureCompensation(int8_t compensation) { +void CameraControl::setAutoExposureCompensation(int compensation) { cfg.setCommand(RawCameraControl::Command::EXPOSURE_COMPENSATION); cfg.expCompensation = compensation; } @@ -87,31 +87,27 @@ void CameraControl::setAutoWhiteBalanceLock(bool lock) { } // Other image controls -void CameraControl::setBrightness(uint16_t value) { +void CameraControl::setBrightness(int value) { cfg.setCommand(RawCameraControl::Command::BRIGHTNESS); cfg.brightness = value; } -void CameraControl::setContrast(uint16_t value) { +void CameraControl::setContrast(int value) { cfg.setCommand(RawCameraControl::Command::CONTRAST); cfg.contrast = value; } -void CameraControl::setSaturation(uint16_t value) { +void CameraControl::setSaturation(int value) { cfg.setCommand(RawCameraControl::Command::SATURATION); cfg.saturation = value; } -void CameraControl::setSharpness(uint16_t value) { +void CameraControl::setSharpness(int value) { cfg.setCommand(RawCameraControl::Command::SHARPNESS); cfg.sharpness = value; } -void CameraControl::setNoiseReductionStrength(uint16_t value) { - cfg.setCommand(RawCameraControl::Command::NOISE_REDUCTION_STRENGTH); - cfg.noiseReductionStrength = value; -} -void CameraControl::setLumaDenoise(uint16_t value) { +void CameraControl::setLumaDenoise(int value) { cfg.setCommand(RawCameraControl::Command::LUMA_DENOISE); cfg.lumaDenoise = value; } -void CameraControl::setChromaDenoise(uint16_t value) { +void CameraControl::setChromaDenoise(int value) { cfg.setCommand(RawCameraControl::Command::CHROMA_DENOISE); cfg.chromaDenoise = value; } diff --git a/src/pipeline/node/ColorCamera.cpp b/src/pipeline/node/ColorCamera.cpp old mode 100755 new mode 100644 index a8451bdbb..a356cb355 --- a/src/pipeline/node/ColorCamera.cpp +++ b/src/pipeline/node/ColorCamera.cpp @@ -25,7 +25,7 @@ std::string ColorCamera::getName() const { } std::vector ColorCamera::getOutputs() { - return {video, preview, still}; + return {raw, isp, video, preview, still}; } std::vector ColorCamera::getInputs() { @@ -124,18 +124,49 @@ void ColorCamera::setPreviewSize(int width, int height) { properties.previewHeight = height; } +void ColorCamera::setPreviewSize(std::tuple size) { + setPreviewSize(std::get<0>(size), std::get<1>(size)); +} + // set video output size void ColorCamera::setVideoSize(int width, int height) { properties.videoWidth = width; properties.videoHeight = height; } +void ColorCamera::setVideoSize(std::tuple size) { + setVideoSize(std::get<0>(size), std::get<1>(size)); +} + // set still output size void ColorCamera::setStillSize(int width, int height) { properties.stillWidth = width; properties.stillHeight = height; } +void ColorCamera::setStillSize(std::tuple size) { + setStillSize(std::get<0>(size), std::get<1>(size)); +} + +void ColorCamera::setIspScale(int horizNum, int horizDenom, int vertNum, int vertDenom) { + properties.ispScale.horizNumerator = horizNum; + properties.ispScale.horizDenominator = horizDenom; + properties.ispScale.vertNumerator = vertNum; + properties.ispScale.vertDenominator = vertDenom; +} + +void ColorCamera::setIspScale(int numerator, int denominator) { + setIspScale(numerator, denominator, numerator, denominator); +} + +void ColorCamera::setIspScale(std::tuple scale) { + setIspScale(std::get<0>(scale), std::get<1>(scale)); +} + +void ColorCamera::setIspScale(std::tuple horizScale, std::tuple vertScale) { + setIspScale(std::get<0>(horizScale), std::get<1>(horizScale), std::get<0>(vertScale), std::get<1>(vertScale)); +} + void ColorCamera::setResolution(ColorCameraProperties::SensorResolution resolution) { properties.resolution = resolution; } @@ -183,6 +214,19 @@ std::tuple ColorCamera::getVideoSize() const { maxVideoHeight = 2160; } + // Take into the account the ISP scaling + int numW = properties.ispScale.horizNumerator; + int denW = properties.ispScale.horizDenominator; + if(numW > 0 && denW > 0) { + maxVideoWidth = getScaledSize(maxVideoWidth, numW, denW); + } + + int numH = properties.ispScale.vertNumerator; + int denH = properties.ispScale.vertDenominator; + if(numH > 0 && denH > 0) { + maxVideoHeight = getScaledSize(maxVideoHeight, numH, denH); + } + return {maxVideoWidth, maxVideoHeight}; } @@ -211,6 +255,20 @@ std::tuple ColorCamera::getStillSize() const { maxStillWidth = 4032; // Note not 4056 as full sensor resolution maxStillHeight = 3040; } + + // Take into the account the ISP scaling + int numW = properties.ispScale.horizNumerator; + int denW = properties.ispScale.horizDenominator; + if(numW > 0 && denW > 0) { + maxStillWidth = getScaledSize(maxStillWidth, numW, denW); + } + + int numH = properties.ispScale.vertNumerator; + int denH = properties.ispScale.vertDenominator; + if(numH > 0 && denH > 0) { + maxStillHeight = getScaledSize(maxStillHeight, numH, denH); + } + return {maxStillWidth, maxStillHeight}; } @@ -253,6 +311,34 @@ int ColorCamera::getResolutionHeight() const { return std::get<1>(getResolutionSize()); } +int ColorCamera::getScaledSize(int input, int num, int denom) const { + return (input * num - 1) / denom + 1; +} + +int ColorCamera::getIspWidth() const { + int inW = getResolutionWidth(); + int num = properties.ispScale.horizNumerator; + int den = properties.ispScale.horizDenominator; + if(num > 0 && den > 0) { + return getScaledSize(inW, num, den); + } + return inW; +} + +int ColorCamera::getIspHeight() const { + int inH = getResolutionHeight(); + int num = properties.ispScale.vertNumerator; + int den = properties.ispScale.vertDenominator; + if(num > 0 && den > 0) { + return getScaledSize(inH, num, den); + } + return inH; +} + +std::tuple ColorCamera::getIspSize() const { + return {getIspWidth(), getIspHeight()}; +} + void ColorCamera::sensorCenterCrop() { properties.sensorCropX = ColorCameraProperties::AUTO; properties.sensorCropY = ColorCameraProperties::AUTO; diff --git a/src/pipeline/node/StereoDepth.cpp b/src/pipeline/node/StereoDepth.cpp index 9d36d260f..38aa97101 100644 --- a/src/pipeline/node/StereoDepth.cpp +++ b/src/pipeline/node/StereoDepth.cpp @@ -3,6 +3,8 @@ // standard #include +#include "spdlog/spdlog.h" + namespace dai { namespace node { @@ -63,9 +65,17 @@ void StereoDepth::setInputResolution(int width, int height) { properties.width = width; properties.height = height; } -void StereoDepth::setMedianFilter(StereoDepthProperties::MedianFilter median) { +void StereoDepth::setMedianFilter(Properties::MedianFilter median) { properties.median = median; } +void StereoDepth::setDepthAlign(Properties::DepthAlign align) { + properties.depthAlign = align; + // Unset 'depthAlignCamera', that would take precedence otherwise + properties.depthAlignCamera = CameraBoardSocket::AUTO; +} +void StereoDepth::setDepthAlign(CameraBoardSocket camera) { + properties.depthAlignCamera = camera; +} void StereoDepth::setConfidenceThreshold(int confThr) { properties.confidenceThreshold = confThr; } @@ -85,10 +95,12 @@ void StereoDepth::setRectifyMirrorFrame(bool enable) { properties.rectifyMirrorFrame = enable; } void StereoDepth::setOutputRectified(bool enable) { - properties.enableOutputRectified = enable; + (void)enable; + spdlog::warn("{} is deprecated. The output is auto-enabled if used", __func__); } void StereoDepth::setOutputDepth(bool enable) { - properties.enableOutputDepth = enable; + (void)enable; + spdlog::warn("{} is deprecated. The output is auto-enabled if used", __func__); } } // namespace node