diff --git a/cells/RescaleRegisteredDepth.cpp b/cells/RescaleRegisteredDepth.cpp index b8da704..5d7b94f 100644 --- a/cells/RescaleRegisteredDepth.cpp +++ b/cells/RescaleRegisteredDepth.cpp @@ -14,13 +14,16 @@ struct RescaledRegisteredDepth { in.declare(&RescaledRegisteredDepth::depth_in_, "depth", "The depth image to rescale.").required(true); in.declare(&RescaledRegisteredDepth::mask_in_, "mask", "A mask of the size of depth, that will be resized too."); in.declare(&RescaledRegisteredDepth::image_in_, "image", "The rgb image.").required(true); + in.declare(&RescaledRegisteredDepth::K_in_, "K", "The camera intrinsics matrix of the depth camera to rescale."); out.declare(&RescaledRegisteredDepth::depth_out_, "depth", "The rescaled depth image."); out.declare(&RescaledRegisteredDepth::mask_out_, "mask", "The rescaled depth image."); + out.declare(&RescaledRegisteredDepth::K_out_, "K", "The rescaled camera intrinsics matrix of the depth camera."); } int process(const tendrils& in, const tendrils& out) { cv::Size dsize = depth_in_->size(), isize = image_in_->size(); + K_in_->copyTo(*K_out_); if (dsize == isize) { rescaleDepth(*depth_in_, CV_32F, *depth_out_); @@ -43,6 +46,12 @@ struct RescaledRegisteredDepth { std::numeric_limits::quiet_NaN())); *depth_out_ = output; + //rescale the calibration matrix + K_out_->at(0, 0) = K_out_->at(0, 0) * factor; + K_out_->at(1, 1) = K_out_->at(1, 1) * static_cast(isize.height) / static_cast(dsize.height); + K_out_->at(0, 2) = static_cast(isize.width-1) / 2.0f; + K_out_->at(1, 2) = static_cast(isize.height-1) / 2.0f; + if (!mask_in_->empty()) { assert(mask_in_->size() == depth_in_->size()); cv::Mat mask(isize, CV_8U); @@ -55,7 +64,7 @@ struct RescaledRegisteredDepth { return ecto::OK; } - ecto::spore image_in_, depth_in_, depth_out_, mask_in_, mask_out_; + ecto::spore image_in_, depth_in_, depth_out_, mask_in_, mask_out_, K_in_, K_out_; }; ECTO_CELL(base, RescaledRegisteredDepth, "RescaledRegisteredDepth", diff --git a/python/ecto_image_pipeline/io/source/ros.py b/python/ecto_image_pipeline/io/source/ros.py index 31039e2..bd8fd27 100644 --- a/python/ecto_image_pipeline/io/source/ros.py +++ b/python/ecto_image_pipeline/io/source/ros.py @@ -42,7 +42,8 @@ def res_translate(res): def declare_cells(_p): res = {'camera_info_image': CellInfo(ecto_ros.CameraInfo2Cv), 'camera_info_depth': CellInfo(ecto_ros.CameraInfo2Cv), - 'crop_box': CellInfo(CropBox)} + 'crop_box': CellInfo(CropBox), + 'depth_map': CellInfo(RescaledRegisteredDepth)} if HAS_PCL: res['cloud'] = CellInfo(MatToPointCloudXYZOrganized) return res @@ -57,14 +58,13 @@ def declare_forwards(_p): i = {} o = {'camera_info_image': [Forward('K', 'K_image', 'The camera intrinsics matrix of the image camera.')], - 'camera_info_depth': [Forward('K', 'K_depth', 'The camera intrinsics matrix of the depth camera.')], + 'depth_map': [Forward('K', 'K_depth', 'The camera intrinsics matrix of the depth camera.')], 'crop_box': [Forward('rgb', 'image', 'The RGB image from a OpenNI device.'), Forward('depth', new_doc='The depth map from a OpenNI device. This is a CV_32FC1, with values in meters.'), Forward('points3d'), Forward('mask')] } if HAS_PCL: o['cloud'] = [Forward('point_cloud')] - return (p,i,o) def configure(self, _p, _i, _o): @@ -73,7 +73,6 @@ def configure(self, _p, _i, _o): self._rgb_image = ecto_ros.Image2Mat(swap_rgb=True) #these transform the depth into something usable - self._depth_map = RescaledRegisteredDepth() self._points3d = DepthTo3d() #self._depth_mask = BaseSource._depth_mask() @@ -86,19 +85,20 @@ def connections(self, p): ] #rescaling ... - graph += [self._depth_converter['image'] >> self._depth_map['depth'], - self._rgb_image['image'] >> self._depth_map['image'], + graph += [self._depth_converter['image'] >> self.depth_map['depth'], + self._rgb_image['image'] >> self.depth_map['image'], + self.camera_info_depth['K'] >> self.depth_map['K'], ] #depth ~> 3d calculations graph += [ - self._depth_map['depth'] >> self._points3d['depth'], - self.camera_info_depth['K'] >> self._points3d['K'], + self.depth_map['depth'] >> self._points3d['depth'], + self.depth_map['K'] >> self._points3d['K'], #self._depth_map['depth'] >> self._depth_mask['depth'], #self._rgb_image['image'] >> self._cloud['image'], self._points3d['points3d'] >> self.crop_box['points3d'], self._rgb_image['image'] >> self.crop_box['rgb'], - self._depth_map['depth'] >> self.crop_box['depth'] + self.depth_map['depth'] >> self.crop_box['depth'] ] if HAS_PCL: graph += [ self.crop_box['points3d'] >> self.cloud['points'] ]