Skip to content

Commit

Permalink
Replace '.points[' with just '[' (#4219)
Browse files Browse the repository at this point in the history
* Reuse code, add exception in cuda
  • Loading branch information
kunaltyagi committed Jun 29, 2020
1 parent d88a08e commit 0dde8ff
Show file tree
Hide file tree
Showing 208 changed files with 1,773 additions and 1,811 deletions.
4 changes: 2 additions & 2 deletions CHANGES.md
Expand Up @@ -3062,7 +3062,7 @@ pcl::OrganizedDataIndex -> pcl::search::OrganizedNeighbor
* added new templated methods for `nearestKSearch` and `radiusSearch` for situations when PointT is different than the one the KdTree object was created with (e.g., KdTree<PointT1> vs nearestKSearch (PointT2 &p...)
* added two new methods for `getApproximateIndices` where given a reference cloud of point type T1 we're trying to find the corresponding indices in a different cloud of point type T2
* refactorized a lot of code in search and octree to make it look more consistent with the rest of the API
* fixed a bug in octree_search which was semantically doing something bad: for each `radiusSearch`/`nearestKSearch`/`approxNearestSearch` call with a PointCloudConstPtr, the octree was getting recreated. Changed the API to be consistent with the rest of PCL (including pcl_search and pcl_kdtree) where we pass in a PointCloud instead of a PointCloudConstPtr which simply calls searchMethod (cloud.points[i], ...)
* fixed a bug in octree_search which was semantically doing something bad: for each `radiusSearch`/`nearestKSearch`/`approxNearestSearch` call with a PointCloudConstPtr, the octree was getting recreated. Changed the API to be consistent with the rest of PCL (including pcl_search and pcl_kdtree) where we pass in a PointCloud instead of a PointCloudConstPtr which simply calls searchMethod (cloud[i], ...)
* minor code optimizations
* renamed organized_neighbor.h header in pcl_search (unreleased, therefore API changes OK!) to organized.h
* disabled the auto-search tuning as it wasn't really working. must clean this up further
Expand Down Expand Up @@ -3117,7 +3117,7 @@ pcl::OrganizedDataIndex -> pcl::search::OrganizedNeighbor
* added unit test for `getIntersectedVoxelCentersRecursive`
* added method `getIntersectedVoxelIndices` for getting indices of intersected voxels and updated unit test
* refactorized a lot of code in search and octree to make it look more consistent with the rest of the API
* fixed a bug in octree_search which was semantically doing something bad: for each `radiusSearch`/`nearestKSearch`/`approxNearestSearch` call with a PointCloudConstPtr, the octree was getting recreated. Changed the API to be consistent with the rest of PCL (including pcl_search and pcl_kdtree) where we pass in a PointCloud instead of a PointCloudConstPtr which simply calls searchMethod (cloud.points[i], ...)
* fixed a bug in octree_search which was semantically doing something bad: for each `radiusSearch`/`nearestKSearch`/`approxNearestSearch` call with a PointCloudConstPtr, the octree was getting recreated. Changed the API to be consistent with the rest of PCL (including pcl_search and pcl_kdtree) where we pass in a PointCloud instead of a PointCloudConstPtr which simply calls searchMethod (cloud[i], ...)
* minor code optimizations
* renamed organized_neighbor.h header in pcl_search (unreleased, therefore API changes OK!) to organized.h
* disabled the auto-search tuning as it wasn't really working. must clean this up further
Expand Down
Expand Up @@ -136,7 +136,7 @@ class CVFHEstimation : public GlobalEstimator<PointInT, FeatureT> {
vfh_signature.points.resize(1);
vfh_signature.width = vfh_signature.height = 1;
for (int d = 0; d < 308; ++d)
vfh_signature.points[0].histogram[d] = cvfh_signatures.points[i].histogram[d];
vfh_signature[0].histogram[d] = cvfh_signatures[i].histogram[d];

signatures.push_back(vfh_signature);
}
Expand Down
Expand Up @@ -165,7 +165,7 @@ class OURCVFHEstimator : public GlobalEstimator<PointInT, FeatureT> {
vfh_signature.points.resize(1);
vfh_signature.width = vfh_signature.height = 1;
for (int d = 0; d < 308; ++d)
vfh_signature.points[0].histogram[d] = point.histogram[d];
vfh_signature[0].histogram[d] = point.histogram[d];

signatures.push_back(vfh_signature);
}
Expand Down
Expand Up @@ -97,8 +97,8 @@ pcl::rec_3d_framework::GlobalNNPipeline<Distance, PointInT, FeatureT>::classify(

if (!signatures.empty()) {
for (std::size_t idx = 0; idx < signatures.size(); idx++) {
float* hist = signatures[idx].points[0].histogram;
int size_feat = sizeof(signatures[idx].points[0].histogram) / sizeof(float);
float* hist = signatures[idx][0].histogram;
int size_feat = sizeof(signatures[idx][0].histogram) / sizeof(float);
std::vector<float> std_hist(hist, hist + size_feat);
ModelT empty;

Expand Down
Expand Up @@ -197,8 +197,8 @@ pcl::rec_3d_framework::GlobalNNCRHRecognizer<Distance, PointInT, FeatureT>::reco
pcl::ScopeTime t_matching("Matching and roll...");
for (std::size_t idx = 0; idx < signatures.size(); idx++) {

float* hist = signatures[idx].points[0].histogram;
int size_feat = sizeof(signatures[idx].points[0].histogram) / sizeof(float);
float* hist = signatures[idx][0].histogram;
int size_feat = sizeof(signatures[idx][0].histogram) / sizeof(float);
std::vector<float> std_hist(hist, hist + size_feat);
ModelT empty;

Expand Down
Expand Up @@ -253,8 +253,8 @@ pcl::rec_3d_framework::GlobalNNCVFHRecognizer<Distance, PointInT, FeatureT>::rec
for (std::size_t c = 0; c < categories_to_be_searched_.size(); c++) {
std::cout << "Using category:" << categories_to_be_searched_[c] << std::endl;
for (std::size_t idx = 0; idx < signatures.size(); idx++) {
float* hist = signatures[idx].points[0].histogram;
int size_feat = sizeof(signatures[idx].points[0].histogram) / sizeof(float);
float* hist = signatures[idx][0].histogram;
int size_feat = sizeof(signatures[idx][0].histogram) / sizeof(float);
std::vector<float> std_hist(hist, hist + size_feat);

flann_model histogram;
Expand Down Expand Up @@ -293,8 +293,8 @@ pcl::rec_3d_framework::GlobalNNCVFHRecognizer<Distance, PointInT, FeatureT>::rec
else {
for (std::size_t idx = 0; idx < signatures.size(); idx++) {

float* hist = signatures[idx].points[0].histogram;
int size_feat = sizeof(signatures[idx].points[0].histogram) / sizeof(float);
float* hist = signatures[idx][0].histogram;
int size_feat = sizeof(signatures[idx][0].histogram) / sizeof(float);
std::vector<float> std_hist(hist, hist + size_feat);

flann_model histogram;
Expand Down
Expand Up @@ -114,9 +114,9 @@ uniform_sampling(const vtkSmartPointer<vtkPolyData>& polydata,
for (i = 0; i < n_samples; i++) {
Eigen::Vector4f p(0.f, 0.f, 0.f, 0.f);
randPSurface(polydata, &cumulativeAreas, totalArea, p);
cloud_out.points[i].x = static_cast<float>(p[0]);
cloud_out.points[i].y = static_cast<float>(p[1]);
cloud_out.points[i].z = static_cast<float>(p[2]);
cloud_out[i].x = static_cast<float>(p[0]);
cloud_out[i].y = static_cast<float>(p[1]);
cloud_out[i].z = static_cast<float>(p[2]);
}
}

Expand Down Expand Up @@ -167,9 +167,9 @@ getVerticesAsPointCloud(const vtkSmartPointer<vtkPolyData>& polydata,
for (vtkIdType i = 0; i < points->GetNumberOfPoints(); i++) {
double p[3];
points->GetPoint(i, p);
cloud_out.points[i].x = static_cast<float>(p[0]);
cloud_out.points[i].y = static_cast<float>(p[1]);
cloud_out.points[i].z = static_cast<float>(p[2]);
cloud_out[i].x = static_cast<float>(p[0]);
cloud_out[i].y = static_cast<float>(p[1]);
cloud_out[i].z = static_cast<float>(p[2]);
}
}

Expand Down
6 changes: 3 additions & 3 deletions apps/modeler/src/normal_estimation_worker.cpp
Expand Up @@ -131,8 +131,8 @@ pcl::modeler::NormalEstimationWorker::processImpl(CloudMeshItem* cloud_mesh_item

for (std::size_t i = 0, i_end = indices->size(); i < i_end; ++i) {
std::size_t dest = (*indices)[i];
cloud->points[dest].normal_x = normals.points[i].normal_x;
cloud->points[dest].normal_y = normals.points[i].normal_y;
cloud->points[dest].normal_z = normals.points[i].normal_z;
cloud->points[dest].normal_x = normals[i].normal_x;
cloud->points[dest].normal_y = normals[i].normal_y;
cloud->points[dest].normal_z = normals[i].normal_z;
}
}
16 changes: 8 additions & 8 deletions apps/point_cloud_editor/src/cloud.cpp
Expand Up @@ -227,7 +227,7 @@ Cloud::drawWithRGB () const
{
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(3, GL_UNSIGNED_BYTE, sizeof(Point3D),
&(cloud_.points[0].b));
&(cloud_[0].b));
draw();
}

Expand Down Expand Up @@ -267,7 +267,7 @@ Cloud::draw (bool disable_highlight) const
glTranslatef(-center_xyz_[0], -center_xyz_[1], -center_xyz_[2]);

glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, sizeof(Point3D), &(cloud_.points[0].x));
glVertexPointer(3, GL_FLOAT, sizeof(Point3D), &(cloud_[0].x));

if (disable_highlight || (!selection_ptr) || selection_ptr->empty())
{
Expand Down Expand Up @@ -328,7 +328,7 @@ Cloud::remove(const Selection& selection)
{
unsigned int pos = cloud_.size();
for (auto rit = selection.rbegin(); rit != selection.rend(); ++rit)
std::swap(cloud_.points[--pos], cloud_.points[*rit]);
std::swap(cloud_[--pos], cloud_[*rit]);
resize(cloud_.size()-selection.size());
}

Expand Down Expand Up @@ -439,7 +439,7 @@ Cloud::restore (const CopyBuffer& copy_buffer, const Selection& selection)
append(copied_cloud);
unsigned int pos = cloud_.size();
for (auto rit = selection.rbegin(); rit != selection.rend(); ++rit)
std::swap(cloud_.points[--pos], cloud_.points[*rit]);
std::swap(cloud_[--pos], cloud_[*rit]);
}

std::string
Expand All @@ -459,15 +459,15 @@ Cloud::updateCloudMembers ()

std::fill_n(min_xyz_, XYZ_SIZE, 0.0f);
std::fill_n(max_xyz_, XYZ_SIZE, 0.0f);
float *pt = &(cloud_.points[0].data[X]);
float *pt = &(cloud_[0].data[X]);
std::copy(pt, pt+XYZ_SIZE, max_xyz_);
std::copy(max_xyz_, max_xyz_+XYZ_SIZE, min_xyz_);
for (std::size_t i = 1; i < cloud_.size(); ++i)
{
for (unsigned int j = 0; j < XYZ_SIZE; ++j)
{
min_xyz_[j] = std::min(min_xyz_[j], cloud_.points[i].data[j]);
max_xyz_[j] = std::max(max_xyz_[j], cloud_.points[i].data[j]);
min_xyz_[j] = std::min(min_xyz_[j], cloud_[i].data[j]);
max_xyz_[j] = std::max(max_xyz_[j], cloud_[i].data[j]);
}
}
float range = 0.0f;
Expand All @@ -493,7 +493,7 @@ Cloud::enableTexture () const
glEnable(GL_TEXTURE_1D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(1, GL_FLOAT, sizeof(Point3D),
&(cloud_.points[0].data[color_ramp_axis_]));
&(cloud_[0].data[color_ramp_axis_]));
glMatrixMode(GL_TEXTURE);
glPushMatrix();
glLoadIdentity();
Expand Down
6 changes: 3 additions & 3 deletions apps/src/ni_agast.cpp
Expand Up @@ -235,9 +235,9 @@ class AGASTDemo {
if (!std::isfinite(pt.x) || !std::isfinite(pt.y) || !std::isfinite(pt.z))
continue;

keypoints3d.points[j].x = pt.x;
keypoints3d.points[j].y = pt.y;
keypoints3d.points[j].z = pt.z;
keypoints3d[j].x = pt.x;
keypoints3d[j].y = pt.y;
keypoints3d[j].z = pt.z;
++j;
}

Expand Down
6 changes: 3 additions & 3 deletions apps/src/ni_brisk.cpp
Expand Up @@ -185,9 +185,9 @@ class BRISKDemo {
PointT pt =
bilinearInterpolation(cloud, keypoints->points[i].x, keypoints->points[i].y);

keypoints3d.points[j].x = pt.x;
keypoints3d.points[j].y = pt.y;
keypoints3d.points[j].z = pt.z;
keypoints3d[j].x = pt.x;
keypoints3d[j].y = pt.y;
keypoints3d[j].z = pt.z;
++j;
}

Expand Down
26 changes: 13 additions & 13 deletions apps/src/organized_segmentation_demo.cpp
Expand Up @@ -81,15 +81,15 @@ displayCurvature(pcl::PointCloud<PointT>& cloud,
{
pcl::PointCloud<pcl::PointXYZRGBA> curvature_cloud = cloud;
for (std::size_t i = 0; i < cloud.points.size(); i++) {
if (normals.points[i].curvature < 0.04) {
curvature_cloud.points[i].r = 0;
curvature_cloud.points[i].g = 255;
curvature_cloud.points[i].b = 0;
if (normals[i].curvature < 0.04) {
curvature_cloud[i].r = 0;
curvature_cloud[i].g = 255;
curvature_cloud[i].b = 0;
}
else {
curvature_cloud.points[i].r = 255;
curvature_cloud.points[i].g = 0;
curvature_cloud.points[i].b = 0;
curvature_cloud[i].r = 255;
curvature_cloud[i].g = 0;
curvature_cloud[i].b = 0;
}
}

Expand All @@ -105,14 +105,14 @@ displayDistanceMap(pcl::PointCloud<PointT>& cloud,
pcl::PointCloud<pcl::PointXYZRGBA> distance_map_cloud = cloud;
for (std::size_t i = 0; i < cloud.points.size(); i++) {
if (distance_map[i] < 5.0) {
distance_map_cloud.points[i].r = 255;
distance_map_cloud.points[i].g = 0;
distance_map_cloud.points[i].b = 0;
distance_map_cloud[i].r = 255;
distance_map_cloud[i].g = 0;
distance_map_cloud[i].b = 0;
}
else {
distance_map_cloud.points[i].r = 0;
distance_map_cloud.points[i].g = 255;
distance_map_cloud.points[i].b = 0;
distance_map_cloud[i].r = 0;
distance_map_cloud[i].g = 255;
distance_map_cloud[i].b = 0;
}
}

Expand Down
16 changes: 8 additions & 8 deletions common/include/pcl/common/distances.h
Expand Up @@ -112,8 +112,8 @@ namespace pcl
for (std::size_t j = i; j < cloud.points.size (); ++j)
{
// Compute the distance
double dist = (cloud.points[i].getVector4fMap () -
cloud.points[j].getVector4fMap ()).squaredNorm ();
double dist = (cloud[i].getVector4fMap () -
cloud[j].getVector4fMap ()).squaredNorm ();
if (dist <= max_dist)
continue;

Expand All @@ -126,8 +126,8 @@ namespace pcl
if (i_min == token || i_max == token)
return (max_dist = std::numeric_limits<double>::min ());

pmin = cloud.points[i_min];
pmax = cloud.points[i_max];
pmin = cloud[i_min];
pmax = cloud[i_max];
return (std::sqrt (max_dist));
}

Expand All @@ -152,8 +152,8 @@ namespace pcl
for (std::size_t j = i; j < indices.size (); ++j)
{
// Compute the distance
double dist = (cloud.points[indices[i]].getVector4fMap () -
cloud.points[indices[j]].getVector4fMap ()).squaredNorm ();
double dist = (cloud[indices[i]].getVector4fMap () -
cloud[indices[j]].getVector4fMap ()).squaredNorm ();
if (dist <= max_dist)
continue;

Expand All @@ -166,8 +166,8 @@ namespace pcl
if (i_min == token || i_max == token)
return (max_dist = std::numeric_limits<double>::min ());

pmin = cloud.points[indices[i_min]];
pmax = cloud.points[indices[i_max]];
pmin = cloud[indices[i_min]];
pmax = cloud[indices[i_max]];
return (std::sqrt (max_dist));
}

Expand Down
4 changes: 2 additions & 2 deletions common/include/pcl/common/impl/centroid.hpp
Expand Up @@ -759,7 +759,7 @@ demeanPointCloud (const pcl::PointCloud<PointT> &cloud_in,
cloud_out (1, i) = cloud_in[i].y - centroid[1];
cloud_out (2, i) = cloud_in[i].z - centroid[2];
// One column at a time
//cloud_out.block<4, 1> (0, i) = cloud_in.points[i].getVector4fMap () - centroid;
//cloud_out.block<4, 1> (0, i) = cloud_in[i].getVector4fMap () - centroid;
}

// Make sure we zero the 4th dimension out (1 row, N columns)
Expand All @@ -783,7 +783,7 @@ demeanPointCloud (const pcl::PointCloud<PointT> &cloud_in,
cloud_out (1, i) = cloud_in[indices[i]].y - centroid[1];
cloud_out (2, i) = cloud_in[indices[i]].z - centroid[2];
// One column at a time
//cloud_out.block<4, 1> (0, i) = cloud_in.points[indices[i]].getVector4fMap () - centroid;
//cloud_out.block<4, 1> (0, i) = cloud_in[indices[i]].getVector4fMap () - centroid;
}

// Make sure we zero the 4th dimension out (1 row, N columns)
Expand Down

0 comments on commit 0dde8ff

Please sign in to comment.