Generalized ICP algorithm in PCL

1.3k views Asked by At

I am trying to solve this exercise (slide 40).

I downloaded the kitchen-small dataset an performed the following two steps.

First step: Convert Depth-Images from the dataset to Pointclouds.

#include <opencv2/core/core.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/ximgproc.hpp>
#include <stdio.h>
#include <iostream>
#include <string>
#include <fstream>
#include <cstdint>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/gicp.h>
#include <pcl/segmentation/extract_clusters.h>
#include <pcl/filters/extract_indices.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/features/normal_3d.h>
#include <pcl/kdtree/kdtree.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/segmentation/extract_clusters.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/features/normal_3d_omp.h>
#include <Eigen/Dense>

using namespace std;
using namespace cv;
Eigen::Matrix3f camera_matrix;

pcl::PointCloud<pcl::PointXYZRGB>::Ptr RGBDtoPCL(Mat depth_image, Mat rgb_image, Eigen::Matrix3f& _intrinsics)
{
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>);

    float fx = 512;//_intrinsics(0, 0);
    float fy = 512;//_intrinsics(1, 1);
    float cx = 320;//_intrinsics(0, 2);
    float cy = 240;//_intrinsics(1, 2);

    float factor = 1;

    depth_image.convertTo(depth_image, CV_32F); // convert the image data to float type 

    if (!depth_image.data) {
        std::cerr << "No depth data!!!" << std::endl;
        exit(EXIT_FAILURE);
    }

    pointcloud->width = depth_image.cols; //Dimensions must be initialized to use 2-D indexing 
    pointcloud->height = depth_image.rows;
    pointcloud->resize(pointcloud->width*pointcloud->height);

#pragma omp parallel for
    for (int v = 0; v < depth_image.rows; v += 4)
    {
        for (int u = 0; u < depth_image.cols; u += 4)
        {
            float Z = depth_image.at<float>(v, u) / factor;

            pcl::PointXYZRGB p;
            
            p.z = Z;
            p.x = (u - cx) * Z / fx;
            p.y = (v - cy) * Z / fy;
            p.r = (rgb_image.at<Vec3b>(v, u)[2]);
            p.g = (rgb_image.at<Vec3b>(v, u)[1]);
            p.b = (rgb_image.at<Vec3b>(v, u)[0]);
            //cout << p.r << endl;


            p.z = p.z;
            p.x = p.x;
            p.y = p.y;

            pointcloud->points.push_back(p);

        }
    }

    return pointcloud;

}


int main(int argc, char const *argv[])
{   
    pcl::VoxelGrid<pcl::PointXYZRGB> voxel_grid;
    pcl::NormalEstimationOMP<pcl::PointXYZRGB, pcl::Normal> ne;
    camera_matrix << fx, 0.0f, cx, 0.0f, fy, cy, 0.0f, 0.0f, 1.0f;

    for(int i=1; i<= 180; i++)
    {
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>);
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr subsamp_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
        pcl::PointCloud<pcl::PointXYZRGBNormal> dst;

    /*Convert depth image to Pointcloud*/
        Mat input_rgdb_image, input_depth_img, scaled_depth_img;
        input_rgdb_image = imread("/home/aleio/Downloads/rgbd-scenes/kitchen_small/kitchen_small_1/kitchen_small_1_"+to_string(i)+".png");
        input_depth_img = imread("/home/aleio/Downloads/rgbd-scenes/kitchen_small/kitchen_small_1/kitchen_small_1_"+to_string(i)+"_depth.png", IMREAD_ANYDEPTH);
        input_depth_img.convertTo(scaled_depth_img, CV_32F, 0.001);

        pointcloud=RGBDtoPCL(scaled_depth_img,input_rgdb_image,camera_matrix);

/*Voxelization*/
        voxel_grid.setInputCloud (pointcloud);
        voxel_grid.setLeafSize (0.005, 0.005, 0.005); 
        voxel_grid.filter ( *subsamp_cloud ) ;

/*Estimate normals*/     
        ne.setInputCloud (subsamp_cloud);
        pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB> ());
        ne.setSearchMethod (tree);
        pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
        ne.setRadiusSearch (0.03);
        ne.compute (*cloud_normals);

/*Create <PointXYZRGBNormal> cloud*/
        // Initialization part
        dst.width = subsamp_cloud->width;
        dst.height = subsamp_cloud->height;
        dst.is_dense = true;
        dst.points.resize(dst.width * dst.height);
        
        // Assignment part
        for (int i = 0; i < cloud_normals->points.size(); i++)
        {
        dst.points[i].x = subsamp_cloud->points[i].x;
        dst.points[i].y = subsamp_cloud->points[i].y;
        dst.points[i].z = subsamp_cloud->points[i].z;

        dst.points[i].r = subsamp_cloud->points[i].r;
        dst.points[i].g = subsamp_cloud->points[i].g;
        dst.points[i].b = subsamp_cloud->points[i].b;

        // cloud_normals -> Which you have already have; generated using pcl example code 

        dst.points[i].curvature = cloud_normals->points[i].curvature;

        dst.points[i].normal_x = cloud_normals->points[i].normal_x;
        dst.points[i].normal_y = cloud_normals->points[i].normal_y;
        dst.points[i].normal_z = cloud_normals->points[i].normal_z;
        }
/*Save Pointcloud with normals*/
        pcl::io::savePCDFileASCII ("/home/aleio/Downloads/rgbd-scenes/kitchen_small/Pointcloud/Pointcloud_"+to_string(i)+".pcd", dst);
    }

    return 0;
}

Second step: Perform GICP Algorithm.

#include <opencv2/core/core.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/ximgproc.hpp>
#include <stdio.h>
#include <iostream>
#include <string>
#include <fstream>
#include <cstdint>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/gicp.h>
#include <pcl/segmentation/extract_clusters.h>
#include <pcl/filters/extract_indices.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/features/normal_3d.h>
#include <pcl/kdtree/kdtree.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/segmentation/extract_clusters.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/features/normal_3d_omp.h>
#include <Eigen/Dense>


using namespace std;
using namespace cv;
Eigen::Matrix3f camera_matrix;


int main(int argc, char const *argv[])
{   
    pcl::GeneralizedIterativeClosestPoint<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> gicp; //generalized icp
    
    Eigen::Matrix4f globalTransform;
    globalTransform.setIdentity();

    /*Set GICP parameters, need I to adjust them?*/
    gicp.setMaxCorrespondenceDistance(0.05);
    gicp.setMaximumIterations(5);
    gicp.setTransformationEpsilon(1e-8);
    
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_in(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_out(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
    pcl::PointCloud<pcl::PointXYZRGBNormal> Final; //What is that?
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr t_cloud(new pcl::PointCloud<pcl::PointXYZRGBNormal>); //cloud_in transformed according globalTransform

    for(int i=1; i<= 180; i+=1)
    {
        /*Declare some Pointclouds*/
        pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGBNormal>); //loaded cloud
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr xyz_final (new pcl::PointCloud<pcl::PointXYZRGB>);
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr xyz_transformed (new pcl::PointCloud<pcl::PointXYZRGB>);
        pcl::PointCloud<pcl::Normal>::Ptr normal(new pcl::PointCloud<pcl::Normal>);
        

        if (pcl::io::loadPCDFile<pcl::PointXYZRGBNormal> ("/home/aleio/Downloads/rgbd-scenes/kitchen_small/Pointcloud/Pointcloud_"+to_string(i)+".pcd", *cloud) == -1) //* load the file
        {
            PCL_ERROR ("Couldn't read file test_pcd.pcd \n");
            return (-1);
        }
        if(i==1)
        {
            pcl::copyPointCloud(*cloud,*cloud_out);  ////////At first step I only copy the first cloud.
            continue;
        }

        pcl::copyPointCloud(*cloud,*cloud_in); ////////copy

        gicp.setInputSource(cloud_in); //New cloud
        gicp.setInputTarget(cloud_out); //Old cloud

        gicp.align(Final);
        cout << "has converged:" << gicp.hasConverged() << " score: " << gicp.getFitnessScore() << endl;
        cout << gicp.getFinalTransformation() << endl;

        globalTransform = globalTransform * gicp.getFinalTransformation();
        pcl::transformPointCloud (*cloud_in, *t_cloud, globalTransform);

/*Now I convert XYZRGBNormal clouds to XYZRGB clouds*/

/*------------------------FINAL_CLOUD-----------------------------------------*/
        // Initialization part
        xyz_final->width = Final.width;
        xyz_final->height = Final.height;
        xyz_final->is_dense = true;
        xyz_final->points.resize(xyz_final->width * xyz_final->height);
        // Assignment part
        for (int i = 0; i < t_cloud->points.size(); i++)
        {
        xyz_final->points[i].x = Final.points[i].x;
        xyz_final->points[i].y = Final.points[i].y;
        xyz_final->points[i].z = Final.points[i].z;
        xyz_final->points[i].r = Final.points[i].r;
        xyz_final->points[i].g = Final.points[i].g;
        xyz_final->points[i].b = Final.points[i].b;
        }
/*------------------------TRANSFORMED_CLOUD-----------------------------------------*/
        // Initialization part
        xyz_transformed->width = t_cloud->width;
        xyz_transformed->height = t_cloud->height;
        xyz_transformed->is_dense = true;
        xyz_transformed->points.resize(xyz_transformed->width * xyz_transformed->height);
        // Assignment part
        for (int i = 0; i < t_cloud->points.size(); i++)
        {
        xyz_transformed->points[i].x = t_cloud->points[i].x;
        xyz_transformed->points[i].y = t_cloud->points[i].y;
        xyz_transformed->points[i].z = t_cloud->points[i].z;
        xyz_transformed->points[i].r = t_cloud->points[i].r;
        xyz_transformed->points[i].g = t_cloud->points[i].g;
        xyz_transformed->points[i].b = t_cloud->points[i].b;
        }

        pcl::copyPointCloud(*cloud,*cloud_out);  ////////copy
        
    }


    return 0;
}

Now I would expect that, since GICP converges, visualizing all the Final or the Transformed clouds, I would obtain a complete reconstruction of the scene. But this does not happens and I obtain a confused scene.

What am I missing?

I didn't get the meaning of the Final cloud resulting from gic.palign() method. Should it be the input cloud aligned with respect to the target cloud, i.e. the input cloud transformed according to the matrix returned by gicp.getFinalTransformation() method?

1

There are 1 answers

0
Mark Loyman On

ICP is for finetuning an already roughly aligned clouds, which you can obtain using a feature-based alignment method, such as SampleConsensusInitialAlignment or SampleConsensusPrerejective.

These tutorials can guide you with this: