Merge pull request #5 from ozantonkal/implementing_addPointCloud

Implementing add point cloud
pull/1453/head
Anatoly Baksheev 12 years ago
commit 01e99db675
  1. 3
      modules/viz/include/opencv2/viz/types.hpp
  2. 2
      modules/viz/include/opencv2/viz/viz3d.hpp
  3. 2
      modules/viz/src/q/viz3d_impl.hpp
  4. 2
      modules/viz/src/viz3d.cpp
  5. 98
      modules/viz/src/viz3d_impl.cpp

@ -16,14 +16,17 @@ namespace temp_viz
typedef cv::Vec4d Vec4d;
typedef cv::Vec2d Vec2d;
typedef cv::Vec2i Vec2i;
typedef cv::Vec3b Vec3b;
typedef cv::Matx33d Matx33d;
typedef cv::Affine3f Affine3f;
typedef cv::Affine3d Affine3d;
typedef cv::Point3f Point3f;
typedef cv::Point3d Point3d;
typedef cv::Matx44d Matx44d;
typedef cv::Matx44f Matx44f;
typedef cv::Size Size;
typedef cv::Point Point;
typedef cv::InputArray InputArray;

@ -27,7 +27,7 @@ namespace temp_viz
void addCoordinateSystem(double scale, const Affine3f& t, const String &id = "coordinate");
void showPointCloud(const std::string& id, cv::InputArray cloud, cv::InputArray colors, const cv::Affine3f& pose = cv::Affine3f::Identity());
void showPointCloud(const String& id, InputArray cloud, InputArray colors, const Affine3f& pose = Affine3f::Identity());
bool addPointCloudNormals (const Mat &cloud, const Mat& normals, int level = 100, float scale = 0.02f, const String &id = "cloud");

@ -103,7 +103,7 @@ public:
* \param[in] colors color input in the same order of the points or single uniform color
* \param[in] pose transform to be applied on the point cloud
*/
void showPointCloud(const std::string& id, cv::InputArray cloud, cv::InputArray colors, const cv::Affine3f& pose = cv::Affine3f::Identity());
void showPointCloud(const String& id, InputArray cloud, InputArray colors, const Affine3f& pose = Affine3f::Identity());
bool addPolygonMesh (const Mesh3d& mesh, const cv::Mat& mask, const std::string &id = "polygon");
bool updatePolygonMesh (const Mesh3d& mesh, const cv::Mat& mask, const std::string &id = "polygon");

@ -23,7 +23,7 @@ void temp_viz::Viz3d::addCoordinateSystem(double scale, const Affine3f& t, const
impl_->addCoordinateSystem(scale, t, id);
}
void temp_viz::Viz3d::showPointCloud(const std::string& id, cv::InputArray cloud, cv::InputArray colors, const cv::Affine3f& pose)
void temp_viz::Viz3d::showPointCloud(const String& id, InputArray cloud, InputArray colors, const Affine3f& pose)
{
impl_->showPointCloud(id, cloud, colors, pose);
}

@ -28,11 +28,11 @@ void temp_viz::Viz3d::VizImpl::setPosition (int x, int y) { window_->SetPosition
void temp_viz::Viz3d::VizImpl::setSize (int xw, int yw) { window_->SetSize (xw, yw); }
void temp_viz::Viz3d::VizImpl::showPointCloud(const std::string& id, cv::InputArray cloud, cv::InputArray colors, const cv::Affine3f& pose)
void temp_viz::Viz3d::VizImpl::showPointCloud(const String& id, InputArray cloud, InputArray colors, const Affine3f& pose)
{
cv::Mat cloudMat = cloud.getMat();
cv::Mat colorsMat = colors.getMat();
CV_Assert(cloudMat.type() == CV_32FC3 && colorsMat.type() == CV_8UC3 && cloudMat.size() == colorsMat.size());
Mat cloudMat = cloud.getMat();
Mat colorsMat = colors.getMat();
CV_Assert((cloudMat.type() == CV_32FC3 || cloudMat.type() == CV_64FC3) && colorsMat.type() == CV_8UC3 && cloudMat.size() == colorsMat.size());
vtkSmartPointer<vtkPolyData> polydata;
vtkSmartPointer<vtkCellArray> vertices;
@ -57,7 +57,10 @@ void temp_viz::Viz3d::VizImpl::showPointCloud(const std::string& id, cv::InputAr
if (!points)
{
points = vtkSmartPointer<vtkPoints>::New ();
points->SetDataTypeToFloat ();
if (cloudMat.type() == CV_32FC3)
points->SetDataTypeToFloat ();
else if (cloudMat.type() == CV_64FC3)
points->SetDataTypeToDouble ();
polydata->SetPoints (points);
}
points->SetNumberOfPoints (nr_points);
@ -69,30 +72,54 @@ void temp_viz::Viz3d::VizImpl::showPointCloud(const std::string& id, cv::InputAr
polydata = reinterpret_cast<vtkPolyDataMapper*>(am_it->second.actor->GetMapper ())->GetInput ();
vertices = polydata->GetVerts ();
points = polydata->GetPoints ();
// Update the point data type based on the cloud
if (cloudMat.type() == CV_32FC3)
points->SetDataTypeToFloat ();
else if (cloudMat.type() == CV_64FC3)
points->SetDataTypeToDouble ();
// Copy the new point array in
nr_points = cloudMat.size().area();
points->SetNumberOfPoints (nr_points);
}
// Get a pointer to the beginning of the data array
float *data = (static_cast<vtkFloatArray*> (points->GetData ()))->GetPointer (0);
// Scan through the data and apply mask where point is NAN
int j = 0;
// If a point is NaN, ignore it
for(int y = 0; y < cloudMat.rows; ++y)
if (cloudMat.type() == CV_32FC3)
{
// Get a pointer to the beginning of the data array
float *data = (static_cast<vtkFloatArray*> (points->GetData ()))->GetPointer (0);
// Scan through the data and apply mask where point is NAN
for(int y = 0; y < cloudMat.rows; ++y)
{
const Point3f* crow = cloudMat.ptr<Point3f>(y);
for(int x = 0; x < cloudMat.cols; ++x)
if (cvIsNaN(crow[x].x) != 1 && cvIsNaN(crow[x].y) != 1 && cvIsNaN(crow[x].z) != 1)
{
// Points are transformed based on pose parameter
Point3f transformed_point = pose * crow[x];
memcpy (&data[j++ * 3], &transformed_point, sizeof(Point3f));
}
}
}
else if (cloudMat.type() == CV_64FC3)
{
const cv::Point3f* crow = cloudMat.ptr<cv::Point3f>(y);
for(int x = 0; x < cloudMat.cols; ++x)
if (cvIsNaN(crow[x].x) != 1 && cvIsNaN(crow[x].y) != 1 && cvIsNaN(crow[x].z) != 1)
{
// Points are transformed based on pose parameter
cv::Point3f transformed_point = pose * crow[x];
memcpy (&data[j++ * 3], &transformed_point, sizeof(cv::Point3f));
}
// Get a pointer to the beginning of the data array
double *data = (static_cast<vtkDoubleArray*> (points->GetData ()))->GetPointer (0);
// If a point is NaN, ignore it
for(int y = 0; y < cloudMat.rows; ++y)
{
const Point3d* crow = cloudMat.ptr<Point3d>(y);
for(int x = 0; x < cloudMat.cols; ++x)
if (cvIsNaN(crow[x].x) != 1 && cvIsNaN(crow[x].y) != 1 && cvIsNaN(crow[x].z) != 1)
{
// Points are transformed based on pose parameter
Point3d transformed_point = pose * crow[x];
memcpy (&data[j++ * 3], &transformed_point, sizeof(Point3d));
}
}
}
nr_points = j;
points->SetNumberOfPoints (nr_points);
@ -116,13 +143,27 @@ void temp_viz::Viz3d::VizImpl::showPointCloud(const std::string& id, cv::InputAr
unsigned char* colors_data = new unsigned char[nr_points * 3];
j = 0;
for(int y = 0; y < colorsMat.rows; ++y)
if (cloudMat.type() == CV_32FC3)
{
const cv::Vec3b* crow = colorsMat.ptr<cv::Vec3b>(y);
const cv::Point3f* cloud_row = cloudMat.ptr<cv::Point3f>(y);
for(int x = 0; x < colorsMat.cols; ++x)
if (cvIsNaN(cloud_row[x].x) != 1 && cvIsNaN(cloud_row[x].y) != 1 && cvIsNaN(cloud_row[x].z) != 1)
memcpy (&colors_data[j++ * 3], &crow[x], sizeof(cv::Vec3b));
for(int y = 0; y < colorsMat.rows; ++y)
{
const Vec3b* crow = colorsMat.ptr<Vec3b>(y);
const Point3f* cloud_row = cloudMat.ptr<Point3f>(y);
for(int x = 0; x < colorsMat.cols; ++x)
if (cvIsNaN(cloud_row[x].x) != 1 && cvIsNaN(cloud_row[x].y) != 1 && cvIsNaN(cloud_row[x].z) != 1)
memcpy (&colors_data[j++ * 3], &crow[x], sizeof(Vec3b));
}
}
else if (cloudMat.type() == CV_64FC3)
{
for(int y = 0; y < colorsMat.rows; ++y)
{
const Vec3b* crow = colorsMat.ptr<Vec3b>(y);
const Point3d* cloud_row = cloudMat.ptr<Point3d>(y);
for(int x = 0; x < colorsMat.cols; ++x)
if (cvIsNaN(cloud_row[x].x) != 1 && cvIsNaN(cloud_row[x].y) != 1 && cvIsNaN(cloud_row[x].z) != 1)
memcpy (&colors_data[j++ * 3], &crow[x], sizeof(Vec3b));
}
}
reinterpret_cast<vtkUnsignedCharArray*>(&(*scalars))->SetArray (colors_data, 3 * nr_points, 0);
@ -152,9 +193,6 @@ void temp_viz::Viz3d::VizImpl::showPointCloud(const std::string& id, cv::InputAr
// Save the viewpoint transformation matrix to the global actor map
vtkSmartPointer<vtkMatrix4x4> transformation = vtkSmartPointer<vtkMatrix4x4>::New();
convertToVtkMatrix (sensor_origin, sensor_orientation, transformation);
// convertToVtkMatrix (pose.matrix, transformation);
std::cout << transformation->GetElement(0,3) << endl;
(*cloud_actor_map_)[id].viewpoint_transformation_ = transformation;
}

Loading…
Cancel
Save