Merge pull request #2485 from Nerei:viz_correct_viewer_pose

pull/2486/head^2
Roman Donchenko 11 years ago committed by OpenCV Buildbot
commit 882971151d
  1. 6
      modules/viz/src/vizimpl.cpp
  2. 22
      modules/viz/test/test_tutorial3.cpp

@ -427,12 +427,12 @@ void cv::viz::Viz3d::VizImpl::setViewerPose(const Affine3d &pose)
// Rotate the view vector
cv::Matx33d rotation = pose.rotation();
cv::Vec3d y_axis(0.0, 1.0, 0.0);
cv::Vec3d y_axis(0.0, -1.0, 0.0); // In Computer Vision Camera Y-axis is oriented down
cv::Vec3d up_vec(rotation * y_axis);
// Compute the new focal point
cv::Vec3d z_axis(0.0, 0.0, 1.0);
cv::Vec3d focal_vec = pos_vec + rotation * z_axis;
cv::Vec3d focal_vec = pose * z_axis;
camera.SetPosition(pos_vec.val);
camera.SetFocalPoint(focal_vec.val);
@ -450,7 +450,7 @@ cv::Affine3d cv::viz::Viz3d::VizImpl::getViewerPose()
Vec3d view_up(camera.GetViewUp());
Vec3d focal(camera.GetFocalPoint());
Vec3d y_axis = normalized(view_up);
Vec3d y_axis = normalized(-view_up); // In Computer Vision Camera Y-axis is oriented down
Vec3d z_axis = normalized(focal - pos);
Vec3d x_axis = normalized(y_axis.cross(z_axis));

@ -15,50 +15,46 @@ void tutorial3(bool camera_pov)
myWindow.showWidget("Coordinate Widget", viz::WCoordinateSystem());
/// Let's assume camera has the following properties
Point3d cam_pos(3.0, 3.0, 3.0), cam_focal_point(3.0, 3.0, 2.0), cam_y_dir(-1.0, 0.0, 0.0);
Point3d cam_origin(3.0, 3.0, 3.0), cam_focal_point(3.0, 3.0, 2.0), cam_y_dir(-1.0, 0.0, 0.0);
/// We can get the pose of the cam using makeCameraPose
Affine3d cam_pose = viz::makeCameraPose(cam_pos, cam_focal_point, cam_y_dir);
Affine3d camera_pose = viz::makeCameraPose(cam_origin, cam_focal_point, cam_y_dir);
/// We can get the transformation matrix from camera coordinate system to global using
/// - makeTransformToGlobal. We need the axes of the camera
Affine3d transform = viz::makeTransformToGlobal(Vec3d(0.0, -1.0, 0.0), Vec3d(-1.0, 0.0, 0.0), Vec3d(0.0, 0.0, -1.0), cam_pos);
Affine3d transform = viz::makeTransformToGlobal(Vec3d(0.0, -1.0, 0.0), Vec3d(-1.0, 0.0, 0.0), Vec3d(0.0, 0.0, -1.0), cam_origin);
/// Create a cloud widget.
Mat dragon_cloud = viz::readCloud(get_dragon_ply_file_path());
viz::WCloud cloud_widget(dragon_cloud, viz::Color::green());
/// Pose of the widget in camera frame
Affine3d cloud_pose = Affine3d().translate(Vec3d(0.0, 0.0, 3.0));
Affine3d cloud_pose = Affine3d().rotate(Vec3d(0.0, CV_PI/2, 0.0)).rotate(Vec3d(0.0, 0.0, CV_PI)).translate(Vec3d(0.0, 0.0, 3.0));
/// Pose of the widget in global frame
Affine3d cloud_pose_global = transform * cloud_pose;
/// Visualize camera frame
myWindow.showWidget("CPW_FRUSTUM", viz::WCameraPosition(Vec2f(0.889484f, 0.523599f)), camera_pose);
if (!camera_pov)
{
viz::WCameraPosition cpw(0.5); // Coordinate axes
viz::WCameraPosition cpw_frustum(Vec2f(0.889484f, 0.523599f)); // Camera frustum
myWindow.showWidget("CPW", cpw, cam_pose);
myWindow.showWidget("CPW_FRUSTUM", cpw_frustum, cam_pose);
}
myWindow.showWidget("CPW", viz::WCameraPosition(0.5), camera_pose);
/// Visualize widget
myWindow.showWidget("bunny", cloud_widget, cloud_pose_global);
/// Set the viewer pose to that of camera
if (camera_pov)
myWindow.setViewerPose(cam_pose);
myWindow.setViewerPose(camera_pose);
/// Start event loop.
myWindow.spin();
}
TEST(Viz, DISABLED_tutorial3_global_view)
TEST(Viz, tutorial3_global_view)
{
tutorial3(false);
}
TEST(Viz, DISABLED_tutorial3_camera_view)
TEST(Viz, tutorial3_camera_view)
{
tutorial3(true);
}

Loading…
Cancel
Save