Hi,
Please find attached a patch for the roboearth packages: they were not
compatible with the last version of the kinect (wrong topics / frames,
etc...).
There's also a fix for the fact that when an object is detected, the
point3d is left empty.
I hope this helps.
Cheers,
Ugo
--
Ugo Cupcic | Shadow Robot Company |
ugo@shadowrobot.com
Software Engineer | 251 Liverpool Road |
need a Hand? | London N1 1LX | +44 20 7700 2487
http://www.shadowrobot.com/hand/ @shadowrobot
Index: object_scanning/ar_bounding_box/include/ar_kinect/ar_kinect.h
===================================================================
--- object_scanning/ar_bounding_box/include/ar_kinect/ar_kinect.h (revision 1390)
+++ object_scanning/ar_bounding_box/include/ar_kinect/ar_kinect.h (working copy)
@@ -63,15 +63,15 @@
#define DEBUG_AR_KINECT
/// Kinect camera origin tf frame
-#define ORIGIN "/openni_rgb_optical_frame"
+#define ORIGIN "/camera_rgb_optical_frame"
/// object center tf frame
#define TARGET "/PATTERN_BASE"
#define DBG(str) //str
-const std::string cameraImageTopic_ = "/camera/rgb/image_color";
-const std::string cameraInfoTopic_ = "/camera/rgb/camera_info";
-const std::string cloudTopic_ = "/camera/rgb/points";
+const std::string cameraImageTopic_ = "/camera/depth_registered/image";
+const std::string cameraInfoTopic_ = "/camera/depth_registered/camera_info";
+const std::string cloudTopic_ = "/camera/depth_registered/points";
class CoordinateFrame;
@@ -104,7 +104,7 @@
tf::TransformBroadcaster broadcaster_;
ros::Subscriber sub_;
image_transport::Subscriber cam_sub_;
ros::Subscriber cloud_sub_;
ros::Publisher arMarkerPub_;
#ifdef DEBUG_AR_KINECT
ros::Publisher kinect_pclPub_;
Index: object_scanning/ar_bounding_box/src/ar_kinect.cpp
===================================================================
--- object_scanning/ar_bounding_box/src/ar_kinect.cpp (revision 1390)
+++ object_scanning/ar_bounding_box/src/ar_kinect.cpp (working copy)
@@ -684,24 +684,24 @@
}
float distthreshold = .002;
double d01 = Utils3D::distPoints(&ps[0],&ps[1]);
if ( fabs(d01 - distances(cv::Point(idxs[0],idxs[1])) ) > distthreshold) {
destroyresults = true;
- cerr << "distance 0/1 too large: " << d01 <<endl;
+ cerr << "distance 0/1 too large: " << fabs(d01 - distances(cv::Point(idxs[0],idxs[1])) ) <<endl;
}
double d12 = Utils3D::distPoints(&ps[1],&ps[2]);
if ( fabs(d12 - distances(cv::Point(idxs[1],idxs[2])) ) > distthreshold) {
destroyresults = true;
- cerr << "distance 1/2 too large: " << d12 <<endl;
+ cerr << "distance 1/2 too large: " << fabs(d12 - distances(cv::Point(idxs[1],idxs[2])) ) <<endl;
}
double d02 = Utils3D::distPoints(&ps[0],&ps[2]);
if ( fabs(d02 - distances(cv::Point(idxs[0],idxs[2])) ) > distthreshold) {
destroyresults = true;
- cerr << "distance 2/0 too large: " << d02 <<endl;
+ cerr << "distance 2/0 too large: " << fabs(d02 - distances(cv::Point(idxs[0],idxs[2])) ) <<endl;
}
// check marker plausibility
Index: object_scanning/re_object_recorder/src/comthread.cpp
===================================================================
--- object_scanning/re_object_recorder/src/comthread.cpp (revision 1390)
+++ object_scanning/re_object_recorder/src/comthread.cpp (working copy)
@@ -113,7 +113,7 @@
origin.header.stamp = pcl_msg->header.stamp;
origin.pose.orientation.w = 1.f;
try {
- tf.transformPose("/openni_rgb_frame", origin, markerToRGBCamera);
+ tf.transformPose("/camera_rgb_optical_frame", origin, markerToRGBCamera);
cloud->sensor_origin_ = Eigen::Vector4f(markerToRGBCamera.pose.position.x,
markerToRGBCamera.pose.position.y,
Index: object_scanning/re_kinect_object_detector/src/slam_main.cpp
===================================================================
--- object_scanning/re_kinect_object_detector/src/slam_main.cpp (revision 1390)
+++ object_scanning/re_kinect_object_detector/src/slam_main.cpp (working copy)
@@ -47,6 +47,8 @@
#include <std_msgs/String.h>
#include <tf/transform_broadcaster.h>
+#include <geometry_msgs/Point.h>
+
#include <cv_bridge/cv_bridge.h>
#include "re_kinect_object_detector/DetectionResult.h"
@@ -73,7 +75,7 @@
public:
ROSCom() {
ros::NodeHandle nh;
- kinect_sub = nh.subscribe<sensor_msgs::PointCloud2>("camera/rgb/points",1,
+ kinect_sub = nh.subscribe<sensor_msgs::PointCloud2>("camera/depth_registered/points",1,
boost::bind(&ROSCom::kinect_cb,this,_1));
features_pub = nh.advertise<sensor_msgs::PointCloud2>("re_kinect/feature_pcl",10);
@@ -166,6 +168,13 @@
px.x = x;
px.y = y;
detectedObjMsg.points2d.push_back(px);
+
+ geometry_msgs::Point threeD_pt;
+ threeD_pt.x = pt.x;
+ threeD_pt.y = pt.y;
+ threeD_pt.z = pt.z;
+
+ detectedObjMsg.points3d.push_back(threeD_pt);
}
}
tf::Transform object_tf = tfFromEigen(t);