Where is the interface of transfering the object position?

asked 2014-04-17 03:38:09 -0500

Yantian_Zha gravatar image

Hi all, I'm confusing in failed to find the interface in pick_and_place_action_server.cpp(in turtlebot_arm package) of inputting the object position that is generated by function "pick_and_place_sub_" in "block_detection_action_server.cpp".But in "block_detection_action_server.cpp" I found a subscribe function "pick_and_place_sub_ = nh_.subscribe(goal_->topic, 1, &PickAndPlaceServer::sendGoalFromTopic, this)". I don't what the use of "goal" for in it. Pls offer me a hand, thanks a lot!

Here is the code of block_detection_action_server.cpp:

#include <ros ros.h=""> #include <sensor_msgs pointcloud2.h=""> #include <actionlib server="" simple_action_server.h=""> #include <turtlebot_block_manipulation blockdetectionaction.h="">

#include <tf transform_listener.h="">

#include <pcl ros="" conversions.h=""> #include <pcl point_cloud.h=""> #include <pcl point_types.h=""> #include <pcl filters="" passthrough.h=""> #include <pcl segmentation="" extract_clusters.h=""> #include <pcl filters="" extract_indices.h=""> #include <pcl kdtree="" kdtree_flann.h=""> #include <pcl sample_consensus="" method_types.h=""> #include <pcl sample_consensus="" model_types.h=""> #include <pcl segmentation="" sac_segmentation.h=""> #include <pcl segmentation="" extract_clusters.h="">

#include <pcl_ros point_cloud.h=""> #include <pcl_ros transforms.h="">

#include <cmath> #include <algorithm>

namespace turtlebot_block_manipulation {

class BlockDetectionServer { private:

ros::NodeHandle nh_; actionlib::SimpleActionServer<turtlebot_block_manipulation::blockdetectionaction> as_; std::string action_name_; turtlebot_block_manipulation::BlockDetectionFeedback feedback_; turtlebot_block_manipulation::BlockDetectionResult result_; turtlebot_block_manipulation::BlockDetectionGoalConstPtr goal_; ros::Subscriber sub_; ros::Publisher pub_;

tf::TransformListener tf_listener_;

// Parameters from goal std::string arm_link; double block_size; double table_height;

ros::Publisher block_pub_;

// Parameters from node

public: BlockDetectionServer(const std::string name) : nh_("~"), as_(name, false), action_name_(name) { // Load parameters from the server.

// Register the goal and feeback callbacks.
as_.registerGoalCallback(boost::bind(&BlockDetectionServer::goalCB, this));
as_.registerPreemptCallback(boost::bind(&BlockDetectionServer::preemptCB, this));

as_.start();

// Subscribe to point cloud
sub_ = nh_.subscribe("/camera/depth_registered/points", 1, &BlockDetectionServer::cloudCb, this);
pub_ = nh_.advertise< pcl::PointCloud<pcl::PointXYZRGB> >("block_output", 1);

block_pub_ = nh_.advertise< geometry_msgs::PoseArray >("/turtlebot_blocks", 1, true);

}

void goalCB() { ROS_INFO("[block detection] Received goal!"); // accept the new goal result_.blocks.poses.clear();

goal_ = as_.acceptNewGoal();

block_size = goal_->block_size;
table_height = goal_->table_height;
arm_link = goal_->frame;

result_.blocks.header.frame_id = arm_link;

}

void preemptCB() { ROS_INFO("%s: Preempted", action_name_.c_str()); // set the action state to preempted as_.setPreempted(); }

void cloudCb ( const sensor_msgs::PointCloud2ConstPtr& msg ) { // Only do this if we're actually actively working on a goal. if (!as_.isActive()) return;

result_.blocks.header.stamp = msg->header.stamp;

// convert to PCL
pcl::PointCloud<pcl::PointXYZRGB> cloud;
pcl::fromROSMsg (*msg, cloud);

// transform to whatever frame we're working in, probably the arm frame.
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_transformed (new pcl::PointCloud<pcl::PointXYZRGB>);

tf_listener_.waitForTransform(std::string(arm_link), cloud.header.frame_id, cloud.header.stamp, ros::Duration(1.0));
if (!pcl_ros::transformPointCloud (std::string(arm_link), cloud, *cloud_transformed, tf_listener_))
{
  ROS_ERROR ("Error converting to desired frame");
  return;
}

// Create the segmentation object for the planar model and set all the parameters
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZRGB>);
pcl::SACSegmentation<pcl::PointXYZRGB> seg;
pcl::PointIndices::Ptr inliers (new pcl::PointIndices);
pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_plane (new pcl::PointCloud<pcl::PointXYZRGB> ());
seg.setOptimizeCoefficients (true);
seg.setModelType (pcl::SACMODEL_PLANE);
seg.setMethodType (pcl::SAC_RANSAC);
seg.setMaxIterations (200);
seg.setDistanceThreshold (0.005);

// Limit to things we think ...
(more)
edit retag flag offensive close merge delete