首页 > 代码库 > ROS turtlebot_follower :让机器人跟随我们移动
ROS turtlebot_follower :让机器人跟随我们移动
ROS turtlebot_follower 学习
首先在catkin_ws/src目录下载源码,地址:https://github.com/turtlebot/turtlebot_apps.git
了解代码见注释(其中有些地方我也不是很明白)
follower.cpp
#include <ros/ros.h>
#include <pluginlib/class_list_macros.h>
#include <nodelet/nodelet.h>
#include <geometry_msgs/Twist.h>
#include <sensor_msgs/Image.h>
#include <visualization_msgs/Marker.h>
#include <turtlebot_msgs/SetFollowState.h>
#include "dynamic_reconfigure/server.h"
#include "turtlebot_follower/FollowerConfig.h"
#include <depth_image_proc/depth_traits.h>
namespace turtlebot_follower
{
//* The turtlebot follower nodelet.
/**
* The turtlebot follower nodelet. Subscribes to point clouds
* from the 3dsensor, processes them, and publishes command vel
* messages.
*/
class TurtlebotFollower : public nodelet::Nodelet
{
public:
/*!
* @brief The constructor for the follower.
* Constructor for the follower.
*/
TurtlebotFollower() : min_y_(0.1), max_y_(0.5),
min_x_(-0.2), max_x_(0.2),
max_z_(0.8), goal_z_(0.6),
z_scale_(1.0), x_scale_(5.0)
{
}
~TurtlebotFollower()
{
delete config_srv_;
}
private:
double min_y_; /**< The minimum y position of the points in the box. */
double max_y_; /**< The maximum y position of the points in the box. */
double min_x_; /**< The minimum x position of the points in the box. */
double max_x_; /**< The maximum x position of the points in the box. */
double max_z_; /**< The maximum z position of the points in the box. 框中 点的最大z位置,以上四个字段用来设置框的大小*/
double goal_z_; /**< The distance away from the robot to hold the centroid 离机器人的距离,以保持质心*/
double z_scale_; /**< The scaling factor for translational robot speed 移动机器人速度的缩放系数*/
double x_scale_; /**< The scaling factor for rotational robot speed 旋转机器人速度的缩放系数*/
bool enabled_; /**< Enable/disable following; just prevents motor commands 启用/禁用追踪; 只是阻止电机命令,置为false后,机器人不会移动,/mobile_base/mobile_base_controller/cmd_vel topic 为空*/
// Service for start/stop following
ros::ServiceServer switch_srv_;
// Dynamic reconfigure server 动态配置服务
dynamic_reconfigure::Server<turtlebot_follower::FollowerConfig>* config_srv_;
/*!
* @brief OnInit method from node handle.
* OnInit method from node handle. Sets up the parameters
* and topics.
* 初始化handle,参数,和话题
*/
virtual void onInit()
{
ros::NodeHandle& nh = getNodeHandle();
ros::NodeHandle& private_nh = getPrivateNodeHandle();
//从参数服务器获取设置的参数(launch文件中设置数值)
private_nh.getParam("min_y", min_y_);
private_nh.getParam("max_y", max_y_);
private_nh.getParam("min_x", min_x_);
private_nh.getParam("max_x", max_x_);
private_nh.getParam("max_z", max_z_);
private_nh.getParam("goal_z", goal_z_);
private_nh.getParam("z_scale", z_scale_);
private_nh.getParam("x_scale", x_scale_);
private_nh.getParam("enabled", enabled_);
//设置机器人移动的话题(用于机器人移动):/mobile_base/mobile_base_controller/cmd_vel(换成你的机器人的移动topic)
cmdpub_ = private_nh.advertise<geometry_msgs::Twist> ("/mobile_base/mobile_base_controller/cmd_vel", 1);
markerpub_ = private_nh.advertise<visualization_msgs::Marker>("marker",1);
bboxpub_ = private_nh.advertise<visualization_msgs::Marker>("bbox",1);
sub_= nh.subscribe<sensor_msgs::Image>("depth/image_rect", 1, &TurtlebotFollower::imagecb, this);
switch_srv_ = private_nh.advertiseService("change_state", &TurtlebotFollower::changeModeSrvCb, this);
config_srv_ = new dynamic_reconfigure::Server<turtlebot_follower::FollowerConfig>(private_nh);
dynamic_reconfigure::Server<turtlebot_follower::FollowerConfig>::CallbackType f =
boost::bind(&TurtlebotFollower::reconfigure, this, _1, _2);
config_srv_->setCallback(f);
}
//设置默认值,详见catkin_ws/devel/include/turtlrbot_follower/FollowerConfig.h
void reconfigure(turtlebot_follower::FollowerConfig &config, uint32_t level)
{
min_y_ = config.min_y;
max_y_ = config.max_y;
min_x_ = config.min_x;
max_x_ = config.max_x;
max_z_ = config.max_z;
goal_z_ = config.goal_z;
z_scale_ = config.z_scale;
x_scale_ = config.x_scale;
}
/*!
* @brief Callback for point clouds.
* Callback for depth images. It finds the centroid
* of the points in a box in the center of the image.
* 它找到图像中心框中的点的质心
* Publishes cmd_vel messages with the goal from the image.
* 发布图像中目标的cmd_vel 消息
* @param cloud The point cloud message.
* 参数:点云的消息
*/
void imagecb(const sensor_msgs::ImageConstPtr& depth_msg)
{
// Precompute the sin function for each row and column wangchao预计算每行每列的正弦函数
uint32_t image_width = depth_msg->width;
ROS_INFO_THROTTLE(1, "image_width=%d", image_width);
float x_radians_per_pixel = 60.0/57.0/image_width;//每个像素的弧度
float sin_pixel_x[image_width];
for (int x = 0; x < image_width; ++x) {
//求出正弦值
sin_pixel_x[x] = sin((x - image_width/ 2.0) * x_radians_per_pixel);
}
uint32_t image_height = depth_msg->height;
float y_radians_per_pixel = 45.0/57.0/image_width;
float sin_pixel_y[image_height];
for (int y = 0; y < image_height; ++y) {
// Sign opposite x for y up values
sin_pixel_y[y] = sin((image_height/ 2.0 - y) * y_radians_per_pixel);
}
//X,Y,Z of the centroid 质心的xyz
float x = 0.0;
float y = 0.0;
float z = 1e6;
//Number of points observed 观察的点数
unsigned int n = 0;
//Iterate through all the points in the region and find the average of the position 迭代通过该区域的所有点,找到位置的平均值
const float* depth_row = reinterpret_cast<const float*>(&depth_msg->data[0]);
int row_step = depth_msg->step / sizeof(float);
for (int v = 0; v < (int)depth_msg->height; ++v, depth_row += row_step)
{
for (int u = 0; u < (int)depth_msg->width; ++u)
{
float depth = depth_image_proc::DepthTraits<float>::toMeters(depth_row[u]);
if (!depth_image_proc::DepthTraits<float>::valid(depth) || depth > max_z_) continue;//不是有效的深度值或者深度超出max_z_
float y_val = sin_pixel_y[v] * depth;
float x_val = sin_pixel_x[u] * depth;
if ( y_val > min_y_ && y_val < max_y_ &&
x_val > min_x_ && x_val < max_x_)
{
x += x_val;
y += y_val;
z = std::min(z, depth); //approximate depth as forward.
n++;
}
}
}
//If there are points, find the centroid and calculate the command goal.
//If there are no points, simply publish a stop goal.
//如果有点,找到质心并计算命令目标。如果没有点,只需发布停止消息。
ROS_INFO_THROTTLE(1, " n ==%d",n);
if (n>4000)
{
x /= n;
y /= n;
if(z > max_z_){
ROS_INFO_THROTTLE(1, "Centroid too far away %f, stopping the robot\n", z);
if (enabled_)
{
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
}
return;
}
ROS_INFO_THROTTLE(1, "Centroid at %f %f %f with %d points", x, y, z, n);
publishMarker(x, y, z);
if (enabled_)
{
geometry_msgs::TwistPtr cmd(new geometry_msgs::Twist());
cmd->linear.x = (z - goal_z_) * z_scale_;
cmd->angular.z = -x * x_scale_;
cmdpub_.publish(cmd);
}
}
else
{
ROS_INFO_THROTTLE(1, "Not enough points(%d) detected, stopping the robot", n);
publishMarker(x, y, z);
if (enabled_)
{
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
}
}
publishBbox();
}
bool changeModeSrvCb(turtlebot_msgs::SetFollowState::Request& request,
turtlebot_msgs::SetFollowState::Response& response)
{
if ((enabled_ == true) && (request.state == request.STOPPED))
{
ROS_INFO("Change mode service request: following stopped");
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
enabled_ = false;
}
else if ((enabled_ == false) && (request.state == request.FOLLOW))
{
ROS_INFO("Change mode service request: following (re)started");
enabled_ = true;
}
response.result = response.OK;
return true;
}
//画一个圆点,这个圆点代表质心
void publishMarker(double x,double y,double z)
{
visualization_msgs::Marker marker;
marker.header.frame_id = "/camera_rgb_optical_frame";
marker.header.stamp = ros::Time();
marker.ns = "my_namespace";
marker.id = 0;
marker.type = visualization_msgs::Marker::SPHERE;
marker.action = visualization_msgs::Marker::ADD;
marker.pose.position.x = x;
marker.pose.position.y = y;
marker.pose.position.z = z;
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
marker.scale.x = 0.1;
marker.scale.y = 0.1;
marker.scale.z = 0.1;
marker.color.a = 1.0;
marker.color.r = 1.0;
marker.color.g = 0.0;
marker.color.b = 0.0;
//only if using a MESH_RESOURCE marker type:
markerpub_.publish( marker );
}
//画一个立方体,这个立方体代表感兴趣区域(RIO)
void publishBbox()
{
double x = (min_x_ + max_x_)/2;
double y = (min_y_ + max_y_)/2;
double z = (0 + max_z_)/2;
double scale_x = (max_x_ - x)*2;
double scale_y = (max_y_ - y)*2;
double scale_z = (max_z_ - z)*2;
visualization_msgs::Marker marker;
marker.header.frame_id = "/camera_rgb_optical_frame";
marker.header.stamp = ros::Time();
marker.ns = "my_namespace";
marker.id = 1;
marker.type = visualization_msgs::Marker::CUBE;
marker.action = visualization_msgs::Marker::ADD;
//设置标记物姿态
marker.pose.position.x = x;
marker.pose.position.y = -y;
marker.pose.position.z = z;
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
//设置标记物的尺寸大小
marker.scale.x = scale_x;
marker.scale.y = scale_y;
marker.scale.z = scale_z;
marker.color.a = 0.5;
marker.color.r = 0.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
//only if using a MESH_RESOURCE marker type:
bboxpub_.publish( marker );
}
ros::Subscriber sub_;
ros::Publisher cmdpub_;
ros::Publisher markerpub_;
ros::Publisher bboxpub_;
};
PLUGINLIB_DECLARE_CLASS(turtlebot_follower, TurtlebotFollower, turtlebot_follower::TurtlebotFollower, nodelet::Nodelet);
}
接下来看launch文件follower.launch
建议在修改前,将原先的代码注释掉,不要删掉。
<!--
The turtlebot people (or whatever) follower nodelet.
-->
<launch>
<arg name="simulation" default="false"/>
<group unless="$(arg simulation)"> <!-- Real robot -->
<include file="$(find turtlebot_follower)/launch/includes/velocity_smoother.launch.xml">
<arg name="nodelet_manager" value="/mobile_base_nodelet_manager"/>
<arg name="navigation_topic" value="/cmd_vel_mux/input/navi"/>
</include>
<!--modify by 2016.11.07 启动我的机器人和摄像头,这里更换成你的机器人的启动文件和摄像头启动文件-->
<include file="$(find handsfree_hw)/launch/handsfree_hw.launch">
</include>
<include file="$(find handsfree_bringup)/launch/xtion_fake_laser_openni2.launch">
</include>
<!-- 将原先的注释掉<include file="$(find turtlebot_bringup)/launch/3dsensor.launch">
<arg name="rgb_processing" value="http://www.mamicode.com/true"/>
<arg name="depth_processing" value="http://www.mamicode.com/true"/>
<arg name="depth_registered_processing" value="http://www.mamicode.com/false"/>
<arg name="depth_registration" value="http://www.mamicode.com/false"/>
<arg name="disparity_processing" value="http://www.mamicode.com/false"/>
<arg name="disparity_registered_processing" value="http://www.mamicode.com/false"/>
<arg name="scan_processing" value="http://www.mamicode.com/false"/>
</include>-->
<!--modify end -->
</group>
<group if="$(arg simulation)">
<!-- Load nodelet manager for compatibility -->
<node pkg="nodelet" type="nodelet" ns="camera" name="camera_nodelet_manager" args="manager"/>
<include file="$(find turtlebot_follower)/launch/includes/velocity_smoother.launch.xml">
<arg name="nodelet_manager" value="camera/camera_nodelet_manager"/>
<arg name="navigation_topic" value="cmd_vel_mux/input/navi"/>
</include>
</group>
<param name="camera/rgb/image_color/compressed/jpeg_quality" value="22"/>
<!-- Make a slower camera feed available; only required if we use android client -->
<node pkg="topic_tools" type="throttle" name="camera_throttle"
args="messages camera/rgb/image_color/compressed 5"/>
<include file="$(find turtlebot_follower)/launch/includes/safety_controller.launch.xml"/>
<!-- Real robot: Load turtlebot follower into the 3d sensors nodelet manager to avoid pointcloud serializing -->
<!-- Simulation: Load turtlebot follower into nodelet manager for compatibility -->
<node pkg="nodelet" type="nodelet" name="turtlebot_follower"
args="load turtlebot_follower/TurtlebotFollower camera/camera_nodelet_manager">
<!--更换成你的机器人的移动topic,我的是/mobile_base/mobile_base_controller/cmd_vel-->
<remap from="turtlebot_follower/cmd_vel" to="/mobile_base/mobile_base_controller/cmd_vel"/>
<remap from="depth/points" to="camera/depth/points"/>
<param name="enabled" value="true" />
<!--<param name="x_scale" value="http://www.mamicode.com/7.0" />-->
<!--<param name="z_scale" value="http://www.mamicode.com/2.0" />
<param name="min_x" value="http://www.mamicode.com/-0.35" />
<param name="max_x" value="http://www.mamicode.com/0.35" />
<param name="min_y" value="http://www.mamicode.com/0.1" />
<param name="max_y" value="http://www.mamicode.com/0.6" />
<param name="max_z" value="http://www.mamicode.com/1.2" />
<param name="goal_z" value="http://www.mamicode.com/0.6" />-->
<!-- test 可以在此处调节参数-->
<param name="x_scale" value="1.5"/>
<param name="z_scale" value="1.0" />
<param name="min_x" value="-0.35" />
<param name="max_x" value="0.35" />
<param name="min_y" value="0.1" />
<param name="max_y" value="0.5" />
<param name="max_z" value="1.5" />
<param name="goal_z" value="0.6" />
</node>
<!-- Launch the script which will toggle turtlebot following on and off based on a joystick button. default: on -->
<node name="switch" pkg="turtlebot_follower" type="switch.py"/>
<!--modify 2016.11.07 在turtlebot_follower下新建follow.rviz文件,加载rviz,此时rviz内容为空-->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find turtlebot_follower)/follow.rviz"/>
<!--modify end -->
</launch>
编译,运行follow.launch 会将机器人和摄像头,rviz都启动起来,只需要运行这一个launch就可以了。
rviz中添加两个marker,pointcloud,camera。如图:
topic与frame名称与代码中要保持一致。
添加完之后,rviz显示如图:
红点代表质心,绿框代表感兴趣区域
当红点在我们身上时,机器人会跟随我们运动,注意:走动时,我们的速度要慢一点,机器人的移动速度也要调慢一点。
当感兴趣区域没有红点时,机器人停止跟随,直到出现红点。
ROS turtlebot_follower :让机器人跟随我们移动
声明:以上内容来自用户投稿及互联网公开渠道收集整理发布,本网站不拥有所有权,未作人工编辑处理,也不承担相关法律责任,若内容有误或涉及侵权可进行投诉: 投诉/举报 工作人员会在5个工作日内联系你,一经查实,本站将立刻删除涉嫌侵权内容。