Baxter学习笔记6-鼠标点动控制baxter机械臂--实战篇

本次实验目的在于采用捕获鼠标坐标点,通过点击鼠标左键或者右键,发布目标点,然后让baxter跟踪目标点

terminal 1 – 启动action_server服务:

in the baxter.sh's floder
检查本机IP,注意修改baxter.sh 内的 your_ip      
./baxter.sh or run 'baxter' in the terminal -->see detail in the ~/.bashrc
rosrun baxter_tools enable_robot -e 使能机器人
rosrun baxter_tools enable_robot -s 查看机器人状态
rosrun baxter_interface joint_trajectory_action_server.py 关节轨迹动作服务

terminal 2 – 启动kinect视频流并捕获鼠标点:

./baxter.sh 
rosrun kinect2_bridge kinect2_bridge
rosrun kinect2_viewer click_PublishPosition
其中,click_PublishPosition.cpp 中须在Viewer.cpp的基础上做如下修改,
1,创建 onMouse()鼠标坐标点采样的外部函数:
void onMouse(int event, int x, int y, int flags, void* ustc){
//  std::cout << "onMouse: " << x << " " << y << std::endl;
    mouseX  = x;
    mouseY  = y;
    mouseBtnType = event;
}
2,在imageViewer() 上做回调函数,从而更新随时变化的鼠标位置。
cv::setMouseCallback(window_name, onMouse, nullptr);
3,由于需要把采样到的鼠标坐标点发布到topic上,方便控制机器人的脚本订阅,因而需要创建Publisher
  //add the Publisher 
  ros::Publisher leftBtnPointPub = nh.advertise<geometry_msgs::PointStamped>
                                      ("/kinect2/click_point/left", 1);
  ros::Publisher rightBtnPointPub = nh.advertise<geometry_msgs::PointStamped>
                                      ("/kinect2/click_point/right", 1);
4,修改imageViewer(),作为彩色图像回显主函数中,发布鼠标三维点。
void imageViewer()
  {
    cv::Mat color, depth, depthDisp, combined;
    std::chrono::time_point<std::chrono::high_resolution_clock> start, now;
    double fps = 0;
    size_t frameCount = 0;
    std::ostringstream oss;
    std::ostringstream ossXYZ;
    const cv::Point pos(5, 15);
    const cv::Scalar colorText = CV_RGB(255, 0, 0); // text color is red
    const double sizeText = 0.5;
    const int lineText = 1;
    const int font = cv::FONT_HERSHEY_SIMPLEX;
    const std::string window_name = "Image viewer";
    cv::namedWindow(window_name);
    cv::setMouseCallback(window_name, onMouse, nullptr);

    oss << "starting publish position...";

    // init the message
    // The Point is in the kinect frame
    geometry_msgs::PointStamped ptMsg;
    ptMsg.header.frame_id = "kinect_link";

    start = std::chrono::high_resolution_clock::now();
    for(; running && ros::ok();)
    {
      if(updateImage)
      {
        //read current image (lock current Kinect video)
        lock.lock();
        color = this->color;
        depth = this->depth;
        updateImage = false;
        lock.unlock();

        createCloud(depth, color, cloud);
        pcl::PointXYZRGBA& target_pt = cloud->points[img_y * depth.cols + img_x];
        // And We publish the leftBtn nad rightBtn postion OUT including follow topic
        // kinect2/click_point/left  and  /kinect2/click_point/right
        switch (mouseBtnType) {
        case cv::EVENT_LBUTTONUP:
            // publish the leftBtnPoint
            //read current mouse postion
            img_x = mouseX; 
            img_y = mouseY;

            target_pt = cloud->points[img_y * depth.cols + img_x];

            std::cout << "The Target Point Position :  " << target_pt << std::endl;

            ptMsg.point.x = target_pt.x;
            ptMsg.point.y = target_pt.y;
            ptMsg.point.z = target_pt.z;
            ptMsg.header.stamp = ros::Time::now();
            leftBtnPointPub.publish(ptMsg);
            ros::spinOnce();  //Just publish Once
            break;
        case cv::EVENT_RBUTTONUP:
            // publish the leftBtnPoint
            //read current mouse postion
            img_x = mouseX;
            img_y = mouseY;

            target_pt = cloud->points[img_y * depth.cols + img_x];

            std::cout << "The Target Point Position :  " << target_pt << std::endl;

            ptMsg.point.x = target_pt.x;
            ptMsg.point.y = target_pt.y;
            ptMsg.point.z = target_pt.z;
            ptMsg.header.stamp = ros::Time::now();
            rightBtnPointPub.publish(ptMsg);
            ros::spinOnce(); //Just publish Once
            break;
        default:
           //read current basket center point postion
            img_x = basket_center_w;
            img_y = basket_center_h;

            target_pt = cloud->points[img_y * depth.cols + img_x];

            std::cout << "The Target Point Position : " << target_pt << std::endl;

            ptMsg.point.x = target_pt.x;
            ptMsg.point.y = target_pt.y;
            ptMsg.point.z = target_pt.z;
            ptMsg.header.stamp = ros::Time::now();
            basketPointPub.publish(ptMsg);
            ros::spinOnce(); 
            break;
        }
        mouseBtnType = cv::EVENT_MOUSEMOVE;

        // cal fps
        ++frameCount;
        now = std::chrono::high_resolution_clock::now();
        double elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(now - start).count() / 1000.0;
        if(elapsed >= 1.0)
        {
          fps = frameCount / elapsed;
          oss.str("");
          oss << "fps: " << fps << " ( " << elapsed / frameCount * 1000.0 << " ms)";
          start = now;
          frameCount = 0;
        }

        dispDepth(depth, depthDisp, 12000.0f);
        combine(color, depthDisp, combined);
        //combined = color;

        //show the fps
        cv::putText(color, oss.str(), pos, font, sizeText, colorText, lineText, CV_AA);

        //show the postion nearly mouse
        ossXYZ.str("");
        ossXYZ << "( " << ptMsg.point.x << ", " << ptMsg.point.y << ", " << ptMsg.point.z << " )";
        cv::putText(color, ossXYZ.str(), cv::Point(img_x, img_y), font, 1, colorText, 3, CV_AA);
        cv::imshow(window_name, color);
        cv::waitKey(1);
      }

      int key = cv::waitKey(1);
      // press the 'q' to exit the running
      switch(key & 0xFF)
      {
      case 27:
      case 'q':
        running = false;
        break;
      }
    }
    cv::destroyAllWindows();
    cv::waitKey(100);
  }

terminal 3 – 启动moveit服务:

in the baxter.sh's folder   
./baxter.sh 
roslaunch baxter_moveit_config demo_baxter.launch  for the moveit_rviz
'or' roslaunch baxter_moveit_config move_group.launch  just for motion planning

terminal 4 – 启动控制脚本:

./baxter.sh 
启动脚本:
rosrun baxter_moveit_config Track_mouse.py joint_states:=/robot/joint_states

其中,arms_moveit.py 实现baxter的控制。
其主要完成订阅click_PublishPosition.cpp中发布的topic。

/kinect2/click_point/left
/kinect2/click_point/right

回调函数如下:

left_arm_controller  = ArmsMoveIt(side = 'left_arm' )
right_arm_controller = ArmsMoveIt(side = 'right_arm')

def kinect_point_right_callback(point_stamp ):
        print "\n-----------INTO: kinect_point_right_callback-----------"
        transform_maxtrix = right_arm_controller.transform_matrix
        kinect_point = np.array([point_stamp.point.x, point_stamp.point.y, point_stamp.point.z, 1])
        print "position in kinect frame : ", kinect_point[:3]
        # Calculate the robot point acording to the transform matrix
        robot_point = np.dot(transform_maxtrix, kinect_point)
        print "position in robot frame : ", robot_point[:3]
        target_position = right_arm_controller.get_target_arm_ee_pose(
                                                    position_x = robot_point[0],
                                                    position_y = robot_point[1],
                                                    position_z = robot_point[2])
        right_arm_controller.move_arm_ee_to_target_pose(target_position)
def kinect_point_left_callback(point_stamp ):
    print "\n-----------INTO: kinect_point_left_callback-----------"
    transform_maxtrix = left_arm_controller.transform_matrix
    kinect_point = np.array([point_stamp.point.x, point_stamp.point.y, point_stamp.point.z, 1])
    print "position in kinect frame : ", kinect_point[:3]
    # Calculate the robot point acording to the transform matrix
    robot_point = np.dot(transform_maxtrix, kinect_point)
    print "position in robot frame : ", robot_point[:3]
    target_position = left_arm_controller.get_target_arm_ee_pose(
                                                position_x = robot_point[0],
                                                position_y = robot_point[1],
                                                position_z = robot_point[2])

    left_arm_controller.move_arm_ee_to_target_pose(target_position)
def left_hand_camera_callback(Image):
    print "\n------------------------INTO: left_hand_camera_callback----------------------------"
    try:
        #detect()
        cv_image = CvBridge().imgmsg_to_cv2(Image, "bgr8")
    except CvBridgeError as e:
        print(e)
    cv2.imshow("left_hand_Image window", cv_image)
    cv2.waitKey(3)
if __name__ == '__main__':
    kinect_point_sub = {}
    kinect_point_sub['right_arm'] = rospy.Subscriber( '/kinect2/click_point/right',
                                                      geometry_msgs.msg.PointStamped, 
                                                      kinect_point_right_callback )
    kinect_point_sub['left_arm'] = rospy.Subscriber( '/kinect2/click_point/left',
                                                      geometry_msgs.msg.PointStamped, 
                                                      kinect_point_left_callback)
    left_hand_image_sub = rospy.Subscriber( "image_raw",Image, left_hand_camera_callback )
    rospy.spin()

在主函数中,只需要rospy.spin(),等待鼠标发来的触发信号即可。

你可能感兴趣的:(Baxter学习路线)