使用qt5,ffmpeg6.0,opencv,os2来实现。qt并非必要,只是用惯了。
步骤是:
1.读取rtsp码流,转换成mat图像
2.发送ros::mat图像
项目结构如下:
videoplayer.h
#ifndef VIDEOPLAYER_H
#define VIDEOPLAYER_H
#include
#include
class VlcInstance;
class VlcMedia;
class VlcMediaPlayer;
class VideoPlayer : public QThread
{
Q_OBJECT
public:
explicit VideoPlayer();
~VideoPlayer();
void startPlay();
void stopPlay();
signals:
void sig_GetOneFrame(QImage); //每获取到一帧图像 就发送此信号
void sig_GetRFrame(QImage);
signals:
//void SigFinished(void);
protected:
void run();
private:
QString mFileName;
VlcInstance *_instance;
VlcMedia *_media;
VlcMediaPlayer *_player;
std::string rtspaddr;
bool mStopFlag;//是否退出标志
public slots:
void setrtsp(std::string addr);
};
#endif // VIDEOPLAYER_H
videoplayer.cpp
#include "videoplayer.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}
#include
#include
using namespace std;
VideoPlayer::VideoPlayer()
{
rtspaddr="rtsp://admin:[email protected]:554/stream1";
}
VideoPlayer::~VideoPlayer()
{
}
void VideoPlayer::setrtsp(std::string addr){
rtspaddr=addr;
}
void VideoPlayer::startPlay()
{
///调用 QThread 的start函数 将会自动执行下面的run函数 run函数是一个新的线程
this->start();
}
void VideoPlayer::stopPlay(){
mStopFlag= true;
}
void VideoPlayer::run()
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
const AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
AVPacket *packet;
uint8_t *out_buffer;
static struct SwsContext *img_convert_ctx;
int videoStream, i, numBytes;
int ret, got_picture;
avformat_network_init();
//Allocate an AVFormatContext.
pFormatCtx = avformat_alloc_context();
AVDictionary *avdic=NULL;
/*
char option_key[]="rtsp_transport";
char option_value[]="tcp";
av_dict_set(&avdic,option_key,option_value,0);
char option_key2[]="max_delay";
char option_value2[]="100";
av_dict_set(&avdic,option_key2,option_value2,0);*/
av_dict_set(&avdic, "buffer_size", "1024000", 0); //设置最大缓存,1080可调到最大
av_dict_set(&avdic, "rtsp_transport", "udp", 0); //以tcp的方式传送
av_dict_set(&avdic, "stimeout", "5000000", 0); //设置超时断开链接时间,单位us
av_dict_set(&avdic, "max_delay", "500000", 0); //设置最大时延
av_dict_set(&avdic, "framerate", "5", 0);
//av_dict_set(&avdic, "video_size","640x40",0);
/*
AVDictionary* options = NULL;
av_dict_set(&options, "buffer_size", "1024000", 0); //设置最大缓存,1080可调到最大
av_dict_set(&options, "rtsp_transport", "udp", 0); //以tcp的方式传送
av_dict_set(&options, "stimeout", "5000000", 0); //设置超时断开链接时间,单位us
av_dict_set(&options, "max_delay", "500000", 0); //设置最大时延
av_dict_set(&options, "framerate", "20", 0);*/
///rtsp地址,可根据实际情况修改
/// rtsp://127.0.0.1:8554/stream
/// rtsp://admin:[email protected]:554/stream1
//char * tmp=(char*)rtspaddr.data();
//char url[50];
//strcpy(url, tmp);
//char url[] ="rtsp://admin:[email protected]:554/stream1";
char url[100];
for(int i=0;inb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
///如果videoStream为-1 说明没有找到视频流
if (videoStream == -1) {
printf("Didn't find a video stream.\n");
return;
}
printf("nb_stream:%d videoStream:%d\n",pFormatCtx->nb_streams,videoStream);
pCodec = avcodec_find_decoder(pFormatCtx->streams[videoStream]->codecpar->codec_id);
pCodecCtx = avcodec_alloc_context3(pCodec);
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
//printf("pCodecCtx->frame_number:%d\n", pCodecCtx->frame_number);
//printf("pCodecCtx->time_base.num:%d\n", pCodecCtx->time_base.num);
//printf("pCodecCtx->time_base.den:%d\n", pCodecCtx->time_base.den);
//printf("pCodecCtx->bit_rate:%d\n", pCodecCtx->bit_rate);
//printf("pCodecCtx->framerate:%d\n", pCodecCtx->framerate);
// pCodecCtx->bit_rate =0; //初始化为0
// pCodecCtx->time_base.num=1; //下面两行:一秒钟25帧
// pCodecCtx->time_base.den=10;
// pCodecCtx->frame_number=1; //每包一个视频帧
if (pCodec == NULL) {
printf("Codec not found.\n");
return;
}
///打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return;
}
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
///这里我们改成了 将解码后的YUV数据转换成RGB32
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL);
numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width,pCodecCtx->height,1);
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(
pFrameRGB->data,
pFrameRGB->linesize,
out_buffer,
AV_PIX_FMT_RGBA,
pCodecCtx->width,
pCodecCtx->height,
1
);
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *) malloc(sizeof(AVPacket)); //分配一个packet
av_new_packet(packet, y_size); //分配packet的数据
mStopFlag = false;
while (!mStopFlag)
{
if (av_read_frame(pFormatCtx, packet) < 0)
{
continue; //这里认为视频读取完了
}
if (packet->stream_index == videoStream) {
ret = avcodec_send_packet(pCodecCtx,packet);
if( 0 != ret){
continue;
}
while (avcodec_receive_frame(pCodecCtx,pFrame) == 0){
sws_scale(img_convert_ctx,
(uint8_t const * const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,
pFrameRGB->linesize);
//把这个RGB数据 用QImage加载
QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGBA8888);
//QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB888);
QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示
emit sig_GetOneFrame(image); //发送信号
/*
printf("pCodecCtx->width:%d\n", pCodecCtx->width);
printf("pCodecCtx->height:%d\n", pCodecCtx->height);
printf("pCodecCtx->frame_number:%d\n", pCodecCtx->frame_number);
printf("pCodecCtx->time_base.num:%d\n", pCodecCtx->time_base.num);
printf("pCodecCtx->time_base.den:%d\n", pCodecCtx->time_base.den);
printf("pCodecCtx->bit_rate:%d\n", pCodecCtx->bit_rate);
printf("pCodecCtx->framerate:%d\n", pCodecCtx->framerate);
printf("pCodecCtx->frame_size:%d\n", pCodecCtx->frame_size);*/
}
}
av_packet_unref(packet); //释放资源,否则内存会一直上升
msleep(0.02); //停一停 不然放的太快了
}
av_free(out_buffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
//emit SigFinished();
}
widget.h
#ifndef WIDGET_H
#define WIDGET_H
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "videoplayer.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "rclcpp/rclcpp.hpp"
#include "std_msgs/msg/string.hpp"
#include
#include
#include
#include
using namespace std::chrono_literals;
QT_BEGIN_NAMESPACE
namespace Ui { class Widget; }
QT_END_NAMESPACE
class Widget : public QWidget
{
Q_OBJECT
public:
Widget(QWidget *parent = nullptr);
~Widget();
signals:
void sig_fame(QImage img);
private:
Ui::Widget *ui;
private:
void readconfig();
QString rtspaddr;
void initWidget();
void initconnect();
private slots:
void slot_open_or_close();
protected:
//void paintEvent(QPaintEvent *event);
private:
VideoPlayer *mPlayer; //播放线程
QImage mImage; //记录当前的图像
QString url;
//QImage initimage;
cv::Mat QImage2Mat(QImage image);
QImage Mat2QImage(const cv::Mat &mat);
//void readvideo();
//std::vector colors(32);
//cv::VideoWriter outputVideo;
//int encode_type ;//= VideoWriter::fourcc('M', 'J', 'P', 'G');
//std::vector colors;
//cv::VideoWriter outputVideo;
private slots:
void slotGetOneFrame(QImage img);
private:
cv::Mat tempmat;
// -------------------------------------
// ros
// -------------------------------------
// node
rclcpp::Node::SharedPtr node_;
// pub
rclcpp::Publisher::SharedPtr publisher_;
// sub
//rclcpp::Subscription::SharedPtr subscriber_;
// spin
rclcpp::TimerBase::SharedPtr timer_;
void initSpin(void);
QTimer spin_timer_;
void timer_callback();
};
#endif // WIDGET_H
widget.cpp
#include "widget.h"
#include "ui_widget.h"
Widget::Widget(QWidget *parent)
: QWidget(parent)
, ui(new Ui::Widget)
{
mPlayer = new VideoPlayer;
ui->setupUi(this);
readconfig();
initWidget();
initconnect();
// -------------------------------------
/*
// create topic pub
this->publisher_ = this->node_->create_publisher("pub_topic", 10);
// create topic sub
this->subscriber_ = node_->create_subscription(
"sub_topic", 10,
[&](const std_msgs::msg::String::SharedPtr msg)
{
// 處理訂閱到的消息
QString receivedMsg = QString::fromStdString(msg->data);
std::cout << msg->data << std::endl;
//ui->textBrowser->append(receivedMsg);
});
this->initSpin();*/
rclcpp::init(0, nullptr);
// create node
this->node_ = rclcpp::Node::make_shared("video");
this->publisher_ = this->node_->create_publisher("pubImageTopic", 10);
this->timer_ = this->node_->create_wall_timer(33ms, std::bind(&Widget::timer_callback, this));
this->initSpin();
}
Widget::~Widget()
{
delete ui;
// -------------------------------------
// ROS 釋放
// -------------------------------------
this->spin_timer_.stop();
rclcpp::shutdown();
// -------------------------------------
}
void Widget::initSpin(void)
{
this->spin_timer_.setInterval(1); // 1 ms
QObject::connect(&this->spin_timer_, &QTimer::timeout, [&]()
{ rclcpp::spin_some(node_); });
this->spin_timer_.start();
}
void Widget::timer_callback(){
try
{
//ros_img_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", tempmat).toImageMsg();
//sensor_msgs::msg::Image::SharedPtr ros_img_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", tempmat).toImageMsg();
if(!tempmat.empty())
{
//rclcpp::Publisher::SharedPtr video_compressed_publisher_;
cv::Mat des1080;
cv::resize(tempmat, des1080, cv::Size(1080, 720), 0, 0, cv::INTER_NEAREST);
sensor_msgs::msg::CompressedImage::SharedPtr ros_img_compressed_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", des1080).toCompressedImageMsg();
//video_compressed_publisher_->publish(*ros_img_compressed_);
this->publisher_ ->publish(*ros_img_compressed_);
qDebug()<<"publisher";
}
else{
qDebug()<<"empty image";
}
//RCLCPP_WARN(this->get_logger(), "empty image");
// video_publisher_->publish(*ros_img_);
}
catch (cv_bridge::Exception &e)
{
//RCLCPP_ERROR(this->get_logger(),ros_img_->encoding.c_str());
qDebug()<<"Exception";
}
}
void Widget::readconfig(){
QSettings settingsread("./src/video_topic/conf/config.ini",QSettings::IniFormat);
rtspaddr = settingsread.value("SetUpOption/camerartsp").toString();
mPlayer->setrtsp(rtspaddr.toStdString());
}
void Widget::initWidget(){
qDebug()<le_rtstspaddr->setText(rtspaddr);
}
void Widget::slot_open_or_close(){
if(ui->btn_openorclose->text()=="open"){
ui->btn_openorclose->setText("close");
mPlayer->startPlay();
// ROS 初始化
// -------------------------------------
/*
rclcpp::init(0, nullptr);
// create node
this->node_ = rclcpp::Node::make_shared("video");
this->publisher_ = this->node_->create_publisher("pubImageTopic", 10);
this->timer_ = this->node_->create_wall_timer(500ms, std::bind(&Widget::timer_callback, this));
//pub_img = this->create_publisher("res_img", 10);
rclcpp::spin(this->node_);*/
//readvideo();
}else{
ui->btn_openorclose->setText("open");
mPlayer->stopPlay();
}
}
void Widget::initconnect(){
connect(ui->btn_openorclose,&QPushButton::clicked,this,&Widget::slot_open_or_close);
connect(mPlayer,SIGNAL(sig_GetOneFrame(QImage)),this,SLOT(slotGetOneFrame(QImage)));
connect(this,&Widget::sig_fame,this,&Widget::slotGetOneFrame);
//connect(mPlayer,&VideoPlayer::SigFinished, mPlayer,&VideoPlayer::deleteLater);//自动释放
}
void Widget::slotGetOneFrame(QImage img)
{
//cv::Mat tempmat = QImage2Mat(img);
tempmat = QImage2Mat(img);
if (tempmat.empty()) {
printf("null img\n");
}else {
QImage outimg = Mat2QImage(tempmat);
//printf("get img\n");
mImage = outimg;
QImage imageScale = mImage.scaled(QSize(ui->label->width(), ui->label->height()));
QPixmap pixmap = QPixmap::fromImage(imageScale);
ui->label->setPixmap(pixmap);
}
}
cv::Mat Widget::QImage2Mat(QImage image)
{
cv::Mat mat = cv::Mat::zeros(image.height(), image.width(),image.format()); //初始化Mat
switch(image.format()) //判断image的类型
{
case QImage::QImage::Format_Grayscale8: //灰度图
mat = cv::Mat(image.height(), image.width(),
CV_8UC1,(void*)image.constBits(),image.bytesPerLine());
break;
case QImage::Format_RGB888: //3通道彩色
mat = cv::Mat(image.height(), image.width(),
CV_8UC3,(void*)image.constBits(),image.bytesPerLine());
break;
case QImage::Format_ARGB32: //4通道彩色
mat = cv::Mat(image.height(), image.width(),
CV_8UC4,(void*)image.constBits(),image.bytesPerLine());
break;
case QImage::Format_RGBA8888:
mat = cv::Mat(image.height(), image.width(),
CV_8UC4,(void*)image.constBits(),image.bytesPerLine());
break;
default:
return mat;
}
cv::cvtColor(mat, mat, cv::COLOR_BGR2RGB);
return mat;
}
QImage Widget::Mat2QImage(const cv::Mat &mat)
{
if(mat.type()==CV_8UC1 || mat.type()==CV_8U)
{
QImage image((const uchar *)mat.data, mat.cols, mat.rows, mat.step, QImage::Format_Grayscale8);
return image;
}
else if(mat.type()==CV_8UC3)
{
QImage image((const uchar *)mat.data, mat.cols, mat.rows, mat.step, QImage::Format_RGB888);
return image.rgbSwapped(); //r与b调换
}
}
widget.ui
Widget
0
0
645
461
Widget
-
-
TextLabel
-
-
true
-
open
main.cpp
#include "widget.h"
#include
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
Widget w;
w.show();
//w.showMaximized();
return a.exec();
}
package.xml
video_topic
0.0.0
TODO: Package description
cl
TODO: License declaration
ament_cmake
cv_bridge
rclcpp
std_msgs
cv_bridge
ament_lint_auto
ament_lint_common
ament_cmake
CMakeLists.txt
cmake_minimum_required(VERSION 3.5)
project(video_topic)
# Default to C++14
if(NOT CMAKE_CXX_STANDARD)
set(CMAKE_CXX_STANDARD 14)
endif()
if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
endif()
# find dependencies
find_package(ament_cmake REQUIRED)
find_package(rclcpp REQUIRED)
find_package(std_msgs REQUIRED)
# qt
find_package(Qt5 REQUIRED COMPONENTS Core Gui Widgets)
find_package(cv_bridge REQUIRED)
find_package(image_transport)
find_package(sensor_msgs REQUIRED)
add_executable(video
src/main.cpp
src/videoplayer.h
src/videoplayer.cpp
src/widget.h
src/widget.cpp
src/widget.ui
)
#set(FFMPEG_LIBS_DIR /usr/local/ffmpeg/lib)
#set(FFMPEG_HEADERS_DIR /usr/local/ffmpeg/include)
set(FFMPEG_LIBS_DIR /usr/lib/aarch64-linux-gnu)
set(FFMPEG_HEADERS_DIR /usr/include/aarch64-linux-gnu)
include_directories(${FFMPEG_HEADERS_DIR})
#link_directories(${FFMPEG_LIBS_DIR})
#set(FFMPEG_LIBS libavutil.so libavcodec.so libavdevice.so libavformat.so libavfilter.so libswresample.so libswscale.so libavutil.so)
set(FFMPEG_LIBS libavcodec.so libavformat.so libswscale.so libavutil.so)
find_package(OpenCV REQUIRED)
include_directories( ${OpenCV_INCLUDE_DIRS})
target_link_libraries(video ${OpenCV_LIBS})
# ros
target_link_libraries(video
${rclcpp_LIBRARIES}
)
# qt
target_link_libraries(video
Qt5::Core
Qt5::Gui
Qt5::Widgets
)
#ffmpeg
target_link_libraries(video ${FFMPEG_LIBS})
ament_target_dependencies(video
rclcpp
std_msgs
sensor_msgs
cv_bridge
OpenCV
image_transport
)
# ------------------------------------------
# 設置自動MOC、UIC和RCC (與QT相關)
# ------------------------------------------
set_target_properties(video PROPERTIES AUTOMOC ON)
set_target_properties(video PROPERTIES AUTOUIC ON)
set_target_properties(video PROPERTIES AUTORCC ON)
# ------------------------------------------
# 安装可执行文件
install(TARGETS video
DESTINATION lib/${PROJECT_NAME}
)
ament_package()