一个使用Qt和FFmpeg播放视频的示例代码,注释中有详细说明:
#include
#include
#include
#include
#include
#include
#include
#include
#include
extern "C" {
#include
#include
#include
}
// FFmpeg相关数据
AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL;
AVPacket packet;
int videoStream = -1;
int audioStream = -1;
struct SwsContext *convertCtx = NULL;
// 视频信息
double videoDuration;
int videoWidth;
int videoHeight;
// Qt相关数据
QMainWindow *window = NULL;
QLabel *videoLabel = NULL;
QTimer *timer = NULL;
// 初始化FFmpeg
bool initFFmpeg(const char *filename) {
av_register_all();
avformat_network_init();
// 打开视频文件
if (avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) {
std::cerr << "Could not open file: " << filename << std::endl;
return false;
}
// 获取流信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
std::cerr << "Could not find stream information." << std::endl;
return false;
}
// 查找视频流和音频流
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) {
videoStream = i;
videoDuration = pFormatCtx->streams[i]->duration * av_q2d(pFormatCtx->streams[i]->time_base);
videoWidth = pFormatCtx->streams[i]->codecpar->width;
videoHeight = pFormatCtx->streams[i]->codecpar->height;
} else if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audioStream < 0) {
audioStream = i;
}
}
// 解码视频流
if (videoStream >= 0) {
pCodecCtx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
std::cerr << "Failed to find codec." << std::endl;
return false;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
std::cerr << "Failed to open codec." << std::endl;
return false;
}
pFrame = av_frame_alloc();
if (pFrame == NULL) {
std::cerr << "Failed to allocate video frame." << std::endl;
return false;
}
convertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24,
SWS_BICUBIC, NULL, NULL, NULL);
if (convertCtx == NULL) {
std::cerr << "Failed to create scaling context." << std::endl;
return false;
}
} else {
std::cerr << "Could not find video stream." << std::endl;
return false;
}
return true;
}
// 关闭FFmpeg
void closeFFmpeg() {
if (convertCtx != NULL) {
sws_freeContext(convertCtx);
convertCtx = NULL;
}
if (pFrame != NULL) {
av_frame_free(&pFrame);
pFrame = NULL;
}
if (pCodecCtx != NULL) {
avcodec_close(pCodecCtx);
avcodec_free_context(&pCodecCtx);
pCodecCtx = NULL;
}
if (pFormatCtx != NULL) {
avformat_close_input(&pFormatCtx);
pFormatCtx = NULL;
}
}
// 创建视频窗口
void createVideoWindow() {
// 创建主窗口
window = new QMainWindow();
window->setWindowTitle("FFmpeg Video Player");
// 创建视频标签
videoLabel = new QLabel();
videoLabel->setMinimumSize(videoWidth, videoHeight);
videoLabel->setMaximumSize(videoWidth, videoHeight);
videoLabel->setAlignment(Qt::AlignCenter);
// 创建布局
QHBoxLayout *hLayout = new QHBoxLayout();
hLayout->addWidget(videoLabel);
QVBoxLayout *vLayout = new QVBoxLayout();
vLayout->addLayout(hLayout);
QWidget *widget = new QWidget();
widget->setLayout(vLayout);
window->setCentralWidget(widget);
// 创建定时器
timer = new QTimer();
timer->setInterval(40);
// 绑定定时器事件
QObject::connect(timer, &QTimer::timeout, []() {
int ret = av_read_frame(pFormatCtx, &packet);
if (ret >= 0) {
// 解码视频帧
if (packet.stream_index == videoStream) {
avcodec_send_packet(pCodecCtx, &packet);
while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) {
AVFrame *rgbFrame = av_frame_alloc();
if (rgbFrame == NULL) {
break;
}
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t *buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
if (buffer == NULL) {
av_frame_free(&rgbFrame);
break;
}
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
sws_scale(convertCtx, (const uint8_t * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, rgbFrame->data, rgbFrame->linesize);
QImage image(rgbFrame->data[0], pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB888);
videoLabel->setPixmap(QPixmap::fromImage(image, Qt::AutoColor));
av_freep(&rgbFrame->data[0]);
av_frame_free(&rgbFrame);
}
}
av_packet_unref(&packet);
} else if (ret == AVERROR_EOF) {
timer->stop();
closeFFmpeg();
}
});
}
int main(int argc, char *argv[]) {
QApplication app(argc, argv);
if (argc < 2) {
std::cerr << "Usage: " << argv[0] << " " << std::endl;
return 1;
}
if (!initFFmpeg(argv[1])) {
return 1;
}
if (videoStream >= 0) {
createVideoWindow();
window->show();
timer->start();
return app.exec();
} else {
std::cerr << "Could not find video stream." << std::endl;
closeFFmpeg();
return 1;
}
}
这个示例程序用FFmpeg来解码视频流,并使用Qt来显示视频帧。代码较长,注释中有详细的说明。
本文福利,莬费领取Qt开发学习资料包、技术视频,内容包括(C++语言基础,C++设计模式,Qt编程入门,QT信号与槽机制,QT界面开发-图像绘制,QT网络,QT数据库编程,QT项目实战,QSS,OpenCV,Quick模块,面试题等等)↓↓↓↓↓↓见下面↓↓文章底部点击莬费领取↓↓