参考:FFmpeg源码编译(基于MSVC编译器)
参考:Qt5 + FFmpeg
//初始化(初始化一次即可)
void FFmpegThread::initlib() {
static QMutex mutex;
QMutexLocker locker(&mutex);
static bool isInit = false;
if (!isInit) {
//注册所有设备,主要用于本地摄像机播放支持
#ifdef ffmpegdevice
avdevice_register_all();
#endif
//初始化网络流格式,使用网络流时必须先执行
avformat_network_init();
isInit = true;
qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION;
}
}
//设置缓存大小,1080p可将值调大
av_dict_set(&options, "buffer_size", "8192000", 0);
//以tcp方式打开,如果以udp方式打开将tcp替换为udp
av_dict_set(&options, "rtsp_transport", "tcp", 0);
//设置超时断开连接时间,单位微秒,3000000表示3秒
av_dict_set(&options, "stimeout", "3000000", 0);
// ,单位微秒,1000000表示1秒
av_dict_set(&options, "max_delay", "1000000", 0);
//自动开启线程数
av_dict_set(&options, "threads", "auto", 0);
//zerolatency:转码延迟,以牺牲视频质量减少时延
av_dict_set(&options, "tune", "zerolatency", 0);
//打开视频流
avFormatContext = avformat_alloc_context();
int result = avformat_open_input(&avFormatContext, url.toStdString().data(), NULL, &options);
if (result < 0) {
qDebug() << "open input error" << url;
return false;
}
qDebug() << "open input:" << url << " " << result;
//释放设置参数
if (options != NULL) {
av_dict_free(&options);
qDebug() << "free option";
}
//获取流信息
result = avformat_find_stream_info(avFormatContext, NULL);
if (result < 0) {
qDebug() << TIMEMS << "find stream info error";
return false;
}
videoStreamIndex = av_find_best_stream(avFormatContext,
AVMEDIA_TYPE_VIDEO,
-1,
-1,
const_cast<const AVCodec **>(&videoDecoder),
0);
if (videoStreamIndex < 0) {
qDebug() << "find video stream index error";
return false;
}
//获取视频流
AVStream *videoStream = avFormatContext->streams[videoStreamIndex];
//获取视频流解码器,或者指定解码器
videoCodec = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(videoCodec, videoStream->codecpar);
videoDecoder = const_cast<AVCodec *>(avcodec_find_decoder(videoCodec->codec_id));
//videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
if (videoDecoder == NULL) {
qDebug() << "video decoder not found";
return false;
}
//设置加速解码
videoCodec->lowres = videoDecoder->max_lowres;
videoCodec->flags2 |= AV_CODEC_FLAG2_FAST;
//打开视频解码器
result = avcodec_open2(videoCodec, videoDecoder, NULL);
if (result < 0) {
qDebug() << "open video codec error";
return false;
}
//获取分辨率大小
videoWidth = videoStream->codecpar->width;
videoHeight = videoStream->codecpar->height;
//如果没有获取到宽高则返回
if (videoWidth == 0 || videoHeight == 0) {
qDebug() << "find width height error";
return false;
}
QString videoInfo = QString(u8"视频流信息 -> 索引: %1 解码: %2 格式: %3 时长: %4 秒 分辨率: %5*%6")
.arg(videoStreamIndex)
.arg(videoDecoder->name, avFormatContext->iformat->name)
.arg((avFormatContext->duration) / 1000000, videoWidth, videoHeight);
qDebug() << videoInfo;
//循环查找音频流索引
audioStreamIndex = -1;
for (uint i = 0; i < avFormatContext->nb_streams; i++) {
if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioStreamIndex = i;
break;
}
}
//有些没有音频流,所以这里不用返回
if (audioStreamIndex == -1) {
qDebug() << "find audio stream index error";
} else {
//获取音频流
AVStream *audioStream = avFormatContext->streams[audioStreamIndex];
audioCodec = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(audioCodec, audioStream->codecpar);
//获取音频流解码器,或者指定解码器
audioDecoder = const_cast<AVCodec *>(avcodec_find_decoder(audioCodec->codec_id));
//audioDecoder = avcodec_find_decoder_by_name("aac");
if (audioDecoder == NULL) {
qDebug() << "audio codec not found";
return false;
}
//打开音频解码器
result = avcodec_open2(audioCodec, audioDecoder, NULL);
if (result < 0) {
qDebug() << "open audio codec error";
return false;
}
QString audioInfo = QString(u8"音频流信息 -> 索引: %1 解码: %2 比特率: %3 声道数: %4 采样: %5")
.arg(audioStreamIndex)
.arg(audioDecoder->name)
.arg(avFormatContext->bit_rate, audioCodec->channels, audioCodec->sample_rate);
qDebug() << audioInfo;
}
//预分配好内存
avPacket = av_packet_alloc();
avFrame = av_frame_alloc();
avFrame2 = av_frame_alloc();
avFrame3 = av_frame_alloc();
//比较上一次文件的宽度高度,当改变时,需要重新分配内存
if (oldWidth != videoWidth || oldHeight != videoHeight) {
int byte = av_image_get_buffer_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight, 1);
buffer = (uint8_t *) av_malloc(byte * sizeof(uint8_t));
oldWidth = videoWidth;
oldHeight = videoHeight;
}
//定义像素格式
AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;
//通过解码器获取解码格式
srcFormat = videoCodec->pix_fmt;
//默认最快速度的解码采用的SWS_FAST_BILINEAR参数,可能会丢失部分图片数据,可以自行更改成其他参数
int flags = SWS_FAST_BILINEAR;
//开辟缓存存储一帧数据
//以下两种方法都可以,avpicture_fill已经逐渐被废弃
//avpicture_fill((AVPicture *)avFrame3, buffer, dstFormat, videoWidth, videoHeight);
av_image_fill_arrays(avFrame3->data, avFrame3->linesize, buffer,
dstFormat, videoWidth, videoHeight, 1);
//图像转换
swsContext = sws_getContext(videoWidth, videoHeight, srcFormat,
videoWidth, videoHeight, dstFormat,
flags, NULL, NULL, NULL);
void FFmpegThread::run() {
// qint64 startTime = av_gettime();
while (!stopped) {
//根据标志位执行初始化操作
if (isPlay) {
bool ret = this->init();
if (!ret) {
emit signal_initFailed();
stopped = !stopped;
break;
}
isPlay = false;
continue;
}
frameFinish = av_read_frame(avFormatContext, avPacket);
if (frameFinish >= 0) {
//判断当前包是视频还是音频
int index = avPacket->stream_index;
if (index == videoStreamIndex) {
frameFinish = avcodec_send_packet(videoCodec, avPacket);
if (frameFinish < 0) {
continue;
}
frameFinish = avcodec_receive_frame(videoCodec, avFrame2);
if (frameFinish < 0) {
continue;
}
if (frameFinish >= 0) {
//将数据转成一张图片
sws_scale(swsContext, (const uint8_t *const *) avFrame2->data, avFrame2->linesize, 0,
videoHeight, avFrame3->data, avFrame3->linesize);
//以下两种方法都可以
//QImage image(avFrame3->data[0], videoWidth, videoHeight, QImage::Format_RGB32);
QImage image((uchar *) buffer, videoWidth, videoHeight, QImage::Format_RGB32);
if (!image.isNull()) {
emit signal_receiveImage(image);
}
usleep(1);
}
#if 0
//延时(不然文件会立即全部播放完)
AVRational timeBase = {1, AV_TIME_BASE};
int64_t ptsTime = av_rescale_q(avPacket->dts, avFormatContext->streams[videoStreamIndex]->time_base, timeBase);
// 微秒
int64_t nowTime = av_gettime() - startTime;
qDebug() << "ptstime:" << ptsTime << "nowTime:" << nowTime;
if (ptsTime > nowTime) {
av_usleep(ptsTime - nowTime);
}
#else
AVRational timeBase = {1, AV_TIME_BASE};
int64_t ptsTime = av_rescale_q(avPacket->dts,
avFormatContext->streams[videoStreamIndex]->time_base,
timeBase);
currentTime = ptsTime;
if (lastTime == 0) {
av_usleep(10 * 1000);
}
if ((currentTime - lastTime) > 1 * 1000 * 1000 || (currentTime - lastTime) < 0) {
av_usleep(10 * 1000);
} else {
av_usleep(currentTime - lastTime);
}
lastTime = currentTime;
#endif
} else if (index == audioStreamIndex) {
//解码音频流,自行处理
}
}
av_packet_unref(avPacket);
av_freep(avPacket);
usleep(1);
}
//线程结束后释放资源
free();
stopped = false;
isPlay = false;
qDebug() << "stop ffmpeg thread";
emit signal_threadOver();
}
FFmpegWidget::FFmpegWidget(QWidget *parent) : QWidget(parent) {
thread = new FFmpegThread(this);
connect(thread, SIGNAL(signal_receiveImage(QImage)), this, SLOT(updateImage(QImage)));
image = QImage();
connect(thread, &FFmpegThread::signal_initFailed, this, &FFmpegWidget::signal_openVideoFailed);
}
void FFmpegWidget::updateImage(const QImage &image) {
this->image = image;
this->update();
}
void FFmpegWidget::paintEvent(QPaintEvent *) {
if (image.isNull()) {
return;
}
QPainter painter(this);
#if 0
//image = image.scaled(this->size(), Qt::KeepAspectRatio);
//按照比例自动居中绘制
int pixX = rect().center().x() - image.width() / 2;
int pixY = rect().center().y() - image.height() / 2;
QPoint point(pixX, pixY);
painter.drawImage(point, image);
#else
painter.drawImage(this->rect(), image);
#endif
}
void FFmpegWidget::open() {
qDebug() << TIMEMS << "open video" << objectName();
clear();
thread->play();
thread->start();
}
void FFmpegWidget::close() {
qDebug() << TIMEMS << "close video" << objectName();
if (thread->isRunning()) {
thread->stop();
thread->quit();
thread->wait(500);
}
QTimer::singleShot(1, this, SLOT(clear()));
}
在QT中只需要在ui文件中添加一个QWidget,并将这个QWidget部件提升为FFmpegWidget即可,通过调用FFmpegWidget的setUrl()方法设置拉流地址,然后通过open()和close()方法启动和停止视频。
#ifndef FFMPEGINCLUDE_H
#define FFMPEGINCLUDE_H
//为了兼容C和C99标准必须加以下内容(否则编译不能通过)
#ifndef INT64_C
#define INT64_C
#define UINT64_C
#endif
//引入ffmpeg头文件
extern "C" {
#include "libavutil/opt.h"
#include "libavutil/time.h"
#include "libavutil/frame.h"
#include "libavutil/display.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avassert.h"
#include "libavutil/imgutils.h"
#include "libavutil/ffversion.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavformat/avformat.h"
#include "libavfilter/avfilter.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavdevice/avdevice.h"
#if (LIBAVCODEC_VERSION_MAJOR > 56)
#include "libavutil/hwcontext.h"
#endif
}
#include
#include
#include
#ifdef Q_CC_MSVC
#pragma execution_character_set("utf-8")
#endif
#ifndef TIMEMS
#define TIMEMS qPrintable(QTime::currentTime().toString("HH:mm:ss") + "[" + __FUNCTION__ + ":" + QString::number(__LINE__ ) + "]")
#endif
//通过avcode版本定义对应主版本
#if (LIBAVCODEC_VERSION_MAJOR == 56)
#define FFMPEG_VERSION_MAJOR 2
#elif (LIBAVCODEC_VERSION_MAJOR == 57)
#define FFMPEG_VERSION_MAJOR 3
#elif (LIBAVCODEC_VERSION_MAJOR == 58)
#define FFMPEG_VERSION_MAJOR 4
#elif (LIBAVCODEC_VERSION_MAJOR == 59)
#define FFMPEG_VERSION_MAJOR 5
#elif (LIBAVCODEC_VERSION_MAJOR == 60)
#define FFMPEG_VERSION_MAJOR 6
#endif
#if (FFMPEG_VERSION_MAJOR > 4)
#define AVCodecx const AVCodec
#define AVInputFormatx const AVInputFormat
#define AVOutputFormatx const AVOutputFormat
#else
#define AVCodecx AVCodec
#define AVInputFormatx AVInputFormat
#define AVOutputFormatx AVOutputFormat
#endif
#if (FFMPEG_VERSION_MAJOR < 3)
enum AVHWDeviceType {
AV_HWDEVICE_TYPE_VDPAU,
AV_HWDEVICE_TYPE_CUDA,
AV_HWDEVICE_TYPE_VAAPI,
AV_HWDEVICE_TYPE_DXVA2,
AV_HWDEVICE_TYPE_QSV,
AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
AV_HWDEVICE_TYPE_NONE,
AV_HWDEVICE_TYPE_D3D11VA,
AV_HWDEVICE_TYPE_DRM,
};
#endif
#endif // FFMPEGINCLUDE_H
#ifndef FFMPEG_H
#define FFMPEG_H
#include
#include
#include "ffmpeginclude.h"
class FFmpegThread : public QThread
{
Q_OBJECT
public:
explicit FFmpegThread(QObject *parent = 0);
/// 初始化
static void initlib();
protected:
void run();
private:
/// 线程停止标志位
volatile bool stopped = false;
/// 播放视频标志位
volatile bool isPlay = false;
/// 一帧完成
int frameFinish = 0;
/// 视频宽度
int videoWidth = 0;
/// 视频高度
int videoHeight = 0;
/// 上一次视频宽度
int oldWidth = 0;
/// 上一次视频高度
int oldHeight = 0;
/// 视频流索引
int videoStreamIndex = -1;
/// 音频流索引
int audioStreamIndex = -1;
/// 视频流地址
QString url = "";
/// 存储解码后图片buffer
uint8_t *buffer;
/// 包对象
AVPacket *avPacket;
/// 帧对象
AVFrame *avFrame;
/// 帧对象
AVFrame *avFrame2;
/// 帧对象
AVFrame *avFrame3;
/// 格式对象
AVFormatContext *avFormatContext;
/// 视频解码器
AVCodecContext *videoCodec;
/// 音频解码器
AVCodecContext *audioCodec;
/// 处理图片数据对象
SwsContext *swsContext;
/// 参数对象
AVDictionary *options;
/// 视频解码
AVCodec *videoDecoder;
/// 音频解码
AVCodec *audioDecoder;
/// 记录上一次时间戳
qint64 lastTime = 0;
/// 记录当前时间戳
qint64 currentTime = 0;
signals:
/// 信号:收到图片信号
void signal_receiveImage(const QImage &image);
/// 信号:初始化失败
void signal_initFailed();
/// 信号:线程结束
void signal_threadOver();
public slots:
/// 设置视频流地址
void setUrl(const QString &url);
/// 初始化视频对象
bool init();
/// 释放对象
void free();
/// 播放视频对象
void play();
/// 暂停播放
void pause();
/// 继续播放
void next();
/// 停止采集线程
void stop();
};
//实时视频显示窗体类
class FFmpegWidget : public QWidget
{
Q_OBJECT
public:
explicit FFmpegWidget(QWidget *parent = 0);
~FFmpegWidget();
protected:
void paintEvent(QPaintEvent *);
private:
FFmpegThread *thread;
QImage image;
signals:
/// 信号:打开视频失败
void signal_openVideoFailed();
private slots:
/// 接收图像并绘制
void updateImage(const QImage &image);
public slots:
/// 设置视频流地址
void setUrl(const QString &url);
/// 打开设备
void open();
/// 暂停
void pause();
/// 继续
void next();
/// 关闭设备
void close();
///清空
void clear();
};
#endif // FFMPEG_H
#include "ffmpeg.h"
FFmpegThread::FFmpegThread(QObject *parent) : QThread(parent) {
setObjectName("FFmpegThread");
buffer = NULL;
avPacket = NULL;
avFrame = NULL;
avFrame2 = NULL;
avFrame3 = NULL;
avFormatContext = NULL;
videoCodec = NULL;
audioCodec = NULL;
swsContext = NULL;
options = NULL;
videoDecoder = NULL;
audioDecoder = NULL;
//初始化注册(注册一次即可)
FFmpegThread::initlib();
}
//初始化(初始化一次即可)
void FFmpegThread::initlib() {
static QMutex mutex;
QMutexLocker locker(&mutex);
static bool isInit = false;
if (!isInit) {
//注册所有设备,主要用于本地摄像机播放支持
#ifdef ffmpegdevice
avdevice_register_all();
#endif
//初始化网络流格式,使用网络流时必须先执行
avformat_network_init();
isInit = true;
qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION;
}
}
bool FFmpegThread::init() {
//在打开码流前指定各种参数比如:探测时间/超时时间/最大延时等
//设置缓存大小,1080p可将值调大
av_dict_set(&options, "buffer_size", "8192000", 0);
//以tcp方式打开,如果以udp方式打开将tcp替换为udp
av_dict_set(&options, "rtsp_transport", "tcp", 0);
//设置超时断开连接时间,单位微秒,3000000表示3秒
av_dict_set(&options, "stimeout", "3000000", 0);
// ,单位微秒,1000000表示1秒
av_dict_set(&options, "max_delay", "1000000", 0);
//自动开启线程数
av_dict_set(&options, "threads", "auto", 0);
//zerolatency:转码延迟,以牺牲视频质量减少时延
av_dict_set(&options, "tune", "zerolatency", 0);
//打开视频流
avFormatContext = avformat_alloc_context();
int result = avformat_open_input(&avFormatContext, url.toStdString().data(), NULL, &options);
if (result < 0) {
qDebug() << TIMEMS << "open input error" << url;
return false;
}
qDebug() << TIMEMS << "open input:" << url << " " << result;
//释放设置参数
if (options != NULL) {
av_dict_free(&options);
qDebug() << TIMEMS << "free option";
}
//获取流信息
result = avformat_find_stream_info(avFormatContext, NULL);
if (result < 0) {
qDebug() << TIMEMS << "find stream info error";
return false;
}
//----------视频流部分开始,打个标记方便折叠代码----------
if (1) {
videoStreamIndex = av_find_best_stream(avFormatContext,
AVMEDIA_TYPE_VIDEO,
-1,
-1,
const_cast<const AVCodec **>(&videoDecoder),
0);
if (videoStreamIndex < 0) {
qDebug() << TIMEMS << "find video stream index error";
return false;
}
//获取视频流
AVStream *videoStream = avFormatContext->streams[videoStreamIndex];
//获取视频流解码器,或者指定解码器
videoCodec = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(videoCodec, videoStream->codecpar);
videoDecoder = const_cast<AVCodec *>(avcodec_find_decoder(videoCodec->codec_id));
//videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
if (videoDecoder == NULL) {
qDebug() << TIMEMS << "video decoder not found";
return false;
}
//设置加速解码
videoCodec->lowres = videoDecoder->max_lowres;
videoCodec->flags2 |= AV_CODEC_FLAG2_FAST;
//打开视频解码器
result = avcodec_open2(videoCodec, videoDecoder, NULL);
if (result < 0) {
qDebug() << TIMEMS << "open video codec error";
return false;
}
//获取分辨率大小
videoWidth = videoStream->codecpar->width;
videoHeight = videoStream->codecpar->height;
//如果没有获取到宽高则返回
if (videoWidth == 0 || videoHeight == 0) {
qDebug() << TIMEMS << "find width height error";
return false;
}
QString videoInfo = QString(u8"视频流信息 -> 索引: %1 解码: %2 格式: %3 时长: %4 秒 分辨率: %5*%6")
.arg(videoStreamIndex)
.arg(videoDecoder->name, avFormatContext->iformat->name)
.arg((avFormatContext->duration) / 1000000, videoWidth, videoHeight);
qDebug() << TIMEMS << videoInfo;
}
//----------视频流部分开始----------
//----------音频流部分开始,打个标记方便折叠代码----------
if (1) {
//循环查找音频流索引
audioStreamIndex = -1;
for (uint i = 0; i < avFormatContext->nb_streams; i++) {
if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioStreamIndex = i;
break;
}
}
//有些没有音频流,所以这里不用返回
if (audioStreamIndex == -1) {
qDebug() << TIMEMS << "find audio stream index error";
} else {
//获取音频流
AVStream *audioStream = avFormatContext->streams[audioStreamIndex];
audioCodec = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(audioCodec, audioStream->codecpar);
//获取音频流解码器,或者指定解码器
audioDecoder = const_cast<AVCodec *>(avcodec_find_decoder(audioCodec->codec_id));
//audioDecoder = avcodec_find_decoder_by_name("aac");
if (audioDecoder == NULL) {
qDebug() << TIMEMS << "audio codec not found";
return false;
}
//打开音频解码器
result = avcodec_open2(audioCodec, audioDecoder, NULL);
if (result < 0) {
qDebug() << TIMEMS << "open audio codec error";
return false;
}
QString audioInfo = QString(u8"音频流信息 -> 索引: %1 解码: %2 比特率: %3 声道数: %4 采样: %5")
.arg(audioStreamIndex)
.arg(audioDecoder->name)
.arg(avFormatContext->bit_rate, audioCodec->channels, audioCodec->sample_rate);
qDebug() << TIMEMS << audioInfo;
}
}
//----------音频流部分结束----------
//预分配好内存
avPacket = av_packet_alloc();
avFrame = av_frame_alloc();
avFrame2 = av_frame_alloc();
avFrame3 = av_frame_alloc();
//比较上一次文件的宽度高度,当改变时,需要重新分配内存
if (oldWidth != videoWidth || oldHeight != videoHeight) {
int byte = av_image_get_buffer_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight, 1);
buffer = (uint8_t *) av_malloc(byte * sizeof(uint8_t));
oldWidth = videoWidth;
oldHeight = videoHeight;
}
//定义像素格式
AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;
//通过解码器获取解码格式
srcFormat = videoCodec->pix_fmt;
//默认最快速度的解码采用的SWS_FAST_BILINEAR参数,可能会丢失部分图片数据,可以自行更改成其他参数
int flags = SWS_FAST_BILINEAR;
//开辟缓存存储一帧数据
//以下两种方法都可以,avpicture_fill已经逐渐被废弃
//avpicture_fill((AVPicture *)avFrame3, buffer, dstFormat, videoWidth, videoHeight);
av_image_fill_arrays(avFrame3->data, avFrame3->linesize, buffer,
dstFormat, videoWidth, videoHeight, 1);
//图像转换
swsContext = sws_getContext(videoWidth, videoHeight, srcFormat,
videoWidth, videoHeight, dstFormat,
flags, NULL, NULL, NULL);
//输出视频信息
//av_dump_format(avFormatContext, 0, url.toStdString().data(), 0);
qDebug() << TIMEMS << "init ffmpeg finsh";
return true;
}
void FFmpegThread::run() {
// qint64 startTime = av_gettime();
while (!stopped) {
//根据标志位执行初始化操作
if (isPlay) {
bool ret = this->init();
if (!ret) {
emit signal_initFailed();
stopped = !stopped;
break;
}
isPlay = false;
continue;
}
frameFinish = av_read_frame(avFormatContext, avPacket);
if (frameFinish >= 0) {
//判断当前包是视频还是音频
int index = avPacket->stream_index;
if (index == videoStreamIndex) {
frameFinish = avcodec_send_packet(videoCodec, avPacket);
if (frameFinish < 0) {
continue;
}
frameFinish = avcodec_receive_frame(videoCodec, avFrame2);
if (frameFinish < 0) {
continue;
}
if (frameFinish >= 0) {
//将数据转成一张图片
sws_scale(swsContext, (const uint8_t *const *) avFrame2->data, avFrame2->linesize, 0,
videoHeight, avFrame3->data, avFrame3->linesize);
//以下两种方法都可以
//QImage image(avFrame3->data[0], videoWidth, videoHeight, QImage::Format_RGB32);
QImage image((uchar *) buffer, videoWidth, videoHeight, QImage::Format_RGB32);
if (!image.isNull()) {
emit signal_receiveImage(image);
}
usleep(1);
}
#if 0
//延时(不然文件会立即全部播放完)
AVRational timeBase = {1, AV_TIME_BASE};
int64_t ptsTime = av_rescale_q(avPacket->dts, avFormatContext->streams[videoStreamIndex]->time_base, timeBase);
// 微秒
int64_t nowTime = av_gettime() - startTime;
qDebug() << "ptstime:" << ptsTime << "nowTime:" << nowTime;
if (ptsTime > nowTime) {
av_usleep(ptsTime - nowTime);
}
#else
AVRational timeBase = {1, AV_TIME_BASE};
int64_t ptsTime = av_rescale_q(avPacket->dts,
avFormatContext->streams[videoStreamIndex]->time_base,
timeBase);
currentTime = ptsTime;
if (lastTime == 0) {
av_usleep(10 * 1000);
}
if ((currentTime - lastTime) > 1 * 1000 * 1000 || (currentTime - lastTime) < 0) {
av_usleep(10 * 1000);
} else {
av_usleep(currentTime - lastTime);
}
lastTime = currentTime;
#endif
} else if (index == audioStreamIndex) {
//解码音频流,自行处理
}
}
av_packet_unref(avPacket);
av_freep(avPacket);
usleep(1);
}
//线程结束后释放资源
free();
stopped = false;
isPlay = false;
qDebug() << TIMEMS << "stop ffmpeg thread";
emit signal_threadOver();
}
void FFmpegThread::setUrl(const QString &url) {
this->url = url;
}
void FFmpegThread::free() {
if (swsContext != NULL) {
sws_freeContext(swsContext);
swsContext = NULL;
}
if (avPacket != NULL) {
av_packet_unref(avPacket);
avPacket = NULL;
}
if (avFrame != NULL) {
av_frame_free(&avFrame);
avFrame = NULL;
}
if (avFrame2 != NULL) {
av_frame_free(&avFrame2);
avFrame2 = NULL;
}
if (avFrame3 != NULL) {
av_frame_free(&avFrame3);
avFrame3 = NULL;
}
if (videoCodec != NULL) {
avcodec_close(videoCodec);
videoCodec = NULL;
}
if (audioCodec != NULL) {
avcodec_close(audioCodec);
audioCodec = NULL;
}
if (avFormatContext != NULL) {
avformat_close_input(&avFormatContext);
avFormatContext = NULL;
}
av_dict_free(&options);
qDebug() << TIMEMS << "close ffmpeg ok";
}
void FFmpegThread::play() {
//通过标志位让线程执行初始化
isPlay = true;
}
void FFmpegThread::pause() {
}
void FFmpegThread::next() {
}
void FFmpegThread::stop() {
//通过标志位让线程停止
stopped = true;
}
//实时视频显示窗体类
FFmpegWidget::FFmpegWidget(QWidget *parent) : QWidget(parent) {
thread = new FFmpegThread(this);
connect(thread, SIGNAL(signal_receiveImage(QImage)), this, SLOT(updateImage(QImage)));
image = QImage();
connect(thread, &FFmpegThread::signal_initFailed, this, &FFmpegWidget::signal_openVideoFailed);
}
FFmpegWidget::~FFmpegWidget() {
close();
}
void FFmpegWidget::paintEvent(QPaintEvent *) {
if (image.isNull()) {
return;
}
QPainter painter(this);
#if 0
//image = image.scaled(this->size(), Qt::KeepAspectRatio);
//按照比例自动居中绘制
int pixX = rect().center().x() - image.width() / 2;
int pixY = rect().center().y() - image.height() / 2;
QPoint point(pixX, pixY);
painter.drawImage(point, image);
#else
painter.drawImage(this->rect(), image);
#endif
}
void FFmpegWidget::updateImage(const QImage &image) {
this->image = image;
this->update();
}
void FFmpegWidget::setUrl(const QString &url) {
thread->setUrl(url);
}
void FFmpegWidget::open() {
qDebug() << TIMEMS << "open video" << objectName();
clear();
thread->play();
thread->start();
}
void FFmpegWidget::pause() {
thread->pause();
}
void FFmpegWidget::next() {
thread->next();
}
void FFmpegWidget::close() {
qDebug() << TIMEMS << "close video" << objectName();
if (thread->isRunning()) {
thread->stop();
thread->quit();
thread->wait(500);
}
QTimer::singleShot(1, this, SLOT(clear()));
}
void FFmpegWidget::clear() {
image = QImage();
update();
}