头文件:
#ifndef WIDGET_H
#define WIDGET_H
#include
#include
#include
#include
#include
#include
extern "C"
{
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
}
#define MAX_AUDIO_FRAME_SIZE 192000
namespace Ui {
class Widget;
}
class Widget : public QOpenGLWidget
{
Q_OBJECT
public:
explicit Widget(QWidget *parent = nullptr);
~Widget();
public slots:
void timeCallback(void);
void on_play_clicked();
void resizeEvent(QResizeEvent* );
private:
Ui::Widget *ui;
void paintGL();
void loadAudioInfo(double rate, int channel);
int playVedio(void);
QTimer *timer; // 定时播放,根据帧率来
int vedioW,vedioH; // 图像宽高
QList vedioBuff; // 图像缓存区
QPixmap m_pix;
QString myUrl; // 视频地址
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
int ret, got_picture,got_audio; // 视频解码标志
int videoindex; // 视频序号
// 音频
int audioindex; // 音频序号
AVCodecParameters *aCodecParameters;
AVCodec *aCodec;
AVCodecContext *aCodecCtx;
QByteArray byteBuf;//音频缓冲
QAudioOutput *audioOutput;
QIODevice *streamOut;
};
#endif // WIDGET_H
cpp文件:
#include "widget.h"
#include "ui_widget.h"
Widget::Widget(QWidget *parent) :
QOpenGLWidget(parent),
ui(new Ui::Widget)
{
ui->setupUi(this);
timer = new QTimer(this);
timer->setTimerType(Qt::PreciseTimer); // 精准定时设置
connect(timer,SIGNAL(timeout()),this,SLOT(timeCallback()));
myUrl = QString("test1.mp4");
}
Widget::~Widget()
{
delete ui;
}
void Widget::timeCallback(void)
{
// 视频缓存播放
// if(!vedioBuff.isEmpty())
// {
ui->label->setPixmap(vedioBuff.at(0));
// m_pix = vedioBuff.at(0);
// vedioBuff.removeAt(0);
// update();
// }
// else {
timer->stop();
// }
// 音频缓存播放
if(audioOutput && audioOutput->state() != QAudio::StoppedState && audioOutput->state() != QAudio::SuspendedState)
{
int writeBytes = qMin(byteBuf.length(), audioOutput->bytesFree());
streamOut->write(byteBuf.data(), writeBytes);
byteBuf = byteBuf.right(byteBuf.length() - writeBytes);
}
}
void Delay_MSec(unsigned int msec)
{
QTime _Timer = QTime::currentTime().addMSecs(msec);
while( QTime::currentTime() < _Timer )
QCoreApplication::processEvents(QEventLoop::AllEvents, 100);
}
void::Widget::paintGL()
{
QPainter p(this);
p.drawPixmap(rect(),m_pix);
}
int Widget::playVedio(void)
{
char *filepath = myUrl.toUtf8().data();
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
// 打开视频文件,初始化pFormatCtx结构
if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){
qDebug("视频文件打开失败.\n");
return -1;
}
// 获取音视频流
if(avformat_find_stream_info(pFormatCtx,NULL)<0){
qDebug("媒体流获取失败.\n");
return -1;
}
videoindex = -1;
audioindex = -1;
//nb_streams视音频流的个数,这里当查找到视频流时就中断了。
for(int i=0; inb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
videoindex=i;
break;
}
if(videoindex==-1){
qDebug("找不到视频流.\n");
return -1;
}
//nb_streams视音频流的个数,这里当查找到音频流时就中断了。
for(int i=0; inb_streams; i++)
if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){
audioindex=i;
break;
}
if(audioindex==-1){
qDebug("找不到音频流.\n");
return -1;
}
//获取视频流编码结构
pCodecCtx=pFormatCtx->streams[videoindex]->codec;
float frameNum = pCodecCtx->framerate.num; // 每秒帧数
if(frameNum>100) frameNum = frameNum/1001;
int frameRate = 1000/frameNum; //
qDebug("帧/秒 = %f 播放间隔是时间=%d\n",frameNum,frameRate);
//查找解码器
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{
qDebug("找不到解码器.\n");
return -1;
}
//使用解码器读取pCodecCtx结构
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
{
qDebug("打开视频码流失败.\n");
return -1;
}
//获取音频流编码结构-------------------------------------------------------------
aCodecParameters = pFormatCtx->streams[audioindex]->codecpar;
aCodec = avcodec_find_decoder(aCodecParameters->codec_id);
if (aCodec == 0) {
qDebug("找不到解码器.\n");
return -1;
}
aCodecCtx = avcodec_alloc_context3(aCodec);
avcodec_parameters_to_context(aCodecCtx, aCodecParameters);
//使用解码器读取aCodecCtx结构
if (avcodec_open2(aCodecCtx, aCodec, 0) < 0) {
qDebug("打开视频码流失败.\n");
return 0;
}
int rate = pFormatCtx->streams[audioindex]->codec->sample_rate;
int channel = pFormatCtx->streams[audioindex]->codec->channels;
loadAudioInfo(rate,channel);
// 清空缓存区
byteBuf.clear();
vedioBuff.clear();
//创建帧结构,此函数仅分配基本结构空间,图像数据空间需通过av_malloc分配
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
// 获取音频参数
uint64_t out_channel_layout = aCodecCtx->channel_layout;
AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16;
int out_sample_rate = aCodecCtx->sample_rate;
int out_channels = av_get_channel_layout_nb_channels(out_channel_layout);
uint8_t *audio_out_buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE*2);
SwrContext *swr_ctx = swr_alloc_set_opts(NULL, out_channel_layout, out_sample_fmt,out_sample_rate, aCodecCtx->channel_layout, aCodecCtx->sample_fmt, aCodecCtx->sample_rate, 0, 0);
swr_init(swr_ctx);
//创建动态内存,创建存储图像数据的空间
unsigned char *out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1);
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//初始化img_convert_ctx结构
struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
timer->start(frameRate); //定时间隔播放
double audio_clock;
int fps = av_q2d(pFormatCtx->streams[videoindex]->avg_frame_rate);
while (av_read_frame(pFormatCtx, packet) >= 0){
if (packet->stream_index == audioindex){
audio_clock = pFrame->pts;
int ret = avcodec_decode_audio4(aCodecCtx, pFrame, &got_audio, packet);
if ( ret < 0)
{
qDebug("解码失败.\n");
return 0;
}
if (got_audio)
{
int len = swr_convert(swr_ctx, &audio_out_buffer, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)pFrame->data, pFrame->nb_samples);
if (len <= 0)
{
qDebug("解码失败1.\n");
continue;
}
int dst_bufsize = av_samples_get_buffer_size(0, out_channels, len, out_sample_fmt, 1);
QByteArray atemp = QByteArray((const char *)audio_out_buffer, dst_bufsize);
byteBuf.append(atemp);
}
}
//如果是视频数据
else if (packet->stream_index == videoindex){
//解码一帧视频数据
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0){
qDebug("解码失败.\n");
return 0;
}
if (got_picture){
double video_time = pFrame->pts;
sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
QImage img((uchar*)pFrameRGB->data[0],pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
img = img.scaled(vedioW, vedioH);
QPixmap temp = QPixmap::fromImage(img);
// vedioBuff.append(temp);
m_pix = temp;
double diff_time = video_time - audio_clock;
if (diff_time > frameRate && diff_time/fps < 500)
{
// qDebug()<label->setPixmap(temp);
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
void Widget::resizeEvent(QResizeEvent* )
{
vedioW = ui->label->width();
vedioH = ui->label->height();
}
void Widget::on_play_clicked()
{
vedioW = ui->label->width();
vedioH = ui->label->height();
if(timer->isActive())
timer->stop();
playVedio();
}
void Widget::loadAudioInfo(double rate,int channel)
{
QAudioFormat fmt;
fmt.setSampleRate(rate);
fmt.setSampleSize(16);
fmt.setChannelCount(channel);
fmt.setCodec("audio/pcm");
fmt.setByteOrder(QAudioFormat::LittleEndian);
fmt.setSampleType(QAudioFormat::SignedInt);
audioOutput = new QAudioOutput(fmt);
streamOut = audioOutput->start();
}