经过网络传输接收到的码流,已经存放在公共链表 PacketNode_t 中,码流经过解码成YUV或RGB后才能播放,接下来就介绍FFmpeg解码过程和 SDL 播放视频。
码流解码很消耗CPU资源,所以要单独开解码线程,在这之前应该进行解码器的初始化。
AVFrame *m_picture;
AVFrame *m_pFrameYUV;
AVCodec *m_codec;
AVCodecContext *m_pCodecCtx;
struct SwsContext *m_pImgCtx;
AVCodecParserContext *m_pCodecParserCtx;
AVFormatContext *m_pFmtCtx;
int m_PicBytes;
uint8_t* m_PicBuf;
BOOL CVideoDlg::InitDecoder()
{
int Ret = 0;
av_register_all(); //注册所有解码器
m_pFmtCtx = avformat_alloc_context();
if (NULL == m_pFmtCtx)
{
Printf("avformat_alloc_context failed!\n");
}
//寻找解码器
m_codec = avcodec_find_decoder(CODEC_ID_H265);
if (!m_codec)
{
Printf(TRUE,FALSE,"Codec not found !\n");
return 0;
}
//解码器结构
m_pCodecCtx = avcodec_alloc_context3(m_codec);
m_pCodecParserCtx = av_parser_init(AV_CODEC_ID_H264);
if (!m_pCodecParserCtx){
Printf(TRUE,FALSE,"Could not allocate video parser context\n");
return 0;
}
//打开解码器
Ret = avcodec_open2(m_pCodecCtx, m_codec, NULL);
if (Ret < 0)
{
Printf(TRUE,FALSE,"Could not open codec !\n");
return 0;
}
//AVFrame分配空间
m_picture = av_frame_alloc();
m_pFrameYUV = av_frame_alloc();
m_PicBytes = 0;
m_PicBuf = NULL;
m_pImgCtx = NULL;
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER)) {
AfxMessageBox(_T("Could not initialize SDL\n"));
return 0;
}
return TRUE;
}
在初始化过程中,已经通过定时器 SetTimer() 实现解码线程的声明:
void CVideoDlg::OnTimer(UINT_PTR nIDEvent)
{
if (0 == thread_exit)
{
return;
}
if(123 == nIDEvent)
{
pThreadPlay = AfxBeginThread(Thread_Play,this); //开启线程
KillTimer(123);
}
}
Thread_Play就是解码线程函数的地址,接下来去实现这个函数:
static UINT Thread_Play(VOID *pVoid)
{
CVideoDlg *pDlg = (CVideoDlg*) pVoid; //指针绑定对话框类
pDlg->FFmpegPlayer(); //FFmpeg解码并播放
return 0;
}
void CVideoDlg::FFmpegPlayer()
{
int Ret = 0;
int64_t pos = 0;
int vid_idx,i;
AVPacket pkt;
av_init_packet(&pkt);
//判断视频
for( i = 0; i < m_pFmtCtx->nb_streams; i++)
{
if(m_pFmtCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
vid_idx = i;
}
}
//显示在MFC控件上
screen_w = g_width;
screen_h = g_height;
screen = SDL_CreateWindowFrom(GetDlgItem(IDC_SCREEN)->GetSafeHwnd());
if (!screen) {
AfxMessageBox(_T("SDL: could not create window - exiting\n"));
return ;
}
sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, screen_w, screen_h);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
while (1)
{
SDL_WaitEvent(&g_event);
if (g_event.type != SFM_BREAK_EVENT) /
{
if (!packetList.empty())
{
//将码流数据复制到AvPacket中
cs.Lock();
pkt.size = packetList.front().length;
pkt.data = packetList.front().buf;
pkt.flags = packetList.front().flags;
pkt.stream_index = 0;
pkt.duration = 48000;
pkt.pos += pos;
pos = packetList.front().length;
packetList.pop_front();
cs.Unlock();
//解码码流
DecodeVideoFrame(&pkt);
}
//释放数据包
av_free_packet(&pkt);
Sleep(10);
}
else
{
break;
}
}
Printf("跳出循环,编码结束;\n");
}
/*将码流数据解码成视频帧,并进行格式转换,
然后通过SDL播放在MFC对话框上*/
BOOL CVideoDlg::DecodeVideoFrame(AVPacket *pkt)
{
int Ret = -1;
int got_picture = 0;
//将pkt的码流解码到m_picture中
Ret = avcodec_decode_video2(m_pCodecCtx,m_picture,&got_picture,pkt);
if (Ret < 0)
{
Printf("Error while decoding video frame\n");
return 0;
}
if(1 != got_picture)
{
Printf("could not get decoded video frame\n");
return 0;
}
int width = m_pCodecCtx->width;
int height = m_pCodecCtx->height;
//获得像素大小
m_PicBytes = avpicture_get_size(PIX_FMT_YUV420P,width,height);
m_PicBuf = new uint8_t[m_PicBytes];
//为结构体m_pFrame分配存缓冲数据的buffer
avpicture_fill((AVPicture*)m_pFrameYUV,m_PicBuf,PIX_FMT_YUV420P,width,height);
if (!m_pImgCtx)
{
//将解码后的帧数据转换成YUV420数据
m_pImgCtx = sws_getContext(width,height,
m_pCodecCtx->pix_fmt,width,height,
PIX_FMT_YUV420P,SWS_BICUBIC,NULL,NULL,NULL);
}
//转换数据格式
sws_scale(m_pImgCtx,(const uint8_t* const*)m_picture->data, m_picture->linesize,0, height, m_pFrameYUV->data, m_pFrameYUV->linesize);
//SDL显示------------------------
SDL_UpdateTexture(sdlTexture, NULL, m_pFrameYUV->data[0], m_pFrameYUV->linesize[0]);
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(sdlRenderer);
//SDL End-----------------------
delete[] m_PicBuf; //释放内存
return Ret;
}
到此,一个视频聊天软件经过:视频采集、编码、码流网络传输、解码、播放流程,终于可以实现了。