在上一章的基础上添加了包队列和帧队列,有些和上一章相同的地方注释不再赘述。
(ffmpeg)ffmpeg+SDL2的简单音视频播放器
/*
1) 所有在类里声明的回调函数必须为静态函数,且只能调用静态变量,所以都传递了this指针
2) 关闭窗口时,先置标志,然后等待所有线程退出,事件处理线程等待其他所有线程退出后退出
3) 相比1.0版本,把取包和解码分为两个线程,转换放在音视频线程中,但是感觉解码和转换放在一起更合适,且帧队列改为转换后帧的队列
*/
#include "player.h"
using namespace player;
#define ASSERT(c) do{if(c == NULL){printf("%s is NULL\n",#c);return -1;}}while(0);
#if 1
#define dprintf(fmt,args...) printf(fmt,##args)
#else
#define dprintf(fmt,args...)
#endif
Player::Player(const char *file):
screenH(0)
,screenW(0)
,filename(NULL)
,isEventClose(true)
,pFormatCtx(NULL)
,pPacket(NULL)
,pCodec(NULL)
,isReadPackClose(true)
,isDecodeClose(true)
,isAudioClose(true)
,isVideoClose(true)
,audioStreamIndex(-1)
,videoStreamIndex(-1)
,eventThread(NULL)
,readPackThread(NULL)
,decodeThread(NULL)
,audioPlayerThread(NULL)
,videoPlayerThread(NULL)
{
if(file)
{
filename = (char *)malloc(strlen(file)+1);
if(filename)
sprintf(filename,"%s",file);
}
else
filename = NULL;
avformat_network_init();
pMutex = SDL_CreateMutex();
closeOver.allOver = 0;
}
Player::~Player()
{
if(filename)
free(filename);
SDL_DestroyMutex(pMutex);
}
int Player::versionShow(void)
{
dprintf(VERSION);
return 0;
}
//同1.0注释,不再赘述
int Player::init(void)
{
pFormatCtx = avformat_alloc_context();
ASSERT(pFormatCtx)
dprintf("filename:%s\n",filename);
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER | SDL_INIT_EVENTS)) {
dprintf( "Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}else
{
if(avformat_open_input(&pFormatCtx,filename,NULL,NULL)!=0){
dprintf("Couldn't open input stream.\n");
return -1;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0){
dprintf("Couldn't find stream information.\n");
return -1;
}
av_dump_format(pFormatCtx, 0, filename, false);
eventThread = SDL_CreateThread(eventLoop,"event",(void *)this);
ASSERT(eventThread)
isEventClose = false;
readPackThread = SDL_CreateThread(readPackLoop,"readpack",(void *)this);
ASSERT(readPackThread)
}
return 0;
}
int Player::exit(void)
{
//想要音视频线程结束,直接置标志即可
if(closeOver.closeSingle.audioOver == 0)
{
isAudioClose = true;
}
if(closeOver.closeSingle.videoOver == 0)
{
isVideoClose = true;
}
//解码线程有可能处于等待条件变量的状态,所以结束时先置标志位,再从等待状态唤醒
if(closeOver.closeSingle.decodeOver == 0)
{
if(!isDecodeClose)
{
isDecodeClose = true;
SDL_LockMutex(pCodec[videoStreamIndex].pFrameQueue->pMutex);
SDL_CondSignal(pCodec[videoStreamIndex].pFrameQueue->pCond);
SDL_UnlockMutex(pCodec[videoStreamIndex].pFrameQueue->pMutex);
SDL_LockMutex(pCodec[audioStreamIndex].pFrameQueue->pMutex);
SDL_CondSignal(pCodec[audioStreamIndex].pFrameQueue->pCond);
SDL_UnlockMutex(pCodec[audioStreamIndex].pFrameQueue->pMutex);
}
}
//解包线程同上
if(closeOver.closeSingle.readPacketOver == 0)
{
isReadPackClose = true;
SDL_LockMutex(packetQueue.pMutex);
SDL_CondSignal(packetQueue.pCond);
SDL_UnlockMutex(packetQueue.pMutex);
}
//按资源依赖顺序释放线程后,阻塞等待线程结束
while(closeOver.allOver != 0xF);
SDL_Quit();
if(pFormatCtx)
{
avformat_close_input(&pFormatCtx);
pFormatCtx = NULL;
}
isEventClose = true;
return 0;
}
int Player::keyEvent(int key)
{
switch (key)
{
case SDLK_SPACE:
break;
default:
break;
}
return 0;
}
int Player::eventLoop(void *pthis)
{
ASSERT(pthis)
SDL_Event event;
Player *tmpthis = (Player *) pthis;
tmpthis->isEventClose = false;
while((!tmpthis->isEventClose) && (pthis !=NULL))
{
SDL_WaitEvent(&event);
switch (event.type)
{
case SDL_KEYDOWN:
tmpthis->keyEvent(event.key.keysym.sym);
break;
case SDL_QUIT:
tmpthis->exit();
break;
default:
break;
}
}
dprintf("leave eventLoop\n");
return 0;
}
//同1.0基本相同,多添加了队列初始化
int Player::codecContextInit(int streamIndex)
{
dprintf("Init %d\n",streamIndex);
pCodec[streamIndex].pCodeCtx = avcodec_alloc_context3(NULL);
ASSERT(pCodec[streamIndex].pCodeCtx)
avcodec_parameters_to_context(pCodec[streamIndex].pCodeCtx,pFormatCtx->streams[streamIndex]->codecpar);
pCodec[streamIndex].pCodeCtx->pkt_timebase = pFormatCtx->streams[streamIndex]->time_base;
pCodec[streamIndex].pAvCodec = avcodec_find_decoder(pCodec[streamIndex].pCodeCtx->codec_id);
ASSERT(pCodec[streamIndex].pAvCodec)
if(avcodec_open2(pCodec[streamIndex].pCodeCtx, pCodec[streamIndex].pAvCodec,NULL)<0){
dprintf("Could not open codec.\n");
return -1;
}
switch (pFormatCtx->streams[streamIndex]->codecpar->codec_type)
{
case AVMEDIA_TYPE_VIDEO:
dprintf("AVMEDIA_TYPE_VIDEO\n");
videoStreamIndex = streamIndex;
if(pCodec[streamIndex].pCodeCtx->pix_fmt == AV_PIX_FMT_YUV420P)
{
pCodec[streamIndex].pFrame = av_frame_alloc();
ASSERT(pCodec[streamIndex].pFrame)
pCodec[streamIndex].videoOutFmt.pOutFrame = av_frame_alloc();
ASSERT(pCodec[streamIndex].videoOutFmt.pOutFrame)
pCodec[streamIndex].pBuffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,1));
ASSERT(pCodec[streamIndex].pBuffer)
av_image_fill_arrays(pCodec[streamIndex].videoOutFmt.pOutFrame->data, pCodec[streamIndex].videoOutFmt.pOutFrame->linesize,pCodec[streamIndex].pBuffer, \
AV_PIX_FMT_YUV420P,pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,1);
pCodec[streamIndex].pSwsConvertCtx = sws_getContext(pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height, pCodec[streamIndex].pCodeCtx->pix_fmt, \
pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,AV_PIX_FMT_YUV420P , SWS_BICUBIC, NULL, NULL, NULL);
ASSERT(pCodec[streamIndex].pSwsConvertCtx)
}
else
{
pCodec[streamIndex].pFrame = av_frame_alloc();
ASSERT(pCodec[streamIndex].pFrame)
pCodec[streamIndex].videoOutFmt.pOutFrame = av_frame_alloc();
ASSERT(pCodec[streamIndex].videoOutFmt.pOutFrame)
pCodec[streamIndex].pBuffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,1));
ASSERT(pCodec[streamIndex].pBuffer)
av_image_fill_arrays(pCodec[streamIndex].videoOutFmt.pOutFrame->data, pCodec[streamIndex].videoOutFmt.pOutFrame->linesize,pCodec[streamIndex].pBuffer, \
AV_PIX_FMT_RGB24,pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,1);
pCodec[streamIndex].pSwsConvertCtx = sws_getContext(pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height, pCodec[streamIndex].pCodeCtx->pix_fmt, \
pCodec[streamIndex].pCodeCtx->width, pCodec[streamIndex].pCodeCtx->height,AV_PIX_FMT_RGB24 , SWS_BICUBIC, NULL, NULL, NULL);
ASSERT(pCodec[streamIndex].pSwsConvertCtx)
}
if(screenWwidth)
screenW = pCodec[streamIndex].pCodeCtx->width;
if(screenHheight)
screenH = pCodec[streamIndex].pCodeCtx->height;
//初始化frame队列
pCodec[streamIndex].pFrameQueue = (FrameQueue *)malloc(sizeof(FrameQueue));
ASSERT(pCodec[streamIndex].pFrameQueue);
memset(pCodec[streamIndex].pFrameQueue,0,sizeof(FrameQueue));
//初始化帧队列互斥量
pCodec[streamIndex].pFrameQueue->pMutex = SDL_CreateMutex();
//初始化帧队列条件变量
pCodec[streamIndex].pFrameQueue->pCond = SDL_CreateCond();
//帧队列最大值
pCodec[streamIndex].pFrameQueue->maxSize = FRAME_QUEUE_MAX_SIZE;
//直接为每个帧队列元素申请空间,而非使用时申请
for(int i=0;imaxSize;i++)
{
pCodec[streamIndex].pFrameQueue->qFrame[i] = av_frame_alloc();
ASSERT(pCodec[streamIndex].pFrameQueue->qFrame[i])
}
break;
case AVMEDIA_TYPE_AUDIO:
dprintf("AVMEDIA_TYPE_AUDIO\n");
audioStreamIndex = streamIndex;
pCodec[streamIndex].pAudioOutFmt = (AudioOutFmt *)malloc(sizeof(AudioOutFmt));
ASSERT(pCodec[streamIndex].pAudioOutFmt)
memset(pCodec[streamIndex].pAudioOutFmt,0,sizeof(AudioOutFmt));
pCodec[streamIndex].pAudioOutFmt->outChLayout = AV_CH_LAYOUT_STEREO;
pCodec[streamIndex].pAudioOutFmt->outNbSamples = pCodec[streamIndex].pCodeCtx->frame_size;
pCodec[streamIndex].pAudioOutFmt->outNbSampleFmt = AV_SAMPLE_FMT_S16;
pCodec[streamIndex].pAudioOutFmt->outSamplesRate = pCodec[streamIndex].pCodeCtx->sample_rate;
pCodec[streamIndex].pAudioOutFmt->outChannels = av_get_channel_layout_nb_channels(pCodec[streamIndex].pAudioOutFmt->outChLayout);
pCodec[streamIndex].pAudioOutFmt->outFrameSize = av_samples_get_buffer_size(NULL,pCodec[streamIndex].pAudioOutFmt->outChannels ,pCodec[streamIndex].pAudioOutFmt->outNbSamples,pCodec[streamIndex].pAudioOutFmt->outNbSampleFmt, 1);
pCodec[streamIndex].audioBufCtrl.audioLen = 0;
pCodec[streamIndex].audioBufCtrl.pAudioChunk = NULL;
pCodec[streamIndex].audioBufCtrl.pAudioPos = NULL;
pCodec[streamIndex].pBuffer = (unsigned char *)av_malloc(MAX_AUDIO_FRAME_SIZE*2);
ASSERT(pCodec[streamIndex].pBuffer)
pCodec[streamIndex].pFrame = av_frame_alloc();
ASSERT(pCodec[streamIndex].pFrame)
//初始化frame队列,同上
pCodec[streamIndex].pFrameQueue = (FrameQueue *)malloc(sizeof(FrameQueue));
ASSERT(pCodec[streamIndex].pFrameQueue);
memset(pCodec[streamIndex].pFrameQueue,0,sizeof(FrameQueue));
pCodec[streamIndex].pFrameQueue->pMutex = SDL_CreateMutex();
pCodec[streamIndex].pFrameQueue->pCond = SDL_CreateCond();
pCodec[streamIndex].pFrameQueue->maxSize = FRAME_QUEUE_MAX_SIZE;
for(int i=0;imaxSize;i++)
{
pCodec[streamIndex].pFrameQueue->qFrame[i] = av_frame_alloc();
ASSERT(pCodec[streamIndex].pFrameQueue->qFrame[i])
}
break;
default:
break;
}
return 0;
}
int Player::codecContextExit(int streamIndex)
{
if(pCodec != NULL)
{
if(pCodec[streamIndex].pCodeCtx != NULL)
{
SDL_LockMutex(pMutex);
avcodec_close(pCodec[streamIndex].pCodeCtx);
pCodec[streamIndex].pCodeCtx = NULL;
SDL_UnlockMutex(pMutex);
}
}
return 0;
}
//大部分同1.0相同,不再赘述
int Player::readPackLoop(void *pthis)
{
ASSERT(pthis)
Player *tmpthis = (Player *) pthis;
tmpthis->isReadPackClose = false;
tmpthis->pCodec = (Codec *)malloc(sizeof(Codec)*(tmpthis->pFormatCtx->nb_streams));
ASSERT(tmpthis->pCodec)
memset(tmpthis->pCodec,0,sizeof(Codec)*(tmpthis->pFormatCtx->nb_streams));
for(int i=0; i < tmpthis->pFormatCtx->nb_streams; i++)
{
if(tmpthis->codecContextInit(i)<0)
{
dprintf("Stream%d init failed\n",i);
return -1;
}
}
//初始化包队列互斥量
tmpthis->packetQueue.pMutex = SDL_CreateMutex();
//初始化包队列条件变量
tmpthis->packetQueue.pCond = SDL_CreateCond();
//创建其他线程
tmpthis->decodeThread = SDL_CreateThread(decodeLoop,"decode",(void *)pthis);
ASSERT(tmpthis->decodeThread)
tmpthis->isDecodeClose = false;
tmpthis->audioPlayerThread = SDL_CreateThread(audioLoop,"audio",(void *)pthis);
ASSERT(tmpthis->audioPlayerThread)
tmpthis->isAudioClose = false;
tmpthis->videoPlayerThread = SDL_CreateThread(videoLoop,"video",(void *)pthis);
ASSERT(tmpthis->videoPlayerThread)
tmpthis->isVideoClose = false;
//每次读包存入队列前临时存储
tmpthis->pPacket = av_packet_alloc();
ASSERT(tmpthis->pPacket)
//这两个变量改为局部也行
tmpthis->gotAudioPicture = -1;
tmpthis->gotVideoPicture = -1;
while(!tmpthis->isReadPackClose)
{ //判断队列是否已满
if(tmpthis->packetQueue.qPacket.size()pFormatCtx, tmpthis->pPacket) == 0)
{
//存入队列前要上锁
SDL_LockMutex(tmpthis->packetQueue.pMutex);
//申请一个包变量
AVPacket *tmppacket = av_packet_alloc();
if (!tmppacket) {
dprintf("tmppacket malloc failed\n");
av_packet_unref(tmppacket);
return -1;
}
//把取到的包存入新申请的包变量
av_packet_move_ref(tmppacket, tmpthis->pPacket);
//入队
tmpthis->packetQueue.qPacket.push(tmppacket);
SDL_UnlockMutex(tmpthis->packetQueue.pMutex);
//临时存包的变量可以释放,等待下次存储
av_packet_unref(tmpthis->pPacket);
}
//队列没满但是读取包失败了,则认为包已经被读取完了,线程结束
else
{
tmpthis->isReadPackClose = true;
dprintf("readPacket Over\n");
}
}
//队列已经满了则等待条件变量,此条件在解码线程每次从包队列里取出一个包就满足一次
else
{
SDL_LockMutex(tmpthis->packetQueue.pMutex);
SDL_CondWait(tmpthis->packetQueue.pCond,tmpthis->packetQueue.pMutex);
SDL_UnlockMutex(tmpthis->packetQueue.pMutex);
}
}
dprintf("leave readPackLoop\n");
//因为包队列元素空间释放在解码线程,所以解包线程要退出,必须等待解码线程退出,以保证队列里所有包资源都已被释放
while(tmpthis->closeOver.closeSingle.decodeOver == 0);
if(tmpthis->pCodec)
{
free(tmpthis->pCodec);
tmpthis->pCodec = NULL;
}
SDL_DestroyMutex(tmpthis->packetQueue.pMutex);
SDL_DestroyCond(tmpthis->packetQueue.pCond);
av_packet_free(&tmpthis->pPacket);
tmpthis->pPacket = NULL;
tmpthis->codecContextExit(tmpthis->audioStreamIndex);
tmpthis->codecContextExit(tmpthis->videoStreamIndex);
tmpthis->isReadPackClose = true;
tmpthis->closeOver.closeSingle.readPacketOver = 1;
dprintf("readPackLoop free over\n");
return 0;
}
int Player::decodeLoop(void *pthis)
{
ASSERT(pthis)
Player *tmpthis = (Player *)pthis;
tmpthis->isDecodeClose = false;
while (!tmpthis->isDecodeClose)
{
//判断包队列是否有元素可取
if(tmpthis->packetQueue.qPacket.size()>0)
{
//有元素可取,操作前先上锁
SDL_LockMutex(tmpthis->packetQueue.pMutex);
//取首个元素
AVPacket *tmpPacket = tmpthis->packetQueue.qPacket.front();
//从队列中弹出,即size-1,但是资源未释放
tmpthis->packetQueue.qPacket.pop();
//判断包类型
switch (tmpthis->pFormatCtx->streams[tmpPacket->stream_index]->codecpar->codec_type)
{
case AVMEDIA_TYPE_AUDIO:
tmpthis->audioStreamIndex = tmpPacket->stream_index;
avcodec_send_packet(tmpthis->pCodec[tmpthis->audioStreamIndex].pCodeCtx,tmpPacket);
//释放包资源
av_packet_free(&tmpPacket);
//置位条件
SDL_CondSignal(tmpthis->packetQueue.pCond);
SDL_UnlockMutex(tmpthis->packetQueue.pMutex);
//判断帧队列是否已满
if(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qSize >= tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->maxSize)
{
//帧队列满了,则等待条件变量,此变量在对应播放线程被满足
SDL_LockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
SDL_CondWait(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pCond,tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
}
//条件被满足后,证明帧队列已经被取走一个元素
SDL_LockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
//判断写索引是否需要从头开始
if(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->writePrt > tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->maxSize-1)
{
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->writePrt = 0;
}
//解码
tmpthis->gotAudioPicture = avcodec_receive_frame(tmpthis->pCodec[tmpthis->audioStreamIndex].pCodeCtx,
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->writePrt]);
if(tmpthis->gotAudioPicture == 0)
{
//解码成功后队列size+1,写索引+1
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qSize++;
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->writePrt++;
}
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
break;
case AVMEDIA_TYPE_VIDEO:
//同上
tmpthis->videoStreamIndex = tmpPacket->stream_index;
avcodec_send_packet(tmpthis->pCodec[tmpthis->videoStreamIndex].pCodeCtx,tmpPacket);
av_packet_free(&tmpPacket);
SDL_CondSignal(tmpthis->packetQueue.pCond);
SDL_UnlockMutex(tmpthis->packetQueue.pMutex);
if(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qSize >= tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->maxSize)
{
SDL_LockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
SDL_CondWait(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pCond,tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
}
SDL_LockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
if(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->writePrt>tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->maxSize-1)
{
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->writePrt = 0;
}
tmpthis->gotVideoPicture = avcodec_receive_frame(tmpthis->pCodec[tmpthis->videoStreamIndex].pCodeCtx,
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->writePrt]);
if(tmpthis->gotVideoPicture == 0)
{
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qSize++;
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->writePrt++;
}
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
break;
default:
break;
}
}
else
{
if(tmpthis->isReadPackClose)
{
tmpthis->isDecodeClose = true;
dprintf("isDecodeClose is true\n");
}
}
}
dprintf("leave decodeLoop\n");
//等待音频与视频线程结束,防止使用帧时异常
while(!((tmpthis->closeOver.closeSingle.audioOver==1) && (tmpthis->closeOver.closeSingle.videoOver==1)));
//释放包队列所有元素
if(tmpthis->packetQueue.qPacket.size()>0)
{
for(int i=0;ipacketQueue.qPacket.size();i++)
{
AVPacket *tmpPacket = tmpthis->packetQueue.qPacket.front();
av_packet_free(&tmpPacket);
tmpPacket = NULL;
tmpthis->packetQueue.qPacket.pop();
}
}
//释放音频帧队列所有元素
for(int z = 0;zpCodec[tmpthis->audioStreamIndex].pFrameQueue->maxSize;z++)
{
av_frame_free(&tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qFrame[z]);
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qFrame[z] = NULL;
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->maxSize = 0;
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qSize = 0;
}
SDL_DestroyMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
SDL_DestroyCond(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pCond);
//释放视频帧队列所有元素
for(int z = 0;zpCodec[tmpthis->videoStreamIndex].pFrameQueue->maxSize;z++)
{
av_frame_free(&tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qFrame[z]);
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qFrame[z] = NULL;
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->maxSize = 0;
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qSize = 0;
}
SDL_DestroyMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
SDL_DestroyCond(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pCond);
av_frame_free(&tmpthis->pCodec[tmpthis->audioStreamIndex].pFrame);
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrame = NULL;
av_frame_free(&tmpthis->pCodec[tmpthis->videoStreamIndex].pFrame);
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrame = NULL;
tmpthis->isDecodeClose = true;
tmpthis->closeOver.closeSingle.decodeOver = 1;
dprintf("decodeLoop free over\n");
return 0;
}
//同1.0
void Player::audioCallBack(void *pthis,unsigned char *stream,int len)
{
if(pthis == NULL)
return;
Player *tmpthis = (Player *)pthis;
SDL_memset(stream, 0, len);
if(tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen == 0)
return;
len=(len>tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen?tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen:len);
memcpy(stream, tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.pAudioPos, len);
tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.pAudioPos += len;
tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen -= len;
}
int Player::audioLoop(void *pthis)
{
ASSERT(pthis)
Player *tmpthis = (Player *)pthis;
tmpthis->isAudioClose = false;
SDL_AudioSpec audioSpec;
int i = tmpthis->audioStreamIndex;
audioSpec.freq = tmpthis->pCodec[i].pAudioOutFmt->outSamplesRate;
audioSpec.format = AUDIO_S16SYS;
audioSpec.channels = tmpthis->pCodec[i].pAudioOutFmt->outChannels;
audioSpec.silence = 0;
audioSpec.samples = tmpthis->pCodec[i].pAudioOutFmt->outNbSamples;
audioSpec.callback = audioCallBack;
audioSpec.userdata = pthis;
if (SDL_OpenAudio(&audioSpec, NULL)<0){
dprintf("can't open audio.\n");
return -1;
}
tmpthis->pCodec[i].pSwrConvertCtx = swr_alloc();
ASSERT(tmpthis->pCodec[i].pSwrConvertCtx)
tmpthis->pCodec[i].pSwrConvertCtx=swr_alloc_set_opts(tmpthis->pCodec[i].pSwrConvertCtx, \
tmpthis->pCodec[i].pAudioOutFmt->outChLayout, \
tmpthis->pCodec[i].pAudioOutFmt->outNbSampleFmt, \
tmpthis->pCodec[i].pAudioOutFmt->outSamplesRate, \
av_get_default_channel_layout(tmpthis->pCodec[i].pCodeCtx->channels), \
tmpthis->pCodec[i].pCodeCtx->sample_fmt , \
tmpthis->pCodec[i].pCodeCtx->sample_rate,0, NULL);
swr_init(tmpthis->pCodec[i].pSwrConvertCtx);
SDL_PauseAudio(0);
while(!tmpthis->isAudioClose)
{
//判断是否有帧可取
if(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qSize>0)
{
//有帧可取,则根据读索引,直接使用帧队列元素
SDL_LockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
swr_convert(tmpthis->pCodec[tmpthis->audioStreamIndex].pSwrConvertCtx, \
&tmpthis->pCodec[tmpthis->audioStreamIndex].pBuffer, \
MAX_AUDIO_FRAME_SIZE, \
(const uint8_t **)tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->readPrt]->data, \
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->readPrt]->nb_samples);
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->readPrt++;
if(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->readPrt>tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->maxSize-1)
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->readPrt = 0;
tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->qSize--;
SDL_CondSignal(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pCond);
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->audioStreamIndex].pFrameQueue->pMutex);
while(tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen>0)
{
SDL_Delay(1);
}
tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.pAudioChunk = tmpthis->pCodec[tmpthis->audioStreamIndex].pBuffer;
tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.audioLen =tmpthis->pCodec[tmpthis->audioStreamIndex].pAudioOutFmt->outFrameSize;
tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.pAudioPos = tmpthis->pCodec[tmpthis->audioStreamIndex].audioBufCtrl.pAudioChunk;
}
}
dprintf("leave audioLoop\n");
if(tmpthis->pCodec[tmpthis->audioStreamIndex].pAudioOutFmt!=NULL)
{
free(tmpthis->pCodec[tmpthis->audioStreamIndex].pAudioOutFmt);
tmpthis->pCodec[tmpthis->audioStreamIndex].pAudioOutFmt = NULL;
}
swr_free(&tmpthis->pCodec[tmpthis->audioStreamIndex].pSwrConvertCtx);
tmpthis->pCodec[tmpthis->audioStreamIndex].pSwrConvertCtx = NULL;
av_free(tmpthis->pCodec[tmpthis->audioStreamIndex].pBuffer);
tmpthis->pCodec[tmpthis->audioStreamIndex].pBuffer = NULL;
SDL_CloseAudio();
tmpthis->isAudioClose = true;
tmpthis->closeOver.closeSingle.audioOver = 1;
dprintf("audioLoop free over\n");
return 0;
}
//同音频线程处理方式
int Player::videoLoop(void *pthis)
{
ASSERT(pthis)
Player *tmpthis = (Player *)pthis;
tmpthis->isVideoClose = false;
SDL_Window *screen;
screen = SDL_CreateWindow("Player",SDL_WINDOWPOS_UNDEFINED,SDL_WINDOWPOS_UNDEFINED,\
tmpthis->screenW,tmpthis->screenH,SDL_WINDOW_SHOWN);
if(!screen)
{
dprintf("Can't Create Window -- %s\n",SDL_GetError());
return -1;
}
SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
SDL_RenderClear(sdlRenderer);
SDL_Texture* sdlTexture = NULL;
if(tmpthis->pCodec[tmpthis->videoStreamIndex].pCodeCtx->pix_fmt == AV_PIX_FMT_YUV420P)
sdlTexture = SDL_CreateTexture(sdlRenderer,SDL_PIXELFORMAT_IYUV,\
SDL_TEXTUREACCESS_STREAMING,tmpthis->screenW,tmpthis->screenH);//SDL_PIXELFORMAT_RGB24 /SDL_PIXELFORMAT_IYUV
else
sdlTexture = SDL_CreateTexture(sdlRenderer,SDL_PIXELFORMAT_RGB24,\
SDL_TEXTUREACCESS_STREAMING,tmpthis->screenW,tmpthis->screenH);//SDL_PIXELFORMAT_RGB24 /SDL_PIXELFORMAT_IYUV
SDL_Rect sdlRect;
sdlRect.h = tmpthis->screenH;
sdlRect.w = tmpthis->screenW;
while(!tmpthis->isVideoClose)
{
if(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qSize>0)
{
SDL_LockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
sws_scale(tmpthis->pCodec[tmpthis->videoStreamIndex].pSwsConvertCtx,
(const unsigned char* const*)tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->readPrt]->data,
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qFrame[tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->readPrt]->linesize,
0,
tmpthis->pCodec[tmpthis->videoStreamIndex].pCodeCtx->height,
tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame->data,
tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame->linesize);
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->readPrt++;
if(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->readPrt>tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->maxSize-1)
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->readPrt = 0;
tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->qSize--;
SDL_CondSignal(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pCond);
SDL_UnlockMutex(tmpthis->pCodec[tmpthis->videoStreamIndex].pFrameQueue->pMutex);
SDL_UpdateTexture( sdlTexture, &sdlRect, \
tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame->data[0], \
tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame->linesize[0] ); \
SDL_RenderClear( sdlRenderer );
SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent( sdlRenderer );
tmpthis->gotVideoPicture = -1;
}
}
dprintf("leave videoLoop\n");
av_free(tmpthis->pCodec[tmpthis->videoStreamIndex].pBuffer);
tmpthis->pCodec[tmpthis->videoStreamIndex].pBuffer = NULL;
av_frame_free(&tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame);
tmpthis->pCodec[tmpthis->videoStreamIndex].videoOutFmt.pOutFrame = NULL;
sws_freeContext(tmpthis->pCodec[tmpthis->videoStreamIndex].pSwsConvertCtx);
tmpthis->pCodec[tmpthis->videoStreamIndex].pSwsConvertCtx = NULL;
tmpthis->isVideoClose = true;
tmpthis->closeOver.closeSingle.videoOver = 1;
SDL_DestroyWindow(screen);
dprintf("videoLoop free over\n");
return 0;
}
#ifndef PLAYER_H
#define PLAYER_H
extern "C"
{
#include
#include
#include
#include
#include
#include
}
#include
//版本号
#define VERSION "2.0v\n"
//单帧最大数据
#define MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
//包队列大小
#define PACKET_QUEUE_MAX_SIZE 8
//帧队列大小
#define FRAME_QUEUE_MAX_SIZE 16
//命名空间
namespace player
{
//音频播放控制
typedef struct AUDIOBUFCTRL
{
//音频播放数据缓冲首地址
unsigned char *pAudioChunk;
//音频当前播放数据首指针
unsigned char *pAudioPos;
//音频当前播放剩余数据
unsigned int audioLen;
}AudioBufCtrl;
//音频转换输出格式
typedef struct AUDIOOUTFMT
{
//单通道采样数据
int outNbSamples;
//采样率
int outSamplesRate;
//输出通道
int outChannels;
//输出帧大小
int outFrameSize;
//输出音频通道格式
unsigned long outChLayout;
//采样格式
AVSampleFormat outNbSampleFmt;
}AudioOutFmt;
//视频转换输出格式
typedef struct VIDEOOUTFMT
{
//存储转换后的单帧
AVFrame *pOutFrame;
}VideoOutFmt;
//包队列相关
typedef struct PACKETQUEUE
{
//包队列,使用C++队列模板,非线程安全
std::queue qPacket;
//互斥量
SDL_mutex *pMutex;
//条件变量
SDL_cond *pCond;
}PacketQueue;
//帧队列相关
typedef struct FRAMEQUEUE
{
//帧队列使用数组指针,初始化时申请所有内存,而非使用时申请
AVFrame *qFrame[FRAME_QUEUE_MAX_SIZE];
//队列存储最大值
int maxSize;
//队列当前存有元素个数
int qSize;
//读下标
int readPrt;
//写下标
int writePrt;
//互斥量
SDL_mutex *pMutex;
//条件变量
SDL_cond *pCond;
}FrameQueue;
//编解码器相关
typedef struct CODEC
{
//编解码器上下文
AVCodecContext *pCodeCtx;
//解码器
const AVCodec *pAvCodec;
//临时存储单帧
AVFrame *pFrame;
//视频转码器
struct SwsContext *pSwsConvertCtx;
//音频转码器
struct SwrContext *pSwrConvertCtx;
//转换结果缓存
unsigned char *pBuffer;
//音频转换格式相关
AudioOutFmt *pAudioOutFmt;
//音频播放相关
AudioBufCtrl audioBufCtrl;
//视频转换格式相关
VideoOutFmt videoOutFmt;
//帧队列
FrameQueue *pFrameQueue;
}Codec;
//所有线程退出状态
typedef union CLOSE
{
//单个状态
struct CLOSESINGLE
{
//解包线程结束状态
unsigned char readPacketOver:1;
//解码线程结束状态
unsigned char decodeOver:1;
//音频线程结束状态
unsigned char audioOver:1;
//视频线程结束状态
unsigned char videoOver:1;
}closeSingle;
//总状态
unsigned char allOver;
}Close;
//播放器类
class Player
{
public:
//输入文件名称
char *filename;
//事件线程是否已经退出
bool isEventClose;
//格式化上下文
AVFormatContext *pFormatCtx;
//单包临时存储
AVPacket *pPacket;
//解码器数组
Codec *pCodec;
//构造函数
Player(const char *file);
//析构函数
~Player();
//打印版本
int versionShow(void);
//初始化
int init(void);
//释放初始化资源
int exit(void);
//事件循环
static int eventLoop(void *pthis);
//解包循环
static int readPackLoop(void *pthis);
//解码循环
static int decodeLoop(void *pthis);
//音频循环
static int audioLoop(void *pthis);
//视频循环
static int videoLoop(void *pthis);
//音频播放回调
static void audioCallBack(void *pthis,unsigned char *stream,int len);
//解码器初始化
int codecContextInit(int streamIndex);
//解码器资源释放
int codecContextExit(int streamIndex);
private:
//关闭解包线程指示
bool isReadPackClose;
//关闭解码线程指示
bool isDecodeClose;
//关闭音频播放线程指示
bool isAudioClose;
//关闭视频播放线程指示
bool isVideoClose;
//线程退出状态
Close closeOver;
//窗口宽度
int screenW;
//窗口高度
int screenH;
//音频流索引
int audioStreamIndex;
//视频流索引
int videoStreamIndex;
//获取音频帧状态
int gotAudioPicture;
//获取视频帧状态
int gotVideoPicture;
//互斥量
SDL_mutex *pMutex;
//各线程索引
SDL_Thread *eventThread;
SDL_Thread *readPackThread;
SDL_Thread *decodeThread;
SDL_Thread *audioPlayerThread;
SDL_Thread *videoPlayerThread;
//包队列
PacketQueue packetQueue;
//处理键盘事件
int keyEvent(int key);
};
}
#endif
#include "player.h"
using namespace player;
int main(int argc,char *argv[])
{
if(argc!=2)
{
printf("参数异常\n");
return 0;
}
Player player(argv[1]);
player.versionShow();
player.init();
while(!player.isEventClose);
return 0;
}
#! /bin/sh
g++ player.cpp player_main.cpp -g -o player.a -I /usr/local/ffmpeg/include -I ./ -L /usr/local/ffmpeg/lib \
-lSDL2main -lSDL2 -lavformat -lavcodec -lavutil -lswresample -lswscale -lm -lpthread
[email protected]:Yi-Deng-Da-Shi/player_v2.git
1.目前帧队列存储的是从解码器接收到的帧,是否应该改为转换后的帧
2.播放某些文件时,比如gitee中的DuKou.flac时,farme_size为0,这会导致播放异常,此时farme_size应该在从解码器读取到一帧数据后设置,比如这里读取到一帧为4096,设置后播放正常。
3.代码臃肿
4.音视频同步