《 FFmpeg学习(一) 》
工程的ffmpeg的的开发
开发工具和库:VS2013 + ffmpeg-4.0 + SDL2.0.8
Git的学习请看我这篇:《Git上传代码到github上》
码云的工程地址:https://gitee.com/harray/MyQtFFmpegStudy
摄像头显示:https://gitee.com/harray/MyQtFFmpegStudy/tree/cammer_computer/
myqtffmpegstudy.cpp
#include "myqtffmpegstudy.h"
#include "mediamsg.h"
#include
HWND g_hwnd_widget_camera;
MyQtFFmpegStudy::MyQtFFmpegStudy(QWidget *parent)
: QWidget(parent)
{
ui.setupUi(this);
this->setFixedSize(629, 410);
m_bIsOpen = false;
getDeviceInfoToList();
foreach(QVariant qvar, m_qlistCameraInfo)
{
QString qstrCamera = qvar.value();
addCameraToComboBox(qstrCamera);
}
connect(ui.btnOpenCamera, &QPushButton::clicked, this, &MyQtFFmpegStudy::SlotOpenCamera);
connect(ui.btnCloseCamera, &QPushButton::clicked, this, &MyQtFFmpegStudy::SlotCloseCamera);
connect(ui.comboBox_Camera, SIGNAL(currentIndexChanged(int)), this, SLOT(SlotGetCameraComboBoxText(int)));
connect(this, &MyQtFFmpegStudy::signal_send_open_camera, MediaMsgIns, &MediaMsg::SlotOpenCamera);
connect(this, &MyQtFFmpegStudy::signal_send_close_camera, MediaMsgIns, &MediaMsg::SlotCloseCamera);
g_hwnd_widget_camera = (HWND)ui.widget_show->winId();
}
MyQtFFmpegStudy::~MyQtFFmpegStudy()
{
}
QVariant MyQtFFmpegStudy::getCameraListInfo()
{
return QVariant::fromValue(m_qlistCameraInfo);
}
void MyQtFFmpegStudy::getDeviceInfoToList()
{
/// ȡͷϢ
foreach(const QCameraInfo cameraInfo, QCameraInfo::availableCameras())
{
m_qlistCameraInfo.append(QVariant::fromValue(cameraInfo.description()));
}
}
void MyQtFFmpegStudy::addCameraToComboBox(QString qstrCamera)
{
ui.comboBox_Camera->addItem(qstrCamera);
}
void MyQtFFmpegStudy::SlotGetCameraComboBoxText(int index)
{
QString qstrCamera = ui.comboBox_Camera->itemText(index);
qDebug(qstrCamera.toLocal8Bit().data());
}
void MyQtFFmpegStudy::SlotOpenCamera()
{
if (m_bIsOpen)
{
return;
}
VideoAudio videoaudio;
QString qstrCamera = "video=";
qstrCamera.append(ui.comboBox_Camera->currentText());
g_hwnd_widget_camera = (HWND)ui.widget_show->winId();
videoaudio.qstrCamera = qstrCamera;
MediaMsgIns->SetVideoAudio(videoaudio);
MediaMsgIns->SetCamWidgetSize(ui.widget_show->width(), ui.widget_show->height());
videoaudio.lwinid = (long)ui.widget_show->winId();
emit signal_send_open_camera();
m_bIsOpen = true;
}
void MyQtFFmpegStudy::SlotCloseCamera()
{
m_bIsOpen = false;
emit signal_send_close_camera();
QTimer::singleShot(100,this,&MyQtFFmpegStudy::SlotUpdateWnd);
}
void MyQtFFmpegStudy::SlotUpdateWnd()
{
update();
}
mediamsg.cpp
#include "mediamsg.h"
extern HWND g_hwnd_widget_camera;
//Refresh Event
#define REFRESH_EVENT (SDL_USEREVENT + 1)
//Break
#define BREAK_EVENT (SDL_USEREVENT + 2)
bool thread_exit = false;
int RefreshVideo(void *opaque)
{
thread_exit = false;
while (!thread_exit)
{
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit = false;
//Break
SDL_Event event;
event.type = BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}
MediaMsg * MediaMsg::m_Instance = NULL;
MediaMsg::MediaMsg(QObject *parent)
: QObject(parent)
{
m_bIsClose = false;
addToThread();
InitMedia();
}
MediaMsg::~MediaMsg()
{
m_pThread->quit();
m_pThread->wait();
}
void MediaMsg::InitMedia()
{
av_register_all();
avformat_network_init();
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
{
char buf_err[1024] = { 0 };
sprintf(buf_err, "Could not initialize SDL - %s\n", SDL_GetError());
qDebug(buf_err);
exit(1);
}
}
MediaMsg * MediaMsg::Instance()
{
if (m_Instance == NULL)
{
m_Instance = new MediaMsg;
}
return m_Instance;
}
void MediaMsg::addToThread()
{
m_pThread = new QThread;
this->moveToThread(m_pThread);
m_pThread->start();
}
void MediaMsg::SetVideoAudio(VideoAudio videoaudio)
{
m_videoAudioDevice = videoaudio;
}
void MediaMsg::SetCamWidgetSize(int nWidth, int nHeight)
{
m_nWinCamWidth = nWidth;
m_nWinCamHeight = nHeight;
}
void MediaMsg::SlotOpenCamera()
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVPacket packet;
m_bIsClose = false;// 处于打开状态
qDebug("SlotOpenCamera");
pFormatCtx = avformat_alloc_context();
//Register Device
avdevice_register_all();
AVInputFormat *ifmt = av_find_input_format("dshow");
//窗口
SDL_Window *sdlScreen = NULL;
//渲染器
SDL_Renderer* sdlRenderer = NULL;
//纹理
SDL_Texture* sdlTexture = NULL;
//矩形结构
SDL_Rect sdlRect;
SDL_Thread *sdlThread = NULL;
SDL_Event event;
uint8_t *out_buffer;
int frameFinished;
//Set own video device's name
if (avformat_open_input(&pFormatCtx, m_videoAudioDevice.qstrCamera.toLocal8Bit().data(), ifmt, NULL) != 0)
{
qDebug("Couldn't open input stream.");
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
qDebug("Couldn't find stream information.");
return;
}
videoindex = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++)
{
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
}
if (videoindex == -1)
{
qDebug("Couldn't find a video stream.");
return;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
qDebug("Codec not found.");
return;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
qDebug("Could not open codec.");
return;
}
AVFrame *pFrame, *pFrameYUV;
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
int screen_w = 0, screen_h = 0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
// AV_PIX_FMT_YUV420P(新版本) PIX_FMT_YUV420P(旧版本)
out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
// Make a screen to put our video
sdlScreen = SDL_CreateWindowFrom(g_hwnd_widget_camera);
if (!sdlScreen) {
char buff_err[1024] = { 0 };
sprintf(buff_err, "SDL: could not create window - exiting:%s", SDL_GetError());
return;
}
sdlRenderer = SDL_CreateRenderer(sdlScreen, -1, 0);
Uint32 pixformat = 0;
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
pixformat = SDL_PIXELFORMAT_IYUV;
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
// Read frames and save first five frames to disk
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (m_bIsClose)
{
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
break;
}
if (packet.stream_index == videoindex) {
// Decode video frame
int ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (ret < 0){
printf("Decode Error.\n");
return;
}
// Did we get a video frame?
if (frameFinished)
{
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
//FIX: If window is resize
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = m_nWinCamWidth;
sdlRect.h = m_nWinCamHeight;
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
av_free_packet(&packet);
SDL_Delay(20);//延迟一下,防止播放太快
}
}
else
{
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
SDL_PollEvent(&event);
switch (event.type) {
case SDL_QUIT:
thread_exit = true;
SDL_Quit();
exit(0);
break;
default:
break;
}
}
avcodec_close(pCodecCtx);
// Close the video file
avformat_close_input(&pFormatCtx);
}
void MediaMsg::SlotCloseCamera()
{
m_bIsClose = true;// 关闭摄像头
qDebug("SlotCloseCamera");
}
void MediaMsg::SlotStartRecordVideo()
{
}
void MediaMsg::SlotPauseRecordVideo()
{
}
void MediaMsg::SlotEndRecordVideo()
{
}
工程主要是在网上搜集,在网上都能找到,向前辈们学习。
欢迎大家加我的群:460952208