【视频处理】利用FFMPEG采集USB摄像头数据

之前一直是采用V4L2的基础协议进行USB数据采集,然而并不是非常的方便,在移植了FFMPEG之后,有了另外一种选择。相对于直接采集方便了很多。
ffmpeg.cpp

#include "ffmpeg.h"


#define FFMPEG_MJPEG
//#define FFMPEG_H264
//#define FFMPEG_YUV

#define TIMEMS      qPrintable(QTime::currentTime().toString("HH:mm:ss zzz"))

ffmpeg::ffmpeg(QWidget *parent) :
    QThread(parent)
{
    framCount = 0;
    frameFinish = 0;
    saveFile = true;
    framIndex = 0;
    isOutputFileOpen = false;
}


ffmpeg::~ffmpeg()
{
}


/* 功能:初始化解封装上下文,解码器上下文,和格式转换上下文(yuv转rgb)
 *      1 解封装
 *      2 解码
 *      3 格式转换
 * 参数:无
 * 返回值:成功返回零,失败返回-1
 */
int ffmpeg::initDecodeVideo()
{
    //注册库中所有可用的文件格式和解码器
    av_register_all();
    //注册所有设备,主要用于本地摄像机播放支持
    avdevice_register_all();

    qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION;


    AVDictionary *options = NULL;
    AVCodec *deCodec = NULL;       //解码器


    //为解封装上下文开辟空间
    ifmt_ctx = avformat_alloc_context();
    //解封装对象
    AVInputFormat *ifmt = av_find_input_format("video4linux2");

    //打开输入视频流,进行解封装
    av_dict_set(&options, "framerate", "30", 0);
    av_dict_set(&options, "video_size", "1280x720", 0);
#ifdef FFMPEG_MJPEG
    av_dict_set(&options, "input_format", "mjpeg", 0);
#endif

#ifdef FFMPEG_YUV
    av_dict_set(&options, "input_format", "yuyv422", 0);
#endif
    int result = avformat_open_input(&ifmt_ctx, inputFilename, ifmt, &options);
    if (result < 0) {
        qDebug() << TIMEMS << "open input error" << inputFilename;
        return false;
    }
    //释放设置参数
    if(options != NULL) {
        av_dict_free(&options);
    }

    //获取流信息
    result = avformat_find_stream_info(ifmt_ctx, NULL);
    if (result < 0) {
        qDebug() << TIMEMS << "find stream info error";
        return false;
    }
    videoStreamIndex = -1;

    videoStreamIndex = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &deCodec, 0);

    if (videoStreamIndex < 0) {
        qDebug() << TIMEMS << "find video stream index error";
        return false;
    }

    //从输入封装上下文获取输入视频流
    in_stream = ifmt_ctx->streams[videoStreamIndex];
    if (!in_stream)
    {
        printf("Failed get input stream\n");
        return false;
    }

    //获取视频流解码器上下文
    deCodecCtx = in_stream->codec;

    //获取分辨率大小
    videoWidth = in_stream->codec->width;
    videoHeight = in_stream->codec->height;

    //如果没有获取到宽高则返回
    if (videoWidth == 0 || videoHeight == 0) {
        qDebug() << TIMEMS << "find width height error";
        return false;
    }

    //获取视频流的帧率 fps,要对0进行过滤,除数不能为0,有些时候获取到的是0
    int num = in_stream->codec->framerate.num;
    int den = in_stream->codec->framerate.den;
    if (num != 0 && den != 0) {
        videoFps = num / den ;
    }

    QString videoInfo = QString("视频流信息 -> 索引: %1   格式: %2  时长: %3 秒  fps: %4  分辨率: %5*%6")
                        .arg(videoStreamIndex).arg(ifmt_ctx->iformat->name)
                        .arg((ifmt_ctx->duration) / 1000000).arg(videoFps).arg(videoWidth).arg(videoHeight);
    qDebug() << TIMEMS << videoInfo;

    //打开视频解码器
    result = avcodec_open2(deCodecCtx, deCodec, NULL);
    if (result < 0) {
        qDebug() << TIMEMS << "open video codec error";
        return false;
    }

    avDePacket = av_packet_alloc();
    avDeFrameYuv = av_frame_alloc();
    avDeFrameRgb = av_frame_alloc();

    //比较上一次文件的宽度高度,当改变时,需要重新分配内存
    if (oldWidth != videoWidth || oldHeight != videoHeight) {
        int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight);
        buffer = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
        oldWidth = videoWidth;
        oldHeight = videoHeight;
    }

    //定义像素格式
    AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
    AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;

#ifdef FFMPEG_MJPEG
    srcFormat = AV_PIX_FMT_YUV420P;
#endif

#ifdef FFMPEG_YUV
    srcFormat = AV_PIX_FMT_YUYV422;
#endif

#ifdef FFMPEG_H264
    srcFormat = AV_PIX_FMT_YUV420P;
#endif
    av_image_fill_arrays(avDeFrameRgb->data, avDeFrameRgb->linesize, buffer, dstFormat, videoWidth, videoHeight, 1);
    int flags = SWS_FAST_BILINEAR;

    swsContextYuvtoRgb = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);

    //打开输出视频的文件
    outFile.setFileName(outputFilename);
    outFile.open(QIODevice::WriteOnly);


    qDebug() << TIMEMS << "init ffmpegVideo ok";

    return 0;
}

int ffmpeg::playVideo()
{
    initDecodeVideo();
    while(true)
    {
        if (av_read_frame(ifmt_ctx, avDePacket) >= 0) {
            //判断当前包是视频还是音频
            int index = avDePacket->stream_index;
            in_stream  = ifmt_ctx->streams[index];

            if (index == videoStreamIndex) {
                avcodec_decode_video2(deCodecCtx, avDeFrameYuv, &frameFinish, avDePacket);
                if (frameFinish)
                {

                    //将数据转成一张图片YuvtoRgb
                    sws_scale(swsContextYuvtoRgb, (const uint8_t *const *)avDeFrameYuv->data, avDeFrameYuv->linesize,\
                              0, videoHeight, avDeFrameRgb->data, avDeFrameRgb->linesize);

                    QImage image((uchar *)buffer, videoWidth, videoHeight, QImage::Format_RGB32);

                    if (!image.isNull()) {
                        emit receiveImage(image);
                    }

                    framIndex ++;
                    qDebug()<< "解码到第" << framIndex << "帧";
                    qDebug() << TIMEMS;
                    if(framIndex > 200)
                    {
                        framIndex = 0;
                        break;
                    }

                    for(int i = 0;i < avDeFrameYuv->height;i++){
                        outFile.write((char *)(avDeFrameYuv->data[0] + i * avDeFrameYuv->linesize[0]),avDeFrameYuv->width);
                    }

                    int loop = avDeFrameYuv->height / 2;
                    int len_uv = avDeFrameYuv->width / 2;

                    for(int i = 0;i < loop;i++){
                        outFile.write((char *)(avDeFrameYuv->data[1] + i * avDeFrameYuv->linesize[1]),len_uv);
                    }
                    for(int i = 0;i < loop;i++){
                        outFile.write((char *)(avDeFrameYuv->data[2] + i * avDeFrameYuv->linesize[2]),len_uv);
                    }


                }
                av_packet_unref(avDePacket);
                av_freep(avDePacket);
            }

        }
    }
    outFile.close();

    avformat_free_context(ifmt_ctx);
    //关闭编码和解码器
    avcodec_close(deCodecCtx);
    //清理编码器和解码器上下文
    avcodec_free_context(&deCodecCtx);
    //清理格式转换上下文
    sws_freeContext(swsContextYuvtoRgb);

    qDebug() << TIMEMS << "stop ffmpeg thread";
}
void ffmpeg::run()
{
    playVideo();
}

ffmpeg.h

#ifndef FFMPEG_H
#define FFMPEG_H

#include 
#include 
#include 
#include 
#include 
#include 

//引入ffmpeg头文件
extern "C" {
#include "libavutil/opt.h"
#include "libavutil/time.h"
#include "libavutil/frame.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avassert.h"
#include "libavutil/imgutils.h"
#include "libavutil/ffversion.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavformat/avformat.h"
#include "libavfilter/avfilter.h"

#ifndef gcc45
#include "libavutil/hwcontext.h"
#endif
}


namespace Ui {
class ffmpeg;
}

class ffmpeg : public QThread
{
    Q_OBJECT

public:
    explicit ffmpeg(QWidget *parent = nullptr);
    ~ffmpeg();
    char *outputFilename;
    char *inputFilename;

protected:
    void run();
signals:
    //收到图片信号
    void receiveImage(const QImage &image);

private:
    uint64_t framIndex;
    int lastMsec;
    int videoStreamIndex;               //视频流索引
    int videoWidth;                     //视频宽度
    int videoHeight;                    //视频高度
    int videoFps;                       //视频流帧率
    int frameFinish;                    //一帧完成
    bool saveFile;
    bool isOutputFileOpen;
    uint64_t framCount;                 //帧计数

    uint8_t *buffer;                    //存储解码后图片buffer
    AVOutputFormat *ofmt = NULL;        //输出格式

    AVPacket *avDePacket;               //解码包对象

    AVFrame *avDeFrameYuv;              //解码帧对象YUV
    AVFrame *avDeFrameRgb;              //解码帧对象RGB

    AVFormatContext *ifmt_ctx;          //输入封装格式对象
    AVFormatContext *ofmt_ctx;          //输出封装格式对象

    AVStream *in_stream;                //输入视频流
    AVStream *out_stream;               //输出视频流

    AVCodecContext *deCodecCtx;         //解码器上下文

    SwsContext *swsContextYuvtoRgb;     //格式转换上下文(YuvtoRgb)

    int oldWidth;                       //上一次视频宽度
    int oldHeight;                      //上一次视频高度

    QFile  outFile;

private:
    Ui::ffmpeg *ui;
    int initDecodeVideo();
    int playVideo();

};

#endif // FFMPEG_H

这里需要移植QT,这是后面的硬件编码的RTMP推流的基础。

你可能感兴趣的:(视频处理)