main
void code_h264()
{
Encodeh264 code_obj;
mutex mut;
code_obj.init_encoder(AV_CODEC_ID_H264, CANVAS_WIDTH, CANVAS_HEIGHT, AV_PIX_FMT_BGR24, CANVAS_WIDTH, CANVAS_HEIGHT, AV_PIX_FMT_YUV420P);
int64_t start_ts = 0;//编码开始时间
int64_t duration_time = 0;//一次编码的间隙
while (1)
{
if (srcImg.empty())
{
//cout << "srcImg is null" << endl;
mut.unlock();
continue;
}
resize(srcImg, srcImg, cv::Size(CANVAS_WIDTH, CANVAS_HEIGHT));
if (start_ts == 0)
{
start_ts = av_gettime_relative();
mut.lock();
code_obj.encode_H264(srcImg.data, srcImg.rows, srcImg.cols, AV_PIX_FMT_BGR24, start_ts);
mut.unlock();
}
else
{
if (start_ts == 0)
{
start_ts = av_gettime_relative();
code_obj.encode_H264(m_canvas.m_canvas->data, CANVAS_WIDTH, CANVAS_HEIGHT, AV_PIX_FMT_BGR24, start_ts);
}
else
{
duration_time = av_gettime_relative() - start_ts;
if (duration_time<40000)
{
//printf("duration_time: %d\n", duration_time / 1000);
av_usleep(40000 - duration_time);
printf("frame duration time %d\n", (av_gettime_relative() - start_ts) / 1000);
start_ts = av_gettime_relative() - (40000 - (av_gettime_relative() - start_ts));
mut.lock();
code_obj.encode_H264(srcImg.data, srcImg.cols, srcImg.rows, AV_PIX_FMT_BGR24, start_ts);
mut.unlock();
}
else
{
//printf("duration_time: %d\n", duration_time / 1000);
printf("frame duration time %d\n", (av_gettime_relative() - start_ts) / 1000);
start_ts = av_gettime_relative() - (40000 - (av_gettime_relative() - start_ts));
mut.lock();
code_obj.encode_H264(srcImg.data, srcImg.cols, srcImg.rows, AV_PIX_FMT_BGR24, start_ts);
mut.unlock();
}
}
}
}
}
Encodeh264.h
#pragma once
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/log.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"
#include "libavutil/audio_fifo.h"
#include "libswresample/swresample.h"
#include "libavfilter/avfilter.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
//混音
#include "libavfilter/avfilter.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
//时间
#include "libavutil/time.h"
};
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avdevice.lib")
#pragma comment(lib, "avfilter.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "postproc.lib")
#pragma comment(lib, "swresample.lib")
#pragma comment(lib, "swscale.lib")
class Encodeh264
{
public:
Encodeh264();
~Encodeh264();
int init_encoder(AVCodecID codec_id, int in_w, int in_h, AVPixelFormat src_pix_fmt, int dst_w, int dst_h, AVPixelFormat dst_pix_fmt);
int encode_H264( uint8_t *data,int in_w, int in_h ,AVPixelFormat pix_fmt, int64_t start_ts);
private:
//编码器
const AVCodec *_pCodec=NULL;
//编码器上下文
AVCodecContext *_pCodecCtx = NULL;
//格式转换
SwsContext *_img_converT_ctx;
AVFrame *_AVFrame = NULL;
AVFrame *_YUVFrame=NULL;
//写文件,在构造方法中打开文件
FILE *fp_yuv=NULL;
FILE *fp_h264 = NULL;
AVPacket *pkt = NULL;
int m_width = 0;//要编码画面的宽度
int m_height = 0;//要编码画面的高度
};
Encodeh264.cpp
#include "encodeH264.h"
#include
Encodeh264::Encodeh264()
{
fopen_s(&fp_yuv, "Afilm.yuv", "wb+");
fopen_s(&fp_h264, "Afilm.h264", "wb+");
}
Encodeh264::~Encodeh264()
{
fclose(fp_yuv);
fclose(fp_h264);
}
int Encodeh264::init_encoder(AVCodecID codec_id, int in_w, int in_h, AVPixelFormat src_pix_fmt, int dst_w, int dst_h, AVPixelFormat dst_pix_fmt)
{
m_width = in_w;
m_height = in_h;
//<---------------找到编码器-------------------->
_pCodec = avcodec_find_encoder(codec_id);
if (!_pCodec)
{
printf("Codec not found\n");
return -1;
}
//<---------------申请编码器上下文-------------------->
_pCodecCtx = avcodec_alloc_context3(_pCodec);
if (!_pCodecCtx)
{
printf("Could not allocate video codec context\n");
return -1;
}
_pCodecCtx->bit_rate = 468000;
_pCodecCtx->width = in_w;
_pCodecCtx->height = in_h;
_pCodecCtx->time_base.num = 1;
_pCodecCtx->time_base.den = 25;
_pCodecCtx->gop_size = 10;
_pCodecCtx->max_b_frames = 0;
_pCodecCtx->pix_fmt = dst_pix_fmt;
AVDictionary *param = 0;
if (_pCodecCtx->codec_id == AV_CODEC_ID_H264) {
//av_opt_set(_vc->priv_data, "preset", "fast", 0);
//av_opt_set(_vc->priv_data, "tune", "zerolatency", 0);
//profile: baseline, main, high, high10, high422, high444
//av_opt_set(_vc->priv_data, "profile", "baseline", 0);
av_dict_set(¶m, "preset", "fast", 0);
av_dict_set(¶m, "tune", "zerolatency", 0);
av_dict_set(¶m, "profile", "high422", 0);
//av_dict_set_int(¶m, "rtbufsize", 3041280 * 100, 0);
//av_dict_set(¶m, "tune", "zerolatency", 0);
//av_dict_set(¶m, "preset", "ultrafast", 0); //画面质量下降
//av_dict_set(¶m, "preset", "ultrafast", 0);
//av_dict_set(¶m, "tune", "zerolatency", 0);
}
//<--------------打开编码器-------------------->
if (avcodec_open2(_pCodecCtx, _pCodec, ¶m) < 0)
{
printf("Could not open codec\n");
return -1;
}
//<---------------申请src数据封装格式-------------------->
_AVFrame = av_frame_alloc();
if (!_AVFrame)
{
printf("_AVFrame Could mot allocate video frame\n");
return -1;
}
_AVFrame->format = src_pix_fmt;
_AVFrame->width = in_w;
_AVFrame->height = in_h;
int ret = av_frame_get_buffer(_AVFrame, 0);
if (ret < 0)
{
printf(" _AVFrame Could not allocate the video frame data\n");
return -1;
}
/* make sure the frame data is writable */
ret = av_frame_make_writable(_AVFrame);
if (ret < 0)
{
printf("_AVFrame set frame writable fail");
return -1;
}
//<---------------申请YUV420P数据封装格式-------------------->
_YUVFrame = av_frame_alloc();
if (!_YUVFrame)
{
printf("_YUVFrame Could mot allocate video frame\n");
return -1;
}
_YUVFrame->format = dst_pix_fmt;
_YUVFrame->width = in_w;
_YUVFrame->height = in_h;
ret = av_frame_get_buffer(_YUVFrame, 0);
if (ret < 0)
{
printf("_YUVFrame Could not allocate the video frame data\n");
return -1;
}
/* make sure the frame data is writable */
ret = av_frame_make_writable(_YUVFrame);
if (ret < 0)
{
printf("_YUVFrame set frame writable fail");
return -1;
}
//<---------------申请编码后数据包-------------------->
pkt = av_packet_alloc();
if (!pkt)
{
printf("Could not allocate AVPacket\n");
}
//图像转换
_img_converT_ctx = sws_getContext(in_w, in_h, src_pix_fmt, dst_w, dst_h, dst_pix_fmt, SWS_BICUBIC, NULL, NULL, NULL);
return 0;
}
int Encodeh264::encode_H264(uint8_t *data, int in_w, int in_h, AVPixelFormat src_pix_fmt, int64_t start_ts)
{
_YUVFrame->pts = start_ts;
_AVFrame->pts = start_ts;
if (src_pix_fmt != AV_PIX_FMT_YUV420P)
{
//主要转码
sws_scale(_img_converT_ctx, &data, _AVFrame->linesize, 0, _AVFrame->height, _YUVFrame->data, _YUVFrame->linesize);
//printf("_AVFrame->linesize:%d , _YUVFrame->linesize: %d\n", _AVFrame->linesize, _YUVFrame->linesize);
//int y_size = _pCodecCtx->width * _pCodecCtx->height;
//fwrite(_YUVFrame->data[0], 1, y_size, fp_yuv); //Y
//fwrite(_YUVFrame->data[1], 1, y_size / 4, fp_yuv); //U
//fwrite(_YUVFrame->data[2], 1, y_size / 4, fp_yuv); //V
}
int ret = 0;
ret = avcodec_send_frame(_pCodecCtx, _YUVFrame);
if (ret < 0)
{
printf("Error sending a frame for encoding :%d \n", ret);
}
else
{
ret = avcodec_receive_packet(_pCodecCtx, pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
//return 0;
}
else if (ret < 0)
{
printf("Error during encoding\n");
}
else
{
//编码完成
if (pkt->size > 0)
{
if (pkt->data != NULL)
{
//在这里获得编码后的数据
//k++;
fwrite(pkt->data, pkt->size, 1, fp_h264);
}
else
{
av_packet_unref(pkt);
//不清空内存
//av_packet_free(&pkt);
}
}
}
}
//av_packet_unref(pkt);
//av_packet_free(&pkt);
return 0;
}