title: ffmpeg_sample解读_vaapi_encode
date: 2020-10-28 10:15:02
tags: [读书笔记]
typora-copy-images-to: ./imgs
typora-root-url: ./imgs
总结
把yuv数据编码成h264数据,使用了 硬件编码,同时把硬件编码绑定到编解码器上下文上.
* 同时创建了一个硬件编码帧. 绑定硬件编码上下文后.使用就和之前的编解码器上下文一样了.
流程图
graph TB
fo[fopen]
-->ahcc[av_hwdevice_ctx_create]
-->afebn[avcodec_find_encoder_by_name]
-->aac[avcodec_alloc_context3]
-->shc[set_hwframe_ctx]
-->ahca[av_hwframe_ctx_alloc]
-->ahci[av_hwframe_ctx_init]
-->abr[av_buffer_ref]
-->aco[avcodec_open2]
-->afa[av_frame_alloc]
-->afgb[av_frame_get_buffer]
-->ahgb[av_hwframe_get_buffer]
-->ahtd[av_hwframe_transfer_data]
-->asf[avcodec_send_frame]
-->arp[avcodec_receive_packet]
-->fwite[fwrite]
-->release[release]
代码
/**
* @file
* Intel VAAPI-accelerated encoding example.
*
* @example vaapi_encode.c
* This example shows how to do VAAPI-accelerated encoding. now only support NV12
* raw file, usage like: vaapi_encode 1920 1080 input.yuv output.h264
*
*/
#include
#include
#include
#include
#include
#include
static int width, height;
static AVBufferRef *hw_device_ctx = NULL;
static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx)
{
AVBufferRef *hw_frames_ref;
AVHWFramesContext *frames_ctx = NULL;
int err = 0;
//创建另一个引用
if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) {
fprintf(stderr, "Failed to create VAAPI frame context.\n");
return -1;
}
//设置对应参数
frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
frames_ctx->format = AV_PIX_FMT_VAAPI;
frames_ctx->sw_format = AV_PIX_FMT_NV12;
frames_ctx->width = width;
frames_ctx->height = height;
frames_ctx->initial_pool_size = 20;
//进行初始化
if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) {
fprintf(stderr, "Failed to initialize VAAPI frame context."
"Error code: %s\n",av_err2str(err));
av_buffer_unref(&hw_frames_ref);
return err;
}
//把这个buffer 和 编码上下文联系起来,额外拷贝一个引用.之后在使用编解码器的时候,就直接使用硬件编解码器了
ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref);
if (!ctx->hw_frames_ctx)
err = AVERROR(ENOMEM);
av_buffer_unref(&hw_frames_ref);
return err;
}
static int encode_write(AVCodecContext *avctx, AVFrame *frame, FILE *fout)
{
int ret = 0;
AVPacket enc_pkt;
av_init_packet(&enc_pkt);
enc_pkt.data = NULL;
enc_pkt.size = 0;
//数据送入编码器,取出packet.写入到文件中
if ((ret = avcodec_send_frame(avctx, frame)) < 0) {
fprintf(stderr, "Error code: %s\n", av_err2str(ret));
goto end;
}
while (1) {
ret = avcodec_receive_packet(avctx, &enc_pkt);
if (ret)
break;
enc_pkt.stream_index = 0;
ret = fwrite(enc_pkt.data, enc_pkt.size, 1, fout);
av_packet_unref(&enc_pkt);
}
end:
ret = ((ret == AVERROR(EAGAIN)) ? 0 : -1);
return ret;
}
/**
* 把yuv数据编码成h264数据,使用了 硬件编码,
* vaapi_encode 1920 1080 input.yuv output.h264
* @param argc
* @param argv
* @return
*/
int vaapi_encode_main(int argc, char *argv[])
{
int size, err;
FILE *fin = NULL, *fout = NULL;
AVFrame *sw_frame = NULL, *hw_frame = NULL;
AVCodecContext *avctx = NULL;
AVCodec *codec = NULL;
const char *enc_name = "h264_vaapi";
if (argc < 5) {
fprintf(stderr, "Usage: %s