1、使用sws_scale转换格式为yuv420p,保存在文件中
原mp4的宽高为640x640,改变了宽和高,为原来的1/2
测试播放命令:ffplay -video_size 320x320 -pix_fmt yuv420p -i 2.yuv
#include
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
}
// 申请智能指针变量
#define NEW_PTR(T, P, V, Fn) T *P = V; std::shared_ptr P##P(P, [&P](T *){if(P != nullptr){Fn;}})
// 打印异常信息,并退出main函数
#define FATAL(M, ...) printf(M, ##__VA_ARGS__); return -1
// 自定义变量
const char *src_media = "D:/1.mp4";
const char *dst_media = "D:/2.yuv";
int main(int argc, char **argv) {
// 根据输入文件,打开媒体格式上下文
NEW_PTR(AVFormatContext, fmt_ctx, nullptr, avformat_close_input(&fmt_ctx));
if (avformat_open_input(&fmt_ctx, src_media, nullptr, nullptr) < 0) {
FATAL("open src media file failed.");
}
// 查询stream信息
if (avformat_find_stream_info(fmt_ctx, nullptr) < 0) {
FATAL("find stream info failed.");
}
// 查询视频stream的索引
int video_st_idx;
if ((video_st_idx = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0)) < 0) {
FATAL("find video stream failed.");
}
// 通过索引找到视频stream
AVStream *video_st;
video_st = fmt_ctx->streams[video_st_idx];
// 查询视频stream的解码器
AVCodec *video_dec;
if ((video_dec = avcodec_find_decoder(video_st->codecpar->codec_id)) == nullptr) {
FATAL("find video decoder failed.");
}
// 为解码器申请上下文
NEW_PTR(AVCodecContext, video_dec_ctx, nullptr, avcodec_free_context(&video_dec_ctx));
if ((video_dec_ctx = avcodec_alloc_context3(video_dec)) == nullptr) {
FATAL("allocate video dec context failed.");
}
// 从视频stream中拷贝解码参数到解码器的上下文
if (avcodec_parameters_to_context(video_dec_ctx, video_st->codecpar) < 0) {
FATAL("copy codec params failed.");
}
// 打开解码器
if (avcodec_open2(video_dec_ctx, video_dec, nullptr) < 0) {
FATAL("codec open failed.");
}
// 申请一个packet,并初始化
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = nullptr;
pkt.size = 0;
// 申请一个frame
NEW_PTR(AVFrame, video_frame, nullptr, av_frame_free(&video_frame));
if ((video_frame = av_frame_alloc()) == nullptr) {
FATAL("allocate frame failed.");
}
// 申请一个scale_frame
NEW_PTR(AVFrame, scale_frame, nullptr, av_frame_free(&scale_frame));
if ((scale_frame = av_frame_alloc()) == nullptr) {
FATAL("allocate filter frame failed.");
}
// scale_frame的data内存分配
scale_frame->format = AV_PIX_FMT_YUV420P;
scale_frame->width = video_dec_ctx->width / 2;
scale_frame->height = video_dec_ctx->height / 2;
if (av_frame_get_buffer(scale_frame, 32) < 0) {
FATAL("allocate scale frame data failed.");
}
// 申请一个sws视频转换器
NEW_PTR(SwsContext, sws_ctx, nullptr, sws_freeContext(sws_ctx));
sws_ctx = sws_getContext(video_dec_ctx->width, video_dec_ctx->height, video_dec_ctx->pix_fmt,
scale_frame->width, scale_frame->height, (AVPixelFormat) scale_frame->format,
SWS_BILINEAR, nullptr, nullptr, nullptr);
// 打开输出文件
NEW_PTR(FILE, output, nullptr, fclose(output));
if ((output = fopen(dst_media, "wb")) == nullptr) {
FATAL("no writable file.");
}
// 如果小于0,则表示读完了或者报错了,跳出循环
while (av_read_frame(fmt_ctx, &pkt) >= 0) {
// 我们只关心视频stream
if (pkt.stream_index == video_st_idx) {
// 发送一个视频包
if (avcodec_send_packet(video_dec_ctx, &pkt) < 0) {
FATAL("send packet exception.");
}
// 接受解码完的内容
while (true) {
auto frame_ret = avcodec_receive_frame(video_dec_ctx, video_frame);
// 判断是否完全接受了frame
if (frame_ret == AVERROR(EAGAIN) || frame_ret == AVERROR_EOF) {
break;
}
// 检查是否接受异常
if (frame_ret < 0) {
FATAL("receive frame exception.");
}
video_frame->pts = video_frame->best_effort_timestamp;
if (av_frame_make_writable(scale_frame) < 0) {
FATAL("scale frame is not writable");
}
sws_scale(sws_ctx, (const uint8_t *const *) video_frame->data,
video_frame->linesize, 0, video_dec_ctx->height, scale_frame->data,
scale_frame->linesize);
// 按照YUV420P的格式输出到文件中
for (int i = 0; i < scale_frame->height; i++) {
fwrite(scale_frame->data[0] + i * scale_frame->linesize[0], 1,
(size_t) scale_frame->width, output);
}
for (int i = 0; i < scale_frame->height / 2; i++) {
fwrite(scale_frame->data[1] + i * scale_frame->linesize[1], 1,
(size_t) scale_frame->width / 2, output);
}
for (int i = 0; i < scale_frame->height / 2; i++) {
fwrite(scale_frame->data[2] + i * scale_frame->linesize[2], 1,
(size_t) scale_frame->width / 2, output);
}
av_frame_unref(video_frame);
}
}
av_packet_unref(&pkt);
}
return 0;
}
2、使用filter转换格式为yuv420p,保存在文件中
原mp4的宽高为640x640,改变了宽和高,为原来的1/2
同时改变了fps为15,原fps为25,这样会使视频播放速度更快
解码后的yuv,虽然fps降低,视频速度变快了,但是编码的时候可以还原为正常速度
测试播放命令:ffplay -video_size 320x320 -pix_fmt yuv420p -i 2.yuv
#include
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavfilter/buffersrc.h"
#include "libavfilter/buffersink.h"
#include "libavutil/opt.h"
}
// 申请智能指针变量
#define NEW_PTR(T, P, V, Fn) T *P = V; std::shared_ptr P##P(P, [&P](T *){if(P != nullptr){Fn;}})
// 打印异常信息,并退出main函数
#define FATAL(M, ...) printf(M, ##__VA_ARGS__); return -1
// 自定义变量
const char *src_media = "D:/1.mp4";
const char *dst_media = "D:/2.yuv";
int main(int argc, char **argv) {
// 根据输入文件,打开媒体格式上下文
NEW_PTR(AVFormatContext, fmt_ctx, nullptr, avformat_close_input(&fmt_ctx));
if (avformat_open_input(&fmt_ctx, src_media, nullptr, nullptr) < 0) {
FATAL("open src media file failed.");
}
// 查询stream信息
if (avformat_find_stream_info(fmt_ctx, nullptr) < 0) {
FATAL("find stream info failed.");
}
// 查询视频stream的索引
int video_st_idx;
if ((video_st_idx = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0)) < 0) {
FATAL("find video stream failed.");
}
// 通过索引找到视频stream
AVStream *video_st;
video_st = fmt_ctx->streams[video_st_idx];
// 查询视频stream的解码器
AVCodec *video_dec;
if ((video_dec = avcodec_find_decoder(video_st->codecpar->codec_id)) == nullptr) {
FATAL("find video decoder failed.");
}
// 为解码器申请上下文
NEW_PTR(AVCodecContext, video_dec_ctx, nullptr, avcodec_free_context(&video_dec_ctx));
if ((video_dec_ctx = avcodec_alloc_context3(video_dec)) == nullptr) {
FATAL("allocate video dec context failed.");
}
// 从视频stream中拷贝解码参数到解码器的上下文
if (avcodec_parameters_to_context(video_dec_ctx, video_st->codecpar) < 0) {
FATAL("copy codec params failed.");
}
// 打开解码器
if (avcodec_open2(video_dec_ctx, video_dec, nullptr) < 0) {
FATAL("codec open failed.");
}
// 添加帧率转换过滤器
NEW_PTR(AVFilterGraph, filter_graph, nullptr, avfilter_graph_free(&filter_graph));
if ((filter_graph = avfilter_graph_alloc()) == nullptr) {
FATAL("alloc graph failed.");
}
char args[512] = {0};
snprintf(args, sizeof(args), "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
video_dec_ctx->width, video_dec_ctx->height, video_dec_ctx->pix_fmt,
video_st->time_base.num, video_st->time_base.den,
video_dec_ctx->sample_aspect_ratio.num, video_dec_ctx->sample_aspect_ratio.den);
AVFilterContext *buffer_src_ctx = nullptr;
const AVFilter *in = avfilter_get_by_name("buffer");
if (avfilter_graph_create_filter(&buffer_src_ctx, in, "in", args, nullptr, filter_graph)) {
FATAL("create src filter failed.");
}
AVFilterContext *buffer_sink_ctx = nullptr;
const AVFilter *out = avfilter_get_by_name("buffersink");
if (avfilter_graph_create_filter(&buffer_sink_ctx, out, "out", nullptr, nullptr, filter_graph)) {
FATAL("create sink filter failed.");
}
enum AVPixelFormat pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE};
if (av_opt_set_int_list(buffer_sink_ctx, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN)) {
FATAL("set sink filter options failed.");
}
NEW_PTR(AVFilterInOut, inputs, nullptr, avfilter_inout_free(&inputs));
if ((inputs = avfilter_inout_alloc()) == nullptr) {
FATAL("alloc inputs failed.");
}
inputs->name = av_strdup("out");
inputs->filter_ctx = buffer_sink_ctx;
inputs->pad_idx = 0;
inputs->next = nullptr;
NEW_PTR(AVFilterInOut, outputs, nullptr, avfilter_inout_free(&outputs));
if ((outputs = avfilter_inout_alloc()) == nullptr) {
FATAL("alloc outputs failed.");
}
outputs->name = av_strdup("in");
outputs->filter_ctx = buffer_src_ctx;
outputs->pad_idx = 0;
outputs->next = nullptr;
if (avfilter_graph_parse_ptr(filter_graph, "fps=fps=15,scale=320:320", &inputs, &outputs, nullptr) < 0) {
FATAL("parse graph failed.");
}
if (avfilter_graph_config(filter_graph, nullptr) < 0) {
FATAL("config graph failed.");
}
// 申请一个packet,并初始化
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = nullptr;
pkt.size = 0;
// 申请一个frame
NEW_PTR(AVFrame, video_frame, nullptr, av_frame_free(&video_frame));
if ((video_frame = av_frame_alloc()) == nullptr) {
FATAL("allocate frame failed.");
}
// 申请一个filter_frame
NEW_PTR(AVFrame, filter_frame, nullptr, av_frame_free(&filter_frame));
if ((filter_frame = av_frame_alloc()) == nullptr) {
FATAL("allocate filter frame failed.");
}
// 打开输出文件
NEW_PTR(FILE, output, nullptr, fclose(output));
if ((output = fopen(dst_media, "wb")) == nullptr) {
FATAL("no writable file.");
}
// 如果小于0,则表示读完了或者报错了,跳出循环
while (av_read_frame(fmt_ctx, &pkt) >= 0) {
// 我们只关心视频stream
if (pkt.stream_index == video_st_idx) {
// 发送一个视频包
if (avcodec_send_packet(video_dec_ctx, &pkt) < 0) {
FATAL("send packet exception.");
}
// 接受解码完的内容
while (true) {
auto frame_ret = avcodec_receive_frame(video_dec_ctx, video_frame);
// 判断是否完全接受了frame
if (frame_ret == AVERROR(EAGAIN) || frame_ret == AVERROR_EOF) {
break;
}
// 检查是否接受异常
if (frame_ret < 0) {
FATAL("receive frame exception.");
}
video_frame->pts = video_frame->best_effort_timestamp;
if (av_buffersrc_add_frame_flags(buffer_src_ctx, video_frame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
break;
}
while (true) {
auto got_ret = av_buffersink_get_frame(buffer_sink_ctx, filter_frame);
if (got_ret == AVERROR(EAGAIN) || got_ret == AVERROR_EOF) break;
if (got_ret < 0) {
FATAL("get filter frame failed.");
}
// 按照YUV420P的格式输出到文件中
for (int i = 0; i < filter_frame->height; i++) {
fwrite(filter_frame->data[0] + i * filter_frame->linesize[0], 1,
(size_t) filter_frame->width, output);
}
for (int i = 0; i < filter_frame->height / 2; i++) {
fwrite(filter_frame->data[1] + i * filter_frame->linesize[1], 1,
(size_t) filter_frame->width / 2, output);
}
for (int i = 0; i < filter_frame->height / 2; i++) {
fwrite(filter_frame->data[2] + i * filter_frame->linesize[2], 1,
(size_t) filter_frame->width / 2, output);
}
av_frame_unref(filter_frame);
}
av_frame_unref(video_frame);
}
}
av_packet_unref(&pkt);
}
return 0;
}