《音视频应用开发系列文章目录》
生产者-消费者模式。一个线程负责拉取原始音视频流然后存储在队列,音频消费线程从音频队列获取原始流进行解码等其他消费,视频消费线程从视频队列获取原始流进行解码等其他消费。
#ifndef _AV_PULLER_H_
#define _AV_PULLER_H_
/***********************************************************
** Author:kaychan
** Data:2019-11-04
** Mail:[email protected]
** Explain:a audio/video puller
***********************************************************/
#include "AvCommon.h"
// 获取到解码帧后的回调函数
typedef void(*on_frame)(AVStream *stream, AVFrame *frame, void *priv);
// 错误回调函数
typedef void(*on_error)(std::string error, void *priv);
typedef struct AvPullerParamters_S {
public:
void *priv = NULL;
std::string stream = "";
enum AVHWDeviceType hw_dev_type = AV_HWDEVICE_TYPE_NONE;
enum AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
AVBufferRef *hw_dev_ctx = NULL;
bool run = true;
AVFormatContext *fmt_ctx = NULL;
AVCodecContext *audio_cc = NULL;
AVCodecContext *video_cc = NULL;
int64_t cache_packets_size = 0;
AvPacketSafeQueue audio_packets;
AvPacketSafeQueue video_packets;
AVFrame *audio_frame = NULL;
AVFrame *video_frame = NULL;
AVFrame *video_hw2sw_frame = NULL;
int audio_idx = -1;
int video_idx = -1;
on_frame audio_on_frame;
on_frame video_on_frame;
on_error av_on_error;
}AvPullerParamters;
class AvPuller {
public:
AvPuller();
// 启动拉流器
// stream:流名
// audio_on_frame:音频消费回调函数
// video_on_frame:视频消费回调函数
// on_error:错误信息回调函数
// hw_dev_type:解码器类型支持硬件解码
// (可以参考本系列博文《FFmpeg视频解码硬件加速》使用)
// priv:回调函数入参
void start(const std::string stream,
on_frame audio_on_frame, on_frame video_on_frame, on_error av_on_error,
enum AVHWDeviceType hw_dev_type = AV_HWDEVICE_TYPE_NONE,
void *priv = NULL);
// 停止拉流器
void stop();
private:
std::thread tsk_p_av_;
std::thread tsk_c_a_;
std::thread tsk_c_v_;
AvPullerParamters paramters_;
};
#endif
#include "AvPuller.h"
static std::string AnsiToUTF8(const char *_ansi, int _ansi_len) {
std::string str_utf8("");
wchar_t* pUnicode = NULL;
BYTE * pUtfData = NULL;
do {
int unicodeNeed = MultiByteToWideChar(CP_ACP, 0, _ansi, _ansi_len, NULL, 0);
pUnicode = new wchar_t[unicodeNeed + 1];
memset(pUnicode, 0, (unicodeNeed + 1) * sizeof(wchar_t));
int unicodeDone = MultiByteToWideChar(CP_ACP, 0, _ansi, _ansi_len, (LPWSTR)pUnicode, unicodeNeed);
if (unicodeDone != unicodeNeed) break;
int utfNeed = WideCharToMultiByte(CP_UTF8, 0, (LPWSTR)pUnicode, unicodeDone, (char *)pUtfData, 0, NULL, NULL);
pUtfData = new BYTE[utfNeed + 1];
memset(pUtfData, 0, utfNeed + 1);
int utfDone = WideCharToMultiByte(CP_UTF8, 0, (LPWSTR)pUnicode, unicodeDone, (char *)pUtfData, utfNeed, NULL, NULL);
if (utfNeed != utfDone) break;
str_utf8.assign((char *)pUtfData);
} while (false);
if (pUnicode) delete[] pUnicode;
if (pUtfData) delete[] pUtfData;
return str_utf8;
}
enum AVPixelFormat global_hw_pix_fmt = AV_PIX_FMT_NONE;
static enum AVPixelFormat on_get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) {
const enum AVPixelFormat *p;
for (p = pix_fmts; *p != -1; p++) {
if (*p == global_hw_pix_fmt)
return *p;
}
return AV_PIX_FMT_NONE;
}
static inline void emit_error(int errcode, AvPullerParamters ¶mters) {
paramters.run = false;
char error[256];
av_strerror(errcode, error, 256);
paramters.av_on_error(error, paramters.priv);
}
static inline void emit_error(std::string errstr, AvPullerParamters ¶mters) {
paramters.run = false;
paramters.av_on_error(errstr, paramters.priv);
}
void AvPuller_p_av(AvPullerParamters ¶mters) {
// open media input
AVInputFormat *ifmt = NULL;
std::string::size_type sta = paramters.stream.find("audio=");
std::string::size_type stv = paramters.stream.find("video=");
if (sta != std::string::npos || stv != std::string::npos) {
ifmt = av_find_input_format(AVENGINE_MEDIA_DRIVER);
}
std::string url_utf8 = AnsiToUTF8(paramters.stream.c_str(), paramters.stream.length());
AVDictionary *dict = NULL;
av_dict_set(&dict, "stimeout", "2000000", 0);
av_dict_set(&dict, "buffer_size", "4096000", 0);
av_dict_set(&dict, "recv_buffer_size", "4096000", 0);
int r = avformat_open_input(¶mters.fmt_ctx, url_utf8.c_str(), ifmt, &dict);
if (r != 0) {
emit_error(r, paramters);
return;
}
r = avformat_find_stream_info(paramters.fmt_ctx, NULL);
if (r < 0) {
emit_error(r, paramters);
return;
}
// find media stream index
AVCodec *ac, *vc;
paramters.audio_idx = av_find_best_stream(paramters.fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, &ac, 0);
paramters.video_idx = av_find_best_stream(paramters.fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &vc, 0);
// open media stream decoder
if (paramters.audio_idx >= 0) {
paramters.audio_cc = avcodec_alloc_context3(ac);
avcodec_parameters_to_context(paramters.audio_cc, paramters.fmt_ctx->streams[paramters.audio_idx]->codecpar);
av_codec_set_pkt_timebase(paramters.audio_cc, paramters.fmt_ctx->streams[paramters.audio_idx]->time_base);
r = avcodec_open2(paramters.audio_cc, ac, NULL);
if (r == 0) {
paramters.audio_frame = av_frame_alloc();
if (!paramters.audio_frame) {
emit_error("av_frame_alloc failed", paramters);
return;
}
}
else {
emit_error(r, paramters);
return;
}
}
if (paramters.video_idx >= 0) {
paramters.video_cc = avcodec_alloc_context3(vc);
avcodec_parameters_to_context(paramters.video_cc, paramters.fmt_ctx->streams[paramters.video_idx]->codecpar);
av_codec_set_pkt_timebase(paramters.video_cc, paramters.fmt_ctx->streams[paramters.video_idx]->time_base);
// check video decode device
if (paramters.hw_dev_type != AV_HWDEVICE_TYPE_NONE) {
paramters.hw_pix_fmt = AV_PIX_FMT_NONE;
int iii = 0;
for (;;) {
const AVCodecHWConfig *config = avcodec_get_hw_config(vc, iii++);
if (config) {
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
config->device_type == paramters.hw_dev_type) {
paramters.hw_pix_fmt = config->pix_fmt;
r = av_hwdevice_ctx_create(¶mters.hw_dev_ctx, paramters.hw_dev_type, NULL, NULL, 0);
if (r == 0) {
global_hw_pix_fmt = paramters.hw_pix_fmt;
paramters.video_cc->get_format = on_get_hw_format;
paramters.video_cc->hw_device_ctx = av_buffer_ref(paramters.hw_dev_ctx);
}
break; // support
}
}
else break; // the hw not support the codec
}
}
r = avcodec_open2(paramters.video_cc, vc, NULL);
if (r == 0) {
paramters.video_frame = av_frame_alloc();
paramters.video_hw2sw_frame = av_frame_alloc();
if (!paramters.video_frame || !paramters.video_hw2sw_frame) {
emit_error("av_frame_alloc failed", paramters);
return;
}
}
else {
emit_error(r, paramters);
return;
}
}
// start produce audio and video
while (paramters.run) {
AVPacket packet;
paramters.cache_packets_size = paramters.audio_packets.size() + paramters.video_packets.size();
if (paramters.cache_packets_size < 1 * MB) {
r = av_read_frame(paramters.fmt_ctx, &packet);
if (r >= 0) {
if (packet.stream_index == paramters.audio_idx) paramters.audio_packets.eq(packet);
if (packet.stream_index == paramters.video_idx) paramters.video_packets.eq(packet);
}
else {
// maybe rtsp/rtmp has disconnected
// maybe file is eof
break;
}
}
else CPP11_MSLEEP(30);
}
}
void AvPuller_c_a(AvPullerParamters ¶mters) {
// start consume audio
while (paramters.run) {
if (!paramters.audio_packets.empty()) {
AVPacket p = paramters.audio_packets.dq();
if (!p.data) continue;
int r = avcodec_send_packet(paramters.audio_cc, &p);
av_packet_unref(&p);
if (r == 0) {
r = avcodec_receive_frame(paramters.audio_cc, paramters.audio_frame);
if (r == 0) {
paramters.audio_on_frame(paramters.fmt_ctx->streams[paramters.audio_idx],
paramters.audio_frame, paramters.priv);
}
}
}else CPP11_MSLEEP(30);
}
}
void AvPuller_c_v(AvPullerParamters ¶mters) {
// start consume video
while (paramters.run) {
if (!paramters.video_packets.empty()) {
AVPacket p = paramters.video_packets.dq();
if (!p.data) continue;
int r = avcodec_send_packet(paramters.video_cc, &p);
av_packet_unref(&p);
if (r == 0) {
r = avcodec_receive_frame(paramters.video_cc, paramters.video_frame);
if (r == 0) {
AVFrame *tf = paramters.video_frame;
if (paramters.video_frame->format == paramters.hw_pix_fmt) {
r = av_hwframe_transfer_data(paramters.video_hw2sw_frame, paramters.video_frame, 0);
if (r == 0)
tf = paramters.video_hw2sw_frame;
}
tf->best_effort_timestamp = paramters.video_frame->best_effort_timestamp;
paramters.video_on_frame(paramters.fmt_ctx->streams[paramters.video_idx],
tf, paramters.priv);
}
}
}
else CPP11_MSLEEP(30);
}
}
AvPuller::AvPuller() {
}
void AvPuller::start(const std::string stream,
on_frame audio_on_frame, on_frame video_on_frame, on_error av_on_error,
enum AVHWDeviceType hw_dev_type,
void *priv) {
paramters_.stream = stream;
paramters_.audio_on_frame = audio_on_frame;
paramters_.video_on_frame = video_on_frame;
paramters_.av_on_error = av_on_error;
paramters_.hw_dev_type = hw_dev_type;
paramters_.priv = priv;
paramters_.run = true;
tsk_p_av_ = std::thread(AvPuller_p_av, std::ref(paramters_));
tsk_c_a_ = std::thread(AvPuller_c_a, std::ref(paramters_));
tsk_c_v_ = std::thread(AvPuller_c_v, std::ref(paramters_));
}
void AvPuller::stop() {
paramters_.run = false;
if (tsk_p_av_.joinable()) tsk_p_av_.join();
if (tsk_c_a_.joinable()) tsk_c_a_.join();
if (tsk_c_v_.joinable()) tsk_c_v_.join();
paramters_.audio_packets.clear();
paramters_.video_packets.clear();
if (paramters_.audio_frame) {
av_frame_free(¶mters_.audio_frame);
paramters_.audio_frame = NULL;
}
if (paramters_.video_frame) {
av_frame_free(¶mters_.video_frame);
paramters_.video_frame = NULL;
}
if (paramters_.video_hw2sw_frame) {
av_frame_free(¶mters_.video_hw2sw_frame);
paramters_.video_hw2sw_frame = NULL;
}
if (paramters_.audio_cc) {
avcodec_close(paramters_.audio_cc);
avcodec_free_context(¶mters_.audio_cc);
paramters_.audio_cc = NULL;
}
if (paramters_.video_cc) {
avcodec_close(paramters_.video_cc);
avcodec_free_context(¶mters_.video_cc);
paramters_.video_cc = NULL;
}
if (paramters_.hw_dev_ctx) {
av_buffer_unref(¶mters_.hw_dev_ctx);
paramters_.hw_dev_ctx = NULL;
}
if (paramters_.fmt_ctx) {
avformat_close_input(¶mters_.fmt_ctx);
paramters_.fmt_ctx = NULL;
}
}