ffmpeg解码

本文分别介绍软解和硬解两种方式,同时将获取的每一帧,转换成二进制或者图片格式
本文只是介绍如何解码,

硬解码,这个类中将每一帧转换成二进制格式,然后存放到队列中,注意硬解码需要安装cuda,百度一下你就知道,

#include "hw_video_decoder.h"
#include 
#include 
#include 

AVPixelFormat HWVideoDecoder::hw_pix_fmt;

bool HWVideoDecoder::init(SharedQueue* frameQueue, const QString& path)
{
	this->frameQueue = frameQueue;
	bRunning = true;

	// 初始化解码对象
	hw_device_ctx = nullptr;
	input_ctx = nullptr;
	decoder_ctx = nullptr;
	decoder = nullptr;

	// 查找对应的硬件解码设备
	const char* device_name = "cuda";
	AVHWDeviceType hw_device_type = av_hwdevice_find_type_by_name(device_name);
	if (hw_device_type == AV_HWDEVICE_TYPE_NONE) {
		qDebug() << "Device name %s is not supported" << device_name;
		qDebug() << "Available device types:";
		while ((hw_device_type = av_hwdevice_iterate_types(hw_device_type)) != AV_HWDEVICE_TYPE_NONE)
			qDebug() << av_hwdevice_get_type_name(hw_device_type);
		return false;
	}

	// 打开输入视频文件
	AVDictionary *options = nullptr;
	av_dict_set(&options, "rtsp_transport", "tcp", 0);
	input_ctx = avformat_alloc_context();
	const char* input_file = path.toStdString().c_str();
	if (avformat_open_input(&input_ctx, input_file, nullptr, &options) != 0) {
		qDebug() << "Cannot open input file" << input_file;
		return false;
	}

	// 查找流信息
	if (avformat_find_stream_info(input_ctx, nullptr) < 0) {
		qDebug() << "Cannot find input stream information";
		return false;
	}

	// 查找视频流信息
	result = av_find_best_stream(input_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &decoder, 0);
	if (result < 0) {
		qDebug() << "Cannot find a video stream in the input file";
		return false;
	}
	stream_index = result;

	// 判断硬件解码设备是否兼容视频解码器
	for (int i = 0;; i++) {
		const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
		if (!config) {
			qDebug() << "Decoder does not support device hw_device_type"
				<< decoder->name << av_hwdevice_get_type_name(hw_device_type);
			return false;
		}
		// 得到硬件解码像素格式
		if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
			config->device_type == hw_device_type) {
			hw_pix_fmt = config->pix_fmt;
			break;
		}
	}

	// 得到解码器管理器
	if (!(decoder_ctx = avcodec_alloc_context3(decoder)))
		return (bool)AVERROR(ENOMEM);

	// 得到视频流对象
	stream = input_ctx->streams[stream_index];
	if (avcodec_parameters_to_context(decoder_ctx, stream->codecpar) < 0)
		return false;

	// 设置解码器管理器的像素格式回调函数
	decoder_ctx->get_format = get_hw_format;

	// 初始化硬件解码器
	if ((result = av_hwdevice_ctx_create(&hw_device_ctx, hw_device_type, nullptr, nullptr, 0)) < 0) {
		qDebug() << "Failed to create specified HW device.";
		return false;
	}
	decoder_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);

	// 打开解码器流
	if ((result = avcodec_open2(decoder_ctx, decoder, nullptr)) < 0) {
		qDebug() << "Failed to open codec for stream" << stream_index;
		return false;
	}

	// 初始化帧缓存
	ping_frame = av_frame_alloc();
	pong_frame = av_frame_alloc();
	yuv_frame = av_frame_alloc();
	frame = nullptr;

	// 初始化帧转换数据
	input_fmt = AV_PIX_FMT_NV12;
	output_fmt = AV_PIX_FMT_BGR32;
	size = av_image_get_buffer_size(output_fmt, decoder_ctx->width, decoder_ctx->height, 1);
	buffer = (uint8_t *)av_malloc(size);
	av_image_fill_arrays(yuv_frame->data, yuv_frame->linesize, buffer, output_fmt, decoder_ctx->width, decoder_ctx->height, 1);
	swsContext = sws_getContext(
		decoder_ctx->width, decoder_ctx->height, input_fmt,
		decoder_ctx->width, decoder_ctx->height, output_fmt,
		SWS_BICUBIC, NULL, NULL, NULL);

	return true;
}

void HWVideoDecoder::stop()
{
	bRunning = false;
	wait();
	destroy();
}

bool HWVideoDecoder::destroy()
{
	packet.data = nullptr;
	packet.size = 0;
	av_packet_unref(&packet);
	av_freep(&buffer);
	av_frame_free(&ping_frame);
	av_frame_free(&pong_frame);
	avcodec_free_context(&decoder_ctx);
	avformat_close_input(&input_ctx);
	av_buffer_unref(&hw_device_ctx);

	return true;
}

void HWVideoDecoder::run()
{
	static const int kWaitTime = 50;
	static const int kMaxQueueSize = 16;

	std::chrono::milliseconds duration(kWaitTime);
	while (bRunning)
	{
		if (frameQueue->size() > kMaxQueueSize)
		{
			std::this_thread::sleep_for(duration);
			continue;
		}

		result = av_read_frame(input_ctx, &packet);
		if (result == AVERROR_EOF)
		{
			avio_seek(input_ctx->pb, 0, SEEK_SET);
			avformat_seek_file(input_ctx, stream_index, 0, 0, stream->duration, 0);
			continue;
		}
		if (result < 0)
		{
			qDebug() << "Failed to read frame!";
			return;
		}

		if (stream_index == packet.stream_index)
		{
			result = avcodec_send_packet(decoder_ctx, &packet);
			if (result < 0)
			{
				//qDebug() << "Failed to send packet" << result;
				continue;
			}

			result = avcodec_receive_frame(decoder_ctx, ping_frame);
			if (result == AVERROR(EAGAIN) || result == AVERROR_EOF || result < 0)
			{
				qDebug() << "Failed to receive frame";
				return;
			}

			if (ping_frame->format == hw_pix_fmt) 
			{
				/* retrieve data from GPU to CPU */
				if (av_hwframe_transfer_data(pong_frame, ping_frame, 0) < 0) 
				{
					qDebug() << "Failed to transfer the data to system memory";
					return;
				}
				frame = pong_frame;
			}
			else
			{
				frame = ping_frame;
			}

			// 转换成yuv格式
			result = sws_scale(swsContext, (const uint8_t* const*)frame->data, frame->linesize, 0,
				decoder_ctx->height, yuv_frame->data, yuv_frame->linesize);

			// 填充到帧缓存队列中
			VideoFrame frame;
			frame.width = decoder_ctx->width;
			frame.height = decoder_ctx->height;
			frame.data = QByteArray((char *)buffer, size * sizeof(uint8_t));
			frameQueue->push_back(frame);

		}
		//av_packet_unref(&packet);
		av_free_packet(&packet);
	}
}

int HWVideoDecoder::getFrameTimeMs()
{
	return (int)(1000.0 / av_q2d(stream->avg_frame_rate));
}

AVPixelFormat HWVideoDecoder::get_hw_format(AVCodecContext *, const AVPixelFormat *pix_fmts)
{
	const AVPixelFormat *p;

	for (p = pix_fmts; *p != -1; p++) {
		if (*p == hw_pix_fmt)
			return *p;
	}

	//qDebug() << "Failed to get HW surface format";
	return AV_PIX_FMT_NONE;
}

软解码,将每一帧转换成二进制和图片格式,

#include "video_decoder.h"
#include 
#include 
#include 

bool VideoDecoder::init(SharedQueue* frameQueue, const QString& path)
{
	this->frameQueue = frameQueue;
	bRunning = true;
	nowPath = path;

	avformat_network_init();
	av_register_all(); //注册所有容器格式

	pFormatCtx = avformat_alloc_context();

	AVDictionary *avdic = NULL;
	char option_key[] = "rtsp_transport";
	char option_value[] = "tcp";

	av_dict_set(&avdic, option_key, option_value, 0);
	char option_key2[] = "max_delay";
	char option_value2[] = "100";

	av_dict_set(&avdic, option_key2, option_value2, 0);

	if (avformat_open_input(&pFormatCtx, path.toStdString().c_str(), NULL, &avdic) != 0)
	{
		qDebug() << "can't open the file:" << path;
		return false;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
	{
		qDebug() << "can't find stream infomation";
		return false;
	}

	videoStreamIndex = -1;

	for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++)
	{
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			videoStreamIndex = i;
		}
	}

	if (videoStreamIndex == -1)
	{
		qDebug() << "can't find a video stream";
		return false;
	}

	pCodecCtx = pFormatCtx->streams[videoStreamIndex]->codec;
	pCodecCtx->thread_count = 4;
	pCodecCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
	pCodec = findCodec(pCodecCtx);
	if (pCodec == NULL)
	{
		qDebug() << "can't find a codec";
		return false;
	}

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
	{
		qDebug() << "can't open a codec";
		return false;
	}

	pFrame = av_frame_alloc();
	pFrameBGR = av_frame_alloc();

	imgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
		pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
		AV_PIX_FMT_BGR32, SWS_BICUBIC, NULL, NULL, NULL);

	byteNum = avpicture_get_size(AV_PIX_FMT_BGR32, pCodecCtx->width, pCodecCtx->height);

	outBuffer = (uint8_t *)av_malloc(byteNum * sizeof(uint8_t));
	avpicture_fill((AVPicture *)pFrameBGR, outBuffer, AV_PIX_FMT_BGR32,
		pCodecCtx->width, pCodecCtx->height);

	packet = (AVPacket *)malloc(sizeof(AVPacket));
	av_new_packet(packet, pCodecCtx->width * pCodecCtx->height);

	return true;
}

void VideoDecoder::stop()
{
	bRunning = false;
	wait();
	destroy();
}

void VideoDecoder::run()
{
	static const int kWaitTime = 5;
	static const int kMaxQueueSize = 30;
	
	std::chrono::milliseconds duration(kWaitTime);
	//int frameIndex = 0;
	while (bRunning)
	{
		if (frameQueue->size() > kMaxQueueSize)
		{
			std::this_thread::sleep_for(duration);
			continue;
		}

		int nextFrame = av_read_frame(pFormatCtx, packet);
		if (nextFrame == AVERROR_EOF)
		{
			auto stream = pFormatCtx->streams[videoStreamIndex];
			avio_seek(pFormatCtx->pb, 0, SEEK_SET);
			avformat_seek_file(pFormatCtx, videoStreamIndex, 0, 0, stream->duration, 0);
			continue;
		}

		if (nextFrame < 0)
		{
			qDebug() << "read frame error";
			bRunning = false;
			break;
		}

		if (packet->stream_index == videoStreamIndex)
		{
			int got_picture = 0;
			int ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);

			if (ret < 0)
			{
				qDebug() << "decode error";
				break;
			}

			if (got_picture)
			{
				sws_scale(imgConvertCtx,
					(uint8_t const * const *)pFrame->data,
					pFrame->linesize, 0, pCodecCtx->height, pFrameBGR->data,
					pFrameBGR->linesize);

				VideoFrame frame;
				frame.width = pFrame->width;
				frame.height = pFrame->height;
				frame.data = QByteArray((char *)outBuffer, byteNum * sizeof(uint8_t));
				QImage tmpImg((uchar *)outBuffer, pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB32);
				frame.img = tmpImg.convertToFormat(QImage::Format_RGB888, Qt::NoAlpha);

				lastTime = currentTime;
				frameQueue->push_back(frame);
			}
		}
		av_free_packet(packet);
		std::this_thread::sleep_for(duration);
	}
}

void VideoDecoder::destroy()
{
	av_free(outBuffer);
	av_free(pFrameBGR);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);
}

int VideoDecoder::getFrameTimeMs()
{
	return (int)(1000.0 / av_q2d(pCodecCtx->framerate));
}

AVCodec* VideoDecoder::findCodec(AVCodecContext *codecCtx)
{
		pCodec = avcodec_find_decoder(codecCtx->codec_id);//软解
	return pCodec;
}

你可能感兴趣的:(QT,qt,ffmpeg)