FFmpeg —— 8.示例程序(二):屏幕录制并保存为YUV、H264格式文件

屏幕录制的命令行形式可以参考以下链接

https://blog.csdn.net/leixiaohua1020/article/details/38284961

我们所做的示例程序,说白了,就是通过API程序代码来实现命令行的操作指令。

命令行形式

设备列表 

ffmpeg -list_devices true -f dshow -i dummy

查看设备选项

ffmpeg -list_options true -f dshow -i video="Integrated Camera"

 播放摄像头数据

ffplay -s 1280x720 -f dshow -i video="Integrated Camera"

设置分辨率为 1280x720

屏幕录制

Linux版本

 Linux下可以使用x11grab

ffmpeg -f x11grab -s 1600x900 -r 50 -vcodec libx264 –preset:v ultrafast –tune:v zerolatency -crf 18 -f mpegts udp://localhost:1234

 Windows版本

 Windows下屏幕录制的设备:gdigrab

gdigrab是基于GDI的抓屏设备,可以用于抓取屏幕的特定区域。在这里记录一下gdigrab的用法。
gdigrab通过设定不同的输入URL,支持两种方式的屏幕抓取:
(1)“desktop”:抓取整张桌面。或者抓取桌面中的一个特定的区域。
(2)“title={窗口名称}”:抓取屏幕中特定的一个窗口。

最简单的抓屏:

ffmpeg -f gdigrab -i desktop out.mpg

从屏幕的(10,20)点处开始,抓取640x480的屏幕,设定帧率为5

ffmpeg -f gdigrab -framerate 5 -offset_x 10 -offset_y 20 -video_size 640x480 -i desktop out.mpg

程序源码

/**
 * 最简单的基于FFmpeg的AVDevice例子(屏幕录制)
 * Simplest FFmpeg Device (Screen Capture)
 *
 * 归根结底,我们就是为了实现以下屏幕录制的命令:
 * ffmpeg -f gdigrab -i desktop out.mpg
 *
 * 本程序实现了屏幕录制功能。可以录制并播放桌面数据。是基于FFmpeg
 * 的libavdevice类库最简单的例子。通过该例子,可以学习FFmpeg中
 * libavdevice类库的使用方法。
 * 本程序在Windows下可以使用2种方式录制屏幕:
 *  1.gdigrab: Win32下的基于GDI的屏幕录制设备。
 *             抓取桌面的时候,输入URL为“desktop”。
 *  2.dshow: 使用Directshow。注意需要安装额外的软件screen-capture-recorder
 * 在Linux下可以使用x11grab录制屏幕。
 * 在MacOS下可以使用avfoundation录制屏幕。
 */

#include 

extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/dict.h"
#include "SDL2/SDL.h"
}

#define OUTPUT_YUV420P 1
#define OUTPUT_H264 1

int main(int argc, char *argv[])
{
	AVFormatContext *pFormatCtx;
	AVStream *videoStream;
	AVCodecContext *pCodecCtx;
	AVCodec *pCodec;
	AVFrame *pFrame, *pFrameYUV;
	AVPacket *pPacket;
	SwsContext *pImgConvertCtx;

	int videoIndex = -1;
	unsigned int i = 0;

	SDL_Window *screen;
	SDL_Renderer *sdlRenderer;
	SDL_Texture *sdlTexture;
	SDL_Rect sdlRect;

	int screen_w = 0;
	int screen_h = 0;

	printf("Starting...\n");

	//register device
	avdevice_register_all();

	pFormatCtx = avformat_alloc_context();

	//use gdigrab
	AVInputFormat *ifmt = av_find_input_format("gdigrab");
	if (!ifmt)
	{
		printf("can't find input device.\n");
		return -1;
	}

	AVDictionary *options = NULL;
	if (avformat_open_input(&pFormatCtx, "desktop", ifmt, &options) != 0)
	{
		printf("can't open input stream.\n");
		return -1;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
	{
		printf("can't find stream information.\n");
		return -1;
	}

	for (i = 0; i < pFormatCtx->nb_streams; i++)
	{
		if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			videoIndex = i;
			break;
		}
	}

	if (videoIndex == -1)
	{
		printf("can't find a video stream.\n");
		return -1;
	}

	videoStream = pFormatCtx->streams[videoIndex];
	pCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
	if (pCodec == NULL)
	{
		printf("codec not found.\n");
		return -1;
	}

	pCodecCtx = avcodec_alloc_context3(pCodec);
	if (!pCodecCtx)
	{
		printf("can't alloc codec context.\n");
		return -1;
	}

	avcodec_parameters_to_context(pCodecCtx, videoStream->codecpar);

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
	{
		printf("can't open codec.\n");
		return -1;
	}

	pFrame = av_frame_alloc();
	pFrameYUV = av_frame_alloc();
	pPacket = (AVPacket*) av_malloc(sizeof(AVPacket));

	unsigned char *outBuffer = (unsigned char*) av_malloc(
			av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
					pCodecCtx->height, 1));
	av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, outBuffer,
			AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);

	pImgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
			pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
			AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

#if OUTPUT_YUV420P
	FILE *fpYUV = fopen("output.yuv", "wb+");
#endif

#if OUTPUT_H264
	AVCodecContext *pH264CodecCtx;
	AVCodec *pH264Codec;

	FILE *fpH264 = fopen("output.h264", "wb+");

	//查找H264编码器
	pH264Codec = avcodec_find_encoder(AV_CODEC_ID_H264);
	if (!pH264Codec)
	{
		printf("can't find h264 codec.\n");
		return -1;
	}

	pH264CodecCtx = avcodec_alloc_context3(pH264Codec);
	pH264CodecCtx->codec_id = AV_CODEC_ID_H264;
	pH264CodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
	pH264CodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
	pH264CodecCtx->width = pCodecCtx->width;
	pH264CodecCtx->height = pCodecCtx->height;
	pH264CodecCtx->time_base.num = 1;
	pH264CodecCtx->time_base.den = 15;	//帧率(即一秒钟多少张图片)
	pH264CodecCtx->bit_rate = 800000;	//比特率(调节这个大小可以改变编码后视频的质量)
	pH264CodecCtx->gop_size = 12;
	pH264CodecCtx->qmin = 10;
	pH264CodecCtx->qmax = 51;
	//some formats want stream headers to be separate
	if (pH264CodecCtx->flags & AVFMT_GLOBALHEADER)
	{
		pH264CodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
	}

	// set option
	AVDictionary *params = NULL;
	//H.264
	av_dict_set(¶ms, "preset", "superfast", 0);
	av_dict_set(¶ms, "tune", "zerolatency", 0);	//实现实时编码
	if (avcodec_open2(pH264CodecCtx, pH264Codec, ¶ms) < 0)
	{
		printf("can't open video encoder.\n");
		return -1;
	}

#endif

	//SDL handle
	if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
	{
		printf("can't initialize SDL - %s\n", SDL_GetError());
		return -1;
	}

	screen_w = pCodecCtx->width;
	screen_h = pCodecCtx->height;

	screen = SDL_CreateWindow("Simplest ffmpeg device(screen capture)",
	SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h,
			SDL_WINDOW_OPENGL);
	if (!screen)
	{
		printf("SDL: can't create window - exiting: %s\n", SDL_GetError());
		return -1;
	}

	sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
	sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV,
			SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);

	sdlRect.x = 0;
	sdlRect.y = 0;
	sdlRect.w = screen_w;
	sdlRect.h = screen_h;

	while (av_read_frame(pFormatCtx, pPacket) >= 0)
	{
		if (pPacket->stream_index == videoIndex)
		{
			int ret = avcodec_send_packet(pCodecCtx, pPacket);
			if (ret < 0)
			{
				printf("Decode error.\n");
				return -1;
			}

			if (avcodec_receive_frame(pCodecCtx, pFrame) >= 0)
			{
				sws_scale(pImgConvertCtx,
						(const unsigned char* const*) pFrame->data,
						pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
						pFrameYUV->linesize);

#if OUTPUT_YUV420P
				int y_size = pCodecCtx->width * pCodecCtx->height;
				fwrite(pFrameYUV->data[0], 1, y_size, fpYUV);		//Y
				fwrite(pFrameYUV->data[1], 1, y_size / 4, fpYUV);	//U
				fwrite(pFrameYUV->data[2], 1, y_size / 4, fpYUV);	//V
#endif

#if OUTPUT_H264
				pFrameYUV->format = pCodecCtx->pix_fmt;
				pFrameYUV->width = pCodecCtx->width;
				pFrameYUV->height = pCodecCtx->height;
				int ret = avcodec_send_frame(pH264CodecCtx, pFrameYUV);
				if (ret < 0)
				{
					printf("failed to encode.\n");
					return -1;
				}

				if (avcodec_receive_packet(pH264CodecCtx, pPacket) >= 0)
				{
					ret = fwrite(pPacket->data, 1, pPacket->size, fpH264);
					if (ret < 0)
					{
						printf("write into output.h264 failed.\n");
					}
				}
#endif
				SDL_UpdateTexture(sdlTexture, &sdlRect, pFrameYUV->data[0],
						pFrameYUV->linesize[0]);
				SDL_RenderClear(sdlRenderer);
				SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
				SDL_RenderPresent(sdlRenderer);
				SDL_Delay(40);
			}
		}

		av_packet_unref(pPacket);
	}

	sws_freeContext(pImgConvertCtx);

#if OUTPUT_YUV420P
	fclose(fpYUV);
#endif

#if OUTPUT_H264
	fclose(fpH264);
#endif

	SDL_Quit();

	av_free(outBuffer);
	av_frame_free(&pFrameYUV);
	av_frame_free(&pFrame);
	avcodec_close(pCodecCtx);
	avcodec_close(pH264CodecCtx);
	avformat_close_input(&pFormatCtx);


	return 0;
}

 

你可能感兴趣的:(FFmpeg,FFmpeg示例程序,音视频,ffmpeg)