学习FFmpeg简单Samples之拉取网络流并显示

实例代码

int IpCamera()
{
	AVDictionary* options = NULL;
	av_dict_set(&options, "buffer_size", "4096000", 0); //设置缓存大小,1080p可将值调大
	av_dict_set(&options, "rtsp_transport", "udp", 0);
	av_dict_set(&options, "stimeout", "20000000", 0); //设置超时断开连接时间,单位微秒
	av_dict_set(&options, "max_delay", "500000", 0); //设置最大时延

	int ret = 0;

	//const char* url = "rtmp://192.168.0.188/app/rtmpstream0";
	const char* url = "rtsp://192.168.0.188:554/stream/main";
	//const char* url = "rtmp://192.168.0.112:1936/live/123";
	AVFormatContext* m_fmt_ctx = avformat_alloc_context();

	ret = avformat_open_input(&m_fmt_ctx, url, NULL, &options);
	if (ret != 0) {
		cout << "avformat_open_input is fail !!!" << endl;
		return -1;
	}

	//设置查找时间以避免耗时过长
	m_fmt_ctx->probesize = 1000;
	m_fmt_ctx->max_analyze_duration = 2048;//AV_TIME_BASE

	ret = avformat_find_stream_info(m_fmt_ctx, NULL);
	if (ret < 0) {
		cout << "avformat_find_stream_info is fail !!!" << endl;
		return -1;
	}

	av_dump_format(m_fmt_ctx, 0, url, 0);

	int video_stream = -1;
		
	video_stream =	av_find_best_stream(m_fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);

	if (video_stream < 0) {
		return -1;
	}

	AVCodecContext* _codec_ctx = m_fmt_ctx->streams[video_stream]->codec;
	AVCodec* _codec = avcodec_find_decoder(_codec_ctx->codec_id);
	if (_codec == NULL) {
		return -1;
	}

	ret = avcodec_open2(_codec_ctx, _codec, NULL);
	if (ret != 0) {
		return -1;
	}

	int width = m_fmt_ctx->streams[video_stream]->codecpar->width;
	int height = m_fmt_ctx->streams[video_stream]->codecpar->height;
	int pts = m_fmt_ctx->streams[video_stream]->codec->framerate.den;
	AVPixelFormat inputFormat = m_fmt_ctx->streams[video_stream]->codec->pix_fmt;

	ret = SDL_Init(SDL_INIT_VIDEO);

	SDL_Window*  window = SDL_CreateWindow("FFmpeg_Demo", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 800, 600, SDL_WINDOW_SHOWN);
	
	SDL_Renderer* renderer = NULL;

	if (window) {
		renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
		if (!renderer) {
			av_log(NULL, AV_LOG_WARNING, "Failed to initialize a hardware accelerated renderer: %s\n", SDL_GetError());
			renderer = SDL_CreateRenderer(window, -1, 0);
		}
	}

	SDL_Texture* texture = NULL;

	Uint32 format = FFmpegFmt_2_SDLFmt(inputFormat);

	if (!(texture = SDL_CreateTexture(renderer, format, SDL_TEXTUREACCESS_STREAMING, width, height)))
		return -1;

	int frameIndex = 0;

	int got_picture = 0;

	AVPacket* packet = av_packet_alloc();

	AVFrame *frame_yuv = av_frame_alloc();

	int64_t pre_pts = -1;

	int64_t start_time = av_gettime();

	while (frameIndex < 500)
	{

		ret = av_read_frame(m_fmt_ctx, packet);

		if (ret < 0) {
			break;
		}

		if (packet->stream_index == video_stream)
		{
			//FIX:No PTS (Example: Raw H.264)
			//Simple Write PTS
			if (packet->pts == AV_NOPTS_VALUE) {
				//Write PTS
				AVRational time_base1 = m_fmt_ctx->streams[video_stream]->time_base;
				//Duration between 2 frames (us)
				int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(m_fmt_ctx->streams[video_stream]->r_frame_rate);
				//Parameters
				packet->pts = (double)(frameIndex * calc_duration) / (double)(av_q2d(time_base1) * AV_TIME_BASE);
				packet->dts= packet->pts;
				packet->duration = (double)calc_duration / (double)(av_q2d(time_base1) * AV_TIME_BASE);
			}
			
			ret = avcodec_decode_video2(_codec_ctx, frame_yuv, &got_picture, packet);
			if (ret < 0) {
				printf("Decode Error.\n");
				return ret;
			}

			if (got_picture) {

				if (pre_pts == -1)
					pre_pts = frame_yuv->pts;

				//frame_yuv->pts -= pre_pts;

				AVRational time_base = m_fmt_ctx->streams[video_stream]->time_base;
				AVRational time_base_q = { 1,AV_TIME_BASE };
				int64_t pts_time = av_rescale_q(frame_yuv->pts, time_base, time_base_q);
				int64_t now_time = av_gettime() - start_time;
				if (pts_time > now_time) {
					cout << "delay time : " << pts_time - now_time << endl;
					av_usleep(pts_time - now_time );
				}
			
				cout << "index: " << frameIndex << " pts:" << frame_yuv->pts <<" size: "<<packet->size <<" playTime :" << pts_time << "  key" << frame_yuv->key_frame<< endl;

				//long long currentTime = av_gettime();
				//if (playTime > (currentTime - startTime))
				//	av_usleep(playTime - currentTime);

				ret = SDL_UpdateYUVTexture(texture, NULL,
					frame_yuv->data[0], frame_yuv->linesize[0],
					frame_yuv->data[1], frame_yuv->linesize[1],
					frame_yuv->data[2], frame_yuv->linesize[2]);

				

				if (ret < 0)
					break;

				ret = SDL_RenderClear(renderer);
				if (ret < 0)
					break;

				ret = SDL_RenderCopy(renderer, texture, NULL, NULL);
				if (ret < 0)
					break;

				SDL_RenderPresent(renderer);

				frameIndex++;
			}
		}

		av_packet_unref(packet);
	}

	SDL_DestroyTexture(texture);

	SDL_DestroyRenderer(renderer);

	SDL_DestroyWindow(window);

	SDL_Quit();

	av_packet_free(&packet);

	avformat_close_input(&m_fmt_ctx);

	return 0;
}

你可能感兴趣的:(c++,ffmpeg,ffmpeg,c++)