Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件 发送端

经过两个星期的努力终于完成 Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件,的C语言程序,现在分享给大家,欢迎大家评论指正

由于本程序存在录制时间短但保存成文件的播放长度很长的问题,希望知道的高手们指点一下解决的方法,在此先谢谢了!!!!

send:

gst-launch-0.10 -v gstrtpbin name=rtpbin v4l2src device=/dev/video0 ! videorate ! videoscale ! ffmpegcolorspace ! 'video/x-raw-yuv, width=(int)320, height=(int)240, framerate=(fraction)15/1' !  rtpvrawpay ! rtpbin.send_rtp_sink_0 rtpbin.send_rtp_src_0 ! multiudpsink clients="127.0.0.1:9996" rtpbin.send_rtcp_src_0 ! multiudpsink clients="127.0.0.1:9997" sync=false async=false udpsrc port=10000 ! rtpbin.recv_rtcp_sink_0

C code:

#include <string.h>
#include <math.h>

#include <gst/gst.h>
#define HOST_IP "127.0.0.1"
#define PORT 9996
#define Video_dev "/dev/video0"
#define Video_Caps "video/x-raw-yuv, width=(int)320, height=(int)240, framerate=(fraction)30/1"


int 
main(int argc,char *argv[])
{
	GstElement *vsource,*vrate,*vscale,*vconvert;
	GstElement *vrtpbin,*vrtpsink,*vrtppay;
	GstElement *pipeline;	
	GMainLoop *loop;
	GstCaps *caps;
	GstPad *srcpad,*sinkpad;

	gst_init(&argc,&argv);

	pipeline=gst_pipeline_new(NULL);
	g_assert(pipeline);

	vsource=gst_element_factory_make("v4l2src","vsource");
	g_assert(vsource);
	vrate=gst_element_factory_make("videorate","vrate");
	g_assert(vrate);
	vscale=gst_element_factory_make("videoscale","vscal");
	g_assert(vscale);
	vconvert=gst_element_factory_make("ffmpegcolorspace","vconvert");
	g_assert(vconvert);
	


	vrtppay=gst_element_factory_make("rtpvrawpay","vrtppay");
	g_assert(vrtppay);
	g_object_set(G_OBJECT(vsource),"device", Video_dev, NULL);
	

	gst_bin_add_many(GST_BIN(pipeline),vsource,vrate,vscale,vconvert,vrtppay,NULL);
	
	caps=gst_caps_from_string(Video_Caps);


	if(!gst_element_link_many(vsource,vrate,vscale,vconvert,NULL)){
		g_error("Failed to link ");
	}
	if(!gst_element_link_filtered(vconvert,vrtppay,caps))
	{
		g_error("Failed to link caps");
	} 
	gst_caps_unref(caps);
	

	vrtpbin=gst_element_factory_make("gstrtpbin","vrtpbin");
	g_assert(vrtpbin);
	gst_bin_add(GST_BIN(pipeline),vrtpbin);
	
	vrtpsink=gst_element_factory_make("udpsink","vrtpsink");
	g_assert(vrtpsink);
	g_object_set(vrtpsink,"port",PORT,"host",HOST_IP,NULL);
	gst_bin_add_many(GST_BIN(pipeline),vrtpsink,NULL);

	sinkpad=gst_element_get_request_pad(vrtpbin,"send_rtp_sink_0");
	srcpad=gst_element_get_static_pad(vrtppay,"src");
	if(gst_pad_link(srcpad,sinkpad)!=GST_PAD_LINK_OK)
		g_error("Failed to link video payloader to vrtpbin");
	gst_object_unref(srcpad);
	srcpad=gst_element_get_static_pad(vrtpbin,"send_rtp_src_0");
	sinkpad=gst_element_get_static_pad(vrtpsink,"sink");
	if(gst_pad_link(srcpad,sinkpad)!=GST_PAD_LINK_OK)
	g_error("Failed to link vrtpbin to vrtpsink");
	gst_object_unref(srcpad);
	gst_object_unref(sinkpad);


	g_print("starting sender pipeline\n");
//	gst_element_set_state(pipeline,SGT_STATE_PLAYING);
	gst_element_set_state (pipeline, GST_STATE_PLAYING);
	loop=g_main_loop_new(NULL,FALSE);
	g_main_loop_run(loop);
	g_print("stopping sender pipeline\n");
	gst_element_set_state(pipeline,GST_STATE_NULL);
	return 0;	

}
不完善的地方希望大家指出来,谢谢!!

接收端   在下一篇博客上欢迎浏览。http://blog.csdn.net/zhujinghao09/article/details/8528879


你可能感兴趣的:(Camera,Camera,gstreamer)