首先用命令行工具测试:
gst-launch-1.0 -v v4l2src device=/dev/video0 ! 'video/x-h264, width=1280, height=720, framerate=30/1' ! queue ! h264parse ! flvmux ! rtmpsink location='rtmp://192.168.1.102/live'
这个命令行执行后,就可以在192.168.1.102地址的PC上打开流媒体服务端观看。可以使用nginx或者srs流媒体服务端,创建一个html文件打开网页观看。
<h1>01h1>
<object width='640' height='377' id='SampleMediaPlayback' name='SampleMediaPlayback' type='application/x-shockwave-flash' classid='clsid:d27cdb6e-ae6d-11cf-96b8-444553540000' >
<param name='movie' value='swfs/SampleMediaPlayback.swf' />
<param name='quality' value='high' />
<param name='bgcolor' value='#000000' />
<param name='allowfullscreen' value='true' />
<embed src='SampleMediaPlayback.swf' width='640' height='377' id='SampleMediaPlayback' quality='high' bgcolor='#000000' name='SampleMediaPlayback' allowfullscreen='true' pluginspage='http://www.adobe.com/go/getflashplayer' flashvars='&src=rtmp://192.168.1.102:1935/live&autoHideControlBar=true&streamType=live&autoPlay=true&verbose=true' type='application/x-shockwave-flash'>
embed>
object>
#include
#include
#include
#include
#include
#include
#include
//gst-launch-1.0 -v v4l2src device=/dev/video0 ! 'video/x-h264, width=640, height=360, framerate=30/1' ! queue ! h264parse ! flvmux ! rtmpsink location='rtmp://192.168.1.102/live'
typedef struct _GstDataStruct
{
GstElement *pipeline;
GstElement *v4l2src;
GstElement *queue;
GstElement *h264parse;
GstElement *flvmux;
GstElement *rtmpsink;
GstBus *bus;
guint bus_watch_id;
guint sourceid; /* To control the GSource */
GMainLoop *loop; /* GLib's Main Loop */
} GstDataStruct;
static GstDataStruct GstData;
static unsigned int frame_width;
static unsigned int frame_height;
static unsigned int frame_rate;
static unsigned int frame_bps;
static char devname[32] = {0};
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData)
{
gchar *debug;
GError *error;
GMainLoop *loop = pGstData->loop;
GST_DEBUG ("got message %s",gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
printf("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
int main(int argc, char *argv[])
{
if(argc != 6)
{
frame_width = 1280;
frame_height = 720;
frame_rate = 30;
frame_bps = 1500000;
sprintf(devname, "%s", "/dev/video0");
}
else
{
frame_width = atoi(argv[2]);
frame_height = atoi(argv[3]);
frame_rate = atoi(argv[4]);
frame_bps = atoi(argv[5]);
sprintf(devname, "%s", argv[1]);
}
printf("width:%d, height:%d, rate:%d, bps:%d, dev:%s\n", frame_width, frame_height, frame_rate, frame_bps, devname);
printf("============= v4l2 rtmp gst init start ============\n");
gst_init (NULL, NULL);
printf("=========== create v4l2 rtmp pipeline =============\n");
GstData.pipeline = gst_pipeline_new ("v4l2_rtmp");
GstData.pipeline = gst_pipeline_new ("v4l2_rtmp");
GstData.v4l2src = gst_element_factory_make ("v4l2src", "v4l2src");
GstData.queue = gst_element_factory_make ("queue", "queue");
GstData.h264parse = gst_element_factory_make ("h264parse", "h264parse");
GstData.flvmux = gst_element_factory_make ("flvmux", "flvmux");
GstData.rtmpsink = gst_element_factory_make ("rtmpsink", "rtmpsink");
if (!GstData.pipeline || !GstData.v4l2src || !GstData.queue ||
!GstData.h264parse || !GstData.flvmux || !GstData.rtmpsink)
{
g_printerr ("One element could not be created... Exit\n");
return -1;
}
printf("============ link v4l2 rtmp pipeline ==============\n");
GstCaps *caps_v4l2src;
caps_v4l2src = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING,"byte-stream",
"alignment", G_TYPE_STRING, "au",
"width", G_TYPE_INT, frame_width,
"height", G_TYPE_INT, frame_height,
"framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);
GstCaps *caps_flv_sink;
caps_flv_sink = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING,"avc",
"alignment", G_TYPE_STRING, "au",
"width", G_TYPE_INT, frame_width,
"height", G_TYPE_INT, frame_height,
"framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);
g_object_set(G_OBJECT(GstData.v4l2src), "device", devname, NULL);
g_object_set(G_OBJECT(GstData.rtmpsink), "location", "rtmp://192.168.1.102/live", NULL);
//注意:此处的location参数代表rtmp的url,其取值必须与html文件的rtmp的URL保持一致,才可观看视频。
GstData.bus = gst_pipeline_get_bus(GST_PIPELINE(GstData.pipeline));
GstData.bus_watch_id = gst_bus_add_watch(GstData.bus, (GstBusFunc)bus_msg_call, (gpointer)&GstData);
gst_object_unref(GstData.bus);
gst_bin_add_many(GST_BIN(GstData.pipeline), GstData.v4l2src, GstData.queue,
GstData.h264parse, GstData.flvmux, GstData.rtmpsink,NULL);
if(gst_element_link_filtered(GstData.v4l2src, GstData.queue, caps_v4l2src) != TRUE)
{
g_printerr ("GstData.v4l2src could not link GstData.queue\n");
gst_object_unref (GstData.pipeline);
return -1;
}
gst_caps_unref (caps_v4l2src);
if(gst_element_link(GstData.queue, GstData.h264parse) != TRUE)
{
g_printerr ("GstData.queue could not link GstData.h264parse\n");
gst_object_unref (GstData.pipeline);
return -1;
}
if(gst_element_link_filtered(GstData.h264parse, GstData.flvmux, caps_flv_sink) != TRUE)
{
g_printerr ("GstData.h264parse could not link GstData.flvmux\n");
gst_object_unref (GstData.pipeline);
return -1;
}
gst_caps_unref (caps_flv_sink);
if(gst_element_link(GstData.flvmux, GstData.rtmpsink) != TRUE)
{
g_printerr ("GstData.h264parse could not link GstData.flvmux\n");
gst_object_unref (GstData.pipeline);
return -1;
}
printf("========= link v4l2 rtmp pipeline running ==========\n");
gst_element_set_state (GstData.pipeline, GST_STATE_PLAYING);
GstData.loop = g_main_loop_new(NULL, FALSE); // Create gstreamer loop
g_main_loop_run(GstData.loop); // Loop will run until receiving EOS (end-of-stream), will block here
printf("g_main_loop_run returned, stopping rtmp!\n");
gst_element_set_state (GstData.pipeline, GST_STATE_NULL); // Stop pipeline to be released
printf("Deleting pipeline\n");
gst_object_unref (GstData.pipeline); // THis will also delete all pipeline elements
g_source_remove(GstData.bus_watch_id);
g_main_loop_unref(GstData.loop);
return 0;
}
此代码在Ubuntu16.04系统下使用gcc编译,makefile如下:
需要将系统目录下的关于gstreamer的库文件拷贝到当前目录的libs_x86目录下,另外系统的gstreamer库链接文件都带有.so.0
后缀,去掉最后的.0
,保留到.so
即可。
CFLAGS = -v -g -Wall -Wno-shift-count-overflow -I./include
LDFLAGS = -L./libs_x86
CC = gcc
EXTRA_LIBS = -lstdc++ -lm -lpthread -lgstreamer-1.0 -lgstbase-1.0 -lgobject-2.0 -lgmodule-2.0 -lglib-2.0 -lpcre -lrt
SRC = v4l2_rtmp.c
TARGET = v4l2_rtmp
ALL:
$(CC) $(CFLAGS) $(LDFLAGS) $(SRC) -o $(TARGET) $(EXTRA_LIBS)
clean:
rm v4l2_rtmp *.raw *.mp4 *.wav -rf
此代码只是一个简单实现,并没有做其他操作,也没有长时间验证网络的稳定性,只是说明了可行性,另外还没有添加音频,等过几天再把音频也加上来。