Dynamically Recording From a Live Stream & EOS Handling
Hello all,
So over a month ago I posted to the ml a solution to recording from a
live stream to handle the EOS required by many muxers for mp4/mkv
formats. The previous solution worked only if you had only an audio or
video stream, all 4 different ways at getting it to work would end up
with blocked threads and it going haywire. Recently I received some help
from thaytan on IRC that provided the solution...
Quickly let me draw a rudimentary diagram of the pipeline. Hopefully
it doesn't get mangled too badly once sent.
-->video-->tee /--> blah blah --> xvimagesink
rtspsrc / \--> |---------------------------------|
\ |-> inputs --> muxer --> filesink |
-->audio-->tee /--> |---------------------------------|
\--> blah blah --> autoaudiosink
The basic idea being that the source is split into viewing/listening
and a recording bin that encoded/muxed to a filesink. In my particular
case I'm reading mp4 so branched prior to decoding so all I needed to do
was remux and save.
Initially the solution was to send a custom event through the
pipeline. The reason being that if you send EOS to just the recording
bin it never gets received on the bus call handler since you only get
those events if *every* element in a complete pipeline receive the
event, and they don't. It may be useful to think improving this in the
1.0 plans though I don't know what that entails.
The issue with sending a custom event down the pipeline to signal
stopping the recording is a bit of missing code.
http://cgit.freedesktop.org/gstreamer/gstreamer/tree/libs/gst/base/gstbasesrc.c#n1639
I've filed an enhancement bug to see if that can get implemented [1]
Which basically means that my custom event never arrives and I'm in the
same boat as before... This is where thaytan was a huge help. The
solution was to build a custom gst bin element which I would have never
thought of nor had the expertise to do at first.
The idea is to asynchronously block the pads, have the callback send an
EOS into the custom bin, since the EOS will never make it back to the
bus call handler in any application, have a _handle_message function
catch the EOS and signal the application via a custom message. This all
worked perfectly. I've attached simple code using testsrcs. You will see
that the video is displayed for longer than the recording. We simulate
stopping the recording with a g_timeout callback that will block the
pads and instigate the ending of recording.
I just wanted to make sure this was documented since I found many many
people looking for solutions similar to this with Google but no solutions.
[1] https://bugzilla.gnome.org/show_bug.cgi?id=635718
Thanks for all the help and gstreamer - its great!
--
Nathanael d. Noblet
#include <stdlib.h>
#include <gst/gst.h>
#define INCLUDE_AUDIO 1
struct PipeData {
GstElement *pipeline;
GstPad *asrcpad;
GstPad *vsrcpad;
};
typedef struct _RecordingBin
{
GstBin parent;
} RecordingBin;
typedef struct _RecordingBinClass
{
GstBinClass parent;
} RecordingBinClass;
#define GST_TYPE_RECORDING_BIN recording_bin_get_type()
GST_BOILERPLATE (RecordingBin, recording_bin, GstBin,
GST_TYPE_BIN);
static void
recording_bin_handle_message (GstBin * bin, GstMessage * message)
{
RecordingBin *recording = (RecordingBin *)(bin);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("Got EOS in the recording bin\n");
/* FIXME: Remove the bin from the pipeline and dispose of it */
break;
default:
break;
}
GST_BIN_CLASS (parent_class)->handle_message (bin, message);
}
static void
recording_bin_base_init (gpointer g_class)
{
}
static void
recording_bin_class_init (RecordingBinClass * klass)
{
GstBinClass *gstbin_class = GST_BIN_CLASS (klass);
gstbin_class->handle_message =
GST_DEBUG_FUNCPTR (recording_bin_handle_message);
}
static void
recording_bin_init (RecordingBin * src,
RecordingBinClass * klass)
{
}
gboolean bus_call(GstBus *bus, GstMessage *msg, void *data)
{
gchar *debug;
GError *err;
GMainLoop *loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_APPLICATION:
g_print("APP received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
break;
case GST_MESSAGE_EOS:
g_print("EOS received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug);
g_free(debug);
g_print("BUS CALL %s\n", err->message);
g_error_free(err);
g_main_loop_quit (loop);
break;
default:
break;
}
return TRUE;
}
static void
handle_pad_block (GstPad *pad, gboolean blocked, gpointer user_data)
{
GstEvent *event;
if (blocked) {
GstPad *peer = gst_pad_get_peer (pad);
gst_pad_unlink (pad, peer);
event = gst_event_new_eos();
gst_pad_send_event (peer, event);
gst_pad_set_blocked_async (pad, FALSE, handle_pad_block, NULL);
gst_object_unref (peer);
}
else {
/* Unblock is finished */
}
}
/**
* stop_encoding:
* @data: main pipeline
*
* This is a glib function that gets called every 2 seconds, once called 3 times it will issue an EOS to the bin's
sink pad
*
* Returns: FALSE to stop the event loop calling again, TRUE otherwise
*/
gboolean stop_encoding(gpointer *data)
{
static int called = 0;
called++;
g_print("CALLED!! %d times\n",called);
if(called == 2)
{
struct PipeData *pipedata = (struct PipeData *)(data);
/* Block the src pads of the 2 tee's leading to the recording bin. */
/* In the pad block callback, send EOS into the recording bin */
/* Catch EOS coming out of the recording-subbin and remove the bin */
gst_pad_set_blocked_async (pipedata->asrcpad, TRUE, handle_pad_block, NULL);
gst_pad_set_blocked_async (pipedata->vsrcpad, TRUE, handle_pad_block, NULL);
}
return (called == 2)?FALSE:TRUE;
}
int main(int argc, char* argv[])
{
GMainLoop *loop;
gst_init(&argc,&argv);
loop = g_main_loop_new (NULL, FALSE);
GstElement *pipeline, *vsource, *vtee, *vqueue, *tover, *xvsink, *evqueue, *vencoder, *muxer, *filesink;
GstBin *recording;
GstBus *bus;
GstPad *srcpad,*sinkpad;
struct PipeData pipedata;
// Create gstreamer elements
pipedata.pipeline = pipeline = gst_pipeline_new ("eos-test-player");
vsource = gst_element_factory_make ("videotestsrc", "viewing-file-source");
vtee = gst_element_factory_make ("tee", "viewing-tee");
vqueue = gst_element_factory_make ("queue2", "viewing-queue");
tover = gst_element_factory_make ("timeoverlay", "viewing-overlay");
xvsink = gst_element_factory_make ("xvimagesink", "viewing-xvsink");
GstElement *asource, *atee, *aqueue, *aequeue, *aencoder, *asink;
asource = gst_element_factory_make ("audiotestsrc", "viewing-audio-source");
g_object_set(G_OBJECT(asource), "num-buffers",300, NULL);
atee = gst_element_factory_make ("tee", "viewing-audio-tee");
aqueue = gst_element_factory_make ("queue2", "viewing-audio-queue");
asink = gst_element_factory_make ("pulsesink", "viewing-audio-sink");
aequeue = gst_element_factory_make ("queue2", "encoding-audio-queue");
aencoder = gst_element_factory_make ("lamemp3enc", "encoding-audio-encoder");
recording = GST_BIN(g_object_new (GST_TYPE_RECORDING_BIN, "name", "recbin", NULL));
evqueue = gst_element_factory_make ("queue2", "encoding-queue");
vencoder = gst_element_factory_make ("ffenc_mpeg4", "encoding-encoder");
muxer = gst_element_factory_make ("mp4mux", "encoding-muxer");
filesink = gst_element_factory_make ("filesink", "encoding-filesink");
if(!pipeline || !vsource || !xvsink || !tover )
{
g_print("Unable to create all necessary elements\n");
return -1;
}
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
g_object_set(G_OBJECT(vsource), "num-buffers",300, NULL);
g_object_set(G_OBJECT(filesink),"location","/tmp/output.mp4", NULL);
g_object_set (G_OBJECT (tover), "halign", "right", NULL);
g_object_set (G_OBJECT (tover), "valign", "top", NULL);
g_object_set (G_OBJECT (tover), "shaded-background", TRUE, NULL);
/* create the recording bin */
gst_bin_add_many (recording, aequeue, aencoder, evqueue, vencoder, muxer, filesink, NULL);
sinkpad = gst_element_get_static_pad(evqueue,"sink");
GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad);
if(ghost == NULL)
g_error("Unable to create ghostpad!\n");
gst_element_add_pad(GST_ELEMENT(recording),ghost);
gst_object_unref(GST_OBJECT(sinkpad));
gst_element_link_many(evqueue,vencoder,muxer,filesink,NULL);
sinkpad = gst_element_get_static_pad(aequeue,"sink");
gst_element_add_pad(GST_ELEMENT(recording),gst_ghost_pad_new("asink",sinkpad));
gst_object_unref(GST_OBJECT(sinkpad));
gst_element_link_many(aequeue,aencoder,muxer,NULL);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline), asource, atee, aqueue, asink, vsource, vtee, vqueue, tover,
xvsink, recording, NULL);
/* link video elements */
gst_element_link_many(vsource,tover,vtee,NULL);
srcpad = gst_element_get_request_pad(vtee,"src0");
sinkpad = gst_element_get_pad(vqueue,"sink");
gst_pad_link(srcpad,sinkpad);
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
gst_element_link(vqueue,xvsink);
/* link the viewing pipeline into the bin */
pipedata.vsrcpad = gst_element_get_request_pad(vtee,"src1");
sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink");
gst_pad_link(pipedata.vsrcpad,sinkpad);
gst_object_unref (sinkpad);
/* link audio elements */
gst_element_link_many(asource,atee,NULL);
srcpad = gst_element_get_request_pad(atee,"src0");
sinkpad = gst_element_get_pad(aqueue,"sink");
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
gst_pad_link(srcpad,sinkpad);
gst_element_link(aqueue,asink);
/* link the viewing pipeline into the bin */
pipedata.asrcpad = gst_element_get_request_pad(atee,"src1");
sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"asink");
gst_pad_link(pipedata.asrcpad,sinkpad);
gst_object_unref (sinkpad);
/* Iterate */
g_print ("Running...\n");
gst_element_set_state(pipeline,GST_STATE_PLAYING);
g_timeout_add_seconds(2,(GSourceFunc)stop_encoding, &pipedata);
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
gstreamer-devel <at> lists.sourceforge.net
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel