GStreamer播放媒体,实现Video Mute功能

项目中使用GStreamer播放视频文件,使用v4l2sink作为videosink进行视频渲染。
需要实现video mute的功能。

实现audio mute的功能是很容易的,因为audiosink组件都实现了mute属性,直接设置mute属性就可以实现了。
但是对于videosink组件,好像基本上都没有实现mute属性。所以想实现该功能,可以自己在对应的videosink上实现类似mute的功能,或者想办法在pipeline一层做工作。

GStreamer是支持pipeline在PLAYING状态下对pipeline进行结构上的改动的,基于此,我们可以考虑在希望需要mute video时,把实际的videosink替换为fakesink,这样视频就不渲染了,音频则不受影响。
由于需要对pipeline进行结构操作,所以我们不能直接使用高层的Playbin组件了,至少需要使用urideocdbin组件。
动态操作pipeline的步骤,可以参考官网的相关文档,主要就是在合适的位置增加block probe,进行"截流",然后替换组件。
https://gstreamer.freedesktop.org/documentation/application-development/advanced/pipeline-manipulation.html

自己实现该功能的代码。

#include 

static GstElement*  g_pipeline = NULL;
static GstElement*  g_source = NULL;
static GstElement*  g_videoqueue = NULL;
static GstElement*  g_audioqueue = NULL;
static GstElement*  g_audiosink = NULL;
static GstElement*  g_videosink = NULL;
static GstElement*  g_videobin = NULL;
static GstElement*  g_audiobin = NULL;
static GstElement*  g_videofakesink = NULL;
static GMainLoop*   g_loop = NULL;

static void pad_added_handler(GstElement* src, GstPad* new_pad, gpointer data)
{
  GstCaps* new_pad_caps = gst_pad_query_caps(new_pad, NULL);
  GstPadLinkReturn ret;
  GstStructure *new_pad_struct = new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  const gchar* new_pad_type = gst_structure_get_name (new_pad_struct);
  
  /* link the video or audio sink elements */
  if( g_str_has_prefix(new_pad_type, "audio/x-raw")) {
    GstPad* sink_pad = gst_element_get_static_pad( GST_ELEMENT(g_audiobin), "sink");
    ret = gst_pad_link (new_pad, sink_pad);
  }
  else if(g_str_has_prefix(new_pad_type, "video/x-raw")) ){
    GstPad* sink_pad = gst_element_get_static_pad( GST_ELEMENT(g_videobin), "sink");
    ret = gst_pad_link (new_pad, sink_pad);
  }
}

static GstPadProbeReturn block_pad_probe_video_mute_cb(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
  gst_pad_remove_probe(pad, GST_PAD_PROBE_INFO_ID (info));
  
  g_videofakesink = gst_element_factory_make("fakesink", NULL);
  g_assert(g_videofakesink != NULL);
  
  g_object_set(GST_OBJECT(g_videofakesink), "sync", TRUE, NULL);
  gst_element_set_state(g_videosink, GST_STATE_NULL);
  gst_bin_remove(GST_BIN(g_videobin), g_videosink);
  gst_bin_add(GST_BIN(g_videobin),g_videofakesink);
  gst_element_set_state(g_videofakesink, GST_STATE_PLAYING);
  gst_element_link_many(g_videoqueue, g_videofakesink, NULL);
}

static GstPadProbeReturn block_pad_probe_video_unmute_cb(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
  /* remove this probe*/
  gst_pad_remove_probe(pad, GST_PAD_PROBE_INFO_ID (info));
  
  /* remove the fakesink */
  gst_element_set_state(g_videofakesink, GST_STATE_NULL);
  gst_bin_remove(GST_BIN(g_videobin), g_videofakesink);
  
  /* add the v4l2sink */
  g_videosink = gst_element_factory_make("v4l2sink", NULL);
  g_assert(g_videosink != NULL);
  gst_bin_add(GST_BIN(g_videobin),g_videosink);
  gst_element_set_state(g_videosink, GST_STATE_PLAYING);
  gst_element_link_many(g_videoqueue, g_videosink, NULL);
}

static void video_mute()
{
  GstPad* blockpad = gst_element_get_static_pad(g_videoqueue, "src");
  gst_pad_add_probe( blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, block_pad_probe_video_mute_cb, NULL, NULL);
}

static void video_unmute()
{
  GstPad* blockpad = gst_element_get_static_pad(g_videoqueue, "src");
  gst_pad_add_probe( blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, block_pad_probe_video_unmute_cb, NULL, NULL);
}

static gboolean timer_hander(gpointer data)
{
  static gboolean mute = TRUE;

  if( mute)
  {
    video_mute();
    mute = FALSE;
  }
  else
  {
    video_unmute();
    mute = TRUE;
  }
}

static gboolean bus_event_hander(GstBus* bus, GstMessage* msg, gpointer data)
{
  switch(GST_MESSAGE_TYPE(msg))
  {
    /* EOS quit main loop*/
    case GST_MESSAGE_EOS:
      g_main_loop_quit(g_loop);
    break;
		default:
    break;
  }
}

int main(int argc, char *argv[])
{
  gst_init(&argc, &argv);
  
  /* create pipeline and components */
  g_pipeline = gst_pipeline_new("player");
  g_assert(g_pipeline != NULL);
  g_source = gst_element_factory_make("uridecodebin", NULL);
  g_assert(g_source != NULL);
  g_videoqueue = gst_element_factory_make("queue", NULL);
  g_assert(g_videoqueue != NULL);
  g_audioqueue = gst_element_factory_make("queue", NULL);
  g_assert(g_audioqueue != NULL);
  g_audiosink = gst_element_factory_make("alsasink", NULL);
  g_assert(g_audiosink != NULL);
  g_videosink = gst_element_factory_make("v4l2sink", NULL);
  g_assert(g_videosink != NULL);

  { /* create video bin */
    g_videobin = gst_bin_new("videobin");
    g_assert(g_videobin != NULL);
    gst_bin_add_many(GST_BIN(g_videobin), g_videoqueue, g_videosink,  NULL);
    gst_element_link_many(g_videoqueue, g_videosink, NULL);
    GstPad* pad = gst_element_get_static_pad(g_videoqueue, "sink");
    GstPad* ghost_pad = gst_ghost_pad_new ("sink", pad);
    gst_pad_set_active(ghost_pad, TRUE);
    gst_element_add_pad(g_videobin, ghost_pad);
    gst_object_unref(pad);
  }
  { /*create audio bin*/
    g_audiobin = gst_bin_new("audiobin");
    g_assert(g_audiobin != NULL);
    gst_bin_add_many(GST_BIN(g_audiobin), g_audioqueue, g_audiosink, NULL);
    gst_element_link_many(g_audioqueue, g_audiosink, NULL);
    GstPad* pad = gst_element_get_static_pad(g_audioqueue, "sink");
    GstPad* ghost_pad = gst_ghost_pad_new ("sink", pad);
    gst_pad_set_active(ghost_pad, TRUE);
    gst_element_add_pad(GST_ELEMENT(g_audiobin), ghost_pad);
    gst_object_unref(pad);
  }
  
  /* add components to pipeline*/
  gst_bin_add_many(GST_BIN(g_pipeline), g_source, g_audiobin, g_videobin, NULL);
  
  /* set uri to uridecodebin */
  g_object_set(g_source, "uri", "file:///home/test.mp4", NULL);

  /* register pad-added callback */
  g_signal_connect(g_source, "pad-added", G_CALLBACK(pad_added_handler), NULL);

  /* watch the bus event */
  guint bus_id = gst_bus_add_watch(GST_ELEMENT_BUS(g_pipeline), bus_event_hander, NULL);

  /* play */
  gst_element_set_state(g_pipeline, GST_STATE_PLAYING);

  /* set timer to switch video mute status */
  guint timer_id = g_timeout_add(3000, timer_hander, NULL);

  /* run the main loop*/
  g_loop = g_main_loop_new(NULL, FALSE);
  g_main_loop_run(g_loop);

  gst_element_set_state(g_pipeline, GST_STATE_NULL);
  gst_object_unref(g_pipeline);
  return 0;
}

实际情况,videobin 和 audiobin 中可能会加入需要的convert,crop,resample等elements,可以根据需求增加并作适当的改动。

使用动态修改pipeline方式实现的好处是,可以不需要关注具体videosink的类型。

除了使用动态修改pipeline结构的方法来实现video mute功能,也可以在videosink内部来实现,也就是上面说的为videosink增加mute属性并实现。
以v4l2sink为例(没有xwindow支持情况下),
我们可以在 gst_v4l2sink_show_frame 中根据mute属性的值来操作,当mute属性为设置为true时,我们修改下一帧数据为全黑,并丢弃后续帧数据(如果对应驱动有清屏API最好),
然后在mute属性被设置为false时,在重新开始向v4l2sink送帧数据即可。
但是这种方式,需要根据raw data的数据类型(RGB,YUV等,还需要考虑到数据对齐等因素)来自行生成一帧全黑的数据,否则停止渲染后,最后一帧还在显示。

你可能感兴趣的:(音视频开发)