RK3399上实现视频(MIPI CSI)摄像头获取转至OPENCV

应上次有留言朋友的要求,我把部分代码帖出来:

头文件:

#include 
#include 
#include 
 
#include 
#include "gdk-pixbuf-2.0/gdk-pixbuf/gdk-pixbuf.h"
 
#include "../defines.h"
 
#ifndef GSTCODE_H
#define GSTCODE_H
 
 
#ifdef __JETSON_TX2__
    #define VIDEOSRC            "nvarguscamerasrc"
    #define VIDEOTYPE           "NV12"
    #define VIDEOCONVERT        "nvvidconv"
    #define VIDEOSINK           "nvoverlaysink"
    #define VIDEOSRCCAPS        "video/x-raw(memory:NVMM)"
    #define VIDEOENCODER        "nvv4l2h264enc"
#else
    #define VIDEOSRC            "rkisp"
    #define VIDEOTYPE           "NV12"
    #define VIDEOCONVERT        "videoconvert"
    #define VIDEOSINK           "rkximagesink"
    #define VIDEOSRCCAPS        "video/x-raw"
    #define VIDEOENCODER        "nvv4l2h264enc"
#endif
#define FILESINK                "filesink"
 
typedef struct {
    GstPipeline *pipeline;
    GstElement   *src;
    GstElement  *srcconvert;
    GstCaps     *caps;
    GstElement  *tee;
        //
    GstElement  *queue;
    GstElement  *convert;
    GstElement  *videosink;
        //
    GstElement  *appqueue;
    GstElement  *appconvert;
    GstCaps     *appcaps;
    GstElement  *appsink;
        //
    GMainLoop   *loop;
    guint       sourceid;
}gst_app_t;
 
extern gst_app_t gst_app;
extern gst_app_t *app;
 
void init_gst_elements(int argc, char *argv[]);
 
void terminal_gst_element();
 
class gstcode
{
public:
    gstcode();
};
 
#endif // GSTCODE_H

程序源文件:

#include "gstcode.h"
 
#define CVWIN                   "cvwindows"
 
gst_app_t gst_app;
gst_app_t *app = &gst_app;
 
gstcode::gstcode()
{
 
}
 
static int idx = 0;
 
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
    gst_app_t *app = (gst_app_t*)ptr;
 
    switch(GST_MESSAGE_TYPE(message)){
 
        case GST_MESSAGE_ERROR:{
                gchar *debug;
                GError *err;
 
                gst_message_parse_error(message, &err, &debug);
                g_print("Error %s\n", err->message);
                g_error_free(err);
                g_free(debug);
                g_main_loop_quit(app->loop);
            }
            break;
 
        case GST_MESSAGE_WARNING:{
                gchar *debug;
                GError *err;
                gchar *name;
 
                gst_message_parse_warning(message, &err, &debug);
                g_print("Warning %s\nDebug %s\n", err->message, debug);
 
                name = GST_MESSAGE_SRC_NAME(message);
 
                g_print("Name of src %s\n", name ? name : "nil");
                g_error_free(err);
                g_free(debug);
            }
            break;
 
        case GST_MESSAGE_EOS:
            g_print("End of stream\n");
            g_main_loop_quit(app->loop);
            break;
 
        case GST_MESSAGE_STATE_CHANGED:
            break;
 
        default:
            g_print("got message %s\n", \
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
            break;
    }
 
    return TRUE;
}
 
static char filename[256] = {0};
 
static int exitticks = 0;
 
static int deel_appsink_buffer(GstAppSink * sink){
    GstSample *sample;
    GstBuffer *buffer;
    GstCaps *caps;
    GstStructure *s;
    gint width, height;
    bool res;
//        g_print("new sample %d\n", idx);
    sample = gst_app_sink_pull_sample(sink); //blocks
//    sample = gst_base_sink_get_last_sample(GST_BASE_SINK_CAST(self));
 
    if(sample){
 
        /* get the snapshot buffer format now. We set the caps on the appsink so
         * that it can only be an rgb buffer. The only thing we have not specified
         * on the caps is the height, which is dependant on the pixel-aspect-ratio
         * of the source material */
        caps = gst_sample_get_caps (sample);
        if (!caps) {
          g_print ("could not get snapshot format\n");
          exit (-1);
        }
        s = gst_caps_get_structure (caps, 0);
 
        /* we need to get the final caps on the buffer to get the size */
        res = gst_structure_get_int (s, "width", &width);
        res |= gst_structure_get_int (s, "height", &height);
        if (!res) {
          g_print ("could not get snapshot dimension\n");
          exit (-1);
        }
 
        /* create pixmap from buffer and save, gstreamer video buffers have a stride
         * that is rounded up to the nearest multiple of 4 */
        buffer = gst_sample_get_buffer (sample);
 
//        GstMemory * mem = gst_buffer_get_all_memory(buffer);
        GstMapInfo map;
 
        /* Mapping a buffer can fail (non-readable) */
        if (gst_buffer_map (buffer, &map, GST_MAP_READ)) {
//            g_print("buffer size:%lu\n", map.size);
//            g_print("img width:%d, height:%d\n", width, height);
            if(map.data != NULL){
 
//                cv::Mat mat = cv::Mat(cv::Size(width, height), CV_8U, (char*)map.data);
//                cv::imshow(CVWIN, mat);                                                                   //测试显示OPENCV的窗体
                gst_buffer_unmap (buffer, &map);
            }else{
                g_print("data is null!\n");
                if(gst_app_sink_is_eos(GST_APP_SINK_CAST(gst_app.appsink))){
                    g_warning("gst_app_sink_is_eos !\n");
                }
            }
//            g_print("deel frame: %d !\n", idx);
        }else{
            g_warning("buffer cant map, buffer size:%lu !\n", gst_buffer_get_size(buffer));
            if(exitticks > 1){
                g_main_loop_quit(gst_app.loop);
            }else{
                exitticks ++;
            }
        }
        gst_buffer_unref(buffer);
        idx++;
    }else{
        g_warning("cant get sample from appsink !\n");
    }
//    g_print("\n\n\n##################################################################################################\n\n\n");
    return 1;
}
 
static GstFlowReturn Freturn;
 
static GstFlowReturn * new_sample_callback(GstAppSink * self, gst_app_t * app){
    deel_appsink_buffer(self);
    Freturn = GST_FLOW_OK;
    return &Freturn;
}
 
int frame_deel_thread(void *){
    deel_appsink_buffer(GST_APP_SINK_CAST(gst_app.appsink));
}
 
GstStateChangeReturn state_ret;
 
void init_gst_elements(int argc, char *argv[]){
    GstBus *bus;
    GstSample *sample;
    GSourceFunc func = frame_deel_thread;
    gst_init(&argc, &argv);
 
    app->pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
    /*bus = gst_pipeline_get_bus(app->pipeline);
    gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
    gst_object_unref(bus);*/
 
    app->src = gst_element_factory_make(VIDEOSRC, "mysrc");
#ifdef JETSON_TX2
    g_object_set(app->src, "sensor-id", 1, NULL);
#else
    g_object_set(app->src, "io-mode", 4, "device", "/dev/video0", NULL);
#endif
 
    app->srcconvert = gst_element_factory_make(VIDEOCONVERT, "mysrcconvert");
#ifdef JETSON_TX2
    g_object_set(app->srcconvert, "flip-method", 6, NULL);
    app->caps = gst_caps_new_simple("video/x-raw(memory:NVMM)",//
                                    "width", G_TYPE_INT, VIDEO_WIDTH,
                                    "height", G_TYPE_INT, VIDEO_HEIGHT,
                                    "framerate", GST_TYPE_FRACTION, 30, 1,
                                    "format", G_TYPE_STRING, "NV12",
                                    NULL);
#else
    app->caps = gst_caps_new_simple("video/x-raw",//
                                    "width", G_TYPE_INT, VIDEO_WIDTH,
                                    "height", G_TYPE_INT, VIDEO_HEIGHT,
                                    "framerate", GST_TYPE_FRACTION, 30, 1,
                                    "format", G_TYPE_STRING, "NV12",
                                    NULL);
#endif
 
 
    app->tee = gst_element_factory_make("tee", "mytee");
    app->convert = gst_element_factory_make("videoconvert", "myconvert");
    app->queue = gst_element_factory_make("queue2", "myqueue");
    app->videosink = gst_element_factory_make(VIDEOSINK, "myvsink");
 
    app->appqueue = gst_element_factory_make("queue2", "myappqueue");
    app->appconvert = gst_element_factory_make("videoconvert", "myappvconvert");
    app->appcaps = gst_caps_new_simple("video/x-raw",
                                       "width", G_TYPE_INT, VIDEO_WIDTH,
                                       "height", G_TYPE_INT, VIDEO_HEIGHT,
                                       "framerate", GST_TYPE_FRACTION, 30, 1,
                                       "format", G_TYPE_STRING, "RGB",
                                       NULL);
    app->appsink = gst_element_factory_make("appsink", "myappsink");
 
    g_assert(app->src);
    g_assert(app->tee);
    g_assert(app->srcconvert);
    g_assert(app->queue);
    g_assert(app->convert);
    g_assert(app->videosink);
    g_assert(app->appqueue);
    g_assert(app->appconvert);
    g_assert(app->appsink);
 
 
    g_object_set(app->appsink, "emit-signals", false, "drop", true, "max-buffers", 1, "buffer-list", false, NULL);
 
    g_object_set(app->appqueue, "max-size-time", 500, NULL);
 
    gst_app_sink_set_emit_signals(GST_APP_SINK_CAST(app->appsink), true);
 
    gst_bin_add_many(GST_BIN(app->pipeline), (GstElement*)app->src,
    app->srcconvert, app->tee, app->queue, app->convert, app->videosink, app->appqueue, app->appconvert, app->appsink, NULL);
 
    if(!gst_element_link_filtered((GstElement*)app->src, app->srcconvert, app->caps)){
        g_warning("failed to link src and convert");
    }
    /*if(!gst_element_link((GstElement*)app->src, app->srcconvert)){
        g_warning("failed to link src and convert");
    }*/
 
    if(!gst_element_link(app->srcconvert, app->tee)){
        g_warning("failed to link convert and tee");
    }
 
    if(!gst_element_link(app->tee, app->queue)){
        g_warning("failed to link tee and queue");
    }
 
    if(!gst_element_link(app->queue, app->convert)){
        g_warning("failed to link queue and convert");
    }
 
    if(!gst_element_link(app->convert, app->videosink)){
        g_warning("failed to link convert and videosink");
    }
 
    if(!gst_element_link(app->tee, app->appqueue)){
        g_warning("failed to link tee and appqueue");
    }
 
 
    if(!gst_element_link(app->appqueue, app->appsink)){
        g_warning("failed to link appconvert and appsink");
    }
 
    state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
    g_warning("set state returned %d\n", state_ret);
 
    app->loop = g_main_loop_new(NULL, FALSE);
 
    if(gst_app_sink_pull_sample(GST_APP_SINK_CAST(app->appsink)) == NULL){
        g_warning("app sink sample is null");
    }
 
    if(gst_app_sink_get_emit_signals(GST_APP_SINK_CAST(app->appsink))){
        g_warning("app sink signal true");
    }
 
    if(!gst_app_sink_get_drop(GST_APP_SINK_CAST(app->appsink))){
        g_warning("app sink drop is false");
    }
 
 
    g_idle_add(func, app);
 
}
 
void terminal_gst_element(){
    state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_NULL);
    g_warning("set state null returned %d\n", state_ret);
}

主程序:

/*
  I don't know if it is syntax highlighter or blogger, but I can't seem to
  put angle brackets around header file names properly.
*/
#include 
#include 
 
#include 
#include 
 
#include "defines.h"
#include "src/gstcode.h"
 
#define BUFF_SIZE               (1024)
 
#ifndef VIDEO_WIDTH
#define VIDEO_WIDTH             640
#define VIDEO_HEIGHT            480
#endif
 
 
int main(int argc, char *argv[])
{
    XInitThreads();
 
 
    init_gst_elements(argc, argv);
 
 
    g_main_loop_run(app->loop);
 
    terminal_gst_element();
 
    return 0;
}

 

你可能感兴趣的:(GStreamer,gstreamer,Opencv,rk3399)