首页 > 编程语言 >c++ gstreamer使用2

c++ gstreamer使用2

时间:2023-02-07 14:47:58浏览次数:43  
标签:gst gstreamer c++ pad sink 使用 new data GST

1,播放教程playbin

复制代码
#include <gst/gst.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
    GstElement *playbin;  /* Our one and only element */

    gint n_video;          /* Number of embedded video streams */
    gint n_audio;          /* Number of embedded audio streams */
    gint n_text;           /* Number of embedded subtitle streams */

    gint current_video;    /* Currently playing video stream */
    gint current_audio;    /* Currently playing audio stream */
    gint current_text;     /* Currently playing subtitle stream */

    GMainLoop *main_loop;  /* GLib's Main Loop */
} CustomData;

/* playbin flags */
typedef enum {
    GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
    GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
    GST_PLAY_FLAG_TEXT = (1 << 2)  /* We want subtitle output */
} GstPlayFlags;

/* Forward definition for the message and keyboard processing functions */
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data);

int main(int argc, char *argv[]) {
    CustomData data;
    GstBus *bus;
    GstStateChangeReturn ret;
    gint flags;
    GIOChannel *io_stdin;

    /* Initialize GStreamer */
    gst_init(&argc, &argv);

    /* Create the elements */
    data.playbin = gst_element_factory_make("playbin", "playbin");

    if (!data.playbin) {
        g_printerr("Not all elements could be created.\n");
        return -1;
    }

    /* Set the URI to play */
    g_object_set(data.playbin, "uri", "file:///D:/gstreamer/1.mp4", NULL);
    //rtsp://xxx:xxx@xxx/h264/ch1/main/av_stream

    /* Set flags to show Audio and Video but ignore Subtitles */
    g_object_get(data.playbin, "flags", &flags, NULL);
    flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
    flags &= ~GST_PLAY_FLAG_TEXT;
    g_object_set(data.playbin, "flags", flags, NULL);

    /* Set connection speed. This will affect some internal decisions of playbin */
    g_object_set(data.playbin, "connection-speed", 56, NULL);

    /* Add a bus watch, so we get notified when a message arrives */
    bus = gst_element_get_bus(data.playbin);
    gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);

    /* Add a keyboard watch so we get notified of keystrokes */
#ifdef G_OS_WIN32
    io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
    io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
    g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);

    /* Start playing */
    ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.playbin);
        return -1;
    }

    /* Create a GLib Main Loop and set it to run */
    data.main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(data.main_loop);

    /* Free resources */
    g_main_loop_unref(data.main_loop);
    g_io_channel_unref(io_stdin);
    gst_object_unref(bus);
    gst_element_set_state(data.playbin, GST_STATE_NULL);
    gst_object_unref(data.playbin);
    return 0;
}

/* Extract some metadata from the streams and print it on the screen */
static void analyze_streams(CustomData *data) {
    gint i;
    GstTagList *tags;
    gchar *str;
    guint rate;

    /* Read some properties */
    g_object_get(data->playbin, "n-video", &data->n_video, NULL);
    g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
    g_object_get(data->playbin, "n-text", &data->n_text, NULL);

    g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
        data->n_video, data->n_audio, data->n_text);

    g_print("\n");
    for (i = 0; i < data->n_video; i++) {
        tags = NULL;
        /* Retrieve the stream's video tags */
        g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);
        if (tags) {
            g_print("video stream %d:\n", i);
            gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
            g_print("  codec: %s\n", str ? str : "unknown");
            g_free(str);
            gst_tag_list_free(tags);
        }
    }

    g_print("\n");
    for (i = 0; i < data->n_audio; i++) {
        tags = NULL;
        /* Retrieve the stream's audio tags */
        g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
        if (tags) {
            g_print("audio stream %d:\n", i);
            if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
                g_print("  codec: %s\n", str);
                g_free(str);
            }
            if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
                g_print("  language: %s\n", str);
                g_free(str);
            }
            if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
                g_print("  bitrate: %d\n", rate);
            }
            gst_tag_list_free(tags);
        }
    }

    g_print("\n");
    for (i = 0; i < data->n_text; i++) {
        tags = NULL;
        /* Retrieve the stream's subtitle tags */
        g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
        if (tags) {
            g_print("subtitle stream %d:\n", i);
            if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
                g_print("  language: %s\n", str);
                g_free(str);
            }
            gst_tag_list_free(tags);
        }
    }

    g_object_get(data->playbin, "current-video", &data->current_video, NULL);
    g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
    g_object_get(data->playbin, "current-text", &data->current_text, NULL);

    g_print("\n");
    g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
        data->current_video, data->current_audio, data->current_text);
    g_print("Type any number and hit ENTER to select a different audio stream\n");
}

/* Process messages from GStreamer */
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_ERROR:
        gst_message_parse_error(msg, &err, &debug_info);
        g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
        g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error(&err);
        g_free(debug_info);
        g_main_loop_quit(data->main_loop);
        break;
    case GST_MESSAGE_EOS:
        g_print("End-Of-Stream reached.\n");
        g_main_loop_quit(data->main_loop);
        break;
    case GST_MESSAGE_STATE_CHANGED: {
        GstState old_state, new_state, pending_state;
        gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
        if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
            if (new_state == GST_STATE_PLAYING) {
                /* Once we are in the playing state, analyze the streams */
                analyze_streams(data);
            }
        }
    } break;
    }

    /* We want to keep receiving messages */
    return TRUE;
}

/* Process keyboard input */
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data) {
    gchar *str = NULL;

    if (g_io_channel_read_line(source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
        int index = g_ascii_strtoull(str, NULL, 0);
        if (index < 0 || index >= data->n_audio) {
            g_printerr("Index out of bounds\n");
        }
        else {
            /* If the input was a valid audio stream index, set the current audio stream */
            g_print("Setting current audio stream to %d\n", index);
            g_object_set(data->playbin, "current-audio", index, NULL);
        }
    }
    g_free(str);
    return TRUE;
}
复制代码

此代码应该是和命令行里面的playbin一样的,啥都不需要你做,就能播放,但是这同样代表着什么你都无法优化,直接一个playbin管道就结束了。实测rtsp延时挺严重的。

2,自定义衬垫链接:

复制代码
#include <gst/gst.h>

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *decode;
    GstElement *convert;
    GstElement *sink;
} CustomData;
//先建立一个结构,里面放了一个pipeline指针和四个元件指针

/* Handler for the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *pad, CustomData *data);
static void pad_added_handler2(GstElement *src, GstPad *pad, CustomData *data);

//声明一个两个回调函数,一个负责链接source和decode,另一个负责链接decode和convert
int main(int argc, char *argv[]) {
    CustomData data;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;
    gboolean terminate = FALSE;

    /* Initialize GStreamer */
    gst_init(&argc, &argv);
    //同样需要先初始化

    /* Create the elements */
    data.source = gst_element_factory_make("rtspsrc", "source");
    data.decode = gst_element_factory_make("decodebin", "decode");
    data.convert = gst_element_factory_make("videoconvert", "convert");
    data.sink = gst_element_factory_make("autovideosink", "sink");

    /* Create the empty pipeline */
    data.pipeline = gst_pipeline_new("test-pipeline");
    //先把data里的信息创建出来,创建了一个pipeline和四个元件

    if (!data.pipeline || !data.source || !data.decode || !data.convert || !data.sink) {
        g_printerr("Not all elements could be created.\n");
        return -1;
    }

    /* Build the pipeline. Note that we are NOT linking the source at this
    * point. We will do it later. */
    gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.decode, data.convert, data.sink, NULL);
    //把元件都添加到管道里
    if (!gst_element_link_many( data.convert, data.sink, NULL)) {
        //把元件都链接起来,为什么不连接source和decode?因为这俩需要回调函数进行特殊的链接,一般的链接是要报错的
        g_printerr("Elements could not be linked.\n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    /* Set the URI to play */
    g_object_set(data.source, "location", "rtsp://admin:[email protected]/h264/ch1/main/av_stream", NULL);
    //大约是将source元件的数据源给怼进去

    /* Connect to the pad-added signal */
    g_signal_connect(data.source, "pad-added", G_CALLBACK(pad_added_handler), &data);
    g_signal_connect(data.decode, "pad-added", G_CALLBACK(pad_added_handler2), &data);
    //给source和decode添加衬垫,衬垫关联的是回调函数,我理解的回调函数的参数:源元件(给谁添加衬垫,就是谁),新添加的衬垫,用来传递数据的data


    /* Start playing */
    ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    /* Listen to the bus */
    //获取一个总线,总线可以监视pipeline的运行状态,是否播放完毕等,然后进行相应的处理。
    bus = gst_element_get_bus(data.pipeline);
    do {
        msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ANY);
        //等待执行结束并且返回
        //顺带说一句,以前的老语法是GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS这样的,所以下文中的case用的是这几个错误信息,但是现在这个语法不被支持了。嗯嗯,所以改用GST_MESSAGE_ANY
        /* Parse message */
        if (msg != NULL) {
            GError *err;
            gchar *debug_info;
            g_print("error msg:%d\n", GST_MESSAGE_TYPE(msg));
            switch (GST_MESSAGE_TYPE(msg)) {
            case GST_MESSAGE_ERROR:
                gst_message_parse_error(msg, &err, &debug_info);
                g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
                g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
                g_clear_error(&err);
                g_free(debug_info);
                terminate = TRUE;
                break;
            case GST_MESSAGE_EOS:
                g_print("End-Of-Stream reached.\n");
                terminate = TRUE;
                break;
            case GST_MESSAGE_STATE_CHANGED:
                /* We are only interested in state-changed messages from the pipeline */
                if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data.pipeline)) {
                    GstState old_state, new_state, pending_state;
                    gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
                    g_print("Pipeline state changed from %s to %s:\n",
                        gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
                }
                break;
            default:
                /* We should not reach here */
                g_printerr("Unexpected message received.\n");
                break;
            }
            gst_message_unref(msg);
        }
    } while (!terminate);
    //只要不中止,就一直监视执行结束的状态

    /* Free resources */
    gst_object_unref(bus);
    gst_element_set_state(data.pipeline, GST_STATE_NULL);
    gst_object_unref(data.pipeline);
    return 0;
}

/* This function will be called by the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad(data->decode, "sink");
    //pipeline的链接顺序是:source-decode-convert-sink,我们为source添加了回调,然后此处在回调内部获取了convert的对应的衬垫
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked(sink_pad)) {
        g_print("We are already linked. Ignoring.\n");
        goto exit;
    }
    //此处应该是检查新为source添加的衬垫是不是已经链接到了convert衬垫

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    if (!g_str_has_prefix(new_pad_type, "application/x-rtp")) {
        g_print("It has type '%s' which is not raw rtsp. Ignoring.\n", new_pad_type);
        goto exit;
    }
    //检查这个衬垫当前输出的数据类型,经过一番解析,如果发现里面没有"audio/x-raw",那说明这不是解码音频的

    /* Attempt the link */
    ret = gst_pad_link(new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("Type is '%s' but link failed.\n", new_pad_type);
    }
    else {
        g_print("Link succeeded (type '%s').\n", new_pad_type);
    }
    //如果两个衬垫没链接,那就人为地链接起来

exit:
    //这个语法就厉害了,首先定义了一个exit标号,如果前文中goto exit;那转到的就将会是此处
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref(sink_pad);
}
static void pad_added_handler2(GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad(data->convert, "sink");
    //pipeline的链接顺序是:source-decode-convert-sink,我们为source添加了回调,然后此处在回调内部获取了convert的对应的衬垫
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked(sink_pad)) {
        g_print("We are already linked. Ignoring.\n");
        goto exit;
    }
    //此处应该是检查新为source添加的衬垫是不是已经链接到了convert衬垫

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    if (!g_str_has_prefix(new_pad_type, "video/x-raw")) {
        g_print("It has type '%s' which is not raw rtsp. Ignoring.\n", new_pad_type);
        goto exit;
    }
    //检查这个衬垫当前输出的数据类型,经过一番解析,如果发现里面没有"audio/x-raw",那说明这不是解码音频的

    /* Attempt the link */
    ret = gst_pad_link(new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("Type is '%s' but link failed.\n", new_pad_type);
    }
    else {
        g_print("Link222 succeeded (type '%s').\n", new_pad_type);
    }
    //如果两个衬垫没链接,那就人为地链接起来

exit:
    //这个语法就厉害了,首先定义了一个exit标号,如果前文中goto exit;那转到的就将会是此处
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref(sink_pad);
}
复制代码

 3,从rtsp解码视频,转码为jpg并且写出到本地文件,注意,文件会变得很大

复制代码
#include <gst/gst.h>
#include <iostream>
using namespace std;
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *decode;
    GstElement *convert;
    GstElement *sink;
} CustomData;
//先建立一个结构,里面放了一个pipeline指针和四个元件指针

/* Handler for the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *pad, CustomData *data);
static void pad_added_handler2(GstElement *src, GstPad *pad, CustomData *data);
static void daqing_function(GstElement* object, GstBuffer* arg0, GstPad* arg1, gpointer user_data);

//声明一个两个回调函数,一个负责链接source和decode,另一个负责链接decode和convert
int main(int argc, char *argv[]) {
    CustomData data;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;
    gboolean terminate = FALSE;
     

    /* Initialize GStreamer */
    gst_init(&argc, &argv);
    //同样需要先初始化
    /* Create the elements */
    data.source = gst_element_factory_make("rtspsrc", "source");
    data.decode = gst_element_factory_make("decodebin", "decode");
    data.convert = gst_element_factory_make("jpegenc", "convert");//jpegenc avenc_bmp
    data.sink = gst_element_factory_make("filesink", "sink");


    /* Create the empty pipeline */
    data.pipeline = gst_pipeline_new("test-pipeline");
    //先把data里的信息创建出来,创建了一个pipeline和四个元件

    if (!data.pipeline || !data.source || !data.decode || !data.convert || !data.sink) {  // 
        g_printerr("Not all elements could be created.\n");
        return -1;
    }

    /* Build the pipeline. Note that we are NOT linking the source at this
    * point. We will do it later. */
    gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.decode, data.convert, data.sink, NULL);//
                                                                                                      //把元件都添加到管道里
    if (!gst_element_link_many(data.convert, data.sink, NULL)) {  //data.convert,
                                                                  //把元件都链接起来,为什么不连接source和decode?因为这俩需要回调函数进行特殊的链接,一般的链接是要报错的
        g_printerr("Elements could not be linked.\n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    /* Set the URI to play */
    g_object_set(data.source, "location", "rtsp://admin:[email protected]/h264/ch1/main/av_stream", NULL);
    g_object_set(data.sink, "location", "D:\\tmp\\test.jpg", NULL);

    
    //g_object_set(data.sink, "max-lateness", 1000000000, NULL);
    //g_object_set(data.sink, "blocksize", 900000, NULL);




    //大约是将source元件的数据源给怼进去

    /* Connect to the pad-added signal */
    g_signal_connect(data.source, "pad-added", G_CALLBACK(pad_added_handler), &data);
    g_signal_connect(data.decode, "pad-added", G_CALLBACK(pad_added_handler2), &data);
    //g_signal_connect(data.sink, "convert-sample", G_CALLBACK(daqing_function), &data);
    ////GstBuffer  buffer;
    //GstSample *sample;
    //g_signal_emit_by_name(data.sink, "convert-sample", &sample, NULL);
    //给source和decode添加衬垫,衬垫关联的是回调函数,我理解的回调函数的参数:源元件(给谁添加衬垫,就是谁),新添加的衬垫,用来传递数据的data


    /* Start playing */
    ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    /* Listen to the bus */
    //获取一个总线,总线可以监视pipeline的运行状态,是否播放完毕等,然后进行相应的处理。
    bus = gst_element_get_bus(data.pipeline);
    do {
        msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ANY);
        //等待执行结束并且返回
        //顺带说一句,以前的老语法是GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS这样的,所以下文中的case用的是这几个错误信息,但是现在这个语法不被支持了。嗯嗯,所以改用GST_MESSAGE_ANY
        /* Parse message */
        if (msg != NULL) {
            GError *err;
            gchar *debug_info;
            

            switch (GST_MESSAGE_TYPE(msg)) {
            case GST_MESSAGE_ERROR:
                gst_message_parse_error(msg, &err, &debug_info);
                g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
                g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
                g_clear_error(&err);
                g_free(debug_info);
                terminate = TRUE;
                break;
            case GST_MESSAGE_EOS:
                g_print("End-Of-Stream reached.\n");
                terminate = TRUE;
                break;
            case GST_MESSAGE_STATE_CHANGED:
                /* We are only interested in state-changed messages from the pipeline */
                if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data.pipeline)) {
                    GstState old_state, new_state, pending_state;
                    gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
                    g_print("Pipeline state changed from %s to %s:\n",
                        gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
                }
            case GST_MESSAGE_LATENCY:
                g_print("bus: error msg:%d\n", GST_MESSAGE_TYPE(msg));
                //GstMessage ftmsg;
                GstObject * src;
                src = msg->src;
                cout <<"message->src:"<< src->name << endl;
                break;
            default:
                /* We should not reach here */
                //g_printerr("Unexpected message received.\n");
                break;
            }
            gst_message_unref(msg);
        }
    } while (!terminate);
    //只要不中止,就一直监视执行结束的状态

    /* Free resources */
    gst_object_unref(bus);
    gst_element_set_state(data.pipeline, GST_STATE_NULL);
    gst_object_unref(data.pipeline);
    return 0;
}

/* This function will be called by the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad(data->decode, "sink");
    //pipeline的链接顺序是:source-decode-convert-sink,我们为source添加了回调,然后此处在回调内部获取了convert的对应的衬垫
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked(sink_pad)) {
        g_print("We are already linked. Ignoring.\n");
        goto exit;
    }
    //此处应该是检查新为source添加的衬垫是不是已经链接到了convert衬垫

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    if (!g_str_has_prefix(new_pad_type, "application/x-rtp")) {
        g_print("It has type '%s' which is not raw rtsp. Ignoring.\n", new_pad_type);
        goto exit;
    }
    //检查这个衬垫当前输出的数据类型,经过一番解析,如果发现里面没有"audio/x-raw",那说明这不是解码音频的

    /* Attempt the link */
    ret = gst_pad_link(new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("Type is '%s' but link failed.\n", new_pad_type);
    }
    else {
        g_print("Link succeeded (type '%s').\n", new_pad_type);
    }
    //如果两个衬垫没链接,那就人为地链接起来

exit:
    //这个语法就厉害了,首先定义了一个exit标号,如果前文中goto exit;那转到的就将会是此处
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref(sink_pad);
}
static void pad_added_handler2(GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad(data->convert, "sink");
    //pipeline的链接顺序是:source-decode-convert-sink,我们为source添加了回调,然后此处在回调内部获取了convert的对应的衬垫
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print("22Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked(sink_pad)) {
        g_print("22We are already linked. Ignoring.\n");
        goto exit;
    }
    //此处应该是检查新为source添加的衬垫是不是已经链接到了convert衬垫

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    if (!g_str_has_prefix(new_pad_type, "video/x-raw")) {
        g_print("22It has type '%s' which is not raw rtsp. Ignoring.\n", new_pad_type);
        goto exit;
    }
    //检查这个衬垫当前输出的数据类型,经过一番解析,如果发现里面没有"audio/x-raw",那说明这不是解码音频的

    /* Attempt the link */
    ret = gst_pad_link(new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("22Type is '%s' but link failed.\n", new_pad_type);
    }
    else {
        g_print("22Link succeeded (type '%s').\n", new_pad_type);
    }
    //如果两个衬垫没链接,那就人为地链接起来

exit:
    //这个语法就厉害了,首先定义了一个exit标号,如果前文中goto exit;那转到的就将会是此处
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref(sink_pad);
}

void daqing_function(GstElement* object, GstBuffer* arg0, GstPad* arg1, gpointer user_data) {
    g_print("hello callback==============");
    //cout << "test buffer:" << sizeof(arg0) << endl;
    //cout << "test pad:"<< sizeof(arg1) << endl;
    GstBufferPool *test = arg0->pool;
    //guint test = gst_buffer_n_memory(arg0);

    cout << test << endl;
    printf("%p ppp\n", test);
    int a = 57;
    int *p = &a;
    for (int i = 0; i < 4; i++) {
        printf("%c cc\n", *p);
        //cout << *(p ++ )<< endl;

    }


}
复制代码

标签:gst,gstreamer,c++,pad,sink,使用,new,data,GST
From: https://www.cnblogs.com/kn-zheng/p/17098315.html

相关文章

  • C#使用FFmpeg录制视频
    FFmpeg通过控制台命令可以方便地录制USB摄像头和屏幕,本文使用C#中的Process类调用FFmpeg命令,并通过获取控制台输出参数实现录制视频的完整功能。Gitee完整代码:FFmpegWrapp......
  • 数据采集技术之在Python中Libxml模块安装与使用XPath
    为了使用XPath技术,对爬虫抓取的网页数据进行抽取(如标题、正文等等),之后在Windows下安装libxml2模块(安装后使用的是Libxml模块),该模块含有xpath。准备需要的软件包:Python2.7......
  • Scaffold-DbContext使用教程
    1.基础准备   安装对应的三个nuget包,第二个nuget由你使用数据库决定 2.命令说明 Scaffold-DbContext为DbContext数据库的和实体类型生成代码。为了使Sca......
  • 【android 】android 如何使用服务器进行版本更新
    今天,简单讲讲android里如何在app内部直接使用服务器进行版本更新。昨天,我讲了如何使用应用市场进行版本更新。但是使用应用市场进行版本更新存在一个问题,就是app无法获取......
  • 类与对象的创建与使用 四种方式 js 0207
    使用字类典创建对象利用Object来创建对象使用构造方法创建对象使用class创建对象......
  • 使用PHP连接、操纵Memcached的原理和教程
    Memcahced开源分布式内存对象缓存系统通过减少数据库的负担,从而能够加速你的web应用。在本文中我将解释怎样实现一个基于Memcahced的缓存系统。数据库实例中使用的数据库表......
  • vue项目使用sha256加密
    sha256:1.中文名、英文名,全都叫sha2562.因为哈希值是固定大小的 256位所以名字有个2563.听说最近流行的比特币,区块链中挺多地方都用到了这个加密算法安装:npminsta......
  • vue中 watch 监听器的使用
    watchwatch:一个对象,键是需要观察的表达式,值是对应回调函数,也可以是是方法吗或者包含选项的对象。vue实例将会载实例化是调用$watch(),遍历watch对象的每一个property。......
  • PHP数组使用、特性、函数的总结
    包含其他数组的数组成为多维数组关联数组(即非数字索引数组)使用更有意义的数据作为数组的索引1、数组索引数组$products = array(‘a’, ‘b’, ‘c’);//就像一个......
  • Python中Gevent的使用
    1、可以通过gevent轻松实现并发同步或异步编程。gevent中使用的主要模式是Greenlet,它是以C扩展模块的形式访问Python的轻量级协程。2、Greenlet全部运行在主程序操作系统的......