Gstreamer как правильно слинковать usdsrc - ximagesink

пишу приложение принимающий rtp поток видео, используя gstreamer.Получаю такую ошибку:

"Error received from element udpsrc0: Internal data stream error.Debugging information: …/gstreamer/subprojects/gstreamer/libs/gst/base/gstbasesrc.c(3177): gst_base_src_loop (): /GstPipeline:test-pipeline/GstPipeline:pipeline0/GstUDPSrc:udpsrc0:streaming stopped, reason not-linked (-1)Error: “Internal data stream error.””."

вот код:

gst_init (nullptr, nullptr);

    const auto source = gst_element_factory_make("udpsrc", "source");
    const auto sink = gst_element_factory_make("autovideosink", "sink");
    const auto decodebin = gst_element_factory_make("decodebin", "decodebin");
    const auto videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
    const auto rtph264depay = gst_element_factory_make("rtph264depay", "rtph264depay");
    const auto pipeline = gst_pipeline_new("test-pipeline");
    const auto caps =  gst_caps_new_simple ("application/x-rtp",
      "media", G_TYPE_STRING, "video",
      "clock-rate", G_TYPE_INT, 90000,
      "encoding-name", G_TYPE_STRING,"H264",
      "payload", G_TYPE_INT, 96,
      NULL);
    g_object_set(source, "port", 5000, NULL);
    g_object_set(source, "caps", caps, NULL);
    gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), windId);
    // g_object_set (appsrc, "caps",
    //           gst_caps_new_simple ("application/x-rtp",
    //               "media", G_TYPE_STRING, "video",
    //               "clock-rate", G_TYPE_INT, 90000,
    //               "encoding-name", G_TYPE_STRING, "H264",
    //               "payload", G_TYPE_INT, 96,
    //               NULL),
    //           NULL);
    // g_object_set (G_OBJECT (appsrc), "caps",
    //           gst_caps_new_simple ("video/x-raw",
    //               "format", G_TYPE_STRING, "RGB16",
    //               "width", G_TYPE_INT, 384,
    //               "height", G_TYPE_INT, 288,
    //               "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
    //               "framerate", GST_TYPE_FRACTION, 0, 1,
    //               NULL), NULL);
    if (!pipeline || !source || !sink || !videoconvert || !decodebin || !rtph264depay) {
      g_printerr ("Not all elements could be created.\n");
      return;
    }
    gst_bin_add_many (GST_BIN (pipeline), source, rtph264depay, decodebin, videoconvert, sink, NULL);
    gst_element_link(source, rtph264depay);
    gst_element_link(rtph264depay, decodebin);
    gst_element_link(decodebin, videoconvert);
    gst_element_link(videoconvert, sink);
    gst_element_link_many(source, rtph264depay, decodebin, videoconvert, sink, NULL);
    const auto ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
      g_printerr ("Unable to set the pipeline to the playing state.\n");
      gst_object_unref (pipeline);
      return;
    }

    /* Wait until error or EOS */
    GstBus* bus = gst_element_get_bus(pipeline);
    GstMessage* msg = gst_bus_timed_pop_filtered(
      bus,
      GST_CLOCK_TIME_NONE,
      static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Parse message */
    if (msg != nullptr) {
      GError *err;
      gchar *debug_info;

      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_ERROR:
          gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n",
            GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n",
            debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
        case GST_MESSAGE_EOS:
          g_print ("End-Of-Stream reached.\n");
        break;
        default:
          /* We should not reach here because we only asked for ERRORs and EOS */
            g_printerr ("Unexpected message received.\n");
        break;
      }
      gst_message_unref (msg);
    }

    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
  }

Что не так, подскажите, люди добрые)

когда применяю parser Element, все отлично, но не могу прицепить оверлей к QWidget.

const auto parserElement = gst_parse_launch("udpsrc port=5000 caps = \"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert", nullptr);
    gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (parserElement), windId);

upd:

Не нашел ответа, как правильно слинковать элементы, но нашел как вытащить из gst_parse_launch сам элемент.

const auto parserElement = gst_parse_launch("udpsrc port=5000 caps = \"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! ximagesink name=sink", nullptr);
    const auto pipeline = gst_pipeline_new("test-pipeline");
    gst_bin_add_many (GST_BIN (pipeline), parserElement, NULL);
    const auto ximagesink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
    gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(ximagesink), windId);

Ответы (0 шт):