如何将通过udp传输视频的Gstreamer程序转换为Qt程序?
我有一个通过 UDP 源传输视频的 Gstreamer
程序。我需要在 Qt 中实现该程序。请让我知道我该怎么做。
我正在使用的程序如下所示。
#include <gst/gst.h>
#include <stdio.h>
#include <stdlib.h>
GstElement *pipeline,
*source,
*decoder,
*video_sink,
*text,
*audio_sink;
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad = NULL;
const gchar *mime;
GstCaps *caps;
// get capabilities
caps = gst_pad_get_caps (pad);
// get mime type
mime = gst_structure_get_name (gst_caps_get_structure (caps, 0));
g_print ("Dynamic pad %s:%s created with mime-type %s\n", GST_OBJECT_NAME (element), GST_OBJECT_NAME (pad), mime);
if (g_strrstr (mime, "video"))
{
g_print ("Linking video...\n");
sinkpad = gst_element_get_static_pad (text, "video_sink");
}
if (g_strrstr (mime, "audio"))
{
g_print ("Linking audio...\n");
sinkpad = gst_element_get_static_pad (audio_sink, "sink");
}
if(sinkpad!=NULL)
{
// link
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
gst_caps_unref (caps);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
// initialize GStreamer
gst_init (&argc, &argv);
printf("step 0\n");
loop = g_main_loop_new (NULL, FALSE);
/* check input arguments
if (argc != 2)
{
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}*/
// argv[1]="http://192.168.0.247:1234/Documents/6.mpg";
//"udp://192.168.0.247:1234";
//"/home/quarkgluon/Documents/rajesh/gstreamer/Serenity.mp4";
printf("step 1\n");
// create elements
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("udpsrc", "source");
decoder = gst_element_factory_make ("decodebin2", "decoder");
text = gst_element_factory_make ("textoverlay", "text");
video_sink = gst_element_factory_make ("xvimagesink", "vsink");
audio_sink = gst_element_factory_make ("alsasink", "asink");
if (!pipeline || !source || !decoder || !video_sink || !text || !audio_sink)
{
g_print ("One element could not be created\n");
return -1;
}
// set filename property on the file source. Also add a message
// handler.
g_object_set (G_OBJECT (source),"port",1234, NULL);
// g_object_set (G_OBJECT (text), "text", "hello world awertyuiop!!!", NULL);
// g_object_set (G_OBJECT (text), "italic", 1, NULL);
//g_object_set (G_OBJECT (text), "bold", 1, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// put all elements in a bin
gst_bin_add_many (GST_BIN (pipeline), source, decoder, video_sink, text, audio_sink, NULL);
// link together - note that we cannot link the decoder and
// sink yet, because the decoder uses dynamic pads. For that,
// we set a pad-added signal handler.
gst_element_link (source, decoder);
gst_element_link (text, video_sink);
g_signal_connect (decoder, "pad-added", G_CALLBACK (new_pad), NULL);
printf("step 2\n");
// Now set to playing and iterate.
g_print ("Setting to PLAYING\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run (loop);
// clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have a Gstreamer
program that streams video via UDP source. I need to implement the program in Qt
. Please let me know how can I do it.
The program I'm using is shown below.
#include <gst/gst.h>
#include <stdio.h>
#include <stdlib.h>
GstElement *pipeline,
*source,
*decoder,
*video_sink,
*text,
*audio_sink;
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad = NULL;
const gchar *mime;
GstCaps *caps;
// get capabilities
caps = gst_pad_get_caps (pad);
// get mime type
mime = gst_structure_get_name (gst_caps_get_structure (caps, 0));
g_print ("Dynamic pad %s:%s created with mime-type %s\n", GST_OBJECT_NAME (element), GST_OBJECT_NAME (pad), mime);
if (g_strrstr (mime, "video"))
{
g_print ("Linking video...\n");
sinkpad = gst_element_get_static_pad (text, "video_sink");
}
if (g_strrstr (mime, "audio"))
{
g_print ("Linking audio...\n");
sinkpad = gst_element_get_static_pad (audio_sink, "sink");
}
if(sinkpad!=NULL)
{
// link
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
gst_caps_unref (caps);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
// initialize GStreamer
gst_init (&argc, &argv);
printf("step 0\n");
loop = g_main_loop_new (NULL, FALSE);
/* check input arguments
if (argc != 2)
{
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}*/
// argv[1]="http://192.168.0.247:1234/Documents/6.mpg";
//"udp://192.168.0.247:1234";
//"/home/quarkgluon/Documents/rajesh/gstreamer/Serenity.mp4";
printf("step 1\n");
// create elements
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("udpsrc", "source");
decoder = gst_element_factory_make ("decodebin2", "decoder");
text = gst_element_factory_make ("textoverlay", "text");
video_sink = gst_element_factory_make ("xvimagesink", "vsink");
audio_sink = gst_element_factory_make ("alsasink", "asink");
if (!pipeline || !source || !decoder || !video_sink || !text || !audio_sink)
{
g_print ("One element could not be created\n");
return -1;
}
// set filename property on the file source. Also add a message
// handler.
g_object_set (G_OBJECT (source),"port",1234, NULL);
// g_object_set (G_OBJECT (text), "text", "hello world awertyuiop!!!", NULL);
// g_object_set (G_OBJECT (text), "italic", 1, NULL);
//g_object_set (G_OBJECT (text), "bold", 1, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// put all elements in a bin
gst_bin_add_many (GST_BIN (pipeline), source, decoder, video_sink, text, audio_sink, NULL);
// link together - note that we cannot link the decoder and
// sink yet, because the decoder uses dynamic pads. For that,
// we set a pad-added signal handler.
gst_element_link (source, decoder);
gst_element_link (text, video_sink);
g_signal_connect (decoder, "pad-added", G_CALLBACK (new_pad), NULL);
printf("step 2\n");
// Now set to playing and iterate.
g_print ("Setting to PLAYING\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run (loop);
// clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。
绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
http://cgit.freedesktop.org/gstreamer/qt-gstreamer/
http://cgit.freedesktop.org/gstreamer/qt-gstreamer/