GStreamer:ximagesink 在嵌入 GTK+ 窗口时不起作用



我正在学习如何在Linux平台上使用GStreamer。我已经看过了基本的教程,我想我明白我在那里做了什么。

我现在正试图修改GTK+集成教程(#5(,使其使用实时视频管道(v4l2src!videoconvert!ximagesink(而不是播放库。

当我运行它时,我的GTK+应用程序窗口打开,流进入播放状态,但我看不到任何视频。如果我评论了对gst_video_overlay_set_window_handle的调用,那么ximagelink元素会打开另一个窗口,在那里我可以看到视频按预期工作。

因此,我认为我对管道本身没有任何问题,但我还没有弄清楚如何在GTK+绘图区域小部件中将内容显示为覆盖。

这是我目前拥有的应用程序的精简版本:

#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState    state;         // Current stat of the pipeline
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
guintptr   window_handle;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
window_handle = GDK_WINDOW_XID(window);
// Comment off the next line and the app works, opening a new window
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(data->sink),
window_handle);
}
static void delete_event_cb(GtkWidget  *widget,
GdkEvent   *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window;   // The uppermost window, containing all others
GtkWidget *video_window;  // The drawing area where the video will be shown
GtkWidget *controls;      // HBox to hold the buttons and slider
GtkWidget *main_box;      // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE,  TRUE,  0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar  *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %sn",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %sn",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData data = {};
GstBus    *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source   = gst_element_factory_make("v4l2src", "source");
data.convert  = gst_element_factory_make("videoconvert", "convert");
data.sink     = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}

在这方面的任何帮助都将不胜感激。

我从不在这个小组的人那里得到了答案。答案是这样的。在实现X窗口时,将视频接收器分配给覆盖可能还为时过早,并且您需要将其绑定到的特定GST元素可能不是您创建的元素(例如,它可能是由您创建的接收器元素内部创建的(。

为了解决这个问题,支持覆盖的GST接收器在合适的时间生成显式通知(通过总线同步机制(。应用程序应该注册一个总线同步处理程序,当收到适当的消息(视频覆盖准备窗口处理程序(时,将X窗口绑定到该消息的源。

请参阅https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c

以下是有效的更新代码(注意更改为realize_cb和新的bus_sync_handler功能(:

#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState    state;         // Current state of the pipeline
guintptr    video_window_handle;
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
{
g_error ("Couldn't create native window needed for GstVideoOverlay!");
}
data->video_window_handle = GDK_WINDOW_XID(window);
}
static GstBusSyncReply bus_sync_handler(GstBus     *bus,
GstMessage *message,
CustomData *data)
{
// Ignore all but prepare-window-handle messages
if (!gst_is_video_overlay_prepare_window_handle_message(message))
{
return GST_BUS_PASS;
}
if (data->video_window_handle)
{
g_print("About to assign window to overlayn");
gst_video_overlay_set_window_handle(
GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message)),
data->video_window_handle);
}
else
{
g_warning("Should have gotten a video window handle by nown");
}
}
static void delete_event_cb(GtkWidget  *widget,
GdkEvent   *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window;   // The uppermost window, containing all others
GtkWidget *video_window;  // The drawing area where the video will be shown
GtkWidget *controls;      // HBox to hold the buttons and slider
GtkWidget *main_box;      // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE,  TRUE,  0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar  *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %sn",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %sn",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData            data = {};
GstBus               *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source   = gst_element_factory_make("v4l2src", "source");
data.convert  = gst_element_factory_make("videoconvert", "convert");
data.sink     = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler,
&data, NULL);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}

最新更新