-
-
Save floe/e35100f091315b86a5bf to your computer and use it in GitHub Desktop.
// example appsrc for gstreamer 1.0 with own mainloop & external buffers. based on example from gstreamer docs. | |
// public domain, 2015 by Florian Echtler <floe@butterbrot.org>. compile with: | |
// gcc --std=c99 -Wall $(pkg-config --cflags gstreamer-1.0) -o gst-appsrc gst-appsrc.c $(pkg-config --libs gstreamer-1.0) -lgstapp-1.0 | |
#include <gst/gst.h> | |
#include <gst/app/gstappsrc.h> | |
#include <stdint.h> | |
int want = 1; | |
uint16_t b_white[384*288]; | |
uint16_t b_black[384*288]; | |
static void prepare_buffer(GstAppSrc* appsrc) { | |
static gboolean white = FALSE; | |
static GstClockTime timestamp = 0; | |
GstBuffer *buffer; | |
guint size; | |
GstFlowReturn ret; | |
if (!want) return; | |
want = 0; | |
size = 384 * 288 * 2; | |
buffer = gst_buffer_new_wrapped_full( 0, (gpointer)(white?b_white:b_black), size, 0, size, NULL, NULL ); | |
white = !white; | |
GST_BUFFER_PTS (buffer) = timestamp; | |
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 4); | |
timestamp += GST_BUFFER_DURATION (buffer); | |
ret = gst_app_src_push_buffer(appsrc, buffer); | |
if (ret != GST_FLOW_OK) { | |
/* something wrong, stop pushing */ | |
// g_main_loop_quit (loop); | |
} | |
} | |
static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data) { | |
//prepare_buffer((GstAppSrc*)appsrc); | |
want = 1; | |
} | |
gint main (gint argc, gchar *argv[]) { | |
GstElement *pipeline, *appsrc, *conv, *videosink; | |
for (int i = 0; i < 384*288; i++) { b_black[i] = 0; b_white[i] = 0xFFFF; } | |
/* init GStreamer */ | |
gst_init (&argc, &argv); | |
/* setup pipeline */ | |
pipeline = gst_pipeline_new ("pipeline"); | |
appsrc = gst_element_factory_make ("appsrc", "source"); | |
conv = gst_element_factory_make ("videoconvert", "conv"); | |
videosink = gst_element_factory_make ("xvimagesink", "videosink"); | |
/* setup */ | |
g_object_set (G_OBJECT (appsrc), "caps", | |
gst_caps_new_simple ("video/x-raw", | |
"format", G_TYPE_STRING, "RGB16", | |
"width", G_TYPE_INT, 384, | |
"height", G_TYPE_INT, 288, | |
"framerate", GST_TYPE_FRACTION, 0, 1, | |
NULL), NULL); | |
gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, videosink, NULL); | |
gst_element_link_many (appsrc, conv, videosink, NULL); | |
/* setup appsrc */ | |
g_object_set (G_OBJECT (appsrc), | |
"stream-type", 0, // GST_APP_STREAM_TYPE_STREAM | |
"format", GST_FORMAT_TIME, | |
"is-live", TRUE, | |
NULL); | |
g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL); | |
/* play */ | |
gst_element_set_state (pipeline, GST_STATE_PLAYING); | |
while (1) { | |
prepare_buffer((GstAppSrc*)appsrc); | |
g_main_context_iteration(g_main_context_default(),FALSE); | |
} | |
/* clean up */ | |
gst_element_set_state (pipeline, GST_STATE_NULL); | |
gst_object_unref (GST_OBJECT (pipeline)); | |
return 0; | |
} |
If I set g_main_context_iteration(g_main_context_default(), TRUE);
it blocks forever. How were you able to get that to work?
Ась?
@zacwitte g_main_context_wakeup (NULL)
I'm having issues running the script over Ubuntu 18.04 (on CentOS 8 its working as it is).
I have all of the gst libraries\headers installed.
When trying to run the executable, nothing happens, the script does compile and do run, but the window with the black\white screen does not pop.
Any idea whats the issue?
No idea, sorry. Just tested on a default 20.04 install, and everything works as expected...
I was able to compile the following with Ubuntu 20.04 in separate folder /usr/src/gst-appsrc/gst-appsrc.c and this produced outfile gst-appsrc.
gcc --std=c99 -Wall
How do I run the outfile gst-appsrc?
How do I instruct this to capture RTSP or UDP live streams?
Will this gst-appsrc.c provide example of how to create x-window name containing the live video stream with minimize, restore, maximize screen option?
@bobdavis512 this is just a minimal example for the appsrc component, with a static pipeline. You can start it via ./gst-appsrc
. It can't run anything else than pushing sample image data to an xvimagesink.
@floe, how come the buffer has a size of (width + 1) * height and not just width * height?
@blakat360 - good question, I would suppose that's just an old off-by-one error...
EDIT: tested and yes, no need for the extra width pixel, works fine with the correct width. Fixed.
@floe can you take a look at this? I've adapted your example but can't seem to flash between single-channel images (e.g. red/blue). If I use the RGB format it insists I pass a size of WIDTH * HEIGHT * CHANNELS + HEIGHT, Hence, I thought that your larger buffer related to that.
I'm not sure why this is the case that the docs seem to suggest it should just be straight RGB with no padding.
EDIT: My bad, if the width is a multiple of 3 it works.
Thanks for the code. I was getting 100% CPU usage though until I changed the while loop to use
g_main_context_iteration(g_main_context_default(), TRUE);