#audio #stream #gstreamer #delay #rtmp
Вопрос:
Я хотел бы добавить задержку звука (до 10/15 секунд) в прямом эфире с помощью rtmpsink gstreamer. Это моя линия
gst-launch-1.0 -vvv flvmux streamable=true name=mux ! rtmpsink location="rtmp://localhost/live"
souphttpsrc location="http://<url video h264>" ! tsdemux ! h264parse ! queue ! mux.
souphttpsrc location="https://<url audio aac>" ! icydemux ! aacparse ! queue ! mux.
действуя непосредственно в строке, я попытался добавить «максимальный размер очереди-буферы=0 максимальный размер-время=0 максимальный размер байтов=0 минимальное пороговое время=15000000000» после сопоставления, но весь поток заблокирован таким образом
действуя в c, я попытался изменить временную метку pts в буфере aacparse pad, но я могу изменить любое значение, и это не имеет никакого эффекта
gst_pad_add_probe(line->aacparse_srcpad, GST_PAD_PROBE_TYPE_BUFFER, cb_have_data_audio, NULL, NULL);
..
static GstPadProbeReturn
cb_have_data_audio (GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
GstMapInfo map;
GstBuffer *buffer;
buffer = GST_PAD_PROBE_INFO_BUFFER (info);
buffer = gst_buffer_make_writable (buffer);
if (buffer == NULL)
return GST_PAD_PROBE_OK;
GstClockTime pts = GST_BUFFER_PTS(buffer);
GST_BUFFER_PTS(buffer) = pts 100000000000;
GST_PAD_PROBE_INFO_DATA (info) = buffer;
return GST_PAD_PROBE_OK;
}
Я также попытался использовать gst_pad_set_offset(), но опять безрезультатно
gst_pad_set_offset(line->aacparse_srcpad, 1000000000);
даже при игре с планшетами flvmux и установке «потоковая передача=ложь» эффекта нет, каким должен быть правильный подход, чтобы добавить задержку только к звуку?
Ответ №1:
#include <gst/gst.h>
#include<stdio.h>
static GMainLoop *loop;
static gint counter;
static GstBus *bus;
static gboolean prerolled = FALSE;
static GstPad *sinkpad,*ident_sink;
static GstClockTime ptimestamp=(guint64)1000;
static GstClockTime dtimestamp= 0;
static GstPadProbeReturn
display_data (GstPad *pad,
GstBuffer *apsInfo,
gpointer user_data)
{
// apsInfo = gst_buffer_ref(apsInfo);
// apsInfo = gst_buffer_make_writable(apsInfo);
// int fps = 30;
// ptimestamp = gst_util_uint64_scale_int (1, GST_SECOND, fps);
// int a=GST_BUFFER_PTS (apsInfo) ;
// int b=GST_BUFFER_DTS (apsInfo) ;
// GST_BUFFER_PTS (apsInfo)=ptimestamp;
// GST_BUFFER_DTS (apsInfo)=ptimestamp;;
// //printf("%d %d n",a,b);
// GST_BUFFER_DURATION (apsInfo) = gst_util_uint64_scale_int (1, GST_SECOND, fps);
}
static GstPadProbeReturn
change_time (GstPad *pad,
GstBuffer *apsInfo,
gpointer user_data)
{
//printf("change ing time");
//apsInfo = gst_buffer_ref(apsInfo);
//apsInfo = gst_buffer_make_writable(apsInfo);
/* when these commented lines are added the buffer status returns gst_buffer_is_writable
writern wrtable but any changes in code does not effect the video but when these lines are
removed the buffer status writern not writable but the changes in code effect the video
*/
GST_BUFFER_FLAG_SET (apsInfo, GST_BUFFER_FLAG_DISCONT);
int fps = 30;
dtimestamp = gst_util_uint64_scale_int (1, GST_SECOND, fps);
ptimestamp = gst_util_uint64_scale_int (1, GST_SECOND, fps);
int a=GST_BUFFER_PTS (apsInfo) = ptimestamp;
int b=GST_BUFFER_DTS (apsInfo) = dtimestamp;
printf("%d %d n",a,b);
GST_BUFFER_DURATION (apsInfo) = gst_util_uint64_scale_int (1, GST_SECOND, fps);
}
static void
dec_counter (GstElement * pipeline)
{
if (prerolled)
return;
if (g_atomic_int_dec_and_test (amp;counter)) {
/* all probes blocked and no-more-pads signaled, post
* message on the bus. */
prerolled = TRUE;
gst_bus_post (bus, gst_message_new_application (
GST_OBJECT_CAST (pipeline),
gst_structure_new_empty ("ExPrerolled")));
}
}
/* called when a source pad of uridecodebin is blocked */
static GstPadProbeReturn
cb_blocked (GstPad *pad,
GstPadProbeInfo *info,
gpointer user_data)
{
GstElement *pipeline = GST_ELEMENT (user_data);
if (prerolled)
return GST_PAD_PROBE_REMOVE;
dec_counter (pipeline);
return GST_PAD_PROBE_OK;
}
/* called when uridecodebin has a new pad */
static void
cb_pad_added (GstElement *element,
GstPad *pad,
gpointer user_data)
{
GstElement *pipeline = GST_ELEMENT (user_data);
if (prerolled)
return;
g_atomic_int_inc (amp;counter);
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
(GstPadProbeCallback) cb_blocked, pipeline, NULL);
/* try to link to the video pad */
gst_pad_link (pad, sinkpad);
}
/* called when uridecodebin has created all pads */
static void
cb_no_more_pads (GstElement *element,
gpointer user_data)
{
GstElement *pipeline = GST_ELEMENT (user_data);
if (prerolled)
return;
dec_counter (pipeline);
}
/* called when a new message is posted on the bus */
static void
cb_message (GstBus *bus,
GstMessage *message,
gpointer user_data)
{
GstElement *pipeline = GST_ELEMENT (user_data);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:
g_print ("we received an error!n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_EOS:
g_print ("we reached EOSn");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_APPLICATION:
{
if (gst_message_has_name (message, "ExPrerolled")) {
/* it's our message */
g_print ("we are all prerolled, do seekn");
gst_element_seek (pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
GST_SEEK_TYPE_SET, 0,
GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
break;
}
default:
break;
}
}
gint
main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *src, *csp, *vs, *sink ,*idelem;
/* init GStreamer */
gst_init (amp;argc, amp;argv);
loop = g_main_loop_new (NULL, FALSE);
/* build */
pipeline = gst_pipeline_new ("my-pipeline");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", (GCallback) cb_message,
pipeline);
src = gst_element_factory_make ("uridecodebin", "src");
if (src == NULL)
g_error ("Could not create 'uridecodebin' element");
g_object_set (src, "uri", "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4", NULL);
csp = gst_element_factory_make ("videoconvert", "csp");
if (csp == NULL)
g_error ("Could not create 'videoconvert' element");
vs = gst_element_factory_make ("videoscale", "vs");
if (csp == NULL)
g_error ("Could not create 'videoscale' element");
idelem = gst_element_factory_make ("identity", "identity-elem");
if (idelem == NULL)
g_error ("Could not create 'idelem' ");
sink = gst_element_factory_make ("autovideosink", "sink");
if (sink == NULL)
g_error ("Could not create 'autovideosink' element");
gst_bin_add_many (GST_BIN (pipeline), src, csp, vs,idelem, sink, NULL);
/* can't link src yet, it has no pads */
gst_element_link_many (csp, vs,idelem, sink, NULL);
sinkpad = gst_element_get_static_pad (csp, "sink");
//ident_sink = gst_element_get_static_pad (idelem, "sink");
// //
// gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER,
// (GstPadProbeCallback) display_data, NULL, NULL); //displayinfo data
// gst_object_unref (sinkpad);
GstElement *identity_elem = gst_bin_get_by_name(GST_BIN(pipeline), "identity-elem");
g_object_set(G_OBJECT(identity_elem), "signal-handoffs", TRUE, NULL); //change time
g_signal_connect(idelem, "handoff", (GCallback) change_time, pipeline);
g_atomic_int_set (amp;counter, 1);
g_signal_connect (src, "pad-added",
(GCallback) cb_pad_added, pipeline);
g_signal_connect (src, "no-more-pads",
(GCallback) cb_no_more_pads, pipeline);
gst_element_set_state (pipeline, GST_STATE_PAUSED);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (sinkpad);
gst_object_unref (bus);
gst_object_unref (pipeline);
g_main_loop_unref (loop);
return 0;
}