当前位置: 首页 > 知识库问答 >
问题:

用于音频和视频的Gstreamer rtsp应用程序

李星波
2023-03-14

我试图为管道开发一个应用程序:

gst-launch-1.0 rtspsrc位置=”rtsp://192.168.3.30:8554/rajvi“延迟=0 name=demux demux。!queue!rtpmp4gdepay!aacparse!avdec\u aac!audioconvert!audioresample!autoaudiosink demux。!queue!rtph264depay!h264parse!omxh264dec!videoconvert!videoscale!video/x-raw,宽度=176,高度=144!ximagesink

以下是我实现的代码:

#include <gst/gst.h>

static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
    gchar *name;

    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);
    GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);

    gchar * description = gst_caps_to_string(p_caps);
    g_free(description);

    GstElement *depay = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, depay, "sink") == 0)
    {
            g_print("cb_new_rtspsrc_pad : failed to link elements \n");
    }

    g_free(name);
}

int main(int argc, char *argv[]) {
    GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
               *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    GstPad *sinkpad,*ghost_sinkpad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make ("rtspsrc", "source");


    /*audio bin*/
    audioQueue = gst_element_factory_make ("queue", "audio-queue");
    audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make ("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make ("audioconvert", "aconv");
    audioResample = gst_element_factory_make ("audioresample", "audio-resample");
    audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert ||     !audioResample || !audioSink)
    {
            g_printerr("Cannot create audio elements \n");
            return 0;
 g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
    g_object_set(source, "latency", 0, NULL);

    g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);

    gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
                    audioConvert, audioResample, audioSink, NULL);

    if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample,  audioSink, NULL))
    {
            g_printerr("Error linking fields ...1 \n");
            return 0;
    }

    video  = gst_bin_new ("videobin");
    videoQueue = gst_element_factory_make ("queue", "video-queue");
    videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
    videoParser = gst_element_factory_make ("h264parse", "video-parser");
    videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
    videoConvert = gst_element_factory_make("videoconvert", "convert");
    videoScale = gst_element_factory_make("videoscale", "video-scale");
    videoSink = gst_element_factory_make("ximagesink", "video-sink");
    capsFilter = gst_caps_new_simple("video/x-raw",
                    "width", G_TYPE_INT, 176,
                    "height", G_TYPE_INT, 144,
                    NULL);

    if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
    {
            g_printerr("Cannot create video elements \n");
            return 0;
    }

    gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
                    videosink, NULL);
    /* set property value */
    link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
    gst_caps_unref (capsFilter);
    if (!link_ok) {
            g_warning ("Failed to link element1 and element2!");
    }

    sinkpad = gst_element_get_static_pad (videoConvert, "sink");
    ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
    gst_pad_set_active (ghost_sinkpad, TRUE);
    gst_element_add_pad (video, ghost_sinkpad);

    if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
    {
            g_printerr("Error linking fields... 2 \n");
            return 0;
    }

    gst_bin_add_many (GST_BIN(pipeline), video,NULL);
  /* Start playing */
    gst_element_set_state ( pipeline, GST_STATE_PLAYING);

    /* Wait until error or EOS */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    /* Free resources */
    if (msg != NULL)
            gst_message_unref (msg);
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;
}

链接管道时出错-

共有1个答案

公良高刚
2023-03-14

如果你把视频和音频都放在管道箱中,那么你就可以做到。弄清楚视频和音频的CAP是什么,并且应该能够链接它们。

// ----------------------------------
//  pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
  gchar *name;
  GstCaps * p_caps;
  GstElement* nextElement;
  GstElement* pipeline = (GstElement*)user_data;
  name = gst_pad_get_name(pad);
  g_print("A new pad %s was created\n", name);
  p_caps = gst_pad_get_pad_template_caps(pad);

  if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
  {
    std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;

    nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
  }
  else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
  {
    std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;

    nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");

  }
  if (nextElement != NULL)
  {
    if (!gst_element_link_filtered(element, nextElement, p_caps))
        //if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
    {
      std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
    }
    gst_object_unref(nextElement);
  }

  g_free(name);
  gst_caps_unref(p_caps);
}

// ----------------------------------
//  main
// ----------------------------------
int main(int argc, char *argv[]) 
{
    GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
        *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init(&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make("rtspsrc", "source");


    /*audio bin*/
    audioQueue = gst_element_factory_make("queue", "audio-queue");
    audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make("audioconvert", "aconv");
    audioResample = gst_element_factory_make("audioresample", "audio-resample");
    audioSink = gst_element_factory_make("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
    {
        g_printerr("Cannot create audio elements \n");
        return 0;
        g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
        g_object_set(source, "latency", 0, NULL);

        g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);

        gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
            audioConvert, audioResample, audioSink, NULL);

        if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
        {
            g_printerr("Error linking fields ...1 \n");
            return 0;
        }

        videoQueue = gst_element_factory_make("queue", "video-queue");
        videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
        videoParser = gst_element_factory_make("h264parse", "video-parser");
        videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
        videoConvert = gst_element_factory_make("videoconvert", "convert");
        videoScale = gst_element_factory_make("videoscale", "video-scale");
        videoSink = gst_element_factory_make("ximagesink", "video-sink");
        capsFilter = gst_caps_new_simple("video/x-raw",
            "width", G_TYPE_INT, 176,
            "height", G_TYPE_INT, 144,
            NULL);

        if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
        {
            g_printerr("Cannot create video elements \n");
            return 0;
        }

        gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
            videosink, NULL);
        /* set property value */
        link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
        gst_caps_unref(capsFilter);
        if (!link_ok) {
            g_warning("Failed to link element1 and element2!");
        }
        
        if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
        {
            g_printerr("Error linking fields... 2 \n");
            return 0;
        }

        /* Start playing */
        gst_element_set_state(pipeline, GST_STATE_PLAYING);

        /* Wait until error or EOS */
        bus = gst_element_get_bus(pipeline);
        msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

        /* Free resources */
        if (msg != NULL)
            gst_message_unref(msg);
        gst_object_unref(bus);
        gst_element_set_state(pipeline, GST_STATE_NULL);
        gst_object_unref(pipeline);
        return 0;
    }
}
 类似资料:
  • 在页面上添加视频、声音、动画等,可以增强用户体验。在HTML5之前,为网页添加多媒体的唯一办法,就是使用第三方的插件(如,Adobe Flash等)。 HTML5中,提供了对多媒体的原生支持,只需通过 video元素,就可以向网页嵌入视频、电影或音频资源,通过 audio元素向网页嵌入音频资源,省时省力。 视频 早就听说HTML提供了对视频的原生支持,你可能已经迫不及待想体验一下了。 在HTML5

  • HTML5功能包括本机音频和视频支持,无需使用Flash。 HTML5 和 标签使得向网站添加媒体变得简单。您需要设置src属性以标识媒体源并包含控件属性,以便用户可以播放和暂停媒体。 嵌入视频 以下是在您的网页中嵌入视频文件的最简单形式 - <video src = "foo.mp4" width = "300" height = "200" controls> Your browser

  • 我有一个长音频部分和一个短视频部分,我想在一起mux。 我正在尝试使用以下命令进行MUX: video_0-0002.h264-整个文件(2秒长) Audio.wav-从4秒到6秒 但音频被搞砸了...我怎样才能正确地做呢? 也试过了,听起来好像最后还是有寂静。

  • 主要内容:HTML 音频/视频 DOM 参考手册,HTML 音频/视频 方法,HTML 音频/视频属性,HTML 音频/视频事件HTML 音频/视频 DOM 参考手册 HTML5 DOM 为 <audio> 和 <video> 元素提供了方法、属性和事件。 这些方法、属性和事件允许您使用 JavaScript 来操作 <audio> 和 <video> 元素。 HTML 音频/视频 方法 方法 描述 addTextTrack() 向音频/视频添加新的文本轨道。 canPlayType() 检测浏

  • 如果一些文件是带音频的视频,而一些文件只是音频,是否可以连接多个文件。最终结果应该如下所示: 我认为这一定是可能的,因为我也可以结合一个大的音频文件和一个小的视频与FFMPEG。结果将是一个视频文件,其中最后一帧只是冻结,但音频仍然播放。我想实现相同的结果,要么冻结最后一帧或简单的黑色帧。这可能吗?

  • 我已经使用程序youtube-dl下载了一个YouTube播放列表,我选择了单独下载视频和音频,我现在有一个文件夹充满了视频及其相应的音频,我希望与ffmpeg合并在一起。 我需要使用批处理脚本来执行此操作,但问题是youtube-dl在原始文件的标题之后添加了临时字母,因此视频与其对应的音频没有相同的名称,文件名如下所示: 如何使用windows批处理脚本和ffmpeg合并这些多个视频/音频文件