Gstreamer和gdkpixbuf sink消耗大量内存-可能会泄漏



我有一个简单的程序,从视频流逐帧读取GdkPixbufs。问题是内存使用量以每秒300-400MB的速度增长!这样我的8GB内存很快就用完了。Valgrind/Cachegrind没有指向我任何地方,所以可能是gstreamer中的bug ?

    // compiled with:
    //  g++ -std=c++11 `pkg-config --cflags gstreamer-1.0 gdk-pixbuf-2.0` test_c.cpp -o test `pkg-config --libs gstreamer-1.0 gdk-pixbuf-2.0`
    // usage:
    //  ./test -i video_file.mpg # MUST BE SAME FORMAT AS TEST IMAGE DATA PROVIDED IN main.cpp BELOW!!!
    #include <stdio.h>
    #include <stdlib.h>
    #include <string>
    #include <memory.h>
    #include <vector>
    #include <gst/gst.h>
    #include <gdk-pixbuf/gdk-pixbuf.h>
    #include <gst/controller/controller.h>  // GstPlugin
    #include <glib.h>
    #define APP_NAME    "test"
    typedef struct
    {
        int                                 nWidth;
        int                                 nHeight;
        GdkColorspace               ColorSpace;
        int                                 nBitsPerChannel;
        int                                 nChannels;
        gboolean                        bHasAlpha;
        int                                 nRowStride;
        gsize                               nByteLength;
        guchar                          *pPixels;
    } tSImage;
    typedef std::vector<tSImage *> tImagesList;
    GMainLoop                       *g_pMainLoop;
    gchar                               *g_pszInFile;
    GstElement                  *g_pPipeline;
    GstElement                  *g_pElementVideoSink;
    tImagesList                 g_ImagesList;
    // ----------------------------------------------------------------------------
    void EatLotsOfMemory(tSImage *pFrame)
    // ----------------------------------------------------------------------------
    {
        // access to pixel buffers leads to huge memory consumption!
        for(tImagesList::const_iterator iImage=g_ImagesList.begin(); iImage!=g_ImagesList.end(); iImage++)
        {
            unsigned long test = 0;
            for(int nPos=0; nPos < pFrame->nHeight * pFrame->nRowStride; nPos++)
            {
                if(pFrame->pPixels[nPos] == (*iImage)->pPixels[nPos])
                {
                    test++;
                }
            }
        }
    }
    ///////////////////////////////////////////////////////////////////////////////
    // ----------------------------------------------------------------------------
    bool ParsePixBufMessage(const GValue *val)
    // ----------------------------------------------------------------------------
    {
        gint64 pos;
        if(gst_element_query_position(g_pPipeline, GST_FORMAT_TIME, &pos))
            g_print("Time: %" GST_TIME_FORMAT " r", GST_TIME_ARGS(pos));
        GdkPixbuf *pPixBuf = GDK_PIXBUF(g_value_dup_object(val));
        tSImage Image;
        Image.ColorSpace                = gdk_pixbuf_get_colorspace(pPixBuf);
        Image.nChannels                 = gdk_pixbuf_get_n_channels(pPixBuf);
        Image.bHasAlpha                 = gdk_pixbuf_get_has_alpha(pPixBuf);
        Image.nBitsPerChannel       = gdk_pixbuf_get_bits_per_sample(pPixBuf);
        Image.nWidth                        = gdk_pixbuf_get_width(pPixBuf);
        Image.nHeight                       = gdk_pixbuf_get_height(pPixBuf);
        Image.nRowStride                = gdk_pixbuf_get_rowstride(pPixBuf);
        Image.nByteLength               = gdk_pixbuf_get_byte_length(pPixBuf);
        Image.pPixels                       = gdk_pixbuf_get_pixels(pPixBuf);
        EatLotsOfMemory(&Image);
        g_object_unref(pPixBuf);
        return true;
    }
    // ----------------------------------------------------------------------------
    static gboolean BusCallback(GstBus *bus, GstMessage *msg, gpointer data)
    // ----------------------------------------------------------------------------
    {
        switch(GST_MESSAGE_TYPE(msg))
        {
            // ------------------------------------------------------------------------
            case GST_MESSAGE_EOS:
                g_print("End of streamn");
                g_main_loop_quit(g_pMainLoop);
            break;
            // ------------------------------------------------------------------------
            case GST_MESSAGE_ERROR:
            {
                gchar  *debug;
                GError *error;
                gst_message_parse_error(msg, &error, &debug);
                g_free(debug);
                g_printerr("Error: %sn", error->message);
                g_error_free(error);
                g_main_loop_quit(g_pMainLoop);
            }
            break;
            // ------------------------------------------------------------------------
            case GST_MESSAGE_ELEMENT:
            {
                if(GST_MESSAGE_SRC(msg) != GST_OBJECT_CAST(g_pElementVideoSink))
                    break;
                const GstStructure *pMsgStructure = gst_message_get_structure(msg);
                if(!gst_structure_has_name(pMsgStructure, "preroll-pixbuf") && !gst_structure_has_name (pMsgStructure, "pixbuf"))
                    break;
                const GValue *val = gst_structure_get_value(pMsgStructure, "pixbuf");
                if(val != NULL)
                    if(!ParsePixBufMessage(val))
                        g_main_loop_quit(g_pMainLoop);
            }
            break;
            // ------------------------------------------------------------------------
            default:
    //          g_print("msg %u: "%s"n", GST_MESSAGE_TYPE(msg), GST_MESSAGE_TYPE_NAME(msg));
            break;
        }
        return TRUE;
    }
    // ----------------------------------------------------------------------------
    static void on_decoder_pad_added(GstElement *element, GstPad *pPad, gpointer data)
    // ----------------------------------------------------------------------------
    {
        GstElement *decoder = (GstElement *) data;
        GstCaps *pCaps = gst_pad_get_current_caps(pPad);
        GstStructure *pCapsStructure = gst_caps_get_structure(pCaps, 0);
        if(pCapsStructure != NULL)
        {
            const gchar *pszStructName = gst_structure_get_name(pCapsStructure);
            if(g_str_has_prefix(pszStructName, "video"))
            {
                GstPad *pSinkpad = gst_element_get_static_pad(decoder, "sink");
                if(pSinkpad != NULL)
                {
                    GstPadLinkReturn Ret = gst_pad_link(pPad, pSinkpad);
                    if(Ret != GST_PAD_LINK_OK)
                        g_print("failed to link pad! (%d)", Ret);
                    gst_object_unref(pSinkpad);
                }
            }
        }
    }
    // ----------------------------------------------------------------------------
    void Decode(gchar *pszFile)
    // ----------------------------------------------------------------------------
    {
        g_pMainLoop = g_main_loop_new(NULL, FALSE);
        g_pPipeline                                                 = gst_pipeline_new("pipeline");
        GstBin          *pBin                                       = GST_BIN(g_pPipeline);
        GstElement  *pElementSource                 = gst_element_factory_make("filesrc", "file-source");
        GstElement  *pElementDecoder                = gst_element_factory_make("decodebin", "decoder");
        GstElement  *pElementConvert                = gst_element_factory_make("videoconvert", "convert");  // "autovideoconvert" ?
        g_pElementVideoSink                                 = gst_element_factory_make("gdkpixbufsink", "pixbufsink");  // "imagefreeze" ? "autovideosink" ?
    //  g_pElementVideoSink                                 = gst_element_factory_make("fakesink", "fakesink"); // test
        if(pElementSource == NULL || pElementDecoder == NULL || pElementConvert == NULL || g_pElementVideoSink == NULL)
        {
            g_print("Failed to create elementsn");
            return;
        }
        // configure elements
        g_object_set(G_OBJECT(pElementSource),          "location", pszFile,    NULL);
        g_object_set(G_OBJECT(g_pElementVideoSink), "qos",          FALSE,      "max-lateness", (gint64) - 1, "sync", false, "async", false, NULL);
        // set up pipeline
        gst_bin_add(pBin, pElementSource);
        // add elements to pipeline
        gst_bin_add_many(pBin, pElementDecoder, pElementConvert, g_pElementVideoSink, NULL);
        // link the elements together
        gboolean bOK;
        bOK = gst_element_link(pElementSource, pElementDecoder);
        bOK &= gst_element_link(pElementConvert, g_pElementVideoSink);
        if(!bOK)
        {
            g_print("Failed to link elementsn");
            return;
        }
        // link decoder's video-stream pad dynamically
        g_signal_connect(pElementDecoder, "pad-added", G_CALLBACK(on_decoder_pad_added), pElementConvert);
        // add message handler
        GstBus *pBus = gst_element_get_bus(g_pPipeline);
        guint bus_watch_id = gst_bus_add_watch(pBus, BusCallback, NULL /* app data */);
        // set the pipeline to "playing" state
        gst_element_set_state(g_pPipeline, GST_STATE_PLAYING);
        // iterate
        g_main_loop_run(g_pMainLoop);
        // out of the main loop, clean up nicely
        gst_element_set_state(g_pPipeline, GST_STATE_NULL);
        gst_object_unref(pBus);
        gst_object_unref(GST_OBJECT(g_pPipeline));
        g_source_remove(bus_watch_id);
        g_main_loop_unref(g_pMainLoop);
    }
    ///////////////////////////////////////////////////////////////////////////////
    // main
    ///////////////////////////////////////////////////////////////////////////////
    // ----------------------------------------------------------------------------
    bool ParseOptions(int argc, char *argv[])
    // ----------------------------------------------------------------------------
    {
        GError                  *err            = NULL;
        GOptionContext  *ctx;
        GOptionEntry        entries[] =
        {
            { "input",      'i', G_OPTION_FLAG_FILENAME,    G_OPTION_ARG_FILENAME,              &g_pszInFile,       "input video file",                                     NULL },
            { NULL }
        };
        ctx = g_option_context_new("- " APP_NAME);
        g_option_context_add_main_entries(ctx, entries, NULL);
        g_option_context_add_group(ctx, gst_init_get_option_group());
        if(!g_option_context_parse(ctx, &argc, &argv, &err))
        {
            g_print("Failed to initialize: %sn", err->message);
            g_error_free(err);
            g_option_context_free(ctx);
            return false;
        }
        if(g_pszInFile == NULL)
        {
            g_print("%s", g_option_context_get_help(ctx, TRUE, NULL));
            g_option_context_free(ctx);
            return false;
        }
        g_option_context_free(ctx);
        return true;
    }
    // ----------------------------------------------------------------------------
    int main(int argc, char *argv[])
    // ----------------------------------------------------------------------------
    {
        gst_init(&argc, &argv); // must be called here
        if(!ParseOptions(argc, argv))
            return EXIT_FAILURE;
        // !!! test image data - must match video input format !!!
        for(int i=0; i<3; i++)
        {
            tSImage *pImage = new tSImage();
            pImage->nWidth = 720;
            pImage->nHeight = 576;
            pImage->ColorSpace = GDK_COLORSPACE_RGB;
            pImage->nBitsPerChannel = 8;
            pImage->nChannels = 3;
            pImage->bHasAlpha = false;
            pImage->nRowStride = pImage->nWidth * pImage->nChannels;
            pImage->nByteLength = 0;
            pImage->pPixels = new guchar[pImage->nHeight * pImage->nRowStride];
            memset(pImage->pPixels, (unsigned char) rand(), pImage->nHeight * pImage->nRowStride);
            g_ImagesList.push_back(pImage);
        }
        Decode(g_pszInFile);
        // clear images list
        for(tImagesList::iterator iImage=g_ImagesList.begin(); iImage!=g_ImagesList.end(); iImage++)
        {
            if((*iImage)->pPixels != NULL)
                delete [] (*iImage)->pPixels;
            delete (*iImage);
        }
        g_ImagesList.clear();
        return EXIT_SUCCESS;
    }

使用——gst-debug-level=5运行应用程序可以帮助您发现GST_BUFFER对象的分配和结束。它还可以显示在总线上发布的消息何时被实际使用。我怀疑你在这里有一些东西积累了信息,但从不释放它,使你的程序的虚拟映像一次又一次地增长,但是所有东西仍然有引用,所以valgrind不会发现任何真正的泄漏。

要特别注意公交车。他们倾向于传递大量的信息,如果不加以管理,他们将保留这些信息,直到有人要求它……

相关内容

  • 没有找到相关文章

最新更新