Wie push OpenCV Bilder in GStreamer-Pipeline zu streamen über den tcpserver gestartet-Spüle

Ich versuche zu schieben Bilder erstellt von OpenCV in der GStreamer-Pipeline, um das Streaming eines Videos über das GStreamer-TCPServerSink.

Meine GStreamer-Pipeline sieht wie folgt aus: AppSrc -> FFMpegColorSpace -> VP8Enc -> WebMMux -> TCPServerSink
Die AppSrc Zufuhr ist durch die Schaffung von OpenCV-Bilder, und schieben Sie Sie in Appsrc über gst_app_src_push_buffer.

Ich geschrieben habe ein kleines Testprogramm namens "Sandbox", zeigt sich der Fehler im GStreamer, wenn das Programm ausgeführt wird.

Mein Testsystem sieht wie folgt aus:

  • Gentoo
  • Kernel: 3.10.0-rc3
  • OpenCV: 2.4.5
  • GStreamer: 0.10.36

Ich möchte verstehen, warum dieser Fehler Auftritt, was ich falsch mache und wie eine funktionierende Lösung Aussehen könnte?

Ich hoffe, dass Sie mir helfen können das Problem zu lösen.

Ich habe eine Mini-CMake-Projekt mit dem code der den Fehler erzeugt. Es besteht aus zwei Dateien, nämlich CMakeLists.txt und Sandbox.cpp. CMakeLists.txt ist die Konfiguration über howto erstellen der Sandbox-Programm enthalten in Sandbox.cpp.
Bauen Sie einfach ein Verzeichnis erstellen, z.B. "Build", wechseln Sie in dieses und starten Sie eine Out-of-Source Build mit:

cmake ../

Nun CMake erstellt alle benötigten Dateien, wenn Sie die Abhängigkeiten installiert haben, so können Sie einfach geben:

make

das Programm selbst zu erstellen.

####################################################################################################
# Projectinformations
####################################################################################################
project( Sandbox CXX )
cmake_minimum_required( VERSION 2.8 )

####################################################################################################
# G++ Options
####################################################################################################
set( CMAKE_C_COMPILER "gcc" )
set( CMAKE_CXX_COMPILER "g++" )

set( CMAKE_CXX_FLAGS "-g -O0 -std=c++11 -ggdb -Wall -W -Wunused-variable -Wunused-parameter -Wunused-function -Wunused -Woverloaded-virtual -Wwrite-strings --coverage" )
set( CMAKE_C_FLAGS ${CMAKE_CXX_FLAGS} )

####################################################################################################
# Resolve Dependencies
####################################################################################################
set( Boost_DEBUG 1 )
set( Boost_USE_MULTITHREADED 1 )
find_package( Boost 1.53 REQUIRED system thread timer )

find_package( PkgConfig )
pkg_check_modules( GSTREAMER_0_10 gstreamer-0.10 )
pkg_check_modules( GSTREAMER_0_10_APP gstreamer-app-0.10 )
include_directories( ${GSTREAMER_0_10_INCLUDE_DIRS} )
include_directories( ${GSTREAMER_0_10_APP_INCLUDE_DIRS} )

find_package( OpenCV REQUIRED )

####################################################################################################
# Project
####################################################################################################
add_executable( Sandbox Sandbox.cpp )

Das Programm selbst sieht wie folgt aus:

//Standard C++ Libraries
#include <iostream>
#include <sstream>
#include <string>

//Boost Libraries
#include <boost/asio.hpp>
#include <boost/make_shared.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/thread.hpp>

//GStreamer
#include <gstreamer-0.10/gst/gst.h>
#include <gstreamer-0.10/gst/gstelement.h>
#include <gstreamer-0.10/gst/gstpipeline.h>
#include <gstreamer-0.10/gst/gstutils.h>
#include <gstreamer-0.10/gst/app/gstappsrc.h>

//OpenCV
//#include "cv.h"
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>


GMainLoop *glib_MainLoop;
unsigned int heartbeat_Intervall; ///< In Milliseconds
boost::shared_ptr<boost::asio::deadline_timer> heartbeat;

GstElement *source_OpenCV;
guint64 imagecounter;

void
GLib_MainLoop() {
    if( !g_main_loop_is_running( glib_MainLoop ) ) {
        std::cout << "Starting glib_MainLoop..." << std::endl;
        g_main_loop_run( glib_MainLoop );
        std::cout << "Starting glib_MainLoop stopped." << std::endl;
    }
};

///Creates an Image with a red filled Circle and the current Time displayed in it.
cv::Mat
Create_Image() {
    cv::Size size = cv::Size( 640, 480 );
    cv::Mat image = cv::Mat::zeros( size, CV_8UC3 );
    cv::Point center = cv::Point( size.width / 2, size.height / 2 );
    int thickness = -1;
    int lineType = 8;

    cv::circle( image, center, size.width / 4.0, cv::Scalar( 0, 0, 255 ), thickness, lineType );

    std::stringstream current_Time;
    boost::posix_time::time_facet *facet = new boost::posix_time::time_facet( "%Y.%m.%d %H:%M:%S.%f" );
    current_Time.imbue( std::locale( current_Time.getloc(), facet ) );
    current_Time << boost::posix_time::microsec_clock::universal_time();

    int font = cv::FONT_HERSHEY_SCRIPT_SIMPLEX;
    double fontsize = 1;
    int font_Thickness = 1;

    int baseline = 0;
    cv::Size textSize_01 = cv::getTextSize( current_Time.str(), font, fontsize, font_Thickness , &baseline );
    baseline += font_Thickness;

    cv::Point textOrg_01( ( image.cols - textSize_01.width ) / 2, ( image.rows + textSize_01.height * 2 ) / 2 );
    cv::Scalar textcolour = cv::Scalar( 0, 255, 0 );
    cv::putText( image, current_Time.str(), textOrg_01, font, fontsize, textcolour , font_Thickness , 1 );

    return image;
}

///Creates a Graph of the created Pipeline including the contained Elements. The environment variable "GST_DEBUG_DUMP_DOT_DIR" must be set, e.g to /tmp/to actually create the Graph.
///Furthermore GST_DEBUG needs to be activated, e.g. with "GST_DEBUG=3".
///So "GST_DEBUG=3 GST_DEBUG_DUMP_DOT_DIR=/tmp/" ./Sandbox would work.
///The .dot file can be converted to a e.g. svg-Graphic with the following command (Package GraphViz): dot -Tsvg -oPipelineGraph.svg PipelineGraph.dot
void
Create_PipelineGraph( GstElement *pipeline ) {
    bool debug_active = gst_debug_is_active();
    gst_debug_set_active( 1 );
    GST_DEBUG_BIN_TO_DOT_FILE( GST_BIN( pipeline ), GST_DEBUG_GRAPH_SHOW_ALL, "PipelineGraph" );
    gst_debug_set_active( debug_active );
}

void
Push_new_Image( const boost::system::error_code &error ) {
    if( error != 0 ) {
        std::cout << "Error in Timer: " << error.message() << std::endl;
        return;
    }

    cv::Mat image = Create_Image();

    ///OpenCV handles image in BGR, so to get RGB, Channels R and B needs to be swapped.
    cv::cvtColor( image, image, CV_CVTIMG_SWAP_RB );

    {
        ///How do i get the actual bpp and depth out of the cv::Mat?
        GstCaps *caps = gst_caps_new_simple( "video/x-raw-rgb", "width", G_TYPE_INT, image.cols, "height", G_TYPE_INT, image.rows, "framerate", GST_TYPE_FRACTION, 0, 1, NULL );
        g_object_set( G_OBJECT( source_OpenCV ), "caps", caps, NULL );
        gst_caps_unref( caps );

        IplImage* img = new IplImage( image );
        uchar *IMG_data = ( uchar* ) img->imageData;

        GstBuffer *buffer;
        {
            int bufferlength = image.cols * image.rows * image.channels();
            buffer = gst_buffer_new_and_alloc( bufferlength );

            ///Copy Data from OpenCV to GStreamer
            memcpy( GST_BUFFER_DATA( buffer ), IMG_data, GST_BUFFER_SIZE( buffer ) );

            GST_BUFFER_DURATION( buffer ) = gst_util_uint64_scale( bufferlength, GST_SECOND, 1 );
        }

        ///Setting the Metadata for the image to be pushed.
        {
            GstCaps *caps_Source = NULL;

            std::stringstream video_caps_text;
            video_caps_text << "video/x-raw-rgb,width=(int)" << image.cols << ",height=(int)" << image.rows << ",framerate=(fraction)0/1";
            caps_Source = gst_caps_from_string( video_caps_text.str().c_str() );

            if( !GST_IS_CAPS( caps_Source ) ) {
                std::cout << "Error creating Caps for OpenCV-Source, exiting...";
                exit( 1 );
            }

            gst_app_src_set_caps( GST_APP_SRC( source_OpenCV ), caps_Source );
            gst_buffer_set_caps( buffer, caps_Source );
            gst_caps_unref( caps_Source );
        }

        ///Setting a continues timestamp
        GST_BUFFER_TIMESTAMP( buffer ) = gst_util_uint64_scale( imagecounter * 20, GST_MSECOND, 1 );
        imagecounter += 1;

        ///Push Buffer into GStreamer-Pipeline
        GstFlowReturn rw;
        rw = gst_app_src_push_buffer( GST_APP_SRC( source_OpenCV ), buffer );

        if( rw != GST_FLOW_OK ) {
            std::cout << "Error push buffer to GStreamer-Pipeline, exiting...";

            exit( 1 );
        } else {
            std::cout << "GST_FLOW_OK " << "imagecounter: " << imagecounter << std::endl;
        }

    }

    ///Renew the Heartbeat-Timer
    heartbeat->expires_from_now( boost::posix_time::milliseconds( heartbeat_Intervall ) );
    heartbeat->async_wait( Push_new_Image );
}

int
main( int argc, char **argv ) {
    std::cout << "Sandbox started." << std::endl;

    ///####################
    ///Initialise Sandbox
    ///####################
    boost::shared_ptr<boost::asio::io_service> io_service = boost::make_shared<boost::asio::io_service>();
    boost::shared_ptr<boost::asio::io_service::work> work = boost::make_shared<boost::asio::io_service::work>( *io_service );
    boost::shared_ptr<boost::thread_group> threadgroup = boost::make_shared<boost::thread_group>();

    ///io_service Callback for continuously feeding into the pipeline of GStreamer.
    ///I've using to push the Buffer into GStreamer as i come available instead of getting informed about an empty pipeline by GStreamer-Signals.
    heartbeat_Intervall = 1000; ///< In Milliseconds
    heartbeat = boost::make_shared<boost::asio::deadline_timer>( ( *( io_service.get() ) ) );

    std::cout << "Initialise GStreamer..." << std::endl;
    gst_init( &argc, &argv );

    glib_MainLoop = g_main_loop_new( NULL, 0 );

    std::cout << "Start GLib_MainLoop..." << std::endl;
    io_service->post( GLib_MainLoop );

    ///Create some Workerthreads
    for( std::size_t i = 0; i < 3; ++i )  {
        threadgroup->create_thread( boost::bind( &boost::asio::io_service::run, &( *io_service ) ) );
    }

    ///####################
    ///Do the actual Work
    ///####################
    GstElement *pipeline;
    GstElement *converter_FFMpegColorSpace;
    GstElement *converter_VP8_Encoder;
    GstElement *muxer_WebM;
    GstElement *sink_TCPServer;


    ///Create GStreamer Elements

    pipeline = gst_pipeline_new( "OpenCV_to_TCPServer" );

    if( !pipeline ) {
        std::cout << "Error creating Pipeline, exiting...";
        return 1;
    }

    {
        source_OpenCV = gst_element_factory_make( "appsrc", "Source_OpenCV" );

        if( !source_OpenCV ) {
            std::cout << "Error creating OpenCV-Source, exiting...";
            return 1;
        }

        gst_bin_add( GST_BIN( pipeline ), source_OpenCV );
    }

    {
        converter_FFMpegColorSpace = gst_element_factory_make( "ffmpegcolorspace", "Converter_FFMpegColorSpace" );

        if( !converter_FFMpegColorSpace ) {
            std::cout << "Error creating Converter_FFMpegColorSpace, exiting...";
            return 1;
        }

        gst_bin_add( GST_BIN( pipeline ), converter_FFMpegColorSpace );
    }

    {
        converter_VP8_Encoder = gst_element_factory_make( "vp8enc", "Converter_VP8_Encoder" );

        if( !converter_VP8_Encoder ) {
            std::cout << "Error creating Converter_VP8_Encoder, exiting...";
            return 1;
        }

        gst_bin_add( GST_BIN( pipeline ), converter_VP8_Encoder );
    }

    {
        muxer_WebM = gst_element_factory_make( "webmmux", "Muxer_WebM" );

        if( !muxer_WebM ) {
            std::cout << "Error creating Muxer_WebM, exiting...";
            return 1;
        }

        gst_bin_add( GST_BIN( pipeline ), muxer_WebM );
    }

    {
        sink_TCPServer = gst_element_factory_make( "tcpserversink", "Sink_TCPServer" );

        if( !sink_TCPServer ) {
            std::cout << "Error creating Sink_TCPServer, exiting...";
            return 1;
        }

        gst_bin_add( GST_BIN( pipeline ), sink_TCPServer );
    }


    ///Link GStreamer Elements

    if( !gst_element_link( source_OpenCV, converter_FFMpegColorSpace ) ) {
        std::cout << "Error linking creating source_OpenCV to converter_FFMpegColorSpace, exiting...";
        return 2;
    }

    if( !gst_element_link( converter_FFMpegColorSpace, converter_VP8_Encoder ) ) {
        std::cout << "Error linking creating converter_FFMpegColorSpace to converter_VP8_Encoder, exiting...";
        return 2;
    }

    if( !gst_element_link( converter_VP8_Encoder, muxer_WebM ) ) {
        std::cout << "Error linking creating converter_VP8_Encoder to muxer_WebM, exiting...";
        return 2;
    }

    if( !gst_element_link( muxer_WebM, sink_TCPServer ) ) {
        std::cout << "Error linking creating muxer_WebM to sink_TCPServer, exiting...";
        return 2;
    }


    ///Set State of the GStreamer Pipeline to Playing
    GstStateChangeReturn ret = gst_element_set_state( pipeline, GST_STATE_PLAYING );

    if( ret == GST_STATE_CHANGE_FAILURE ) {
        std::cout << "Error setting GStreamer-Pipeline to playing.";
        return 2;
    }

    Create_PipelineGraph( pipeline );


    ///Start the Heartbeat, that continously creates new Images
    heartbeat->expires_from_now( boost::posix_time::milliseconds( heartbeat_Intervall ) );
    heartbeat->async_wait( Push_new_Image );

    ///####################
    ///Shutdown the Sandbox
    ///####################
    std::cout << "Wait some Seconds before joining all Threads and shutdown the Sandbox..." << std::endl;
    boost::this_thread::sleep( boost::posix_time::seconds( 4 ) );

    std::cout << "Shutdown Sandbox..." << std::endl;
    g_main_loop_quit( glib_MainLoop );
    io_service->stop();

    while( !io_service->stopped() ) {
        boost::this_thread::sleep( boost::posix_time::seconds( 1 ) );
    }

    work.reset();
    threadgroup->join_all();

    g_main_loop_unref( glib_MainLoop );

    threadgroup.reset();
    work.reset();
    io_service.reset();

    std::cout << "Sandbox stopped" << std::endl;
}

target_link_libraries( Sandbox ${Boost_LIBRARIES} ${GSTREAMER_0_10_LIBRARIES} ${GSTREAMER_0_10_APP_LIBRARIES} ${OpenCV_LIBS} )

set_target_properties( Sandbox PROPERTIES LINKER_LANGUAGE CXX )

Ich bin ab dem Sandbox-Programm wie folgt:

LC_ALL="C" GST_DEBUG=3 GST_DEBUG_DUMP_DOT_DIR=/tmp/ ./Sandbox

Dann ein Diagramm der aktuellen Pipeline soll erstellt werden in /tmp/. Diese .dot-Datei umgewandelt werden kann, z.B. in eine svg-Grafik mit:

dot -Tsvg -oPipelineGraph.svg PipelineGraph.dot

Direkt nachdem der Fehler Auftritt. Hier ist eine kurze Botschaft, eingeholt über GST_DEBUG=3:

...
0:00:00.141888460 28057      0x2245d90 INFO                 basesrc gstbasesrc.c:2562:gst_base_src_loop:<Source_OpenCV> pausing after gst_pad_push() = not-negotiated
0:00:00.141924274 28057      0x2245d90 WARN                 basesrc gstbasesrc.c:2625:gst_base_src_loop:<Source_OpenCV> error: Internal data flow error.
0:00:00.141937917 28057      0x2245d90 WARN                 basesrc gstbasesrc.c:2625:gst_base_src_loop:<Source_OpenCV> error: streaming task paused, reason not-negotiated (-4)
0:00:00.141965714 28057      0x2245d90 INFO        GST_ERROR_SYSTEM gstelement.c:1964:gst_element_message_full:<Source_OpenCV> posting message: Internal data flow error.
0:00:00.141998959 28057      0x2245d90 INFO        GST_ERROR_SYSTEM gstelement.c:1987:gst_element_message_full:<Source_OpenCV> posted error message: Internal data flow error.
0:00:00.142018539 28057      0x2245d90 ERROR                 vp8enc gstvp8enc.c:1028:gst_vp8_enc_finish:<Converter_VP8_Encoder> encode returned 1 error
0:00:00.142053733 28057      0x2245d90 INFO             matroskamux matroska-mux.c:2226:gst_matroska_mux_start:<ebmlwrite0> DocType: webm, Version: 2
0:00:00.142082043 28057      0x2245d90 INFO               ebmlwrite ebml-write.c:218:gst_ebml_writer_send_new_segment_event: seeking to 0
0:00:00.142093688 28057      0x2245d90 INFO               GST_EVENT gstevent.c:606:gst_event_new_new_segment_full: creating newsegment update 0, rate 1.000000, format bytes, start 0, stop -1, position 0
...

Mit GST_DEBUG=4 sieht es so aus:

...
0:00:02.464122744 28483      0x24b8590 DEBUG               GST_CAPS gstpad.c:2925:gst_pad_get_allowed_caps:<Converter_VP8_Encoder:src> allowed caps video/x-vp8, width=(int)[ 16, 4096 ], height=(int)[ 16, 4096 ], framerate=(fraction)[ 0/1, 2147483647/1 ]
0:00:02.464152446 28483      0x24b8590 DEBUG               GST_CAPS gstpad.c:2263:gst_pad_get_caps_unlocked:<Converter_VP8_Encoder:sink> pad getcaps returned video/x-raw-yuv, width=(int)[ 16, 4096 ], height=(int)[ 16, 4096 ], framerate=(fraction)[ 0/1, 2147483647/1 ], format=(fourcc)I420
0:00:02.464174847 28483      0x24b8590 DEBUG               GST_PADS gstpad.c:2577:gst_pad_acceptcaps_default:<Converter_VP8_Encoder:sink> allowed caps video/x-raw-yuv, width=(int)[ 16, 4096 ], height=(int)[ 16, 4096 ], framerate=(fraction)[ 0/1, 2147483647/1 ], format=(fourcc)I420
0:00:02.464198411 28483      0x24b8590 DEBUG               GST_PADS gstpad.c:2629:gst_pad_accept_caps:<Converter_VP8_Encoder:sink> acceptfunc returned 1
0:00:02.464210382 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:1082:gst_base_transform_find_transform:<Converter_FFMpegColorSpace> Input caps were video/x-raw-rgb, width=(int)640, height=(int)480, framerate=(fraction)0/1, and got final caps video/x-raw-yuv, width=(int)640, height=(int)480, framerate=(fraction)0/1, format=(fourcc)I420
0:00:02.464236195 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:810:gst_base_transform_configure_caps:<Converter_FFMpegColorSpace> in caps:  video/x-raw-rgb, width=(int)640, height=(int)480, framerate=(fraction)0/1
0:00:02.464252757 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:811:gst_base_transform_configure_caps:<Converter_FFMpegColorSpace> out caps: video/x-raw-yuv, width=(int)640, height=(int)480, framerate=(fraction)0/1, format=(fourcc)I420
0:00:02.464271709 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:819:gst_base_transform_configure_caps:<Converter_FFMpegColorSpace> have_same_caps: 0
0:00:02.464282177 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:2921:gst_base_transform_set_in_place:<Converter_FFMpegColorSpace> setting in_place FALSE
0:00:02.464292338 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:2860:gst_base_transform_set_passthrough:<Converter_FFMpegColorSpace> set passthrough 0
0:00:02.464302369 28483      0x24b8590 DEBUG          basetransform gstbasetransform.c:834:gst_base_transform_configure_caps:<Converter_FFMpegColorSpace> Calling set_caps method to setup caps
0:00:02.464318339 28483      0x24b8590 DEBUG       ffmpegcolorspace gstffmpegcolorspace.c:320:gst_ffmpegcsp_set_caps:<Converter_FFMpegColorSpace> could not configure context for input format
0:00:02.464329667 28483      0x24b8590 WARN           basetransform gstbasetransform.c:1311:gst_base_transform_setcaps:<Converter_FFMpegColorSpace> FAILED to configure caps <Converter_FFMpegColorSpace:src> to accept video/x-raw-yuv, width=(int)640, height=(int)480, framerate=(fraction)0/1, format=(fourcc)I420
0:00:02.464352143 28483      0x24b8590 DEBUG               GST_CAPS gstpad.c:2773:gst_pad_set_caps:<Converter_FFMpegColorSpace:sink> caps video/x-raw-rgb, width=(int)640, height=(int)480, framerate=(fraction)0/1 could not be set
0:00:02.464504648 28483      0x24b8590 INFO                 basesrc gstbasesrc.c:2562:gst_base_src_loop:<Source_OpenCV> pausing after gst_pad_push() = not-negotiated
0:00:02.464517617 28483      0x24b8590 DEBUG                basesrc gstbasesrc.c:2588:gst_base_src_loop:<Source_OpenCV> pausing task, reason not-negotiated
0:00:02.464531137 28483      0x24b8590 DEBUG               GST_PADS gstpad.c:5646:gst_pad_pause_task:<Source_OpenCV:src> pause task
0:00:02.464543705 28483      0x24b8590 DEBUG                   task gsttask.c:698:gst_task_set_state:<Source_OpenCV:src> Changing task 0x24c9000 to state 2
0:00:02.464585246 28483      0x24b8590 DEBUG              GST_EVENT gstevent.c:269:gst_event_new: creating new event 0x24b8940 eos 86
0:00:02.464618078 28483      0x24b8590 WARN                 basesrc gstbasesrc.c:2625:gst_base_src_loop:<Source_OpenCV> error: Internal data flow error.
0:00:02.464631538 28483      0x24b8590 WARN                 basesrc gstbasesrc.c:2625:gst_base_src_loop:<Source_OpenCV> error: streaming task paused, reason not-negotiated (-4)
0:00:02.464645609 28483      0x24b8590 DEBUG            GST_MESSAGE gstelement.c:1933:gst_element_message_full:<Source_OpenCV> start
0:00:02.464672918 28483      0x24b8590 INFO        GST_ERROR_SYSTEM gstelement.c:1964:gst_element_message_full:<Source_OpenCV> posting message: Internal data flow error.
0:00:02.464697059 28483      0x24b8590 DEBUG                GST_BUS gstbus.c:308:gst_bus_post:<bus0> [msg 0x2325680] posting on bus, type error, GstMessageError, gerror=(GError)NULL, debug=(string)"gstbasesrc.c\(2625\):\ gst_base_src_loop\ \(\):\ /GstPipeline:OpenCV_to_TCPServer/GstAppSrc:Source_OpenCV:\012streaming\ task\ paused\,\ reason\ not-negotiated\ \(-4\)"; from source <Source_OpenCV>
0:00:02.464727109 28483      0x24b8590 DEBUG                    bin gstbin.c:3164:gst_bin_handle_message_func:<OpenCV_to_TCPServer> [msg 0x2325680] handling child Source_OpenCV message of type error
0:00:02.464739521 28483      0x24b8590 DEBUG                    bin gstbin.c:3171:gst_bin_handle_message_func:<OpenCV_to_TCPServer> got ERROR message, unlocking state change
0:00:02.464750463 28483      0x24b8590 DEBUG                    bin gstbin.c:3441:gst_bin_handle_message_func:<OpenCV_to_TCPServer> posting message upward
0:00:02.464770307 28483      0x24b8590 DEBUG                GST_BUS gstbus.c:308:gst_bus_post:<bus1> [msg 0x2325680] posting on bus, type error, GstMessageError, gerror=(GError)NULL, debug=(string)"gstbasesrc.c\(2625\):\ gst_base_src_loop\ \(\):\ /GstPipeline:OpenCV_to_TCPServer/GstAppSrc:Source_OpenCV:\012streaming\ task\ paused\,\ reason\ not-negotiated\ \(-4\)"; from source <Source_OpenCV>
0:00:02.464792364 28483      0x24b8590 DEBUG                GST_BUS gstbus.c:338:gst_bus_post:<bus1> [msg 0x2325680] pushing on async queue
0:00:02.464804471 28483      0x24b8590 DEBUG                GST_BUS gstbus.c:343:gst_bus_post:<bus1> [msg 0x2325680] pushed on async queue
0:00:02.464824022 28483      0x24b8590 DEBUG                GST_BUS gstbus.c:334:gst_bus_post:<bus0> [msg 0x2325680] dropped
0:00:02.464835553 28483      0x24b8590 INFO        GST_ERROR_SYSTEM gstelement.c:1987:gst_element_message_full:<Source_OpenCV> posted error message: Internal data flow error.
...

GST_DEBUG=5 überschritten würde von diesem Post. 🙂

Bin ich beim Lesen des Log-in, die Art und Weise, dass es ein problem mit der übertragung des Bildes aus dem Source_OpenCV der Converter_FFMpegColorSpace Element? Aber warum kann nicht converter_FFMpegColorSpace die Daten zu verarbeiten? Ich dachte, dass ich x-raw-rgb zu Converter_FFMpegColorSpace, so dass Sie konvertieren können, es zum x-raw-yuv für Converter_VP8_Encoder. Habe ich angegeben, die geschoben Bild in einem falschen Weg?

Wenn ich bin fehlen einige Informationen, ich bin froh um Tipp, so kann ich hinzufügen.

Vielen Dank im Voraus!

InformationsquelleAutor Angstgeist | 2013-11-26
Schreibe einen Kommentar