My NVXIO GStreamer-FrameSource Impl can not work.

Hi,
I want to use VisionWorks for my application to process video from an IP-camera.
I can get the video stream with gstreamer, the pipeline is:
rtspsrc–>rtph264depay–>h264parse–>omxh264dec–>[nvvidconv]–>appsink
So, I create my GStreamerFrameSourceImpl in nvxio, but it doesn’t work.
IS there some advice?

the code is :

#ifdef USE_RTSPCAMERA

#include <memory>
#include <map>
#include <gst/gst.h>
#include <NVX/Application.hpp>
#include "GStreamerRtspFrameSourceImpl.hpp"
#include <cuda_runtime_api.h>
#include <sstream>

namespace nvidiaio
{

struct NvRtspCameraConfigs
{
    vx_uint32 frameWidth, frameHeight, fps;
};
static const NvRtspCameraConfigs configs[6] =
{
    { vx_uint32(1920), vx_uint32(1080), vx_uint32(25) }, // 0
    { vx_uint32(1280), vx_uint32(720),  vx_uint32(25) }, // 1
    { vx_uint32(704),  vx_uint32(576) , vx_uint32(25) }, // 2
    { vx_uint32(1920), vx_uint32(1080), vx_uint32(50) }, // 3
    { vx_uint32(1280), vx_uint32(720),  vx_uint32(50) }, // 4
    { vx_uint32(704),  vx_uint32(576),  vx_uint32(50) }  // 5
};

GStreamerRtspFrameSourceImpl::GStreamerRtspFrameSourceImpl(const std::string & path) :
    GStreamerEGLStreamSinkFrameSourceImpl(nvxio::FrameSource::CAMERA_SOURCE, "GStreamerRtspFrameSource", false),
    rtspLinkAddress(path)
{
}
void GStreamerRtspFrameSourceImpl::RtspSrcPadAdded(GstElement *src,GstPad *pad,gpointer *data)
{
    GstElement *depay = (GstElement *)data;
    std::unique_ptr<GstPad, GStreamerObjectDeleter> sinkpad(gst_element_get_static_pad(depay, "sink"));

    if(!sinkpad)
    {
        std::cout<<"Cannot get sinkpad(rtspdepay)!"<<std::endl;
    }
    else
    {
        std::cout<<"The sinkpad name is:"<<gst_pad_get_name(sinkpad.get())<<std::endl;
    }
    if(pad)
    {
        std::cout<<"The new pad name is:"<<gst_pad_get_name(pad)<<std::endl;
    }
    if(!gst_pad_is_linked(sinkpad.get()))
    {
        GstPadLinkReturn ret = gst_pad_link(pad,sinkpad.get());
        if(ret == GST_PAD_LINK_OK)
        {
            std::cout<<std::endl<<"Link rtspsrc pad succeed!"<<std::endl;
        }
        else
        {
            switch(ret)
            {
                case GST_PAD_LINK_OK:
                std::cout<<std::endl<<"GST_PAD_LINK_OK"<<std::endl;
                break;
                case GST_PAD_LINK_WRONG_HIERARCHY:
                std::cout<<std::endl<<"GST_PAD_LINK_WRONG_HIERARCHY"<<std::endl;
                break;
                case GST_PAD_LINK_WAS_LINKED:
                std::cout<<std::endl<<"GST_PAD_LINK_WAS_LINKED"<<std::endl;
                break;
                case GST_PAD_LINK_WRONG_DIRECTION:
                std::cout<<std::endl<<"GST_PAD_LINK_WRONG_DIRECTION"<<std::endl;
                break;
                case GST_PAD_LINK_NOFORMAT:
                std::cout<<std::endl<<"GST_PAD_LINK_NOFORMAT"<<std::endl;
                break;
                case GST_PAD_LINK_NOSCHED:
                std::cout<<std::endl<<"GST_PAD_LINK_NOSCHED"<<std::endl;
                break;
                case GST_PAD_LINK_REFUSED:
                std::cout<<std::endl<<"GST_PAD_LINK_REFUSED"<<std::endl;
                break;
                default:
                std::cout<<std::endl<<"GST_PAD_LINK_UNKNOW_REASON"<<std::endl;
                break;
            }
            std::cout<<std::endl<<"Link rtspsrc pad failed!"<<std::endl;
        }
    }
    else
    {
        std::cout<<std::endl<<"rtspsrc pad has been linked!"<<std::endl;
    }
}
bool GStreamerRtspFrameSourceImpl::setConfiguration(const FrameSource::Parameters& params)
{
    NVXIO_ASSERT(end);

    configuration.frameHeight = params.frameHeight;
    configuration.frameWidth = params.frameWidth;
    configuration.fps = params.fps;

    NVXIO_ASSERT((params.format == NVXCU_DF_IMAGE_NV12) ||
                 (params.format == NVXCU_DF_IMAGE_U8) ||
                 (params.format == NVXCU_DF_IMAGE_RGB) ||
                 (params.format == NVXCU_DF_IMAGE_RGBX)||
                 (params.format == NVXCU_DF_IMAGE_NONE));

configuration.format = params.format;

    return true;
}

bool GStreamerRtspFrameSourceImpl::InitializeGstPipeLine()
{
    //default configration 1920*1080/25fps
    NvRtspCameraConfigs rtspcameraconfig = configs[2];
    if ((configuration.frameWidth != (vx_uint32)-1)&&
        (configuration.frameHeight != (vx_uint32)-1))
    {
        rtspcameraconfig.frameWidth = configuration.frameWidth;
        rtspcameraconfig.frameHeight = configuration.frameHeight;
        rtspcameraconfig.fps = 25;
    }
    // select FPS default for the specified config
    for (vx_size i = 0; i < ovxio::dimOf(configs); ++i)
    {
        if ((rtspcameraconfig.frameWidth == configs[i].frameWidth) &&
            (rtspcameraconfig.frameHeight == configs[i].frameHeight))
        {
           rtspcameraconfig.fps = configs[i].fps;
           break;
        }
    }
    if (configuration.fps == (vx_uint32)-1)
    {
        configuration.fps = rtspcameraconfig.fps; 
    }
    end = true;	         
    
    //init gst pipeline
    GstStateChangeReturn status;
    pipeline = GST_PIPELINE(gst_pipeline_new(nullptr));
    if (!pipeline)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }
    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

GstElement *m_rtspsrc = gst_element_factory_make("rtspsrc","rtspsrc");
    GstElement *m_rtspdepay = gst_element_factory_make("rtph264depay","rtspdepay");
    GstElement *m_h264parse = gst_element_factory_make("h264parse",nullptr);
    GstElement *m_h264dec = gst_element_factory_make("omxh264dec",nullptr);
    GstElement *m_videoconvert = gst_element_factory_make("nvvidconv",nullptr);  
    GstElement *m_nvvideosink = gst_element_factory_make("nvvideosink", nullptr);

    if(!m_rtspsrc || !m_rtspdepay || !m_h264parse || ! m_h264dec || !m_videoconvert || !m_nvvideosink)
    {
        std::cout<<"Cannot create Gstreamer Elements"<<std::endl;
        FinalizeGstPipeLine();
        return false;
    }
    //add elements to pipeline  
    gst_bin_add_many(GST_BIN(pipeline),m_rtspsrc,m_rtspdepay,m_h264parse,m_h264dec,m_videoconvert,m_nvvideosink,NULL);
    //Config rtspsrc elements
    g_object_set(G_OBJECT(m_rtspsrc),"location","rtsp://192.168.99.202:554/Streaming/Channels/102",
                 "latency",100,
                 "user-id","admin",
                 "user-pw","admin12345",
                 nullptr);
   
    //Config nvvideosink element
    std::ostringstream stream;
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << rtspcameraconfig.frameWidth << ", "
              "height=(int)" << rtspcameraconfig.frameHeight << ", format=(string){I420}, "
              "framerate=(fraction)" << rtspcameraconfig.fps << "/1;";
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));
    if (!caps_nvvidconv)
    {
        std::cout<<"Failed to create caps"<<std::endl;
        FinalizeGstPipeLine();
        return false;
    }
    std::cout<< std::endl <<"The filter caps is:"<<gst_caps_to_string(caps_nvvidconv.get())<<std::endl;
    
    gboolean ret = gst_element_link_filtered(m_videoconvert,m_nvvideosink,caps_nvvidconv.get());
    if(!ret)
    {
        std::cout<<"hello,link filtered error!"<<std::endl;
        FinalizeGstPipeLine();
        return false;
    }    
   
    g_object_set(G_OBJECT(m_nvvideosink),
               "display", context.display,
               "stream", context.stream,
               "fifo", fifoMode,
               "max-lateness", G_GINT64_CONSTANT(-1),
               "throttle-time", G_GUINT64_CONSTANT(0),
               "render-delay", G_GUINT64_CONSTANT(0),
               "qos", FALSE,
               "sync", FALSE,
               "async",TRUE,
                nullptr);     
    g_object_set(G_OBJECT(m_nvvideosink),"outcaps",caps_nvvidconv.get(),nullptr);

// link elements
    if (!gst_element_link_many(m_rtspdepay,m_h264parse,m_h264dec,m_videoconvert,NULL))
    {
        std::cout<<"GStreamer: cannot link color -> sink"<<std::endl;
        FinalizeGstPipeLine();
        return false;
    }
    //link pad-added singnal for rtspsrc-->rtph264depay
    g_signal_connect(m_rtspsrc,"pad-added",G_CALLBACK(GStreamerRtspFrameSourceImpl::RtspSrcPadAdded),m_rtspdepay);
    
    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, nullptr);
    status = gst_element_set_state((GstElement*)pipeline, GST_STATE_PLAYING);
    handleGStreamerMessages();
    if (status == GST_STATE_CHANGE_ASYNC)
    {   
        std::cout<<"The pipeline will start async!"<<std::endl;
        //wait for status update
        status = gst_element_get_state((GstElement*)pipeline, nullptr, nullptr, GST_CLOCK_TIME_NONE);
    }
    
    if (status == GST_STATE_CHANGE_FAILURE)
    {
       std::cout<<"Can not change to PLAY state!"<<std::endl;
       NVXIO_PRINT("GStreamer: unable to start playback");
       FinalizeGstPipeLine();
       return false;
   }
   
   vx_uint32 initialFPS = configuration.fps;

    if (!updateConfiguration(m_nvvideosink, m_videoconvert, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    // if initialFPS is specified, we should use this, because
    // retrieved via the updateConfiguration function FPS corresponds
    // to camera config FPS
    std::cout<<" init success!!!!"<<std::endl;
    if (initialFPS != (vx_uint32)-1)
        configuration.fps = initialFPS;
    end = false;

    return true;
}

} // namespace nvidiaio

#endif // defined USE_RTSPCAMERA

In order to use my “GStreamerRtspFrameSourceImpl”,i modified some code in GStreamerEGLStreamSinkFrameSourceImpl.hpp and GStreamerEGLStreamSinkFrameSourceImpl.cpp

#if defined USE_GSTREAMER_OMX && defined USE_GLES || defined USE_GSTREAMER_NVMEDIA || defined USE_NVGSTCAMERA || defined USE_RTSPCAMERA

FrameSource.cpp and FrameSourceOVX.cpp

# ifdef USE_RTSPCAMERA
# include "FrameSource/GStreamer/GStreamerRtspFrameSourceImpl.hpp"
# endif

and in function: std::unique_ptr createDefaultFrameSource(const std::string& uri), i create “GStreamerRtspFrameSourceImpl”

if (path == "rtsp")
    	{
#ifdef USE_RTSPCAMERA
	std::cout<<"Make UP RTSP IMPL!"<<std::endl;
	return makeUP<GStreamerRtspFrameSourceImpl>("hello");
#endif
    	}

and add -DUSE_RTSPCAMERA=1 -DUSE_GLES=1 to MakeFile, then make…

I recompile the sample nvx_sample_player (add “LIBRARIES += -lEGL -lcuda” to its makefile) and run:
./nvx_sample_player --source=device:///rtsp?rtspsrc=xxx
the GStreamerRtspFrameSourceImpl can be created,but the gstreamer pipeline can’t change to PLAY state.

the output message is below

nvidia@tegra-ubuntu:/media/nvidia/hcdisk/visionworks/sources/bin/aarch64/linux/release$ ./nvx_sample_player --source=device:///rtsp?rtspsrc=xxx
VisionWorks library info:
VisionWorks version : 1.6.0
OpenVX Standard version : 1.1.0

Make UP RTSP IMPL!

The filter caps is:video/x-raw(memory:NVMM), width=(int)704, height=(int)576, format=(string)I420, framerate=(fraction)25/1
The pipeline will start async!

Linking rtspsrc pad!
The source name is:rtspsrc
The dest name is:rtspdepay
The sinkpad name is:sink
The new pad name is:recv_rtp_src_0_710463277_96

Link rtspsrc pad succeed!
NvMMLiteOpen : Block : BlockType = 261
TVMR: NvMMLiteTVMRDecBlockOpen: 7907: NvMMLiteBlockOpen
NvMMLiteBlockCreate : Block : BlockType = 261
TVMR: cbBeginSequence: 1223: BeginSequence 704x576, bVPR = 0
TVMR: LowCorner Frequency = 100000
TVMR: cbBeginSequence: 1622: DecodeBuffers = 6, pnvsi->eCodec = 4, codec = 0
TVMR: cbBeginSequence: 1693: Display Resolution : (704x576)
TVMR: cbBeginSequence: 1694: Display Aspect Ratio : (704x576)
TVMR: cbBeginSequence: 1762: ColorFormat : 5
TVMR: cbBeginSequence:1767 ColorSpace = NvColorSpace_YCbCr709_ER
TVMR: cbBeginSequence: 1904: SurfaceLayout = 3
TVMR: cbBeginSequence: 2005: NumOfSurfaces = 13, InteraceStream = 0, InterlaceEnabled = 0, bSecure = 0, MVC = 0 Semiplanar = 1, bReinit = 1, BitDepthForSurface = 8 LumaBitDepth = 8, ChromaBitDepth = 8, ChromaFormat = 5
TVMR: cbBeginSequence: 2007: BeginSequence ColorPrimaries = 1, TransferCharacteristics = 1, MatrixCoefficients = 1
Allocating new output: 704x576 (x 13), ThumbnailMode = 0
OPENMAX: HandleNewStreamFormat: 3464: Send OMX_EventPortSettingsChanged : nFrameWidth = 704, nFrameHeight = 576
Can not change to PLAY state!
TVMR: TVMRFrameStatusReporting: 6369: Closing TVMR Frame Status Thread -------------
TVMR: TVMRVPRFloorSizeSettingThread: 6179: Closing TVMRVPRFloorSizeSettingThread -------------
TVMR: TVMRFrameDelivery: 6219: Closing TVMR Frame Delivery Thread -------------
TVMR: NvMMLiteTVMRDecBlockClose: 8105: Done
Error: Can’t open source URI device:///rtsp?rtspsrc=xxx

Hi, are you on r28.1?

Yes, I just upgrade my TX2 from JetPack3.0 to JetPack3.1

Hi,

You can check this topic:
[url]https://devtalk.nvidia.com/default/topic/1019753/jetson-tx2/how-to-use-onboard-camera-on-tx2-dev-kit-with-visionworks-1-6-jetpack-3-1-/post/5203955/#5203955[/url]

The user also modifies GStreamerCameraFrameSourceImpl.cpp to get onboard camera work.

Hi
I solved this problem by removing the framerate in nvvidconv src caps, but i do not know why …

//Config nvvideosink element
    std::ostringstream stream;
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << rtspcameraconfig.frameWidth << ", "
              "height=(int)" << rtspcameraconfig.frameHeight << ", format=(string){I420}, "
              "framerate=(fraction)" << rtspcameraconfig.fps << "/1;";
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));

remove the framerate>>>>>

//Config nvvideosink element
    std::ostringstream stream;
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << rtspcameraconfig.frameWidth << ","
              "height=(int)" << rtspcameraconfig.frameHeight << ",format=(string)NV12";
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));