EGL acquire adjacent two frames use EGL_STREAM_TIME_PRODUCER_KHR, why the timestamps diff about 80ms

We have used nvvideosink → EGLstream , and we set nvvideosink property “sync” = true . the framerate = 25 . and when we acquire frame from EGL and use EGL_STREAM_TIME_PRODUCER_KHR , and compared two adjacent frames, we found they are differ 80 milliseconds. theoretically, it would be differ 40ms.

We have changed the nvvideosink to appsink , and in new_sample function, we compared the adjacent buffer’s timestamps and print out, we found the diff is always 40 milliseconds, just verified our hypothesis. so why the timestamp from nvvideosink to EGL diff is 80ms(not always, maybe vary from 60 to 120 ms)

GstFlowReturn Stream::new_sample(GstElement *sink, gpointer data)
{
    GstSample *sample;
    g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);

    if(sample == NULL) {
        return GST_FLOW_ERROR;
    }

    GstBuffer *buffer;

    buffer = gst_sample_get_buffer(sample);

    GstClockTime now_timestamp = GST_BUFFER_PTS(buffer);
    GstClockTime diff = now_timestamp - pre_timestamp;

    dbgError("diff timestamp = %ld, now = %ld, pre = %ld.\n", diff/1000000, now/1000000, pre/1000000);
    pre_timestamp = now_timestamp ;

    gst_sample_unref(sample);
    return GST_FLOW_OK;
}

I think nvvideosink maybe have debug. It could calculate timestamp with : timestamp = (absolute_time - base-time) + absolute_time = running_time + absolute_time, this is error. It should be timestamp = running_time + base_time.

Hi ClancyLian,
Please share a full sample to reproduce the issue.

main code

#include "stream.h"
#include <gst/gst.h>
#include <cuda_runtime.h>
#include <opencv2/opencv.hpp>
#include "dbg.h"
#include <thread>
using namespace cv;
using namespace std;
using namespace std::chrono;

int main(int argc, char *argv[])
{   
    //thread *panoThread;
    gst_init(&argc, &argv);

    Stream *pStream = new Stream();

    while( !pStream->ready() ) {
        std::this_thread::sleep_for(milliseconds(100));
    }

    time_point<steady_clock, nanoseconds> timerPoint(steady_clock::now());
    int delta = 1000 / 25; // 40ms
    int count = 0;
    while(1) {
        timerPoint += milliseconds(delta);
        std::this_thread::sleep_until(timerPoint);
        // every 40ms fetch new frame
        CUeglFrame *frames = pStream->fetchFrames();
        if(frames != nullptr)
        {
            std::this_thread::sleep_for(milliseconds(10));
            pStream->releaseFrames();
        }
        count++;

    }

    delete pStream;
    return 0;
}

Stream.h code

#ifndef STREAM_H
#define STREAM_H

#include <gst/gst.h>
#include "eglframeconsumer.h"
#include "eglapiaccessors.h"
#include <vector>

class Stream
{
public:
    Stream();
    ~Stream();

public:

    bool setPipeline();

    bool ready();
    CUeglFrame *fetchFrames();
    void releaseFrames();
    void resetStream();

    void dumpClocks();
private:

    static GstFlowReturn new_sample(GstElement *sink, gpointer data);
    static void onSourcePadAdded(GstElement *element, GstPad *pad, gpointer data);
    static void onDbinPadAdded(GstElement *element, GstPad *pad, gpointer data);

private:
    GstElement *videoPipeline;
    EGLFrameConsumer *consumer;
    CUeglFrame *frame;
    bool videoPlaying;
    guint videoBusID;

};

#endif // STREAM_H

Stream.cpp code

#include "stream.h"
#include "dbg.h"
#include "stdio.h"
#include <memory>
#include "gstreamerutils.h"

using namespace std;

Stream::Stream()
{
    videoPlaying = false;

    videoPipeline = gst_pipeline_new("video");
    if (videoPipeline == nullptr) {
        printf("Create video pipeline failed.\n");
        throw;
    }

    consumer = new EGLFrameConsumer(4, 0);

    if(!setPipeline()) {
        printf("set pipeline failed.\n");
        throw;
    }

    GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(videoPipeline));
    videoBusID = gst_bus_add_watch(bus, pipelineBusCb, nullptr);
    gst_object_unref(bus);

    GstStateChangeReturn ret = gst_element_set_state(videoPipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        printf("Change video pipeline state to %s failed.\n",
                 gst_element_state_get_name(GST_STATE_PLAYING));
        gst_object_unref(videoPipeline);
        throw;
    }
}

Stream::~Stream()
{
    gst_element_set_state(videoPipeline, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(videoPipeline));

    if( consumer != NULL ) {
        delete consumer;
    }
}

bool Stream::setPipeline()
{
    printf("Setting up Pipeline.\n");

    GstElement *source = gst_element_factory_make("rtspsrc", nullptr);
    if (source == nullptr) {       
        printf("Create source failed.\n");
        return false;
    }
    //std::string strRtsp = "rtsp://192.168.12.100:554/Onvif/live/4/1";
    std::string strRtsp = "rtsp://192.168.22.55:9001/bs0";
    printf("strRtsp = %s.\n", strRtsp.c_str());
    g_object_set(G_OBJECT(source), "location", strRtsp.c_str(), nullptr);
    g_object_set(G_OBJECT(source), "protocols", 0x4, nullptr);
    g_object_set(G_OBJECT(source), "timeout", 10000000, nullptr);

    g_object_set(source, "ntp-time-source", 3, nullptr);
    g_object_set(source, "buffer-mode", 4, nullptr);
    g_object_set(source, "ntp-sync", true, nullptr);

    GstElement *dbin = gst_element_factory_make("decodebin", nullptr);
    if (dbin == nullptr) {
        printf("Create decodebin failed.\n");
        return false;
    }

    //g_object_set(dbin, "max-size-buffers", 10, nullptr);

    GstElement *convert = gst_element_factory_make("nvvidconv", nullptr);
    if (convert == nullptr) {
        printf("Create nvvidconv failed.\n");
        return false;
    }

    g_object_set(convert, "output-buffers", 4, nullptr);

    GstElement *sink = gst_element_factory_make("nvvideosink", nullptr);
    if (sink == nullptr) {
        printf("Create sink failed.\n");
        return false;
    }

    g_object_set(sink, "display", consumer->getEGLDisplay(), nullptr);
    g_object_set(sink, "stream", consumer->getEGLStream(), nullptr);

    if (consumer->isFIFOMode()) {
        g_object_set(sink, "fifo", true, nullptr);
        g_object_set(sink, "fifo-size", consumer->getFIFOLen(), nullptr);
    }

//    GstElement *sink = gst_element_factory_make("appsink", nullptr);
//    if (sink == nullptr) {
//        printf("Create sink failed.\n");
//        return false;
//    }

//    const char *appsinkcaps = "video/x-raw,format=NV12";
//    GstCaps *caps = gst_caps_from_string(appsinkcaps);
//    g_object_set(sink,"emit-signals", true, "caps", caps, nullptr);
//    g_signal_connect(sink, "new-sample", G_CALLBACK(new_sample), NULL);

    g_signal_connect(source, "pad-added", G_CALLBACK(&Stream::onSourcePadAdded), dbin);
    g_signal_connect(dbin, "pad-added", G_CALLBACK(&Stream::onDbinPadAdded), convert);

    gst_bin_add_many(GST_BIN(videoPipeline), source, dbin, convert, sink, nullptr);
//    if (!gst_element_link(convert, sink)) {
//        return false;
//    }
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> capsNvvidconv(
                gst_caps_from_string("video/x-raw(memory:NVMM), format=(string){I420}"));
    if (!gst_element_link_filtered(convert, sink, capsNvvidconv.get())) {
        printf("Link elememt convert <-> sink failed.\n");
        gst_object_unref(videoPipeline);
        return false;
    }

    return true;

}

static GstClockTime pre = 0;
GstFlowReturn Stream::new_sample(GstElement *sink, gpointer data)
{
    GstSample *sample;
    g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);

    if(sample == NULL) {
        return GST_FLOW_ERROR;
    }

    GstBuffer *buffer;
    GstCaps *caps;
    GstStructure *s;

    caps = gst_sample_get_caps(sample);
    if(caps == NULL) {
        return GST_FLOW_ERROR;
    }

    s = gst_caps_get_structure(caps, 0);

    buffer = gst_sample_get_buffer(sample);

    GstClockTime now = GST_BUFFER_PTS(buffer);
    GstClockTime diff = now - pre;

    printf("diff timestamp = %ld, now = %ld, pre = %ld.\n", diff/1000000, now/1000000, pre/1000000);
    pre = now;

    gst_sample_unref(sample);
    return GST_FLOW_OK;
}

void Stream::onSourcePadAdded(GstElement *element, GstPad *pad, gpointer data)
{
    GstCaps *caps = NULL;
    const GstStructure *str = NULL;
    const gchar *type = NULL;

    GstPad *sinkpad = gst_element_get_static_pad(GST_ELEMENT(data), "sink");
    if(gst_pad_is_linked(sinkpad)) {
        printf("sink pads is linked.\n");
        goto exit;
    }

    caps = gst_pad_query_caps(pad, nullptr);
    str = gst_caps_get_structure(caps, 0);
    type = gst_structure_get_string(str, "media");
    if (strcmp(type, "video") != 0) {
        goto exit;
    }

    //printf("Setting up pads for source.\n");

    //GstElement *dbin = GST_ELEMENT(data);

    if (GST_PAD_LINK_FAILED(gst_pad_link(pad, sinkpad))) {
        printf("Link pads failed.\n");
    }

exit:
    if(caps != nullptr) {
        gst_caps_unref(caps);
    }

    gst_object_unref(sinkpad);
}

void Stream::onDbinPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
    printf("Setting up pads for decodebin.\n");

    GstElement *sink = GST_ELEMENT(data);
    GstPad *sinkpad = gst_element_get_static_pad(sink, "sink");
    if (sinkpad == nullptr) {
        printf("Get sink pad failed.\n");
        return;
    }

    GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
    if (GST_PAD_LINK_FAILED(ret)) {
        printf("Link pads failed %d.\n", ret);
        gst_object_unref(sinkpad);
        return;
    }

    gst_object_unref(sinkpad);
}

bool Stream::ready()
{
    if (videoPlaying) {
        return true;
    }

    GstState state, pending;
    GstStateChangeReturn ret = gst_element_get_state(videoPipeline, &state, &pending, 0);
    if (ret == GST_STATE_CHANGE_SUCCESS && state == GST_STATE_PLAYING) {
        printf("IP cameras start playing.\n");
        videoPlaying = true;
        return true;
    }

    return false;
}

CUeglFrame* Stream::fetchFrames()
{
    frame = consumer->fetch();
    return frame;
}

void Stream::releaseFrames()
{
    if (frame != nullptr) {
        consumer->release();
    }
}

void Stream::resetStream()
{
    consumer->resetStream();
}

void Stream::dumpClocks()
{
    GstClock *clock;

    //printf("IP camera %d clock information:\n", index);
    clock = gst_pipeline_get_clock(GST_PIPELINE(videoPipeline));
    printf("Clock time: %llu.\n", gst_clock_get_time(clock) / 1000000);
    //clock = gst_pipeline_get_pipeline_clock(GST_PIPELINE(videoPipeline));
    //printf("Pipeline clock time: %llu.\n", gst_clock_get_time(clock) / 1000000);
    printf("Pipeline base time %llu, start time %llu, delay %llu, latency %llu.\n",
            gst_element_get_base_time(videoPipeline) / 1000000,
            gst_element_get_start_time(videoPipeline) / 1000000,
            gst_pipeline_get_delay(GST_PIPELINE(videoPipeline)) / 1000000,
            gst_pipeline_get_latency(GST_PIPELINE(videoPipeline)) / 1000000);

    printf("Pipeline clock time minus base time: %llu.\n",
           (gst_clock_get_time(clock) - gst_element_get_base_time(videoPipeline)) / 1000000);
}

eglframeconsumer.h code

#ifndef EGLFRAMECONSUMER_H
#define EGLFRAMECONSUMER_H


#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <cudaEGL.h>

class EGLFrameConsumer
{
public:
    /**
     * @brief EGLFrameConsumer
     * @param fifoLength:
     * @param latency: microseconds
     */
    EGLFrameConsumer(int fifoLength, int latency);
    ~EGLFrameConsumer();

    CUeglFrame* fetch();
    void release();
    void resetStream();
    void clear();

    EGLDisplay getEGLDisplay() {
        return display;
    }

    EGLStreamKHR getEGLStream() {
        return stream;
    }

    bool isFIFOMode() {
        return fifoMode;
    }

    int getFIFOLen() {
        return fifoLength;
    }

private:
    bool initEGLDisplay();
    bool initEGLStream();
    void finalizeEGLStream();
    bool initEGLCudaConsumer();
    void finalizeEGLCudaConsumer();

    EGLDisplay display;
    EGLStreamKHR stream;
    int fifoLength;
    bool fifoMode;
    int latency;

    CUeglStreamConnection cudaConnection;
    CUgraphicsResource cudaResource;
    CUeglFrame eglFrame;
    EGLTimeKHR timeStamp;
};

#endif // EGLFRAMECONSUMER_H

eglconsumer.cpp code

#include <assert.h>
#include <stdio.h>
#include <cuda_runtime.h>

#include "dbg.h"
#include "eglapiaccessors.h"
#include "eglframeconsumer.h"

EGLFrameConsumer::EGLFrameConsumer(int fifoLength, int latency)
{
    printf("Initialize EGL frame consumer.\n");

    display = EGL_NO_DISPLAY;
    stream = EGL_NO_STREAM_KHR;

    this->fifoLength = fifoLength;
    if (fifoLength > 0) {
        fifoMode = true;
    } else {
        fifoMode = false;
    }
    this->latency = latency;

    if (!initEGLDisplay()) {
        printf("Cannot initialize EGL display.\n");
        throw;
    }

    if (!initEGLStream()) {
        printf("Cannot initialize EGL Stream.\n");
        throw;
    }

    if (!initEGLCudaConsumer()) {
        printf("Cannot initialize CUDA consumer.\n");
        throw;
    }
}

EGLFrameConsumer::~EGLFrameConsumer()
{
    printf("Destroy EGL frame consumer.\n");

    finalizeEGLCudaConsumer();
    finalizeEGLStream();
}

CUeglFrame* EGLFrameConsumer::fetch()
{
    if (cudaFree(nullptr) != cudaSuccess) {
        printf("Failed to initialize CUDA context");
        return nullptr;
    }

    EGLint streamState = 0;
    if (!eglQueryStreamKHR(display, stream, EGL_STREAM_STATE_KHR, &streamState)) {
        printf("Cuda consumer, eglQueryStreamKHR EGL_STREAM_STATE_KHR failed.\n");
        return nullptr;
    }

    if (streamState != EGL_STREAM_STATE_NEW_FRAME_AVAILABLE_KHR) {
        printf("No new EGL stream frame available.\n");
        return nullptr;
    }

    EGLTimeKHR timeStampConsumer;
    if (!eglQueryStreamTimeKHR(display, stream, EGL_STREAM_TIME_CONSUMER_KHR, &timeStampConsumer)) {
        printf("Cuda consumer, eglQueryStreamTimeKHR EGL_STREAM_STATE_KHR failed.\n");
        return nullptr;
    }
    EGLTimeKHR timeStampNow = 0;
    if (!eglQueryStreamTimeKHR(display, stream, EGL_STREAM_TIME_NOW_KHR, &timeStampNow)) {
        printf("Cuda consumer, eglQueryStreamTimeKHR EGL_STREAM_STATE_KHR failed.\n");
        return nullptr;
    }

    printf("two frames differ %ldms, %ldms.\n", (timeStampConsumer - timeStamp)/1000000, timeStampConsumer/1000000);
    timeStamp = timeStampConsumer;

    CUresult cuStatus = cuEGLStreamConsumerAcquireFrame(&cudaConnection, &cudaResource, nullptr, 0);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda Acquire EGL stream frame failed cuStatus=%d.\n", cuStatus);
        return nullptr;
    }

    cuStatus = cuGraphicsResourceGetMappedEglFrame(&eglFrame, cudaResource, 0, 0);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda get resource failed with %d.\n", cuStatus);
        cuEGLStreamConsumerReleaseFrame(&cudaConnection, cudaResource, nullptr);
        return nullptr;
    }

    assert(eglFrame.cuFormat == CU_AD_FORMAT_UNSIGNED_INT8);
    assert(eglFrame.eglColorFormat == CU_EGL_COLOR_FORMAT_RGBA);
    assert(eglFrame.planeCount == 1);
    assert(eglFrame.numChannels == 4);
    assert(eglFrame.frameType == CU_EGL_FRAME_TYPE_PITCH);
    assert(eglFrame.pitch == eglFrame.width * 4);
    return &eglFrame;
}

void EGLFrameConsumer::resetStream()
{
    EGLBoolean ret = eglResetStreamNV(display, stream);
    printf("eglResetStreamNV = %d.\n", ret);
}

void EGLFrameConsumer::release()
{
    CUresult cuStatus = cuEGLStreamConsumerReleaseFrame(&cudaConnection, cudaResource, nullptr);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda release frame failed cuStatus=%d.\n", cuStatus);
    }
}

void EGLFrameConsumer::clear()
{
    if (!fifoMode) {
        return;
    }

    if (cudaFree(nullptr) != cudaSuccess) {
        printf("Failed to initialize CUDA context");
        return;
    }

    EGLint streamState = 0;
    if (!eglQueryStreamKHR(display, stream, EGL_STREAM_STATE_KHR, &streamState)) {
        printf("Cuda consumer, eglQueryStreamKHR EGL_STREAM_STATE_KHR failed.\n");
        return;
    }

    if (streamState != EGL_STREAM_STATE_NEW_FRAME_AVAILABLE_KHR) {
        return;
    }

    CUresult cuStatus = cuEGLStreamConsumerAcquireFrame(&cudaConnection, &cudaResource, nullptr, 0);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda Acquire failed cuStatus=%d.\n", cuStatus);
        return;
    }

    cuStatus = cuEGLStreamConsumerReleaseFrame(&cudaConnection, cudaResource, nullptr);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda release frame failed cuStatus=%d.\n", cuStatus);
    }
}

bool EGLFrameConsumer::initEGLDisplay()
{
    // Obtain the EGL display
    display = EGLDisplayAccessor::getInstance();
    if (display == EGL_NO_DISPLAY) {
        printf("Obtain EGL display failed.\n");
        return false;
    }

    return true;
}

bool EGLFrameConsumer::initEGLStream()
{
    const EGLint streamAttrMailboxMode[] = { /*EGL_SUPPORT_REUSE_NV, EGL_FALSE,*/ EGL_NONE };
    const EGLint streamAttrFIFOMode[] = { EGL_STREAM_FIFO_LENGTH_KHR, fifoLength, EGL_SUPPORT_RESET_NV, EGL_TRUE, EGL_NONE };

    if (!setupEGLExtensions()) {
        return false;
    }

    stream = eglCreateStreamKHR(display, fifoMode ? streamAttrFIFOMode : streamAttrMailboxMode);
    if (stream == EGL_NO_STREAM_KHR) {
        printf("Couldn't create stream.\n");
        return false;
    }

    if (!eglStreamAttribKHR(display, stream, EGL_CONSUMER_LATENCY_USEC_KHR, latency)) {
        printf("Consumer: streamAttribKHR EGL_CONSUMER_LATENCY_USEC_KHR failed.\n");
    }
    if (!eglStreamAttribKHR(display, stream, EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR, 0)) {
        printf("Consumer: streamAttribKHR EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR failed.\n");
    }

    // Get stream attributes
    if (!eglQueryStreamKHR(display, stream, EGL_STREAM_FIFO_LENGTH_KHR, &fifoLength)) {
        printf("Consumer: eglQueryStreamKHR EGL_STREAM_FIFO_LENGTH_KHR failed.\n");
    }
    if (!eglQueryStreamKHR(display, stream, EGL_CONSUMER_LATENCY_USEC_KHR, &latency)) {
        printf("Consumer: eglQueryStreamKHR EGL_CONSUMER_LATENCY_USEC_KHR failed.\n");
    }

    if (fifoMode != (fifoLength > 0)) {
        printf("EGL Stream consumer - Unable to set FIFO mode.\n");
        fifoMode = false;
    }
    if (fifoMode) {
        printf("EGL Stream consumer - Mode: FIFO, Length: %d, latency %d.\n", fifoLength, latency);
    } else {
        printf("EGL Stream consumer - Mode: Mailbox.\n");
    }

    return true;
}

void EGLFrameConsumer::finalizeEGLStream()
{
    if (stream != EGL_NO_STREAM_KHR) {
        eglDestroyStreamKHR(display, stream);
        stream = EGL_NO_STREAM_KHR;
    }

    // acquire and release unprocess frames.
}

bool EGLFrameConsumer::initEGLCudaConsumer()
{
    if (cudaFree(nullptr) != cudaSuccess) {
        printf("Failed to initialize CUDA context.\n");
        return false;
    }

    printf("Connect CUDA consumer.\n");
    CUresult curesult = cuEGLStreamConsumerConnect(&cudaConnection, stream);
    if (curesult != CUDA_SUCCESS) {
        printf("Connect CUDA consumer ERROR %d.\n", curesult);
        return false;
    }

    return true;
}

void EGLFrameConsumer::finalizeEGLCudaConsumer()
{
    if (cudaConnection) {
        if (cudaFree(nullptr) != cudaSuccess) {
            printf("Failed to initialize CUDA context.\n");
            return;
        }

        cuEGLStreamConsumerDisconnect(&cudaConnection);
        cudaConnection = nullptr;
    }
}

eglapiaccessors.h code

#ifndef EGLAPIACCESSORS_H
#define EGLAPIACCESSORS_H


#include <EGL/egl.h>
#include <EGL/eglext.h>

#if !defined EGL_KHR_stream || !defined EGL_KHR_stream_fifo || !defined EGL_KHR_stream_consumer_gltexture
# error "EGL_KHR_stream extensions are not supported!"
#endif

class EGLDisplayAccessor
{
public:
    static EGLDisplay getInstance();

private:
    EGLDisplayAccessor();
    ~EGLDisplayAccessor();

    EGLDisplay eglDisplay;
};

#define EXTENSION_LIST_MY(T)                                     \
    T( PFNEGLCREATESTREAMKHRPROC,          eglCreateStreamKHR )  \
    T( PFNEGLDESTROYSTREAMKHRPROC,         eglDestroyStreamKHR ) \
    T( PFNEGLQUERYSTREAMKHRPROC,           eglQueryStreamKHR )   \
    T( PFNEGLSTREAMATTRIBKHRPROC,          eglStreamAttribKHR )  \
    T( PFNEGLQUERYSTREAMU64KHRPROC,        eglQueryStreamu64KHR ) \
    T( PFNEGLRESETSTREAMNVPROC,            eglResetStreamNV )    \
    T( PFNEGLQUERYSTREAMTIMEKHRPROC,       eglQueryStreamTimeKHR )


#define EXTLST_EXTERN(tx, x) extern tx x;

EXTENSION_LIST_MY(EXTLST_EXTERN)

bool setupEGLExtensions();


#endif // EGLAPIACCESSORS_H

eglapiaccessors.cpp code

#include <stdio.h>

#include "dbg.h"
#include "eglapiaccessors.h"


EGLDisplay EGLDisplayAccessor::getInstance()
{
    static EGLDisplayAccessor instance;
    return instance.eglDisplay;
}

EGLDisplayAccessor::EGLDisplayAccessor()
{
    // Obtain the EGL display
    if ((eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY)) == EGL_NO_DISPLAY) {
        printf("EGL obtain display failed.\n");
    }

    // Initialize EGL
    EGLint major, minor;
    if (!eglInitialize(eglDisplay, &major, &minor)) {
        printf("EGL initialize failed.\n");
        eglTerminate(eglDisplay);
        eglDisplay = EGL_NO_DISPLAY;
    } else {
        printf("EGL API: %d.%d\n", major, minor);
    }
}

EGLDisplayAccessor::~EGLDisplayAccessor()
{
    if (eglDisplay != EGL_NO_DISPLAY) {
        eglTerminate(eglDisplay);
        eglDisplay = EGL_NO_DISPLAY;

        printf("Terminate EGL display.\n");
    }
}


static bool initialized = false;

#define EXTLST_IMPL_MY(tx, x) tx x = nullptr;
EXTENSION_LIST_MY(EXTLST_IMPL_MY)

typedef void (* extlst_fnptr_t)(void);
#define EXTLST_ENTRY_MY(tx, x) { ( extlst_fnptr_t *)&x, #x },

static struct {
    extlst_fnptr_t * fnptr;
    char const * name;
} extensionList[] = { EXTENSION_LIST_MY(EXTLST_ENTRY_MY) };

bool setupEGLExtensions()
{
    if (!initialized) {
        for (size_t i = 0; i < sizeof(extensionList) / sizeof(extensionList[0]); i++) {
            *extensionList[i].fnptr = eglGetProcAddress(extensionList[i].name);
            if (!*extensionList[i].fnptr) {
                printf("Couldn't get address of %s()\n", extensionList[i].name);
                return false;
            }
        }

        initialized = true;
    }

    return true;
}

In Stream.cpp, we setup the pipeline. In main.cpp we fetch a frame per 40ms. And In eglconsumer.cpp line 77 we printf two adjacent frames, and found they differ 80ms.

Hi ClancyLian, can you please also share the build command?

I have pack it in attachments. In stream.cpp function setPipeline() ,you should set your ip camera rtsp uri. the camera framerate is 25 fps.

current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616143963ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616144043ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144123ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144203ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144283ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616144362ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144443ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144523ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616144602ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144683ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616144763ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616144843ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616144922ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616145003ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616145083ms.
current frame's timestamp minus previous frame's timestamp: 81ms, current frame's timestamp: 616145164ms.
current frame's timestamp minus previous frame's timestamp: 78ms, current frame's timestamp: 616145243ms.
TVMR: FrameRate = 25.000000 
current frame's timestamp minus previous frame's timestamp: 81ms, current frame's timestamp: 616145324ms.
current frame's timestamp minus previous frame's timestamp: 78ms, current frame's timestamp: 616145403ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616145482ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616145563ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616145643ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616145723ms.
current frame's timestamp minus previous frame's timestamp: 81ms, current frame's timestamp: 616145804ms.
current frame's timestamp minus previous frame's timestamp: 81ms, current frame's timestamp: 616145885ms.
current frame's timestamp minus previous frame's timestamp: 77ms, current frame's timestamp: 616145963ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616146043ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616146123ms.
current frame's timestamp minus previous frame's timestamp: 79ms, current frame's timestamp: 616146203ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616146283ms.
current frame's timestamp minus previous frame's timestamp: 80ms, current frame's timestamp: 616146363ms.

Hi clancyLian,
On r28.1/TX2, I set up a rtsp server:
https://devtalk.nvidia.com/default/topic/1014789/jetson-tx1/-the-cpu-usage-cannot-down-use-cuda-decode-/post/5188538/#5188538

But hit error in running the app:

nvidia@tegra-ubuntu:~/nvvideosinktimestamptest$ ./nvvideosinktimestamptest
Initialize EGL frame consumer.
EGL API: 1.5
EGL Stream consumer - Mode: FIFO, Length: 4, latency 0.
Connect CUDA consumer.
Setting up Pipeline.
strRtsp = rtsp://127.0.0.1:8554/test.
sink pads is linked.
Setting up pads for decodebin.
Link pads failed -4.

Ir run OK in one line command:

nvidia@tegra-ubuntu:~/nvvideosinktimestamptest$ gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test ! decodebin ! nvvidconv ! nvoverlaysink
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Progress: (open) Opening Stream
Progress: (connect) Connecting to rtsp://127.0.0.1:8554/test
Progress: (open) Retrieving server options
Progress: (open) Retrieving media info
Progress: (request) SETUP stream 0
Progress: (request) SETUP stream 1
Progress: (open) Opened Stream
Setting pipeline to PLAYING ...
New clock: GstSystemClock
Progress: (request) Sending PLAY request
Progress: (request) Sending PLAY request
Progress: (request) Sent PLAY request
NvMMLiteOpen : Block : BlockType = 261
TVMR: NvMMLiteTVMRDecBlockOpen: 7907: NvMMLiteBlockOpen
NvMMLiteBlockCreate : Block : BlockType = 261
TVMR: cbBeginSequence: 1223: BeginSequence  1920x816, bVPR = 0
TVMR: LowCorner Frequency = 180000
TVMR: cbBeginSequence: 1622: DecodeBuffers = 2, pnvsi->eCodec = 4, codec = 0
TVMR: cbBeginSequence: 1693: Display Resolution : (1920x816)
TVMR: cbBeginSequence: 1694: Display Aspect Ratio : (1920x816)
TVMR: cbBeginSequence: 1762: ColorFormat : 5
TVMR: cbBeginSequence:1776 ColorSpace = NvColorSpace_YCbCr601
TVMR: cbBeginSequence: 1904: SurfaceLayout = 3
TVMR: cbBeginSequence: 2005: NumOfSurfaces = 9, InteraceStream = 0, InterlaceEnabled = 0, bSecure = 0, MVC = 0 Semiplanar = 1, bReinit = 1, BitDepthForSurface = 8 LumaBitDepth = 8, ChromaBitDepth = 8, ChromaFormat = 5
TVMR: cbBeginSequence: 2007: BeginSequence  ColorPrimaries = 2, TransferCharacteristics = 2, MatrixCoefficients = 2
Allocating new output: 1920x816 (x 9), ThumbnailMode = 0
OPENMAX: HandleNewStreamFormat: 3464: Send OMX_EventPortSettingsChanged : nFrameWidth = 1920, nFrameHeight = 816
TVMR: FrameRate = 24
TVMR: NVDEC LowCorner Freq = (144000 * 1024)
TVMR: FrameRate = 23.986280
TVMR: FrameRate = 23.975698
TVMR: FrameRate = 23.976101
TVMR: FrameRate = 23.976043
TVMR: FrameRate = 23.975986
TVMR: FrameRate = 23.976101
TVMR: FrameRate = 23.976101
TVMR: FrameRate = 23.975871
TVMR: FrameRate = 23.975871
TVMR: FrameRate = 23.976158
TVMR: FrameRate = 23.975928
TVMR: FrameRate = 23.976158
TVMR: FrameRate = 23.976273
TVMR: FrameRate = 23.975928
TVMR: FrameRate = 23.975928
TVMR: FrameRate = 23.976043
TVMR: FrameRate = 23.976101
TVMR: NvMMLiteTVMRDecDoWork: 6768: NVMMLITE_TVMR: EOS detected
TVMR: TVMRBufferProcessing: 5723: Processing of EOS
TVMR: TVMRBufferProcessing: 5800: Processing of EOS Done
Got EOS from element "pipeline0".
Execution ended after 0:01:31.048947615
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
TVMR: TVMRFrameStatusReporting: 6369: Closing TVMR Frame Status Thread -------------
TVMR: TVMRVPRFloorSizeSettingThread: 6179: Closing TVMRVPRFloorSizeSettingThread -------------
TVMR: TVMRFrameDelivery: 6219: Closing TVMR Frame Delivery Thread -------------
TVMR: NvMMLiteTVMRDecBlockClose: 8105: Done
Setting pipeline to NULL ...
Freeing pipeline ...

Any idea for the orror?

Hi,DaneLLL

In stream.cpp ‘onSourcePadAdded’ function, the stream type should be video, you camera may be have audio also. you can do

if (strcmp(type, "video") != 0) {
        goto exit;
    }

And add file

#include <string.h>

Hi ClancyLian, please check the difference of the attached code
main.cpp (792 Bytes)
eglframeconsumer.cpp (6.86 KB)
stream.cpp (7.85 KB)

Hi, DaneLLL
you can try it again with attachment stream.cpp.
stream.cpp (7.87 KB)

Hi ClancyLian, the attachment in #11 is the fix to the issue.

Hi, DaneLLL

do you have replaced the stream.cpp in the attachment in #12 . The error is also below ?

nvidia@tegra-ubuntu:~/nvvideosinktimestamptest$ ./nvvideosinktimestamptest
Initialize EGL frame consumer.
EGL API: 1.5
EGL Stream consumer - Mode: FIFO, Length: 4, latency 0.
Connect CUDA consumer.
Setting up Pipeline.
strRtsp = rtsp://127.0.0.1:8554/test.
sink pads is linked.
Setting up pads for decodebin.
Link pads failed -4.

Could you try another IP camrea ?

Hi ClancyLian, I avoid it with a video-only source and have modification to your code in #11. By applying the difference to your code, you should get correct frame rate.

Hi DaneLLL

I don’t understand what you said. Do the demo now can run ? What’s the issue to you now ? what does it mean " I should get correct frame rate" ?

Hi ClancyLian,
Can you see the attached cpp in #11? Is is modified from your original code. Please check the difference and apply to your code.

I have tried it. But in function

CUeglFrame* EGLFrameConsumer::fetch()
{
    if (cudaFree(nullptr) != cudaSuccess) {
        printf("Failed to initialize CUDA context");
        return nullptr;
    }

    EGLint streamState = 0;
    if (!eglQueryStreamKHR(display, stream, EGL_STREAM_STATE_KHR, &streamState)) {
        printf("Cuda consumer, eglQueryStreamKHR EGL_STREAM_STATE_KHR failed.\n");
        return nullptr;
    }

    CUresult cuStatus = cuEGLStreamConsumerAcquireFrame(&cudaConnection, &cudaResource, nullptr, 50000);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda Acquire EGL stream frame failed cuStatus=%d.\n", cuStatus);
        return nullptr;
    }

    EGLTimeKHR timeStampNow = 0;
    if (!eglQueryStreamTimeKHR(display, stream, EGL_STREAM_TIME_NOW_KHR, &timeStampNow)) {
        printf("Cuda consumer, eglQueryStreamTimeKHR EGL_STREAM_STATE_KHR failed.\n");
        return nullptr;
    }
    printf("timestamp: %ldms, diff %ldms \n", timeStampNow/1000000, (timeStampNow-timeStamp)/1000000);
    timeStamp = timeStampNow;

    cuStatus = cuGraphicsResourceGetMappedEglFrame(&eglFrame, cudaResource, 0, 0);
    if (cuStatus != CUDA_SUCCESS) {
        printf("Cuda get resource failed with %d.\n", cuStatus);
        cuEGLStreamConsumerReleaseFrame(&cudaConnection, cudaResource, nullptr);
        return nullptr;
    }

    assert(eglFrame.cuFormat == CU_AD_FORMAT_UNSIGNED_INT8);
    assert(eglFrame.eglColorFormat == CU_EGL_COLOR_FORMAT_RGBA);
    assert(eglFrame.planeCount == 1);
    assert(eglFrame.numChannels == 4);
    assert(eglFrame.frameType == CU_EGL_FRAME_TYPE_PITCH);
    assert(eglFrame.pitch == eglFrame.width * 4);
    return &eglFrame;
}

I want to print out the frame’s timestamp and compare them but not now timestamp, so you should change EGL_STREAM_TIME_NOW_KHR to EGL_STREAM_TIME_CONSUMER_KHR

Hi ClancyLian,
40ms is expected for 25fps. Setting timeout to 50000us may not be good enough for 25 fps, you can try 60000, 70000.

Hi, DaneLLL
I have changed my comment in #18. I want to use frame’s timestamp to do sync, but the frame’s timestamp may have issue.