• Hardware Platform (Jetson / GPU) Jetson nano
• DeepStream Version 6.2
I have pipeline
appsrc do-timestamp=true caps="video/x-raw(memory:NVMM),width=(int)640,height=(int)480,format=(string)NV12,framerate=(fraction)25/2" ! nvjpegenc ! multifilesink location=out%d.jpg
And I push frames to it at specific time intervals with this code
#include "interprocess_info.h"
#include "socket_helpers.h"
#include "gstpipeline.h"
#include <boost/asio.hpp>
#include <boost/interprocess/sync/interprocess_semaphore.hpp>
#include <nvbufsurface.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <getopt.h>
#include <iostream>
#include <fstream>
#include <chrono>
#include <memory>
#include <iomanip>
static void notify_to_destroy(gpointer user_data)
{
uint64_t dmabuf_fd;
NvBufSurface *surface;
dmabuf_fd = *(uint64_t *)user_data;
GST_INFO("NvBufSurfaceDestroy %lu", dmabuf_fd);
NvBufSurfaceFromFd(dmabuf_fd, (void **)(&surface));
NvBufSurfaceDestroy(surface);
g_free(user_data);
}
static bool execute(const std::string &pipeline_desc)
{
const int frame_w = 640, frame_h = 480;
unsetenv("DISPLAY");
gst_init(nullptr, nullptr);
std::stringstream command;
command << "appsrc name=mysource";
command << pipeline_desc;
std::cout << command.str() << std::endl;
std::string cur_time_str;
Pipeline pipeline(command.str(), "./build/results/dmafdgst" + cur_time_str + ".txt");
auto appsrc_ = pipeline.getElement("mysource");
pipeline.launch();
size_t last_frame{0};
auto start = std::chrono::system_clock::now();
while (std::chrono::duration_cast<std::chrono::seconds>(std::chrono::system_clock::now() - start).count() < 600) {
NvBufSurface *nvbuf;
{
NvBufSurfaceCreateParams localCreateParams{
.gpuId = 0,
.width = frame_w,
.height = frame_h,
.size = 0,
.isContiguous = false,
.colorFormat = NVBUF_COLOR_FORMAT_NV12,
.layout = NVBUF_LAYOUT_PITCH,
.memType = NVBUF_MEM_DEFAULT};
if (NvBufSurfaceCreate(&nvbuf, 1, &localCreateParams)) {
printf("Failed to create nvbufsurface");
return false;
}
nvbuf->numFilled = 1;
}
{
if (NvBufSurfaceMap(nvbuf, 0, -1, NVBUF_MAP_READ_WRITE)) {
printf("Failed to map nvbufsurface");
return false;
}
if (NvBufSurfaceSyncForCpu(nvbuf, 0, -1)) {
printf("Failed to sync for cpu\n");
return false;
}
//Fill the frame with gray color
std::memset((char *)nvbuf->surfaceList[0].mappedAddr.addr[0], (last_frame * 32) % 256, nvbuf->surfaceList[0].planeParams.psize[0]);
std::memset((char *)nvbuf->surfaceList[0].mappedAddr.addr[1], 128, nvbuf->surfaceList[0].planeParams.psize[1]);
if (NvBufSurfaceUnMap(nvbuf, 0, -1)) {
printf("Failed to map nvbufsurface");
return false;
}
}
{
GstMapInfo map;
GstFlowReturn ret;
auto user_data = g_malloc(sizeof(uint64_t));
GST_INFO("NvBufSurfaceAllocate %lu", nvbuf->surfaceList[0].bufferDesc);
*(uint64_t *)user_data = nvbuf->surfaceList[0].bufferDesc;
auto data = g_malloc(sizeof(NvBufSurface));
auto buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags)0, data, sizeof(NvBufSurface), 0, sizeof(NvBufSurface), user_data, notify_to_destroy);
GST_BUFFER_PTS(buffer) = std::chrono::duration_cast<std::chrono::nanoseconds>(std::chrono::system_clock::now() - start).count();
GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_LIVE);
GST_BUFFER_OFFSET(buffer) = last_frame;
printf("PTS = %lu, OFFSET = %lu\n", GST_BUFFER_PTS(buffer), GST_BUFFER_OFFSET(buffer));
gst_buffer_map(buffer, &map, GST_MAP_WRITE);
memcpy(map.data, nvbuf, sizeof(NvBufSurface));
gst_buffer_unmap(buffer, &map);
g_signal_emit_by_name(appsrc_.get(), "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);
last_frame++;
}
usleep(1000); // Sleep for some period
}
return true;
}
int main(int argc, char *argv[])
{
std::stringstream pipeline_desc{};
for (int i = 1; i < argc; i++) {
pipeline_desc << " " << argv[i];
}
if (!execute(pipeline_desc.str())) return EXIT_FAILURE;
return EXIT_SUCCESS;
}
Sometimes nvjpegenc outputs copy of first received frame instead of instead of the frame that came at that moment
The larger the time period, the more copies of the first frame are produced, and fewer correct frames are processed.
If period is bigger than 1 millisecond, nvjpegenc just repeats first received frame.
If I add to pipeline some element, that will change the buffer like this:
appsrc do-timestamp=true caps="video/x-raw(memory:NVMM),width=(int)640,height=(int)480,format=(string)NV12,framerate=(fraction)25/2" ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=NV12,colorimetry=bt601' ! nvjpegenc ! multifilesink location=out%d.jpg
nvjpegenc works correctly(No copies of first frame).