Output video FPS not matching with the input video FPS

• Hardware Platform : DGPU
• DeepStream Version : 6.2.0 **
• TensorRT Version : 8.5.2.2

• NVIDIA GPU Driver Version : 530.30.02
• Issue Type
Input and output fps is not matching when used with the nvstreammux plugin
• code for reproducibility :
Pipeline only with decoder → encoder in which the input and output video FPS are matching

import time
import sys
import os
import gi
gi.require_version(‘Gst’, ‘1.0’)
from gi.repository import GLib, Gst
from common.bus_call import bus_call
import time
import pyds

def cb_new_pad(qtdemux, pad, data):
h264parser = data
name = pad.get_name()
if “video_0” in name and pad.link(h264parser.get_static_pad(“sink”)):
print(f"Could not link {name} pad of qtdemux to sink pad of h264parser")

Gst.init(None)
pipeline = Gst.Pipeline()

source = Gst.ElementFactory.make(“filesrc”, “file-source”)
demux = Gst.ElementFactory.make(“qtdemux”, “demux”)
h264parser = Gst.ElementFactory.make(“h264parse”, “h264-parser”)
decoder = Gst.ElementFactory.make(“nvv4l2decoder”, “nvv4l2-decoder”)
streammux = Gst.ElementFactory.make(“nvstreammux”, “Stream-muxer”)
encoder = Gst.ElementFactory.make(“nvv4l2h264enc”, “nvv4l2-h264-encoder”)
h264parser2 = Gst.ElementFactory.make(“h264parse”, “h264-parser2”)
qtmux = Gst.ElementFactory.make(“qtmux”, “qtmux”)
sink = Gst.ElementFactory.make(“filesink”, “filesink”)

opFileName = “sample_1.mp4”
sink.set_property(“location”, opFileName)
source.set_property(‘location’, <“.mp4”>) # replace it with your mp4 file
streammux.set_property(‘batch-size’, 1)
if os.environ.get(‘USE_NEW_NVSTREAMMUX’) != ‘yes’:
streammux.set_property(‘width’, 1280)
streammux.set_property(‘height’, 720)

pipeline.add(source)
pipeline.add(demux)
pipeline.add(h264parser)
pipeline.add(decoder)

pipeline.add(streammux)

pipeline.add(encoder)
pipeline.add(h264parser2)
pipeline.add(qtmux)
pipeline.add(sink)

print(“Linking elements in the Pipeline \n”)

source.link(demux)
demux.connect(“pad-added”, cb_new_pad, h264parser)
h264parser.link(decoder)
decoder.link(encoder)
encoder.link(h264parser2)
h264parser2.link(qtmux)
qtmux.link(sink)

loop = GLib.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect (“message”, bus_call, loop)
print(“Starting pipeline (First Time)\n”)
time_run_pipeline_start = time.time()
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass

Pipeline with decoder-> nvstreammux ->encoder in which the input and output video FPS are not matching
import time
import sys
import os
import gi
gi.require_version(‘Gst’, ‘1.0’)
from gi.repository import GLib, Gst
from common.bus_call import bus_call
import time
import pyds

def cb_new_pad(qtdemux, pad, data):
h264parser = data
name = pad.get_name()
if “video_0” in name and pad.link(h264parser.get_static_pad(“sink”)):
print(f"Could not link {name} pad of qtdemux to sink pad of h264parser")

Gst.init(None)
pipeline = Gst.Pipeline()

source = Gst.ElementFactory.make(“filesrc”, “file-source”)
demux = Gst.ElementFactory.make(“qtdemux”, “demux”)
h264parser = Gst.ElementFactory.make(“h264parse”, “h264-parser”)
decoder = Gst.ElementFactory.make(“nvv4l2decoder”, “nvv4l2-decoder”)
streammux = Gst.ElementFactory.make(“nvstreammux”, “Stream-muxer”)
encoder = Gst.ElementFactory.make(“nvv4l2h264enc”, “nvv4l2-h264-encoder”)
h264parser2 = Gst.ElementFactory.make(“h264parse”, “h264-parser2”)
qtmux = Gst.ElementFactory.make(“qtmux”, “qtmux”)
sink = Gst.ElementFactory.make(“filesink”, “filesink”)

opFileName = “sample_1.mp4”
sink.set_property(“location”, opFileName)
source.set_property(‘location’, <“.mp4”>) # replace it with your mp4 file
streammux.set_property(‘batch-size’, 1)
if os.environ.get(‘USE_NEW_NVSTREAMMUX’) != ‘yes’:
streammux.set_property(‘width’, 1280)
streammux.set_property(‘height’, 720)

pipeline.add(source)
pipeline.add(demux)
pipeline.add(h264parser)
pipeline.add(decoder)

pipeline.add(streammux)

pipeline.add(encoder)
pipeline.add(h264parser2)
pipeline.add(qtmux)
pipeline.add(sink)

print(“Linking elements in the Pipeline \n”)

source.link(demux)
demux.connect(“pad-added”, cb_new_pad, h264parser)
h264parser.link(decoder)

sinkpad = streammux.get_request_pad(“sink_0”)
if not sinkpad:
sys.stderr.write(" Unable to get the sink pad of streammux \n")
srcpad = decoder.get_static_pad(“src”)
if not srcpad:
sys.stderr.write(" Unable to get source pad of decoder \n")
srcpad.link(sinkpad)

streammux.link(encoder)
encoder.link(h264parser2)
h264parser2.link(qtmux)
qtmux.link(sink)
loop = GLib.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect (“message”, bus_call, loop)
print(“Starting pipeline (First Time)\n”)
time_run_pipeline_start = time.time()
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass

• Requirement details

  • This is sample working code snippet provided.
  • nvstreammux is used in prior to nvinfer in the actual pipeline. Upon experimentation we have corned the issue with nvstreammux causing the FPS variation
  • Please suggest methods in which we can maintain the same FPS for output video.
    Eg : When a video with 18.57FPS is passed to the pipeline with
    decoder-> nvstreammux ->encoder : Its providing a fps of 30.15,
    But the same video passed to the pipeline with
    decoder->encoder : Its providing the same FPS as input
1 Like

Could you try to set the frame-duration parameter for nvstreammux?

Tried using them yet there is mismatch in the fps and duration

  1. streammux.set_property(‘frame-duration’, 10000) # 10000 is in ms , intended to output video at 10 seconds

Also is there any method to set the FPS in the encoder?
Could not find any properties related to FPS in the encoder.

The fps is controlled by nvstreammux, please refer to the link to learn how to set the timestamp about nvstreamux.
Fps is not calculated based on the interval frame of output video, but rather on the timestamp of each frame.

In the documentation i can see that only “attach-sys-ts” has the property to set the timestamp.
I have included this in the pipeline : streammux.set_property(‘attach-sys-ts’, True)
But with this configuration also I am seeing different FPS

Please provide the specific property using which we can match the FPS of the output.

Iterating Again
When nvstreammux is excluded in the pipeline the output FPS is matching
When included nvstreammux is altering the FPS component .

The parameter is for rtp source. In theory, you don’t need to set any parameters ralated to pts. I have tried the pipeline below and it will not change the frame rate. Could you try it with your stream?

gst-launch-1.0 uridecodebin uri=file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264_15fps.mp4 ! m.sink_0 nvstreammux name=m width=1920 height=1080 batch-size=1 ! nvv4l2h264enc ! h264parse ! qtmux ! filesink location=test0.mp4

Hi @yuweiw,
Thanks for your response.
Using the command provided also same issue persists for our video.

I/p video had 18.453 FPS and 191 frame count
Output video has 30.158 FPS and 191 frame count

In the Experiment we have done ,out of 200 videos 179 video’s has this output FPS mismatch.

Is there any other option we can try to set the FPS ?
Or are we missing some meta information in the source video

Could you attach or message to me your video?

I am sorry, video is confidential cant be shared across.
Can u please share a workaround to set the FPS?

These are the findings Using : fpsdisplaysink video-sink=fakesink text-overlay=false sync=false -v 2>&1 at the tail of the command:

1. The frame rate are 0/1 in all the elements of the pipeline for which there is a output FPS mismatch

/GstPipeline:pipeline0/GstH264Parse:h264parse0.GstPad:src: caps = video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)3.1, profile=(string)high, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, colorimetry=(string)2:4:5:3, framerate=(fraction)0/1, interlace-mode=(string)progressive, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true

GstPipeline:pipeline0/nvv4l2decoder:nvv4l2decoder0.GstPad:sink: caps = video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)3.1, profile=(string)high, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, colorimetry=(string)2:4:5:3, framerate=(fraction)0/1, interlace-mode=(string)

2. The frame rate are inferred from the source video in all the elements of the pipeline for which there is a output FPS match

/GstPipeline:pipeline0/GstH264Parse:h264parse0.GstPad:src: caps = video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)3.1, profile=(string)high, width=(int)1280, height=(int)720, framerate=(fraction)75/4, pixel-aspect-ratio=(fraction)1/1, colorimetry=(string)2:4:5:3, interlace-mode=(string)progressive, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true

/GstPipeline:pipeline0/nvv4l2decoder:nvv4l2decoder0.GstPad:sink: caps = video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)3.1, profile=(string)high, width=(int)1280, height=(int)720, framerate=(fraction)75/4, pixel-aspect-ratio=(fraction)1/1, colorimetry=(string)2:4:5:3, interlace-mode=(string)progressive, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true

For the videos which do not have the Same FPS as input , FPS information is not passed downstream.
Probably we are missing this metadata information from stream

can this be resolved using some parameter in nvstreamux?
Or is there any alternative plugins that can used to get the FPS information

The fpsdisplaysink just show the current and average framerate for the running time. Your problem is that the fps in the video file do not match, right?
As the pipeline I attached before, the fps of the sample_1080p_h264_15fps.mp4 is same as the test0.mp4. You can try that with our demo video in the /opt/nvidia/deepstream/deepstream/samples/streams/ path. If there is only a problem with your video source, we need the video source to analyze.

Hi yuweiw,
Thanks for your valuable inputs.
We will try to Analyse the source video

Thank you :)

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.