Dear experts,
I’m currently porting a HDMI-to-4-lane-CSI bridge driver from Xavier AGX to Xavier NX and trying to do the H264 encoding on the video providing from a 4K camera. But I cannot achieve the maximum frame-rate of the camera (30fps).
The camera node in the dtsi is as following :
/ {
host1x {
vi@15c10000 {
num-channels = <2>;
ports {
#address-cells = <1>;
#size-cells = <0>;
vi_port0: port@0 {
reg = <0>;
toto_vi_in0: endpoint {
port-index = <0>;
bus-width = <4>;
remote-endpoint = <&toto_csi_out0>;
};
};
vi_port1: port@1 {
reg = <1>;
toto_vi_in1: endpoint {
port-index = <2>;
bus-width = <4>;
remote-endpoint = <&toto_csi_out1>;
};
};
};
};
nvcsi@15a00000 {
num-channels = <2>;
#address-cells = <1>;
#size-cells = <0>;
csi_chan0: channel@0 {
reg = <0>;
ports {
#address-cells = <1>;
#size-cells = <0>;
csi_chan0_port0: port@0 {
reg = <0>;
toto_csi_in0: endpoint@0 {
port-index = <0>;
bus-width = <4>;
remote-endpoint = <&toto_out0>;
};
};
csi_chan0_port1: port@1 {
reg = <1>;
toto_csi_out0: endpoint@1 {
remote-endpoint = <&toto_vi_in0>;
};
};
};
};
csi_chan1: channel@1 {
reg = <1>;
ports {
#address-cells = <1>;
#size-cells = <0>;
csi_chan1_port0: port@0 {
reg = <0>;
toto_csi_in1: endpoint@2 {
port-index = <2>;
bus-width = <4>;
remote-endpoint = <&toto_out1>;
};
};
csi_chan1_port1: port@1 {
reg = <1>;
toto_csi_out1: endpoint@3 {
remote-endpoint = <&toto_vi_in1>;
};
};
};
};
};
};
cam_i2cmux {
i2c_0:i2c@0 {
toto_a@2b {
compatible = "nvidia,toto";
/* I2C device address */
reg = <0x2b>;
/* V4L2 device node location */
devnode = "video0";
/* Physical dimensions of sensor */
physical_w = "3.674";
physical_h = "2.738";
/* Define any required hw resources needed by driver */
/* ie. clocks, io pins, power sources */
avdd-reg = "vana";
iovdd-reg = "vif";
dvdd-reg = "vdig";
/* Sensor output flip settings */
vertical-flip = "true";
/* if true, delay gain setting by one frame to be in sync with exposure */
delayed_gain = "true";
clocks = <&bpmp_clks TEGRA194_CLK_EXTPERIPH1>,
<&bpmp_clks TEGRA194_CLK_PLLP_OUT0>;
clock-names = "extperiph1", "pllp_grtba";
/* clock-frequency = <24000000>; */
status = "okay";
mclk = "extperiph1";
clock-frequency = <24000000>;
/*vana-supply = <&p2822_avdd_cam_2v8>;
vif-supply = <&p2822_vdd_1v8_cvb>;
vdig-supply = <&p2822_vdd_1v8_cvb>;*/
mode0 { // TOTO_MODE_3840X2160
mclk_khz = "24000";
num_lanes = "4";
tegra_sinterface = "serial_a";
phy_mode = "DPHY";
discontinuous_clk = "yes";
dpcm_enable = "false";
cil_settletime = "0";
active_w = "3840";
active_h = "2160";
mode_type = "yuv";
pixel_phase = "uyvy";
csi_pixel_bit_depth = "16";
readout_orientation = "0";
line_length = "3840";
inherent_gain = "1";
mclk_multiplier = "24";
pix_clk_hz = "312500000";
gain_factor = "1000000";
min_gain_val = "1000000";
max_gain_val = "44400000";
step_gain_val = "1";
default_gain = "1000000";
min_hdr_ratio = "1";
max_hdr_ratio = "1";
framerate_factor = "1000000";
min_framerate = "1500000";
max_framerate = "30000000";
step_framerate = "1";
default_framerate= "30000000";
exposure_factor = "1000000";
min_exp_time = "44";
max_exp_time = "478696";
step_exp_time = "1";
default_exp_time = "16667";/* us */
embedded_metadata_height = "0";
};
ports {
#address-cells = <1>;
#size-cells = <0>;
port@0 {
reg = <0>;
toto_out0: endpoint {
port-index = <0>;
bus-width = <4>;
remote-endpoint = <&toto_csi_in0>;
};
};
};
};
};
i2c_1:i2c@1 {
toto_c@2b {
compatible = "nvidia,toto";
/* I2C device address */
reg = <0x2b>;
/* V4L2 device node location */
devnode = "video1";
/* Physical dimensions of sensor */
physical_w = "3.674";
physical_h = "2.738";
/* Define any required hw resources needed by driver */
/* ie. clocks, io pins, power sources */
avdd-reg = "vana";
iovdd-reg = "vif";
dvdd-reg = "vdig";
/* Sensor output flip settings */
vertical-flip = "true";
/* if true, delay gain setting by one frame to be in sync with exposure */
delayed_gain = "true";
clocks = <&bpmp_clks TEGRA194_CLK_EXTPERIPH1>,
<&bpmp_clks TEGRA194_CLK_PLLP_OUT0>;
clock-names = "extperiph1", "pllp_grtba";
/* clock-frequency = <24000000>; */
status = "okay";
mclk = "extperiph1";
/*vana-supply = <&p2822_avdd_cam_2v8>;
vif-supply = <&p2822_vdd_1v8_cvb>;
vdig-supply = <&p2822_vdd_1v8_cvb>;*/
mode0 { // TOTO_MODE_3840X2160
mclk_khz = "24000";
num_lanes = "4";
tegra_sinterface = "serial_c";
phy_mode = "DPHY";
discontinuous_clk = "yes";
dpcm_enable = "false";
cil_settletime = "0";
active_w = "3840";
active_h = "2160";
mode_type = "yuv";
pixel_phase = "uyvy";
csi_pixel_bit_depth = "16";
readout_orientation = "0";
line_length = "3840";
inherent_gain = "1";
mclk_multiplier = "24";
pix_clk_hz = "312500000";
gain_factor = "1000000";
min_gain_val = "1000000";
max_gain_val = "44400000";
step_gain_val = "1";
default_gain = "1000000";
min_hdr_ratio = "1";
max_hdr_ratio = "1";
framerate_factor = "1000000";
min_framerate = "1500000";
max_framerate = "30000000";
step_framerate = "1";
default_framerate= "30000000";
exposure_factor = "1000000";
min_exp_time = "44";
max_exp_time = "478696";
step_exp_time = "1";
default_exp_time = "16667";/* us */
embedded_metadata_height = "0";
};
ports {
#address-cells = <1>;
#size-cells = <0>;
port@0 {
reg = <0>;
toto_out1: endpoint {
port-index = <2>;
bus-width = <4>;
remote-endpoint = <&toto_csi_in1>;
};
};
};
};
};
};
tcp: tegra-camera-platform {
compatible = "nvidia, tegra-camera-platform";
/**
* Physical settings to calculate max ISO BW
*
* num_csi_lanes = <>;
* Total number of CSI lanes when all cameras are active
*
* max_lane_speed = <>;
* Max lane speed in Kbit/s
*
* min_bits_per_pixel = <>;
* Min bits per pixel
*
* vi_peak_byte_per_pixel = <>;
* Max byte per pixel for the VI ISO case
*
* vi_bw_margin_pct = <>;
* Vi bandwidth margin in percentage
*
* max_pixel_rate = <>;
* Max pixel rate in Kpixel/s for the ISP ISO case
*
* isp_peak_byte_per_pixel = <>;
* Max byte per pixel for the ISP ISO case
*
* isp_bw_margin_pct = <>;
* Isp bandwidth margin in percentage
*/
num_csi_lanes = <8>;
max_lane_speed = <2500000>;
min_bits_per_pixel = <10>;
vi_peak_byte_per_pixel = <2>;
vi_bw_margin_pct = <25>;
max_pixel_rate = <750000>;
isp_peak_byte_per_pixel = <5>;
isp_bw_margin_pct = <25>;
/**
* The general guideline for naming badge_info contains 3 parts, and is as follows,
* The first part is the camera_board_id for the module; if the module is in a FFD
* platform, then use the platform name for this part.
* The second part contains the position of the module, ex. "rear" or "front".
* The third part contains the last 6 characters of a part number which is found
* in the module's specsheet from the vendor.
*/
modules {
cam_module0: module0 {
badge = "camera_front_123";
position = "front";
orientation = "1";
cam_module0_drivernode0: drivernode0 {
pcl_id = "v4l2_sensor";
devname = "toto 9-002b";
proc-device-tree = "/proc/device-tree/cam_i2cmux/i2c@0/toto_a@2b";
};
};
cam_module1: module1 {
badge = "camera_rear_123";
position = "rear";
orientation = "1";
cam_module1_drivernode0: drivernode0 {
pcl_id = "v4l2_sensor";
devname = "toto 10-002b";
proc-device-tree = "/proc/device-tree/cam_i2cmux/i2c@1/toto_c@2b";
};
};
};
};
};
I calculated the pix_clk_hz different ways :
Based on sensor CSI lane output rate :
pixel_clk_hz = 2500 Mbps * 4 / 8 = 1250000000 (8-bit-per-pixel)
pixel_clk_hz = 2500 Mbps * 4 / 16 = 625000000 (16-bit-per-pixel)
Based on frame size and frame rate :
pixel_clk_hz = 3840 x 2160 x 30 x 1 = 248832000 (8-bit-per-pixel)
pixel_clk_hz = 3840 x 2160 x 30 x 2 = 497664000 (16-bit-per-pixel)
The command for saving the H264 encoded video :
$ gst-launch-1.0 v4l2src device=/dev/video0 ! videoconvert ! omxh264enc ! h264parse ! qtmux ! filesink location=toto.h264 -e
Setting pipeline to PAUSED …
Pipeline is live and does not need PREROLL …
Setting pipeline to PLAYING …
New clock: GstSystemClock
Framerate set to : 30 at NvxVideoEncoderSetParameterNvMMLiteOpen : Block : BlockType = 4
===== NVMEDIA: NVENC =====
NvMMLiteBlockCreate : Block : BlockType = 4
H264: Profile = 66, Level = 40
(NVENC runs @499MHz during the encoding)
The command for decoding the H264 info :
$ gst-discoverer-1.0 toto.h264 -v
Analyzing file:///home/nvidia/toto.h264 Opening in BLOCKING MODE NvMMLiteOpen : Block : BlockType = 261 NVMEDIA: Reading vendor.tegra.display-size : status: 6 NvMMLiteBlockCreate : Block : BlockType = 261 Done discovering file:///home/nvidia/toto.h264 Topology: container: video/quicktime video: video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)5.1, profile=(string)constrained-baseline, width=(int)3840, height=(int)2160, framerate=(fraction)100000/13307, pixel-aspect-ratio=(fraction)1/1, interlace-mode=(string)progressive, colorimetry=(string)2:6:0:7, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true Tags: video codec: H.264 / AVC maximum bitrate: 34258320 bitrate: 15566333 datetime: 2020-11-09T08:32:01Z container format: Quicktime Codec: video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)5.1, profile=(string)constrained-baseline, width=(int)3840, height=(int)2160, framerate=(fraction)100000/13307, pixel-aspect-ratio=(fraction)1/1, interlace-mode=(string)progressive, colorimetry=(string)2:6:0:7, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true Additional info: None Stream ID: 8fdbb59d0bae59b8c625e0f21ceac4b42887b4efa066081575d45c6969026e25/001 Width: 3840 Height: 2160 Depth: 24 Frame rate: 100000/13307 Pixel aspect ratio: 1/1 Interlaced: false Bitrate: 15566333 Max bitrate: 34258320 Properties: Duration: 0:01:00.740333333 Seekable: yes Live: no Tags: video codec: H.264 / AVC maximum bitrate: 34258320 bitrate: 15566333 datetime: 2020-11-09T08:32:01Z container format: Quicktime
In every case, I got the framerate around 100000/13307 ~ 7.5fps as above
Camera spec :
QFHD: 2160p/29.97, 2160p/25, 2160p/23.98
FHD: 1080p/59.94, 1080p/50, 1080p/29.97 (2-2), 1080p/25 (2-2), 1080i/59.94, 1080i/50,
HD: 720p/59.94, 720p/50, 720p/23.98
SD:480p/59.94, 576p/50
Can anyone help to point out any missing configuration or stuff in order to achieve the maximum fps of the video provided by the camera (30 fps), please ?
Thanks in advance,
K.