Should I add MIPI data type(0x2d) in kernel code to support RAW14 format?

Hi,

I added RAW 14 format by following other forum topics.
But I couldn’t find RAW14 MIPI data type(0x2d) in the kernel code.

Should I add 0x2d MIPI data type to support RAW14?

Below is our code to support RGGB RAW14 format.

diff --git a/sources/kernel/kernel-4.9/drivers/media/v4l2-core/v4l2-ioctl.c b/sources/kernel/kernel-4.9/drivers/media/v4l2-core/v4l2-ioctl.c
index 222b270fe..a72b2d86c 100644
--- a/sources/kernel/kernel-4.9/drivers/media/v4l2-core/v4l2-ioctl.c
+++ b/sources/kernel/kernel-4.9/drivers/media/v4l2-core/v4l2-ioctl.c
@@ -1228,6 +1228,7 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt)
 	case V4L2_PIX_FMT_SGBRG12:	descr = "12-bit Bayer GBGB/RGRG"; break;
 	case V4L2_PIX_FMT_SGRBG12:	descr = "12-bit Bayer GRGR/BGBG"; break;
 	case V4L2_PIX_FMT_SRGGB12:	descr = "12-bit Bayer RGRG/GBGB"; break;
+	case V4L2_PIX_FMT_SRGGB14:	descr = "14-bit Bayer RGRG/GBGB"; break;
 	case V4L2_PIX_FMT_SBGGR10P:	descr = "10-bit Bayer BGBG/GRGR Packed"; break;
 	case V4L2_PIX_FMT_SGBRG10P:	descr = "10-bit Bayer GBGB/RGRG Packed"; break;
 	case V4L2_PIX_FMT_SGRBG10P:	descr = "10-bit Bayer GRGR/BGBG Packed"; break;
diff --git a/sources/kernel/kernel-4.9/include/uapi/linux/videodev2.h b/sources/kernel/kernel-4.9/include/uapi/linux/videodev2.h
index ae04e9e06..381691907 100644
--- a/sources/kernel/kernel-4.9/include/uapi/linux/videodev2.h
+++ b/sources/kernel/kernel-4.9/include/uapi/linux/videodev2.h
@@ -589,6 +589,7 @@ struct v4l2_pix_format {
 #define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12  GBGB.. RGRG.. */
 #define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12  GRGR.. BGBG.. */
 #define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12  RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SRGGB14 v4l2_fourcc('R', 'G', '1', '4') /* 14  RGRG.. GBGB.. */
 #define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16  BGBG.. GRGR.. */
 #define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16  GBGB.. RGRG.. */
 #define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16  GRGR.. BGBG.. */
diff --git a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/camera_common.c b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/camera_common.c
index 2f9fac5b2..6edfba645 100644
--- a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/camera_common.c
+++ b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/camera_common.c
@@ -41,6 +41,11 @@
 #define HDR_ENABLE		0x1
 
 static const struct camera_common_colorfmt camera_common_color_fmts[] = {
+	{
+		MEDIA_BUS_FMT_SRGGB14_1X14,
+		V4L2_COLORSPACE_SRGB,
+		V4L2_PIX_FMT_SRGGB14
+	},
 	{
 		MEDIA_BUS_FMT_SRGGB12_1X12,
 		V4L2_COLORSPACE_SRGB,
diff --git a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/sensor_common.c b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/sensor_common.c
index 8addb9dec..04750ec67 100644
--- a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/sensor_common.c
+++ b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/sensor_common.c
@@ -209,6 +209,8 @@ static int extract_pixel_format(
 		*format = V4L2_PIX_FMT_SBGGR12;
 	else if (strncmp(pixel_t, "bayer_rggb12", size) == 0)
 		*format = V4L2_PIX_FMT_SRGGB12;
+	else if (strncmp(pixel_t, "bayer_rggb14", size) == 0)
+		*format = V4L2_PIX_FMT_SRGGB14;
 	else if (strncmp(pixel_t, "bayer_gbrg12", size) == 0)
 		*format = V4L2_PIX_FMT_SGBRG12;
 	else if (strncmp(pixel_t, "bayer_grbg12", size) == 0)
diff --git a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi2_formats.h b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi2_formats.h
index fed27b9bf..5a42bae0a 100644
--- a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi2_formats.h
+++ b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi2_formats.h
@@ -108,6 +108,10 @@ static const struct tegra_video_format vi2_video_formats[] = {
 	TEGRA_VIDEO_FORMAT(RAW12, 12, SBGGR12_1X12, 2, 1, T_R16_I,
 				RAW12, SBGGR12, "BGBG.. GRGR.."),
 
+	/* RAW 14 */
+	TEGRA_VIDEO_FORMAT(RAW14, 14, SRGGB14_1X14, 2, 1, T_R16_I,
+				RAW14, SRGGB14, "RGRG.. GBGB.."),
+
 	/* RGB888 */
 	TEGRA_VIDEO_FORMAT(RGB888, 24, RGB888_1X24, 4, 1, T_A8R8G8B8,
 				RGB888, ABGR32, "BGRA-8-8-8-8"),
diff --git a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi5_formats.h b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi5_formats.h
index 07254b134..b2250be41 100644
--- a/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi5_formats.h
+++ b/sources/kernel/nvidia/drivers/media/platform/tegra/camera/vi/vi5_formats.h
@@ -116,6 +116,10 @@ static const struct tegra_video_format vi5_video_formats[] = {
 	TEGRA_VIDEO_FORMAT(RAW12, 12, SBGGR12_1X12, 2, 1, T_R16,
 				RAW12, SBGGR12, "BGBG.. GRGR.."),
 
+	/* RAW 14 */
+	TEGRA_VIDEO_FORMAT(RAW14, 14, SRGGB14_1X14, 2, 1, T_R16,
+				RAW14, SRGGB14, "RGRG.. GBGB.."),
+
 	/* RGB888 */
 	TEGRA_VIDEO_FORMAT(RGB888, 24, RGB888_1X24, 4, 1, T_A8R8G8B8,
 				RGB888, ABGR32, "BGRA-8-8-8-8"),

hello tagoshi,

yes, please extend the support formats in driver side. according to specification, Raw14 is supported by AGX Xavier.

Hi JerryChang,
Thank you for your reply.

Do you mean that code snippet I showed above is enough to support RAW14 format?
I would like to know if I need to add RAW14 MIPI code value (=0x2d) somewhere in the kernel driver.

hello tagoshi,

ya, those code snippets in VI driver side should be enough,
there’s already Raw14 format definition as NVCSI data type.
for example, $public_sources/kernel_src/kernel/nvidia/include/soc/tegra/camrtc-capture.h

#define NVCSI_DATATYPE_RAW14            MK_U32(45)

Thank you for helpful information.
I understood it.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.