For some reason the ISP is producing washed out images for my sensor, and I want to figure out what i’m doing wrong.
The first image was taken raw with the following pipeline:
v4l2-ctl -d /dev/video0 --set-fmt-video=width=1280,height=720,pixelformat=RG12 --set-ctrl exposure=90000,bypass_mode=0 --stream-mmap --stream-count=1 --stream-to=test.raw
We then wrote a simple javascript debayer script to read in the raw and convert it to a png
var assert=require("assert");
var fs=require("fs");
var PNG=require("pngjs2").PNG;
// Width and height
var width=1280;
var height=720;
// Create a blank PNG for our output
var png=new PNG({ width: width, height: height });
// Read the input file into buffer[]
var buffer=[];
var src=fs.createReadStream("data.raw");
src.on("data", (buf)=>{
for(let n=0; n< buf.length; n+=2) {
assert(buf.length%2==0, "Node is giving me a non-even chunk of data, and I have no plan to keep it from ruining everything");
let val=buf.readUInt16LE(n);
buffer.push((val/0xFFF)*0xFF);
}
})
// Once we are done filling the buffer[], write to PNG
.on("end",()=>{
// Make sure we have enough data to finish! This allows us to play with image sizes without overrunning the buffer
let h=Math.floor(buffer.length/width);
if(h<height) {
console.log(`We do not have enough input data to make a full frame. Stopping at y=${h}`);
height=h;
}
// Set the PNG
let n=0;
for (let y=0; y< height; y++) {
// Color values will either be GR (repeating) or else BG (repeating)
let color_values=(y%2==0)?['g','r']:['b','g'];
let color_index=0;
for(let x=0; x< width; x++){
let color=color_values;
let idx=(y*width+x)<<2;
//let idx=((height-y)*width+(width-x))<<2; // HACK... Flip y, flip x
let val=parseInt(buffer[n]);
let val2=0;
let val3=0;
let val4=0;
if(x > 1 && x < width-2) val2=(buffer[n+1] + buffer[n-1])/2;
if(y > 1 && y < height-2) val3=(buffer[n+width+1] + buffer[n-width-1])/2;
if(y > 1 && y < height-2) val4=(buffer[n+width] + buffer[n-width])/2;
//val=val<<2; // HACK...Brighten
if(color=="r")
{
png.data[idx]=val; // Red
png.data[idx+1]=(val2+val4)/2; // Green
png.data[idx+2]=val3; // Blue
}
else if(color=="b")
{
png.data[idx]=val3; // Red
png.data[idx+1]=(val2+val4)/2; // Green
png.data[idx+2]=val; // Blue
}
else if(color=="g")
{
png.data[idx+1]=(val+val3)/2; // Green
if(y%2==0) // r
{
png.data[idx]=val2; // Red
png.data[idx+2]=val4; // Blue
}
else
{
png.data[idx]=val4; // Red
png.data[idx+2]=val2; // Blue
}
}
png.data[idx+3]=0xFF; // Alpha
n++;
}
}
png.pack().pipe(fs.createWriteStream("out.png"));
});
The image looks great, it perfectly captures the cavernous interior of our warehouse.
However if I stream the image through the ISP, in this case going to VLC with auto-exposure turned off and manually setting the same exposure and gain values I get the second image which is super pink and washed out. The edges of objects also look really strange, developing thick pixely borders
gst-launch-1.0 -v nvcamerasrc auto-exposure=1 sensor_id=$1 fpsRange="30 30" intent=3 \
! 'video/x-raw(memory:NVMM), width=(int)1280, height=(int)720, format=(string)I420, framerate=(fraction)30/1' \
! omxh264enc \
! 'video/x-h264, stream-format=(string)byte-stream' \
! h264parse \
! mpegtsmux \
! rtpmp2tpay \
! udpsink host=${IP} port=${PORT} sync=false async=false
Has anyone encountered this sort of behavior? Is it some problem with my device tree settings?
code: GitHub - Daxbot/daxc02: Nvidia Jetson TX1/TX2 Kernel Driver for Leopard Imaging LI-M021C-MIPI