I am using the following code to capture video from 2 cameras for a certain duration and then saving that video. But I get random timings. For example if I set the time to be 60 seconds, i sometimes get a 1 minute 14 seconds video on the jetson orin board, on another windows laptop i get the video for example as 40 seconds video. So its very random. How do i set the time so that I always get a fixed length video in the jetson when using this script. Thank you
#!/usr/bin/env python3
import cv2
import depthai as dai
import contextlib
from calc import HostSpatialsCalc
from utility import *
import numpy as np
import math
import time
def createPipeline():
# Start defining a pipeline
pipeline = dai.Pipeline()
# Define a source - color camera
camRgb = pipeline.create(dai.node.ColorCamera)
camRgb.setPreviewSize(300, 300)
camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setInterleaved(False)
camRgb.setVideoSize(1920, 1080)
# Create output
xoutRgb = pipeline.create(dai.node.XLinkOut)
xoutRgb.setStreamName("rgb")
xoutRgb.input.setBlocking(False)
xoutRgb.input.setQueueSize(1)
# Define sources and outputs
monoLeft = pipeline.create(dai.node.MonoCamera)
monoRight = pipeline.create(dai.node.MonoCamera)
# Properties
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
camRgb.video.link(xoutRgb.input)
return pipeline
with contextlib.ExitStack() as stack:
deviceInfos = dai.Device.getAllAvailableDevices()
usbSpeed = dai.UsbSpeed.SUPER
openVinoVersion = dai.OpenVINO.Version.VERSION_2021_4
qRgbMap = []
devices = []
for deviceInfo in deviceInfos:
deviceInfo: dai.DeviceInfo
device: dai.Device = stack.enter_context(dai.Device(openVinoVersion, deviceInfo, usbSpeed))
devices.append(device)
mxId = device.getMxId()
cameras = device.getConnectedCameras()
usbSpeed = device.getUsbSpeed()
eepromData = device.readCalibration2().getEepromData()
pipeline, stereo= createPipeline()
device.startPipeline(pipeline)
# Output queue will be used to get the rgb frames from the output defined above
timestr = time.strftime("%Y%m%d-%H%M%S")
q_rgb = device.getOutputQueue(name="rgb", maxSize=4, blocking=False)
stream_name = "rgb-" + mxId + "-" + eepromData.productName
recording = (cv2.VideoWriter(f'filename_{mxId}_{timestr}.avi', cv2.VideoWriter_fourcc(*'MJPG'), 20, (1920, 1080)))
qRgbMap.append((q_rgb, stream_name, recording))
start_time = time.time()
while(time.time() - start_time < 60):
for q_rgb, stream_name , recording in qRgbMap:
if q_rgb.has():
video_in = q_rgb.get()
recording.write(video_in.getCvFrame())
if cv2.waitKey(1) == ord('q'):
break