Hello everyone,
I’ve recently followed the guide for creating an RTX LiDAR Sensor within Isaac sim. However i encountered a strange behaviour if i try to create the RTX LiDAR Sensor within a function. When i try to call a function in which i create the RTX LiDAR Sensor it will be created but it won’t shoot out any rays and thus creates an empty pointcloud. When created outside a function everything works perfectly fine.
Is used the following standalone python code to run a headless Isaac sim instance in an Isaac Sim docker container:
from omni.isaac.kit import SimulationApp
CONFIG = {
"width": 1280,
"height": 720,
"window_width": 1920,
"window_height": 1080,
"headless": True,
"renderer": "RayTracedLighting",
"display_options": 3286, # Set display options to show default grid
}
# Example for creating a RTX lidar sensor and publishing PCL data
simulation_app = SimulationApp(launch_config=CONFIG)
import omni
import omni.kit.viewport.utility
import omni.replicator.core as rep
from omni.isaac.core import World
from omni.isaac.core.utils.stage import create_new_stage
from omni.isaac.core.utils.extensions import enable_extension
from omni.isaac.core.utils.render_product import create_hydra_texture
from pxr import Gf
simulation_app.update()
# enable livestream
simulation_app.set_setting("/app/window/drawMouse", True)
simulation_app.set_setting("/app/livestream/proto", "ws")
simulation_app.set_setting("/app/livestream/websocket/framerate_limit", 120)
simulation_app.set_setting("/ngx/enabled", False)
enable_extension("omni.kit.livestream.native")
def load():
world_settings = {"physics_dt": 1.0 / 60.0, "stage_units_in_meters": 1.0, "rendering_dt": 1.0 / 60.0}
create_new_stage()
world = World(**world_settings)
world.scene.add_default_ground_plane()
def rtx():
# Create the lidar sensor that generates data into "RtxSensorCpu"
# Sensor needs to be rotated 90 degrees about X so that its Z up
# Possible options are Example_Rotary and Example_Solid_State
# drive sim applies 0.5,-0.5,-0.5,w(-0.5), we have to apply the reverse
lidar_config = "Example_Rotary"
_, sensor = omni.kit.commands.execute(
"IsaacSensorCreateRtxLidar",
path="/sensor",
parent=None,
config=lidar_config,
translation=(0, 0, 1.0),
orientation=Gf.Quatd(0.5, 0.5, -0.5, -0.5), # Gf.Quatd is w,i,j,k
)
_, render_product_path = create_hydra_texture([1, 1], sensor.GetPath().pathString)
# Create the debug draw pipeline in the post process graph
writer = rep.writers.get("RtxLidar" + "DebugDrawPointCloud")
writer.attach([render_product_path])
print("rtx lidar 1 done")
load()
rtx()
lidar_config = "Example_Rotary"
_, sensor = omni.kit.commands.execute(
"IsaacSensorCreateRtxLidar",
path="/sensor2",
parent=None,
config=lidar_config,
translation=(2.0, 0, 1.0),
orientation=Gf.Quatd(0.5, 0.5, -0.5, -0.5), # Gf.Quatd is w,i,j,k
)
_, render_product_path = create_hydra_texture([1, 1], sensor.GetPath().pathString)
# Create the debug draw pipeline in the post process graph
writer = rep.writers.get("RtxLidar" + "DebugDrawPointCloud")
writer.attach([render_product_path])
print("rtx lidar 2 done")
print("Is running...")
while simulation_app.is_running():
simulation_app.update()
# cleanup and shutdown
simulation_app.close()
Here is what it looks like when running the standalone code:
Im curious if there is an answer for this, since this seems very strange for me.
Many thanks in advance