False semantic data for RTX lidar range sensor

I added lidar to the warehouse scene, and distinguished labels for the point clouds inside and saved them. However, the label of the floor was wrong, and there was almost no point cloud of the floor. But other classes were normal. I don’t know why and how?


*import sys*

*import carb*
*from omni.isaac.kit import SimulationApp*

*# Example for creating a RTX lidar sensor and publishing PCL data*
*simulation_app = SimulationApp({"headless": False})*
*import omni*
*import omni.kit.viewport.utility*
*import omni.replicator.core as rep*
*from omni.isaac.core import SimulationContext*
*from omni.isaac.core.utils import nucleus, stage*
*from omni.isaac.core.utils.extensions import enable_extension*
*from pxr import Gf*
*from omni.isaac.range_sensor import _range_sensor*

*from pxr import UsdGeom, Gf, UsdPhysics, Semantics              # pxr usd imports used to create cube*
*import numpy as np*
*import open3d as o3d*

*# enable ROS bridge extension*
*enable_extension("omni.isaac.debug_draw")*

*simulation_app.update()*

*#lidarInterface = _range_sensor.acquire_lidar_sensor_interface() # Used to interact with the LIDAR*

*# Locate Isaac Sim assets folder to load environment and robot stages*
*assets_root_path = nucleus.get_assets_root_path()*
*if assets_root_path is None:*
*    carb.log_error("Could not find Isaac Sim assets folder")*
*    simulation_app.close()*
*    sys.exit()*

*simulation_app.update()*
*# Loading the simple_room environment*
*warehouse=stage.add_reference_to_stage(*
*    assets_root_path + "/Isaac/Environments/Simple_Warehouse/full_warehouse.usd", "/background"*
*)*


*#://localhost/NVIDIA/Assets/Isaac/2023.1.1/Isaac/Environments/Simple_Warehouse/full_warehouse.usd*
*asset_path_forklift = "http://omniverse-content-production.s3-us-west-2.amazonaws.com/Assets/DigitalTwin/Assets/Warehouse/Equipment/Forklifts/Forklift_A/Forklift_A01_PR_V_NVD_01.usd"*
*fork=stage.add_reference_to_stage(usd_path=asset_path_forklift, prim_path="/ForkLift")*
*forklift_prim = fork.GetPrimAtPath("/ForkLift")*
*collisionAPI = UsdPhysics.CollisionAPI.Apply(fork)*
*UsdGeom.XformCommonAPI(forklift_prim).SetScale((0.01, 0.01, 0.01))*
*UsdGeom.XformCommonAPI(forklift_prim).SetTranslate((4, 0.01, 0.01))*

*background_prim = fork.GetPrimAtPath("/background")*

*child_prim = background_prim.GetChildren();*
*for i in child_prim:*
*    print("00000000000000",i.GetPath())*



*simulation_app.update()*

*lidar_config = "Example_Rotary"*
*if len(sys.argv) == 2:*
*    lidar_config = sys.argv[1]*

*# Create the lidar sensor that generates data into "RtxSensorCpu"*
*# Sensor needs to be rotated 90 degrees about X so that its Z up*

*# Possible options are Example_Rotary and Example_Solid_State*
*# drive sim applies 0.5,-0.5,-0.5,w(-0.5), we have to apply the reverse*

*# _, sensor = omni.kit.commands.execute(*
*#     "IsaacSensorCreateRtxLidar",*
*#     path="/sensor",*
*#     parent=None,*
*#     config=lidar_config,*
*#     translation=(0, 0, 1.0),*
*#     orientation=Gf.Quatd(1.0, 0.0, 0.0, 0.0),  # Gf.Quatd is w,i,j,k*
*# )*


*_, sensor = omni.kit.commands.execute(*
*    "RangeSensorCreateLidar",*
*    path="/sensor",*
*    parent=None,*
*    min_range=0.2,*
*    max_range=50.0,*
*    draw_points=True,*
*    draw_lines=False,*
*    horizontal_fov=360.0,*
*    vertical_fov=60.0,*
*    horizontal_resolution=0.4,*
*    vertical_resolution=0.4,*
*    rotation_rate=0.0,*
*    high_lod=True,*
*    yaw_offset=0.0,*
*    enable_semantics=True,*
*)*





*# if lidarInterface.is_lidar_sensor("/World/sensor"):*
*#     print("lidar sensor is valid")*
*#     simulation_context.stop()*
*#     simulation_app.close()*

*hydra_texture = rep.create.render_product(sensor.GetPath(), [1, 1], name="Isaac")*

*simulation_context = SimulationContext(physics_dt=1.0 / 60.0, rendering_dt=1.0 / 60.0, stage_units_in_meters=1.0)*


*# Create the debug draw pipeline in the post process graph*
*writer = rep.writers.get("RtxLidar" + "DebugDrawPointCloud" + "Buffer")*
*writer.attach([hydra_texture])*


*while simulation_app.is_running():*
*    x_trans = np.random.uniform(low=-10.0, high=10.0)*
*    y_trans = np.random.uniform(low=-10.0, high=10.0)*
*    UsdGeom.XformCommonAPI(sensor).SetTranslate((0, 0, 1.5))*
*    lidarInterface = _range_sensor.acquire_lidar_sensor_interface()*
*    simulation_app.update()*
*    simulation_context.play()*

*    sem = Semantics.SemanticsAPI.Apply(forklift_prim, "Semantics")*
*    sem.CreateSemanticTypeAttr()*
*    sem.CreateSemanticDataAttr()*
*    sem.GetSemanticTypeAttr().Set("class")*
*    sem.GetSemanticDataAttr().Set("/ForkLift")*
*    simulation_app.update()*
*    pointcloud = lidarInterface.get_point_cloud_data("/sensor")*
*    semantics = lidarInterface.get_prim_data("/sensor")*
*    # print("---------------------pointcloud", pointcloud)*
*    #print("---------------------Semantic ID", semantics)*
*    num = pointcloud.shape[0]*pointcloud.shape[1]*
*    pointcloud = pointcloud.reshape(num,3)*

*    points = np.ones((pointcloud.shape[0], 3), np.float32)*
*    points_1 = np.ones((pointcloud.shape[0], 3), np.float32)*
*    points_2 = np.ones((pointcloud.shape[0], 3), np.float32)*
*    points_3 = np.ones((pointcloud.shape[0], 3), np.float32)*
*    points_4 = np.ones((pointcloud.shape[0], 3), np.float32)*
*    for i in range(pointcloud.shape[0]):*
*        if("ForkLift" in semantics[i]):*
*            points[i][0] = pointcloud[i][0]*
*            points[i][1] = pointcloud[i][1]*
*            points[i][2] = pointcloud[i][2]*
*        elif("Forklift" in semantics[i]):*
*            points[i][0] = pointcloud[i][0]*
*            points[i][1] = pointcloud[i][1]*
*            points[i][2] = pointcloud[i][2]*
*            # pcd = o3d.t.geometry.PointCloud()*
*            # pcd.point["positions"] = o3d.core.Tensor(pointcloud[i])*

*        elif("Wall" in semantics[i]):*
*            points_1[i][0] = pointcloud[i][0]*
*            points_1[i][1] = pointcloud[i][1]*
*            points_1[i][2] = pointcloud[i][2]*

*        elif ("floor" in semantics[i]):*
*            points_4[i][0] = pointcloud[i][0]*
*            points_4[i][1] = pointcloud[i][1]*
*            points_4[i][2] = pointcloud[i][2]*

*        elif ("Floor" in semantics[i]):*
*            points_4[i][0] = pointcloud[i][0]*
*            points_4[i][1] = pointcloud[i][1]*
*            points_4[i][2] = pointcloud[i][2]*

*        elif ("Box" in semantics[i]):*
*            points_3[i][0] = pointcloud[i][0]*
*            points_3[i][1] = pointcloud[i][1]*
*            points_3[i][2] = pointcloud[i][2]*
*        else:*
*            points_2[i][0] = pointcloud[i][0]*
*            points_2[i][1] = pointcloud[i][1]*
*            points_2[i][2] = pointcloud[i][2]*


*    pcd = o3d.t.geometry.PointCloud()*
*    pcd.point["positions"] = o3d.core.Tensor(points)*
*    o3d.t.io.write_point_cloud("./point_dir/pointcloud.pcd", pcd)*

*    pcd_1 = o3d.t.geometry.PointCloud()*
*    pcd_1.point["positions"] = o3d.core.Tensor(points_1)*
*    o3d.t.io.write_point_cloud("./point_dir/wall.pcd", pcd_1)*

*    pcd_2 = o3d.t.geometry.PointCloud()*
*    pcd_2.point["positions"] = o3d.core.Tensor(points_2)*
*    o3d.t.io.write_point_cloud("./point_dir/other.pcd", pcd_2)*

*    pcd_3 = o3d.t.geometry.PointCloud()*
*    pcd_3.point["positions"] = o3d.core.Tensor(points_3)*
*    o3d.t.io.write_point_cloud("./point_dir/box.pcd", pcd_3)*

*    pcd_4 = o3d.t.geometry.PointCloud()*
*    pcd_4.point["positions"] = o3d.core.Tensor(points_4)*
*    o3d.t.io.write_point_cloud("./point_dir/floor.pcd", pcd_4)*



*# cleanup and shutdown*
*simulation_context.stop()*
*simulation_app.close()*


*def list_usd_model(stage):*
*    for prim in stage.Traverse():*
*        if UsdGeom.ModelAPI(prim).IsAModel():*
*            print(f" Found model: {stage.GetPath()}")*
```Preformatted text