I’m trying to adapt the follow target example from Isaac Sim Tutorial 9 to a mobile manipulator. I have tried creating a new USD that instantiates two UR10E arms on a rigid body. This is what I did:
- Move the two arms to their own Xform and adjust it accordingly
- Add a rigid body with its own Xform
- Remove the fixed link and articulation roots from the two arms and add them to the rigid body so that the two arms are connected to the rigid body
- Assign the new articulation root to the rigid body xform
So basically I want a rigid body attached with two arms for now, and I want the two arms to be controllable relative to the rigid body, so if that rigid body moves, all arm commands will still be in that local coordinate frame.
I modified the set_robot function in the follow_target.py like so:
def set_robot(self) -> SingleManipulator:
assets_root_path = get_assets_root_path()
if assets_root_path is None:
raise Exception("Could not find Isaac Sim assets folder")
asset_path = (
# assets_root_path + "/Isaac/Samples/Rigging/Manipulator/configure_manipulator/ur10e/ur/ur_gripper.usd"
# "/home/jb2270/Projects/IsaacLab/isaacsim/ur10_double.usd"
# "/home/jb2270/Projects/IsaacLab/isaacsim/ur_gripper.usd"
"/home/jb2270/Projects/IsaacLab/isaacsim/ur_gripper_two.usd"
)
add_reference_to_stage(usd_path=asset_path, prim_path="/robot")
stage_utils.print_stage_prim_paths()
# define the gripper
gripper = ParallelGripper(
# We chose the following values while inspecting the articulation
end_effector_prim_path="/robot/left_arm/ee_link/robotiq_arg2f_base_link",
joint_prim_names=["finger_joint"],
joint_opened_positions=np.array([0]),
joint_closed_positions=np.array([40]),
action_deltas=np.array([-40]),
use_mimic_joints=True,
)
# define the manipulator
manipulator = SingleManipulator(
prim_path="/robot",
name="ur10_robot",
end_effector_prim_path="/robot/left_arm/ee_link/robotiq_arg2f_base_link",
gripper=gripper,
)
return manipulator
And modified the example like so:
from isaacsim import SimulationApp
CONFIG = {
"width": 1280,
"height": 720,
"window_width": 1920,
"window_height": 1080,
"headless": True,
"hide_ui": False, # Show the GUI
"renderer": "RaytracedLighting",
"display_options": 3286, # Set display options to show default grid
}
simulation_app = SimulationApp(CONFIG)
import numpy as np
from ik_solver import KinematicsSolver
from isaacsim.core.api import World
from isaacsim.core.utils.extensions import enable_extension
from isaacsim.core.utils.transformations import get_relative_transform
from isaacsim.core.utils.xforms import get_world_pose
from isaacsim.core.utils.rotations import quat_to_rot_matrix, rot_matrix_to_quat
from isaacsim.core.utils.transformations import tf_matrix_from_pose, pose_from_tf_matrix
from follow_target import FollowTarget
enable_extension("omni.services.livestream.nvcf")
my_world = World(stage_units_in_meters=1.0)
# Initialize the Follow Target task with a target location for the cube to be followed by the end effector
my_task = FollowTarget(name="ur10e_follow_target", target_position=np.array([0.5, 0, 0.5]))
my_world.add_task(my_task)
my_world.reset()
task_params = my_world.get_task("ur10e_follow_target").get_params()
target_name = task_params["target_name"]["value"]
ur10e_name = task_params["robot_name"]["value"]
my_ur10e = my_world.scene.get_object(ur10e_name)
# initialize the ik solver
ik_solver = KinematicsSolver(my_ur10e)
articulation_controller = my_ur10e.get_articulation_controller()
def transform_world_to_arm_local(world_position, world_orientation, arm_base_path="/robot/left_arm"):
"""Transform world coordinates to arm-local coordinates using IsaacSim utilities"""
# Get the current world pose of the arm base
arm_base_world_pose = get_world_pose(arm_base_path)
arm_base_translation = arm_base_world_pose[0]
arm_base_orientation = arm_base_world_pose[1]
print("Arm base", arm_base_world_pose)
# Create transformation matrix from arm base world pose
arm_base_tf = tf_matrix_from_pose(arm_base_translation, arm_base_orientation)
# Create transformation matrix from target world pose
world_target_tf = tf_matrix_from_pose(world_position, world_orientation)
# Compute relative transform: arm_base_tf^-1 * world_target_tf
arm_base_tf_inv = np.linalg.inv(arm_base_tf)
local_target_tf = arm_base_tf_inv @ world_target_tf
# Extract position and orientation from the local transformation matrix
local_position, local_orientation = pose_from_tf_matrix(local_target_tf)
return local_position, local_orientation
# run the simulation
while simulation_app.is_running():
my_world.step(render=True)
if my_world.is_playing():
if my_world.current_time_step_index == 0:
my_world.reset()
observations = my_world.get_observations()
# Transform world coordinates to arm-local coordinates
world_target_pos = observations[target_name]["position"]
world_target_orient = observations[target_name]["orientation"]
local_target_pos, local_target_orient = transform_world_to_arm_local(
world_target_pos, world_target_orient
)
actions, succ = ik_solver.compute_inverse_kinematics(
target_position=local_target_pos,
target_orientation=local_target_orient,
)
if succ:
articulation_controller.apply_action(actions)
else:
print("IK did not converge to a solution. No action is being taken.")
simulation_app.close()
This generally seems to work, but firstly, there is an offset that was introduced as soon as I moved the articulation root from the left arm to the rigid body, and furthermore, if the rigid body moves around due to physics, the inverse kinematics will be off, as the physics doesn’t affect the Xform tree apparently.
I would appreciate some pointers towards how one would set up a simple mobile manipulator. What am I missing?
Best,
Jan
Isaac Sim Version
X 5.1.0
Operating System
Ubuntu 24.04
GPU Information
- Model: RTX 5090
- Driver Version:
