In what format are the rotations in rtde_receiver.getActualTCPPose() in python

       # Define the movement distance in the K1 coordinate system
        move_distance_x = 0.0  # Move X.X m along the x-axis of K1
        move_distance_y = 0.0  # Move X.X m along the y-axis of K1
        move_distance_z = 0.1  # Move X.X m along the z-axis of K1
        tcp_position = rtde_receiver.getActualTCPPose()
        # an example value could look like this: tcp_position = [0.11089816829193276, 0.47492341756375106, 0.6089207876444199, 0.9147791731417009, -1.5661091016471673, 1.5132271359967038] 

        # Matrix transformation
        origin_K1_wrt_K0 = tcp_position[:3]  # Origin of the TCP w.r.t. the Robot Base
        angles_K1_wrt_K0 = tcp_position[3:6]  # Orientation of the origin of the TCP w.r.t. the Robot Base

        # Create a rotation matrix based on the angles with scipy transform
        rot_K0_to_K1 = tf.Rotation.from_rotvec(angles_K1_wrt_K0)
        rot_mat_K0_to_K1 = rot_K0_to_K1.as_matrix()
        print(f"Rotation Matrix K0 to K1: {rot_mat_K0_to_K1}")
        rot_mat_K1_to_K0 = rot_mat_K0_to_K1.T

        # Create the movement vector in the K1 coordinate system
        move_vector_K1 = np.array([move_distance_x, move_distance_y, move_distance_z])

        # Convert the movement vector from K1 to K0 coordinate system
        move_vector_K0 = rot_mat_K1_to_K0 @ move_vector_K1 # This is the original line
        print(f"Move Vector K0: {move_vector_K0}", f"Shape of Move Vector K0: {move_vector_K0.shape}")
        # Calculate the new TCP position in the K0 coordinate system
        new_tcp_position_K0 = origin_K1_wrt_K0 + move_vector_K0

        # Move the robot to the new TCP position
        rtde_controler.moveL(new_tcp_position_K0.tolist() + [tcp_position[3], tcp_position[4], tcp_position[5]], 0.1, 0.2)

this is a part of my python code for my UR5 CB3. I know the location of an object I want to pick up, relative to the TCP, in this example it is at the position [0,0,0.1] from the TCP. Now I want to transform the location of the object into base frame coordinates, K0. I thought I can use scipy tranform rotation from rotvec. But this makes the robot move in a nonsensical direction, not to a place 10 cm in front of the TCP, as it should. What am I doing wrong ? Are the angles_K1_wrt_K0 not given as a rotvec? are they euler angles? I cant find a definitive description, it always says “these values are the rotation” or something imprecise. Maybe I am doing an other obvious mistake.

Help is highly appreciated!!!

There are probably easier methods for your use-case but here is the code I used for a similar process.
It uses numpy.
This version is annotated and refactored by chatgpt, the original code was cobbled together from multiple sources.
I dont know if the nomenclature is entirely correct.
You can find an explanation of robot orientation in unversal robots here:
https://www.universal-robots.com/articles/ur/application-installation/explanation-on-robot-orientation/

These are the steps:

  • create the 4x4 transformation matrix of your offset
  • create the 4x4 transformation matrix of your robot position
  • create the dot product of your position and the inverse of the offset
  • convert back from 4x4 matrix to robot pose

Here are the necessary functions:

import numpy as np

def pose_to_transformation_matrix(pose):
    """
    Converts a 6D pose vector (position + axis-angle rotation) to a 4x4 homogeneous transformation matrix.
    Parameters:
        pose (tuple or list): 6D pose vector (x, y, z, rx, ry, rz)
    Returns:
        np.array: 4x4 homogeneous transformation matrix
    """
    position = pose[:3]
    axis_angle = pose[3:]

    rotation_matrix = axis_angle_to_rotation_matrix(axis_angle)
    transformation_matrix = np.eye(4)
    transformation_matrix[:3, :3] = rotation_matrix
    transformation_matrix[:3, 3] = position

    return transformation_matrix

def axis_angle_to_rotation_matrix(axis_angle):
    """
    Converts an axis-angle rotation to a 3x3 rotation matrix using Rodrigues' rotation formula.
    Parameters:
        axis_angle (tuple or list): Axis-angle rotation vector (rx, ry, rz)
    Returns:
        np.array: 3x3 rotation matrix
    """
    if np.all(np.array(axis_angle) == 0):
        return np.identity(3)
    
    axis = np.array(axis_angle) / np.linalg.norm(axis_angle)
    angle = np.linalg.norm(axis_angle)
    
    cos_angle = np.cos(angle)
    sin_angle = np.sin(angle)
    one_minus_cos = 1 - cos_angle
    x, y, z = axis
    
    rotation_matrix = np.array([
        [cos_angle + x**2 * one_minus_cos, x*y*one_minus_cos - z*sin_angle, x*z*one_minus_cos + y*sin_angle],
        [y*x*one_minus_cos + z*sin_angle, cos_angle + y**2 * one_minus_cos, y*z*one_minus_cos - x*sin_angle],
        [z*x*one_minus_cos - y*sin_angle, z*y*one_minus_cos + x*sin_angle, cos_angle + z**2 * one_minus_cos]
    ])
    
    return rotation_matrix

def transformation_matrix_to_pose(matrix):
    """
    Converts a 4x4 homogeneous transformation matrix to a 6D pose vector (position + axis-angle rotation).
    Parameters:
        matrix (np.array): 4x4 homogeneous transformation matrix
    Returns:
        np.array: 6D pose vector (x, y, z, rx, ry, rz)
    """
    position = matrix[:3, 3]
    rotation_matrix = matrix[:3, :3]
    axis_angle = rotation_matrix_to_axis_angle(rotation_matrix)
    pose = np.concatenate([position, axis_angle])
    
    return pose

def rotation_matrix_to_axis_angle(rotation_matrix):
    """
    Converts a 3x3 rotation matrix to an axis-angle rotation vector.
    Parameters:
        rotation_matrix (np.array): 3x3 rotation matrix
    Returns:
        np.array: Axis-angle rotation vector (rx, ry, rz)
    """
    assert np.allclose(np.dot(rotation_matrix, rotation_matrix.T), np.eye(3)), "Input is not a valid rotation matrix"
    
    angle = np.arccos((np.trace(rotation_matrix) - 1) / 2)
    
    if angle == 0:
        return np.array([0, 0, 0])
    elif angle == np.pi:
        x = np.sqrt((rotation_matrix[0, 0] + 1) / 2)
        y = np.sqrt((rotation_matrix[1, 1] + 1) / 2) * np.sign(rotation_matrix[0, 1])
        z = np.sqrt((rotation_matrix[2, 2] + 1) / 2) * np.sign(rotation_matrix[0, 2])
        return np.array([x, y, z]) * angle
    else:
        rx = rotation_matrix[2, 1] - rotation_matrix[1, 2]
        ry = rotation_matrix[0, 2] - rotation_matrix[2, 0]
        rz = rotation_matrix[1, 0] - rotation_matrix[0, 1]
        axis = np.array([rx, ry, rz]) / (2 * np.sin(angle))
        return axis * angle

Here is how to use these functions

# Example usage
robot_pose = (0.2, 0.2, 0.3, np.pi/2, np.pi/2,0)
robot_htm = pose_to_transformation_matrix(robot_pose)
tcp_offset_pose = (0, 0, 0.1, 0, 0, 0)
tcp_offset_htm = pose_to_transformation_matrix(tcp_offset_pose)

inverse_robot_htm = np.linalg.inv(robot_htm)
inverse_offset_htm = np.linalg.inv(tcp_offset_htm)
combined_htm = np.dot(robot_htm,inverse_offset_htm)
new_robot_pose = transformation_matrix_to_pose(combined_htm)

print("Combined Transformation Matrix:\n", combined_htm)
print("New Robot Pose:", new_robot_pose)
1 Like

First of all: Thank you so much for being so immensely helpful!!!

But just in case anyone finds this later and is losing their mind like me: I found the actual mistake, someone had configured the TCP-position and orientation in a nonsensical way, before I started to use the robot, and hence all my motions that were defined in the coordinate system of the TCP where following this wrong configuration.
Well, at least I learned a lot about kinematics on my way to solve this issue, i guess …
Thanks again, nonetheless!

1 Like