Python geometry_msgs.msg.Transform() Examples

The following are 18 code examples of geometry_msgs.msg.Transform(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module geometry_msgs.msg , or try the search function .
Example #1
Source File: geometry.py    From ros_numpy with MIT License 8 votes vote down vote up
def numpy_to_transform(arr):
	from tf import transformations

	shape, rest = arr.shape[:-2], arr.shape[-2:]
	assert rest == (4,4)

	if len(shape) == 0:
		trans = transformations.translation_from_matrix(arr)
		quat = transformations.quaternion_from_matrix(arr)

		return Transform(
			translation=Vector3(*trans),
			rotation=Quaternion(*quat)
		)
	else:
		res = np.empty(shape, dtype=np.object_)
		for idx in np.ndindex(shape):
			res[idx] = Transform(
				translation=Vector3(*transformations.translation_from_matrix(arr[idx])),
				rotation=Quaternion(*transformations.quaternion_from_matrix(arr[idx]))
			) 
Example #2
Source File: test_geometry.py    From ros_numpy with MIT License 7 votes vote down vote up
def test_transform(self):
        t = Transform(
            translation=Vector3(1, 2, 3),
            rotation=Quaternion(*transformations.quaternion_from_euler(np.pi, 0, 0))
        )

        t_mat = ros_numpy.numpify(t)

        np.testing.assert_allclose(t_mat.dot([0, 0, 1, 1]), [1.0, 2.0, 2.0, 1.0])

        msg = ros_numpy.msgify(Transform, t_mat)

        np.testing.assert_allclose(msg.translation.x, t.translation.x)
        np.testing.assert_allclose(msg.translation.y, t.translation.y)
        np.testing.assert_allclose(msg.translation.z, t.translation.z)
        np.testing.assert_allclose(msg.rotation.x, t.rotation.x)
        np.testing.assert_allclose(msg.rotation.y, t.rotation.y)
        np.testing.assert_allclose(msg.rotation.z, t.rotation.z)
        np.testing.assert_allclose(msg.rotation.w, t.rotation.w) 
Example #3
Source File: tf_bag.py    From tf_bag with GNU Lesser General Public License v3.0 6 votes vote down vote up
def replicateTransformOverTime(self, transf, orig_frame, dest_frame, frequency, start_time=None, end_time=None):
        """
        Adds a new transform to the graph with the specified value

        This can be useful to add calibration a-posteriori.

        :param transf: value of the transform
        :param orig_frame: the source tf frame of the transform of interest
        :param dest_frame: the target tf frame of the transform of interest
        :param frequency: frequency at which the transform should be published
        :param start_time: the time the transform should be published from
        :param end_time: the time the transform should be published until
        :return:
        """
        if start_time is None:
            start_time = self.getStartTime()
        if end_time is None:
            end_time = self.getEndTime()
        transl, quat = transf
        time_delta = rospy.Duration(1 / frequency)

        t_msg = TransformStamped(header=Header(frame_id=orig_frame),
                                 child_frame_id=dest_frame,
                                 transform=Transform(translation=Vector3(*transl), rotation=Quaternion(*quat)))

        def createMsg(time_nsec):
            time = rospy.Time(time_nsec / 1000000000)
            t_msg2 = copy.deepcopy(t_msg)
            t_msg2.header.stamp = time
            return t_msg2

        new_msgs = [createMsg(t) for t in range(start_time.to_nsec(), end_time.to_nsec(), time_delta.to_nsec())]
        self.tf_messages += new_msgs
        self.tf_messages.sort(key=lambda tfm: tfm.header.stamp.to_nsec())
        self.tf_times = np.array(list((tfm.header.stamp.to_nsec() for tfm in self.tf_messages)))
        self.all_transform_tuples.add((orig_frame, dest_frame)) 
Example #4
Source File: transforms.py    From ros-bridge with MIT License 6 votes vote down vote up
def carla_transform_to_ros_transform(carla_transform):
    """
    Convert a carla transform to a ROS transform

    See carla_location_to_ros_vector3() and carla_rotation_to_ros_quaternion() for details

    :param carla_transform: the carla transform
    :type carla_transform: carla.Transform
    :return: a ROS transform
    :rtype: geometry_msgs.msg.Transform
    """
    ros_transform = Transform()

    ros_transform.translation = carla_location_to_ros_vector3(
        carla_transform.location)
    ros_transform.rotation = carla_rotation_to_ros_quaternion(
        carla_transform.rotation)

    return ros_transform 
Example #5
Source File: transforms.py    From ros-bridge with MIT License 6 votes vote down vote up
def carla_transform_to_ros_pose(carla_transform):
    """
    Convert a carla transform to a ROS pose

    See carla_location_to_ros_point() and carla_rotation_to_ros_quaternion() for details

    :param carla_transform: the carla transform
    :type carla_transform: carla.Transform
    :return: a ROS pose
    :rtype: geometry_msgs.msg.Pose
    """
    ros_pose = Pose()

    ros_pose.position = carla_location_to_ros_point(
        carla_transform.location)
    ros_pose.orientation = carla_rotation_to_ros_quaternion(
        carla_transform.rotation)

    return ros_pose 
Example #6
Source File: transforms.py    From ggcnn_kinova_grasping with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def publish_transform(transform, reference_frame, name, seconds=1):
    """
    Publish a Transform for debugging purposes.
        transform           -> A geometry_msgs/Transform to be published
        reference_frame     -> A string defining the reference frame of the transform
        seconds             -> An int32 that defines the duration the transform will be broadcast for
    """
    # Create a stamped_transform and store the transform in it
    st = gmsg.TransformStamped()
    st.transform = transform
    st.header.frame_id = reference_frame
    st.child_frame_id = name

    # Call the publish_stamped_transform function
    publish_stamped_transform(st, seconds) 
Example #7
Source File: transforms.py    From ggcnn_kinova_grasping with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def publish_tf_quaterion_as_transform(translation, quaternion, reference_frame, name, seconds=1):
    qm = gmsg.Transform()
    qm.translation.x = translation[0]
    qm.translation.y = translation[1]
    qm.translation.z = translation[2]
    qm.rotation.x = quaternion[0]
    qm.rotation.y = quaternion[1]
    qm.rotation.z = quaternion[2]
    qm.rotation.w = quaternion[3]
    publish_transform(qm, reference_frame, name, seconds) 
Example #8
Source File: tf_helpers.py    From mvp_grasp with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def publish_transform(transform, reference_frame, name, seconds=1):
    """
    Publish a Transform for debugging purposes.
        transform           -> A geometry_msgs/Transform to be published
        reference_frame     -> A string defining the reference frame of the transform
        seconds             -> An int32 that defines the duration the transform will be broadcast for
    """
    # Create a stamped_transform and store the transform in it
    st = gmsg.TransformStamped()
    st.transform = transform
    st.header.frame_id = reference_frame
    st.child_frame_id = name

    # Call the publish_stamped_transform function
    publish_stamped_transform(st, seconds) 
Example #9
Source File: tf_helpers.py    From mvp_grasp with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def publish_tf_quaterion_as_transform(translation, quaternion, reference_frame, name, seconds=1):
    qm = gmsg.Transform()
    qm.translation.x = translation[0]
    qm.translation.y = translation[1]
    qm.translation.z = translation[2]
    qm.rotation.x = quaternion[0]
    qm.rotation.y = quaternion[1]
    qm.rotation.z = quaternion[2]
    qm.rotation.w = quaternion[3]
    publish_transform(qm, reference_frame, name, seconds) 
Example #10
Source File: handeye_calibration.py    From easy_handeye with GNU Lesser General Public License v3.0 5 votes vote down vote up
def from_dict(self, in_dict):
        """
        Sets values parsed from a given dictionary.

        :param in_dict: input dictionary.
        :type in_dict: dict[string, string|dict[string,float]]

        :rtype: None
        """
        self.eye_on_hand = in_dict['eye_on_hand']
        self.transformation = TransformStamped(
            child_frame_id=in_dict['tracking_base_frame'],
            transform=Transform(
                Vector3(in_dict['transformation']['x'],
                        in_dict['transformation']['y'],
                        in_dict['transformation']['z']),
                Quaternion(in_dict['transformation']['qx'],
                           in_dict['transformation']['qy'],
                           in_dict['transformation']['qz'],
                           in_dict['transformation']['qw'])
            )
        )
        if self.eye_on_hand:
            self.transformation.header.frame_id = in_dict['robot_effector_frame']
        else:
            self.transformation.header.frame_id = in_dict['robot_base_frame'] 
Example #11
Source File: environment.py    From Training-Neural-Networks-for-Event-Based-End-to-End-Robot-Control with GNU General Public License v3.0 5 votes vote down vote up
def step(self, action):
		# Publish action
		if action==0:
			self.left_pub.publish(self.v_forward-self.v_turn)
			self.right_pub.publish(self.v_forward+self.v_turn)
			self.rate.sleep()
		elif action==1:
			self.left_pub.publish(self.v_forward)
			self.right_pub.publish(self.v_forward)
			self.rate.sleep()
		elif action==2:
			self.left_pub.publish(self.v_forward+self.v_turn)
			self.right_pub.publish(self.v_forward-self.v_turn)
			self.rate.sleep()
		# Get transform data
		p = rospy.wait_for_message('transformData', Transform).translation
		p = np.array([p.x,p.y])
		# Calculate robot position and distance
		d, p = self.getDistance(p)
		# Calculate reward
		r = normpdf(d)
		# Translate DVS data from FIFO queue into state image
		s = self.getState()
		# Check if distance causes reset
		if abs(d) > self.reset_distance:
			return s, r, True, d, p
		else:
			return s, r, False, d, p 
Example #12
Source File: tf_bag.py    From tf_bag with GNU Lesser General Public License v3.0 5 votes vote down vote up
def waitForTransform(self, orig_frame, dest_frame, start_time=None):
        """
        Returns the first time for which at least a tf message is available for the whole chain between \
        the two provided frames

        :param orig_frame: the source tf frame of the transform of interest
        :param dest_frame: the target tf frame of the transform of interest
        :param start_time: the first time at which the messages should be considered; if None, all recorded messages
        :return: the ROS time at which the transform is available
        """
        if orig_frame == dest_frame:
            return self.tf_messages[0].header.stamp
        if start_time is not None:
            messages = itertools.ifilter(lambda m: m.header.stamp > start_time, self.tf_messages)
        else:
            messages = self.tf_messages
        missing_transforms = set(self.getChainTuples(orig_frame, dest_frame)) - self.static_transform_tuples
        message = messages.__iter__()
        ret = rospy.Time(0)
        try:
            while missing_transforms:
                m = next(message)
                if (m.header.frame_id, m.child_frame_id) in missing_transforms:
                    missing_transforms.remove((m.header.frame_id, m.child_frame_id))
                    ret = max(ret, m.header.stamp)
                if (m.child_frame_id, m.header.frame_id) in missing_transforms:
                    missing_transforms.remove((m.child_frame_id, m.header.frame_id))
                    ret = max(ret, m.header.stamp)
        except StopIteration:
            raise ValueError('Transform not found between {} and {}'.format(orig_frame, dest_frame))
        return ret 
Example #13
Source File: ego_vehicle.py    From ros-bridge with MIT License 5 votes vote down vote up
def update(self, frame, timestamp):
        """
        Function (override) to update this object.

        On update ego vehicle calculates and sends the new values for VehicleControl()

        :return:
        """
        self.send_vehicle_msgs()
        super(EgoVehicle, self).update(frame, timestamp)
        no_rotation = Transform()
        no_rotation.rotation.w = 1.0
        self.publish_transform(self.get_ros_transform(
            no_rotation, frame_id=str(self.get_id()), child_frame_id=self.get_prefix())) 
Example #14
Source File: kitti2bag.py    From kitti2bag with MIT License 4 votes vote down vote up
def save_dynamic_tf(bag, kitti, kitti_type, initial_time):
    print("Exporting time dependent transformations")
    if kitti_type.find("raw") != -1:
        for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
            tf_oxts_msg = TFMessage()
            tf_oxts_transform = TransformStamped()
            tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
            tf_oxts_transform.header.frame_id = 'world'
            tf_oxts_transform.child_frame_id = 'base_link'

            transform = (oxts.T_w_imu)
            t = transform[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(transform)
            oxts_tf = Transform()

            oxts_tf.translation.x = t[0]
            oxts_tf.translation.y = t[1]
            oxts_tf.translation.z = t[2]

            oxts_tf.rotation.x = q[0]
            oxts_tf.rotation.y = q[1]
            oxts_tf.rotation.z = q[2]
            oxts_tf.rotation.w = q[3]

            tf_oxts_transform.transform = oxts_tf
            tf_oxts_msg.transforms.append(tf_oxts_transform)

            bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)

    elif kitti_type.find("odom") != -1:
        timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
        for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0):
            tf_msg = TFMessage()
            tf_stamped = TransformStamped()
            tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
            tf_stamped.header.frame_id = 'world'
            tf_stamped.child_frame_id = 'camera_left'
            
            t = tf_matrix[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(tf_matrix)
            transform = Transform()

            transform.translation.x = t[0]
            transform.translation.y = t[1]
            transform.translation.z = t[2]

            transform.rotation.x = q[0]
            transform.rotation.y = q[1]
            transform.rotation.z = q[2]
            transform.rotation.w = q[3]

            tf_stamped.transform = transform
            tf_msg.transforms.append(tf_stamped)

            bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp) 
Example #15
Source File: kitti2bag.py    From SARosPerceptionKitti with MIT License 4 votes vote down vote up
def save_dynamic_tf(bag, kitti, kitti_type, initial_time):
    print("Exporting time dependent transformations")
    if kitti_type.find("raw") != -1:
        for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
            tf_oxts_msg = TFMessage()
            tf_oxts_transform = TransformStamped()
            tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
            tf_oxts_transform.header.frame_id = 'world'
            tf_oxts_transform.child_frame_id = 'base_link'

            transform = (oxts.T_w_imu)
            t = transform[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(transform)
            oxts_tf = Transform()

            oxts_tf.translation.x = t[0]
            oxts_tf.translation.y = t[1]
            oxts_tf.translation.z = t[2]

            oxts_tf.rotation.x = q[0]
            oxts_tf.rotation.y = q[1]
            oxts_tf.rotation.z = q[2]
            oxts_tf.rotation.w = q[3]

            tf_oxts_transform.transform = oxts_tf
            tf_oxts_msg.transforms.append(tf_oxts_transform)

            bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)

    elif kitti_type.find("odom") != -1:
        timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
        for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0):
            tf_msg = TFMessage()
            tf_stamped = TransformStamped()
            tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
            tf_stamped.header.frame_id = 'world'
            tf_stamped.child_frame_id = 'camera_left'
            
            t = tf_matrix[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(tf_matrix)
            transform = Transform()

            transform.translation.x = t[0]
            transform.translation.y = t[1]
            transform.translation.z = t[2]

            transform.rotation.x = q[0]
            transform.rotation.y = q[1]
            transform.rotation.z = q[2]
            transform.rotation.w = q[3]

            tf_stamped.transform = transform
            tf_msg.transforms.append(tf_stamped)

            bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp) 
Example #16
Source File: handeye_calibration.py    From easy_handeye with GNU Lesser General Public License v3.0 4 votes vote down vote up
def __init__(self,
                 eye_on_hand=False,
                 robot_base_frame=None,
                 robot_effector_frame=None,
                 tracking_base_frame=None,
                 transformation=None):
        """
        Creates a HandeyeCalibration object.

        :param eye_on_hand: if false, it is a eye-on-base calibration
        :type eye_on_hand: bool
        :param robot_base_frame: needed only for eye-on-base calibrations: robot base link tf name
        :type robot_base_frame: string
        :param robot_effector_frame: needed only for eye-on-hand calibrations: robot tool tf name
        :type robot_effector_frame: string
        :param tracking_base_frame: tracking system tf name
        :type tracking_base_frame: string
        :param transformation: transformation between optical origin and base/tool robot frame as tf tuple
        :type transformation: ((float, float, float), (float, float, float, float))
        :return: a HandeyeCalibration object

        :rtype: easy_handeye.handeye_calibration.HandeyeCalibration
        """

        if transformation is None:
            transformation = ((0, 0, 0), (0, 0, 0, 1))

        self.eye_on_hand = eye_on_hand
        """
        if false, it is a eye-on-base calibration

        :type: bool
        """

        self.transformation = TransformStamped(transform=Transform(
            Vector3(*transformation[0]), Quaternion(*transformation[1])))
        """
        transformation between optical origin and base/tool robot frame

        :type: geometry_msgs.msg.TransformedStamped
        """

        # tf names
        if self.eye_on_hand:
            self.transformation.header.frame_id = robot_effector_frame
        else:
            self.transformation.header.frame_id = robot_base_frame
        self.transformation.child_frame_id = tracking_base_frame

        self.filename = HandeyeCalibration.DIRECTORY + '/' + rospy.get_namespace().rstrip('/').split('/')[-1] + '.yaml' 
Example #17
Source File: environment.py    From Training-Neural-Networks-for-Event-Based-End-to-End-Robot-Control with GNU General Public License v3.0 4 votes vote down vote up
def __init__(self):
		self.dvs_sub = rospy.Subscriber('dvsData', Int8MultiArray, self.dvs_callback)
		self.pos_sub = rospy.Subscriber('transformData', Transform, self.pos_callback)
		self.left_pub = rospy.Publisher('leftMotorSpeed', Float32, queue_size=1)
		self.right_pub = rospy.Publisher('rightMotorSpeed', Float32, queue_size=1)
		self.reset_pub = rospy.Publisher('resetRobot', Bool, queue_size=None)
		self.dvs_data = np.array([0,0])
		self.pos_data = []
		self.v_pre = v_pre
		self.turn_pre = turn_pre
		self.resize_factor = [dvs_resolution[0]//resolution[0], (dvs_resolution[1]-crop_bottom-crop_top)//resolution[1]]
		self.outer = False
		rospy.init_node('dvs_controller')
		self.rate = rospy.Rate(rate)
		
		#Some values for calculating distance to center of lane
		self.v1 = 2.5
		self.v2 = 7.5
		self.scale = 1.0
		self.c1 = np.array([self.scale*self.v1,self.scale*self.v1])
		self.c2 = np.array([self.scale*self.v2,self.scale*self.v2])
		self.c3 = np.array([self.scale*self.v2,self.scale*self.v1])
		self.c4 = np.array([self.scale*self.v1,self.scale*self.v2])
		self.r1_outer = self.scale*(self.v1-0.25)
		self.r2_outer = self.scale*(self.v1+0.25)
		self.l1_outer = self.scale*(self.v1+self.v2-0.25)
		self.l2_outer = self.scale*(0.25)
		self.r1_inner = self.scale*(self.v1-0.75)
		self.r2_inner = self.scale*(self.v1+0.75)
		self.l1_inner = self.scale*(self.v1+self.v2-0.75)
		self.l2_inner = self.scale*(0.75)
		self.d1_outer = 5.0
		self.d2_outer = 2*math.pi*self.r1_outer*0.25
		self.d3_outer = 5.0
		self.d4_outer = 2*math.pi*self.r1_outer*0.5
		self.d5_outer = 2*math.pi*self.r2_outer*0.25
		self.d6_outer = 2*math.pi*self.r1_outer*0.5
		self.d1_inner = 5.0
		self.d2_inner = 2*math.pi*self.r1_inner*0.25
		self.d3_inner = 5.0
		self.d4_inner = 2*math.pi*self.r1_inner*0.5
		self.d5_inner = 2*math.pi*self.r2_inner*0.25
		self.d6_inner = 2*math.pi*self.r1_inner*0.5
		self.d_outer = self.d1_outer + self.d2_outer + self.d3_outer + self.d4_outer + self.d5_outer + self.d6_outer
		self.d_inner = self.d1_inner + self.d2_inner + self.d3_inner + self.d4_inner + self.d5_inner + self.d6_inner 
Example #18
Source File: environment.py    From Training-Neural-Networks-for-Event-Based-End-to-End-Robot-Control with GNU General Public License v3.0 4 votes vote down vote up
def __init__(self):
		self.dvs_sub = rospy.Subscriber('dvsData', Int8MultiArray, self.dvs_callback)
		self.pos_sub = rospy.Subscriber('transformData', Transform, self.pos_callback)
		self.left_pub = rospy.Publisher('leftMotorSpeed', Float32, queue_size=1)
		self.right_pub = rospy.Publisher('rightMotorSpeed', Float32, queue_size=1)
		self.reset_pub = rospy.Publisher('resetRobot', Bool, queue_size=None)
		self.dvs_data = np.array([0,0])
		self.pos_data = []
		self.distance = 0
		self.steps = 0
		self.v_pre = v_pre
		self.turn_pre = turn_pre
		self.resize_factor = [dvs_resolution[0]//resolution[0], (dvs_resolution[1]-crop_bottom-crop_top)//resolution[1]]
		self.outer = False
		rospy.init_node('dvs_controller')
		self.rate = rospy.Rate(rate)
		
		#Some values for calculating distance to center of lane
		self.v1 = 2.5
		self.v2 = 7.5
		self.scale = 1.0
		self.c1 = np.array([self.scale*self.v1,self.scale*self.v1])
		self.c2 = np.array([self.scale*self.v2,self.scale*self.v2])
		self.c3 = np.array([self.scale*self.v2,self.scale*self.v1])
		self.c4 = np.array([self.scale*self.v1,self.scale*self.v2])
		self.r1_outer = self.scale*(self.v1-0.25)
		self.r2_outer = self.scale*(self.v1+0.25)
		self.l1_outer = self.scale*(self.v1+self.v2-0.25)
		self.l2_outer = self.scale*(0.25)
		self.r1_inner = self.scale*(self.v1-0.75)
		self.r2_inner = self.scale*(self.v1+0.75)
		self.l1_inner = self.scale*(self.v1+self.v2-0.75)
		self.l2_inner = self.scale*(0.75)
		self.d1_outer = 5.0
		self.d2_outer = 2*math.pi*self.r1_outer*0.25
		self.d3_outer = 5.0
		self.d4_outer = 2*math.pi*self.r1_outer*0.5
		self.d5_outer = 2*math.pi*self.r2_outer*0.25
		self.d6_outer = 2*math.pi*self.r1_outer*0.5
		self.d1_inner = 5.0
		self.d2_inner = 2*math.pi*self.r1_inner*0.25
		self.d3_inner = 5.0
		self.d4_inner = 2*math.pi*self.r1_inner*0.5
		self.d5_inner = 2*math.pi*self.r2_inner*0.25
		self.d6_inner = 2*math.pi*self.r1_inner*0.5
		self.d_outer = self.d1_outer + self.d2_outer + self.d3_outer + self.d4_outer + self.d5_outer + self.d6_outer
		self.d_inner = self.d1_inner + self.d2_inner + self.d3_inner + self.d4_inner + self.d5_inner + self.d6_inner