ROS Resources: Documentation | Support | Discussion Forum | Index | Service Status | ros @ Robotics Stack Exchange
Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

The other answer to this question does not support the requested use case of two nodes publishing (different) static transforms. I solved this problem by writing my own StaticTransformManager to entirely replace tf2_ros.StaticTransformBroadcaster. If all nodes use StaticTransformManager below instead of tf2_ros.StaticTransformBroadcaster, all static transforms will be available rather than just the most recently-published set. This is accomplished by having the StaticTransformManager read all currently-available transforms from /tf_static and then including those transforms when it publishes a new transform on the same topic.

Note that the preferred way to get an instance of StaticTransformManager is to call StaticTransformManager.getManager() rather than the constructor since there is no reason to have multiple instances of the underlying rospy.Subscriber and rospy.Publisher in a single node.

import rospy
from geometry_msgs.msg import TransformStamped
from tf2_msgs.msg import TFMessage
from threading import Lock, Event

mgr = None

class StaticTransformManager:
    @staticmethod
    def getManager():
        global mgr
        if mgr is None:
            mgr = StaticTransformManager()
        return mgr

    def __init__(self):
        # A cache of child_frame_id -> StampedTransform with that child_frame_id
        self.transforms = dict()
        self.transforms_lock = Lock()

        # Information about an expected incoming transform and a callback to indicate it was received
        self.expected_message = None
        self.expected_message_lock = Lock()

        self.subscriber = rospy.Subscriber("/tf_static", TFMessage, callback=self._onNewTransformSet, queue_size=10)
        self.broadcaster = rospy.Publisher("/tf_static", TFMessage, queue_size=1, latch=True)

    def _onNewTransformSet(self, msg):
        # See if the incoming message contains the transform we're expecting from ourself
        with self.expected_message_lock:
            e = self.expected_message
            # Check if we're expecting a transform
            if e is not None:
                # Check if the received transform matches the one we're expecting
                tr = e[1]
                if any([t.header.seq == tr.header.seq and t.header.stamp.secs == tr.header.stamp.secs and t.header.stamp.nsecs == tr.header.stamp.nsecs for t in msg.transforms]):
                    # This message contains the expected transform
                    e = e[0]
                    self.expected_message = None
                else:
                    # This message does not contain the expected transform
                    e = None

        # Overwrite the appropriate transforms in the local cache
        with self.transforms_lock:
            for transform in msg.transforms:
                self.transforms[transform.child_frame_id] = transform

        # Confirm that the expected message has been processed
        if e is not None:
            e.set()

    def updateTransform(self, t):
        assert(isinstance(t, TransformStamped))

        # Update our local cache of transforms and create a new message containing the entire set of transforms
        msg = TFMessage()
        with self.transforms_lock:
            self.transforms[t.child_frame_id] = t
            msg.transforms = list([self.transforms[child_frame_id] for child_frame_id in self.transforms])

        # Keep trying to slot in our outgoing message as an anticipated incoming message
        while True:
            with self.expected_message_lock:
                if self.expected_message is None:
                    received_message_back = Event()
                    self.expected_message = (received_message_back, t)
                    break

        # Actually release the transform set update as a latched message
        self.broadcaster.publish(msg)

        # Wait until we receive our own message back
        if not received_message_back.wait(5):
            rospy.logerr("StaticTransformManager did not receive its own message after publishing it on /tf_static")
            raise Exception()

EDIT: This original answer is invalid because the treatment of multiple publishers within a node is different than the treatment of multiple publishers in different nodes (https://github.com/ros/ros_comm/issues/146). When the StaticTransformBroadcasters are actually put in separate nodes, both TFMessages are sent upon subscription to the topic.

ORIGINAL:

The other answer to this question does not support the requested use case of two nodes publishing (different) static transforms. I solved this problem by writing my own StaticTransformManager to entirely replace tf2_ros.StaticTransformBroadcaster. If all nodes use StaticTransformManager below instead of tf2_ros.StaticTransformBroadcaster, all static transforms will be available rather than just the most recently-published set. This is accomplished by having the StaticTransformManager read all currently-available transforms from /tf_static and then including those transforms when it publishes a new transform on the same topic.

Note that the preferred way to get an instance of StaticTransformManager is to call StaticTransformManager.getManager() rather than the constructor since there is no reason to have multiple instances of the underlying rospy.Subscriber and rospy.Publisher in a single node.

import rospy
from geometry_msgs.msg import TransformStamped
from tf2_msgs.msg import TFMessage
from threading import Lock, Event

mgr = None

class StaticTransformManager:
    @staticmethod
    def getManager():
        global mgr
        if mgr is None:
            mgr = StaticTransformManager()
        return mgr

    def __init__(self):
        # A cache of child_frame_id -> StampedTransform with that child_frame_id
        self.transforms = dict()
        self.transforms_lock = Lock()

        # Information about an expected incoming transform and a callback to indicate it was received
        self.expected_message = None
        self.expected_message_lock = Lock()

        self.subscriber = rospy.Subscriber("/tf_static", TFMessage, callback=self._onNewTransformSet, queue_size=10)
        self.broadcaster = rospy.Publisher("/tf_static", TFMessage, queue_size=1, latch=True)

    def _onNewTransformSet(self, msg):
        # See if the incoming message contains the transform we're expecting from ourself
        with self.expected_message_lock:
            e = self.expected_message
            # Check if we're expecting a transform
            if e is not None:
                # Check if the received transform matches the one we're expecting
                tr = e[1]
                if any([t.header.seq == tr.header.seq and t.header.stamp.secs == tr.header.stamp.secs and t.header.stamp.nsecs == tr.header.stamp.nsecs for t in msg.transforms]):
                    # This message contains the expected transform
                    e = e[0]
                    self.expected_message = None
                else:
                    # This message does not contain the expected transform
                    e = None

        # Overwrite the appropriate transforms in the local cache
        with self.transforms_lock:
            for transform in msg.transforms:
                self.transforms[transform.child_frame_id] = transform

        # Confirm that the expected message has been processed
        if e is not None:
            e.set()

    def updateTransform(self, t):
        assert(isinstance(t, TransformStamped))

        # Update our local cache of transforms and create a new message containing the entire set of transforms
        msg = TFMessage()
        with self.transforms_lock:
            self.transforms[t.child_frame_id] = t
            msg.transforms = list([self.transforms[child_frame_id] for child_frame_id in self.transforms])

        # Keep trying to slot in our outgoing message as an anticipated incoming message
        while True:
            with self.expected_message_lock:
                if self.expected_message is None:
                    received_message_back = Event()
                    self.expected_message = (received_message_back, t)
                    break

        # Actually release the transform set update as a latched message
        self.broadcaster.publish(msg)

        # Wait until we receive our own message back
        if not received_message_back.wait(5):
            rospy.logerr("StaticTransformManager did not receive its own message after publishing it on /tf_static")
            raise Exception()