Skip to content

Commit f3693bf

Browse files
Multiple Camera support for transform_sensor (#24)
* wip * fix default param * add base frame param init * allow transform sensor to be used with base frame * update documentation * switch to vec of array
1 parent 1556ad6 commit f3693bf

6 files changed

Lines changed: 72 additions & 20 deletions

File tree

fuse_models/doc/sensor_models/transform_sensor.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,12 @@ This sensor is intended to be used to estimate the position of a body relative t
88
For example, this sensor is useful in the case of measuring april tags on some external body and using those measurements to estimate the full pose and twist of that body.
99
An example of this application can be seen [here](../../../fuse_tutorials/config/fuse_apriltag_tutorial.yaml) (or try `ros2 launch fuse_tutorials fuse_apriltag_tutorial.launch.py` to see the tutorial in action).
1010

11-
More generally, this sensor takes a transform between some set `estimation_frame` and one of a predefined set of `transforms`. When it receives such a transform, it will then apply a transform to this measurement to change it to be a measurement of the given `target_frame`. Then, it generates a 3D pose transaction for use by its parent optimizer.
11+
More generally, this sensor takes a transform between one frame in the set `estimation_frames` and one transform from the set `transforms`. When it receives such a transform, it will then apply a transform to this measurement to change it to be a measurement of the given `target_frame`. Then, it generates a 3D pose transaction for use by its parent optimizer.
12+
13+
If you want to use multiple `estimation_frames`, you should define a `base_frame`.
14+
If this frame is non-empty, then you should set the `map_frame_id` and `world_frame_id` in the publisher to the same value.
15+
When this is set, the sensor will transform its pose to be a measurement in `base_frame` of `target_frame`, and the odom it produces will be in `base_frame`. This can be used when e.g. you have multiple cameras, all positioned relative to `world`, and want them measuring the same object.
16+
This allows the transform sensor to use any number of cameras to estimate the pose of some other object.
1217

1318
A slightly confusing aspect of the sensor is the need for multiple definitions of the target frame. Every frame in `transforms` needs a corresponding `target_frame` to be published for it to be used. This is simply because `tf` uses a tree data structure, and the target frames are leaves. These should all end up being in the same global location (discounting measurement noise) but will have different names.
1419

fuse_models/include/fuse_models/parameters/odometry_3d_publisher_params.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ struct Odometry3DPublisherParams : public ParameterBase
152152
rclcpp::Duration covariance_throttle_period{ 0, 0 }; //!< The throttle period duration in seconds
153153
//!< to compute the covariance
154154
rclcpp::Duration tf_cache_time{ 10, 0 };
155-
rclcpp::Duration tf_timeout{ 0, static_cast<uint32_t>(RCUTILS_S_TO_NS(0.1)) };
155+
rclcpp::Duration tf_timeout{ 0, 0 };
156156
int queue_size{ 1 };
157157
std::string map_frame_id{ "map" };
158158
std::string odom_frame_id{ "odom" };

fuse_models/include/fuse_models/parameters/transform_sensor_params.hpp

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,10 @@ struct TransformSensorParams : public ParameterBase
8383

8484
target_frame = fuse_core::getParam(interfaces, fuse_core::joinParameterName(ns, "target_frame"), target_frame);
8585

86-
estimation_frame =
87-
fuse_core::getParam(interfaces, fuse_core::joinParameterName(ns, "estimation_frame"), estimation_frame);
86+
base_frame = fuse_core::getParam(interfaces, fuse_core::joinParameterName(ns, "base_frame"), base_frame);
87+
88+
estimation_frames =
89+
fuse_core::getParam(interfaces, fuse_core::joinParameterName(ns, "estimation_frames"), estimation_frames);
8890

8991
pose_loss = fuse_core::loadLossConfig(interfaces, fuse_core::joinParameterName(ns, "pose_loss"));
9092
pose_covariance = fuse_core::getParam(interfaces, fuse_core::joinParameterName(ns, "pose_covariance"),
@@ -101,7 +103,8 @@ struct TransformSensorParams : public ParameterBase
101103
int queue_size{ 10 };
102104
std::vector<std::string> transforms;
103105
std::string target_frame;
104-
std::string estimation_frame;
106+
std::string base_frame;
107+
std::vector<std::string> estimation_frames;
105108
std::vector<size_t> position_indices;
106109
std::vector<size_t> orientation_indices;
107110
fuse_core::Loss::SharedPtr pose_loss;

fuse_models/include/fuse_models/transform_sensor.hpp

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,9 @@ class TransformSensor : public fuse_core::AsyncSensorModel
142142

143143
std::unique_ptr<tf2_ros::Buffer> tf_buffer_;
144144
std::unique_ptr<tf2_ros::TransformListener> tf_listener_;
145-
std::set<std::string> transforms_of_interest_;
145+
std::set<std::string> fiducial_frames_;
146+
std::set<std::string> estimation_frames_;
147+
std::vector<std::array<double, 6>> pose_covariances_;
146148

147149
rclcpp::Subscription<MessageType>::SharedPtr sub_;
148150

fuse_models/src/odometry_3d_publisher.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -441,7 +441,7 @@ void Odometry3DPublisher::predict(tf2::Transform& pose, nav_msgs::msg::Odometry&
441441
geometry_msgs::msg::AccelWithCovarianceStamped acceleration_output,
442442
bool latest_covariance_valid) const
443443
{
444-
double const dt = to_predict_to.seconds() - rclcpp::Time(odom_output.header.stamp).seconds();
444+
double const dt = std::min(to_predict_to.seconds() - rclcpp::Time(odom_output.header.stamp).seconds(), 0.);
445445
// Convert pose in Eigen representation
446446
fuse_core::Vector3d position;
447447
fuse_core::Vector3d velocity_linear;

fuse_models/src/transform_sensor.cpp

Lines changed: 55 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@
4848
#include <pluginlib/class_list_macros.hpp>
4949
#include <rclcpp/rclcpp.hpp>
5050
#include <stdexcept>
51+
#include <string>
52+
#include <tf2/LinearMath/Transform.hpp>
5153
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
5254

5355
// Register this sensor model with ROS as a plugin.
@@ -102,12 +104,32 @@ void TransformSensor::onInit()
102104

103105
for (auto const& name : params_.transforms)
104106
{
105-
transforms_of_interest_.insert(name);
107+
fiducial_frames_.insert(name);
106108
}
107109

108-
if (params_.estimation_frame.empty())
110+
if (params_.estimation_frames.empty())
109111
{
110-
throw std::runtime_error("No estimation frame specified.");
112+
throw std::runtime_error("No estimation frames specified.");
113+
}
114+
115+
for (auto const& name : params_.estimation_frames)
116+
{
117+
estimation_frames_.insert(name);
118+
}
119+
120+
if (params_.pose_covariance.size() != 6 * estimation_frames_.size())
121+
{
122+
throw std::runtime_error("Must provide 6 `pose_covariance` values per estimation frame");
123+
}
124+
125+
for (std::size_t i = 0; i < estimation_frames_.size(); ++i)
126+
{
127+
pose_covariances_.emplace_back();
128+
auto& cur_entry = pose_covariances_.back();
129+
for (std::size_t j = 0; j < 6; ++j)
130+
{
131+
cur_entry[j] = params_.pose_covariance[(i * 6) + j];
132+
}
111133
}
112134

113135
tf_buffer_ = std::make_unique<tf2_ros::Buffer>(clock_);
@@ -137,7 +159,9 @@ void TransformSensor::process(MessageType const& msg)
137159
std::string const& parent_tf_name = transform.header.frame_id;
138160

139161
std::string const& child_tf_name = transform.child_frame_id;
140-
bool const child_of_interest = transforms_of_interest_.find(child_tf_name) != transforms_of_interest_.end();
162+
bool const child_of_interest = fiducial_frames_.find(child_tf_name) != fiducial_frames_.end();
163+
auto const parent_it = estimation_frames_.find(parent_tf_name);
164+
bool const parent_of_interest = parent_it != estimation_frames_.end();
141165

142166
if (!child_of_interest)
143167
{
@@ -149,14 +173,15 @@ void TransformSensor::process(MessageType const& msg)
149173
// we must either have april tag -> estimation frame or estimation frame -> april tag tf
150174
if (child_of_interest)
151175
{
152-
if (parent_tf_name != params_.estimation_frame)
176+
if (!parent_of_interest)
153177
{
154178
// we don't care about this transform , skip it
155179
RCLCPP_DEBUG(logger_, "Ignoring transform from %s to %s", transform.header.frame_id.c_str(),
156180
child_tf_name.c_str());
157181
continue;
158182
}
159183
}
184+
std::size_t estimation_index = std::distance(estimation_frames_.begin(), parent_it);
160185
RCLCPP_DEBUG(logger_, "Got transform of interest from %s to %s", transform.header.frame_id.c_str(),
161186
child_tf_name.c_str());
162187
// Create a transaction object
@@ -165,10 +190,9 @@ void TransformSensor::process(MessageType const& msg)
165190

166191
tf2::Transform net_transform;
167192
tf2::fromMsg(transform.transform, net_transform);
168-
std::string target_frame_name;
169193
if (!params_.target_frame.empty())
170194
{
171-
target_frame_name = params_.target_frame + "_" + child_tf_name;
195+
std::string target_frame_name = params_.target_frame + "_" + child_tf_name;
172196
tf2::Transform april_to_target;
173197
try
174198
{
@@ -187,17 +211,35 @@ void TransformSensor::process(MessageType const& msg)
187211
}
188212
net_transform = net_transform * april_to_target.inverse();
189213
}
190-
else
191-
{
192-
target_frame_name = "";
193-
}
194214

195215
// Create the pose from the transform
196216
// we want a measurement from the april tag (transform of interest) to some reference frame
197217
// if we have the opposite, invert it and use that
198218
geometry_msgs::msg::PoseWithCovarianceStamped pose;
199219
pose.header = transform.header;
200220
pose.header.frame_id = parent_tf_name;
221+
// transform to base frame if it is defined
222+
if (!params_.base_frame.empty())
223+
{
224+
tf2::Transform base_to_estimation;
225+
try
226+
{
227+
tf2::fromMsg((*tf_buffer_)
228+
.lookupTransform(params_.base_frame, parent_tf_name,
229+
rclcpp::Time(transform.header.stamp.sec - 1, transform.header.stamp.nanosec),
230+
params_.tf_timeout)
231+
.transform,
232+
base_to_estimation);
233+
}
234+
catch (...)
235+
{
236+
// tf2 throws a bunch of different exceptions that don't inherit from one base, just skip (this will happen for
237+
// at least 1 second on startup)
238+
continue;
239+
}
240+
net_transform = base_to_estimation * net_transform;
241+
pose.header.frame_id = params_.base_frame;
242+
}
201243
pose.pose.pose.orientation.w = net_transform.getRotation().w();
202244
pose.pose.pose.orientation.x = net_transform.getRotation().x();
203245
pose.pose.pose.orientation.y = net_transform.getRotation().y();
@@ -207,9 +249,9 @@ void TransformSensor::process(MessageType const& msg)
207249
pose.pose.pose.position.z = net_transform.getOrigin().z();
208250

209251
// TODO(henrygerardmoore): figure out better method to set the covariance
210-
for (std::size_t i = 0; i < params_.pose_covariance.size(); ++i)
252+
for (std::size_t i = 0; i < pose_covariances_[estimation_index].size(); ++i)
211253
{
212-
pose.pose.covariance[i * 7] = params_.pose_covariance[i];
254+
pose.pose.covariance[i * 7] = pose_covariances_[estimation_index][i];
213255
}
214256

215257
bool const validate = !params_.disable_checks;

0 commit comments

Comments
 (0)