WPILibC++ 2026.2.2
Loading...
Searching...
No Matches
PoseEstimator.h
Go to the documentation of this file.
1// Copyright (c) FIRST and other WPILib contributors.
2// Open Source Software; you can modify and/or share it under the terms of
3// the WPILib BSD license file in the root directory of this project.
4
5#pragma once
6
7#include <map>
8#include <optional>
9#include <vector>
10
11#include <Eigen/Core>
12#include <wpi/SymbolExports.h>
13#include <wpi/array.h>
14
15#include "frc/geometry/Pose2d.h"
22#include "units/time.h"
23#include "wpimath/MathShared.h"
24
25namespace frc {
26
27/**
28 * This class wraps odometry to fuse latency-compensated
29 * vision measurements with encoder measurements. Robot code should not use this
30 * directly- Instead, use the particular type for your drivetrain (e.g.,
31 * DifferentialDrivePoseEstimator). It will correct for noisy vision
32 * measurements and encoder drift. It is intended to be an easy drop-in for
33 * Odometry.
34 *
35 * Update() should be called every robot loop.
36 *
37 * AddVisionMeasurement() can be called as infrequently as you want; if you
38 * never call it, then this class will behave like regular encoder odometry.
39 *
40 * @tparam WheelSpeeds Wheel speeds type.
41 * @tparam WheelPositions Wheel positions type.
42 */
43template <typename WheelSpeeds, typename WheelPositions>
45 public:
46 /**
47 * Constructs a PoseEstimator.
48 *
49 * @warning The initial pose estimate will always be the default pose,
50 * regardless of the odometry's current pose.
51 *
52 * @param kinematics A correctly-configured kinematics object for your
53 * drivetrain.
54 * @param odometry A correctly-configured odometry object for your drivetrain.
55 * @param stateStdDevs Standard deviations of the pose estimate (x position in
56 * meters, y position in meters, and heading in radians). Increase these
57 * numbers to trust your state estimate less.
58 * @param visionMeasurementStdDevs Standard deviations of the vision pose
59 * measurement (x position in meters, y position in meters, and heading in
60 * radians). Increase these numbers to trust the vision pose measurement
61 * less.
62 */
65 const wpi::array<double, 3>& stateStdDevs,
66 const wpi::array<double, 3>& visionMeasurementStdDevs)
67 : m_odometry(odometry) {
68 for (size_t i = 0; i < 3; ++i) {
69 m_q[i] = stateStdDevs[i] * stateStdDevs[i];
70 }
71
72 SetVisionMeasurementStdDevs(visionMeasurementStdDevs);
75 }
76
77 /**
78 * Sets the pose estimator's trust in vision measurements. This might be used
79 * to change trust in vision measurements after the autonomous period, or to
80 * change trust as distance to a vision target increases.
81 *
82 * @param visionMeasurementStdDevs Standard deviations of the vision pose
83 * measurement (x position in meters, y position in meters, and heading in
84 * radians). Increase these numbers to trust the vision pose measurement
85 * less.
86 */
88 const wpi::array<double, 3>& visionMeasurementStdDevs) {
89 // Diagonal of measurement covariance matrix R
91 for (size_t i = 0; i < 3; ++i) {
92 r[i] = visionMeasurementStdDevs[i] * visionMeasurementStdDevs[i];
93 }
94
95 // Solve for closed form Kalman gain for continuous Kalman filter with A = 0
96 // and C = I. See wpimath/algorithms.md.
97 for (size_t row = 0; row < 3; ++row) {
98 if (m_q[row] == 0.0) {
99 m_vision_K.diagonal()[row] = 0.0;
100 } else {
101 m_vision_K.diagonal()[row] =
102 m_q[row] / (m_q[row] + std::sqrt(m_q[row] * r[row]));
103 }
104 }
105 }
106
107 /**
108 * Resets the robot's position on the field.
109 *
110 * The gyroscope angle does not need to be reset in the user's robot code.
111 * The library automatically takes care of offsetting the gyro angle.
112 *
113 * @param gyroAngle The current gyro angle.
114 * @param wheelPositions The distances traveled by the encoders.
115 * @param pose The estimated pose of the robot on the field.
116 */
117 void ResetPosition(const Rotation2d& gyroAngle,
118 const WheelPositions& wheelPositions, const Pose2d& pose) {
119 // Reset state estimate and error covariance
120 m_odometry.ResetPosition(gyroAngle, wheelPositions, pose);
121 m_odometryPoseBuffer.Clear();
122 m_visionUpdates.clear();
123 m_poseEstimate = m_odometry.GetPose();
124 }
125
126 /**
127 * Resets the robot's pose.
128 *
129 * @param pose The pose to reset to.
130 */
131 void ResetPose(const Pose2d& pose) {
132 m_odometry.ResetPose(pose);
133 m_odometryPoseBuffer.Clear();
134 m_visionUpdates.clear();
135 m_poseEstimate = m_odometry.GetPose();
136 }
137
138 /**
139 * Resets the robot's translation.
140 *
141 * @param translation The pose to translation to.
142 */
143#if defined(__GNUC__) && !defined(__clang__)
144#pragma GCC diagnostic push
145#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
146#endif // defined(__GNUC__) && !defined(__clang__)
147 void ResetTranslation(const Translation2d& translation) {
148 m_odometry.ResetTranslation(translation);
149
150 const std::optional<std::pair<units::second_t, VisionUpdate>>
151 latestVisionUpdate =
152 m_visionUpdates.empty() ? std::nullopt
153 : std::optional{*m_visionUpdates.crbegin()};
154 m_odometryPoseBuffer.Clear();
155 m_visionUpdates.clear();
156
157 if (latestVisionUpdate) {
158 // apply vision compensation to the pose rotation
159 const VisionUpdate visionUpdate{
160 Pose2d{translation, latestVisionUpdate->second.visionPose.Rotation()},
161 Pose2d{translation,
162 latestVisionUpdate->second.odometryPose.Rotation()}};
163 m_visionUpdates[latestVisionUpdate->first] = visionUpdate;
164 m_poseEstimate = visionUpdate.Compensate(m_odometry.GetPose());
165 } else {
166 m_poseEstimate = m_odometry.GetPose();
167 }
168 }
169#if defined(__GNUC__) && !defined(__clang__)
170#pragma GCC diagnostic pop
171#endif // defined(__GNUC__) && !defined(__clang__)
172
173 /**
174 * Resets the robot's rotation.
175 *
176 * @param rotation The rotation to reset to.
177 */
178#if defined(__GNUC__) && !defined(__clang__)
179#pragma GCC diagnostic push
180#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
181#endif // defined(__GNUC__) && !defined(__clang__)
182 void ResetRotation(const Rotation2d& rotation) {
183 m_odometry.ResetRotation(rotation);
184
185 const std::optional<std::pair<units::second_t, VisionUpdate>>
186 latestVisionUpdate =
187 m_visionUpdates.empty() ? std::nullopt
188 : std::optional{*m_visionUpdates.crbegin()};
189 m_odometryPoseBuffer.Clear();
190 m_visionUpdates.clear();
191
192 if (latestVisionUpdate) {
193 // apply vision compensation to the pose translation
194 const VisionUpdate visionUpdate{
195 Pose2d{latestVisionUpdate->second.visionPose.Translation(), rotation},
196 Pose2d{latestVisionUpdate->second.odometryPose.Translation(),
197 rotation}};
198 m_visionUpdates[latestVisionUpdate->first] = visionUpdate;
199 m_poseEstimate = visionUpdate.Compensate(m_odometry.GetPose());
200 } else {
201 m_poseEstimate = m_odometry.GetPose();
202 }
203 }
204#if defined(__GNUC__) && !defined(__clang__)
205#pragma GCC diagnostic pop
206#endif // defined(__GNUC__) && !defined(__clang__)
207
208 /**
209 * Gets the estimated robot pose.
210 *
211 * @return The estimated robot pose in meters.
212 */
213 Pose2d GetEstimatedPosition() const { return m_poseEstimate; }
214
215 /**
216 * Return the pose at a given timestamp, if the buffer is not empty.
217 *
218 * @param timestamp The pose's timestamp.
219 * @return The pose at the given timestamp (or std::nullopt if the buffer is
220 * empty).
221 */
222 std::optional<Pose2d> SampleAt(units::second_t timestamp) const {
223 // Step 0: If there are no odometry updates to sample, skip.
224 if (m_odometryPoseBuffer.GetInternalBuffer().empty()) {
225 return std::nullopt;
226 }
227
228 // Step 1: Make sure timestamp matches the sample from the odometry pose
229 // buffer. (When sampling, the buffer will always use a timestamp
230 // between the first and last timestamps)
231 units::second_t oldestOdometryTimestamp =
232 m_odometryPoseBuffer.GetInternalBuffer().front().first;
233 units::second_t newestOdometryTimestamp =
234 m_odometryPoseBuffer.GetInternalBuffer().back().first;
235 timestamp =
236 std::clamp(timestamp, oldestOdometryTimestamp, newestOdometryTimestamp);
237
238 // Step 2: If there are no applicable vision updates, use the odometry-only
239 // information.
240 if (m_visionUpdates.empty() || timestamp < m_visionUpdates.begin()->first) {
241 return m_odometryPoseBuffer.Sample(timestamp);
242 }
243
244 // Step 3: Get the latest vision update from before or at the timestamp to
245 // sample at.
246 // First, find the iterator past the sample timestamp, then go back one.
247 // Note that upper_bound() won't return begin() because we check begin()
248 // earlier.
249 auto floorIter = m_visionUpdates.upper_bound(timestamp);
250 --floorIter;
251 auto visionUpdate = floorIter->second;
252
253 // Step 4: Get the pose measured by odometry at the time of the sample.
254 auto odometryEstimate = m_odometryPoseBuffer.Sample(timestamp);
255
256 // Step 5: Apply the vision compensation to the odometry pose.
257 // TODO Replace with std::optional::transform() in C++23
258 if (odometryEstimate) {
259 return visionUpdate.Compensate(*odometryEstimate);
260 }
261 return std::nullopt;
262 }
263
264 /**
265 * Adds a vision measurement to the Kalman Filter. This will correct
266 * the odometry pose estimate while still accounting for measurement noise.
267 *
268 * This method can be called as infrequently as you want, as long as you are
269 * calling Update() every loop.
270 *
271 * To promote stability of the pose estimate and make it robust to bad vision
272 * data, we recommend only adding vision measurements that are already within
273 * one meter or so of the current pose estimate.
274 *
275 * @param visionRobotPose The pose of the robot as measured by the vision
276 * camera.
277 * @param timestamp The timestamp of the vision measurement in seconds. Note
278 * that if you don't use your own time source by calling UpdateWithTime(),
279 * then you must use a timestamp with an epoch since FPGA startup (i.e.,
280 * the epoch of this timestamp is the same epoch as
281 * frc::Timer::GetTimestamp(). This means that you should use
282 * frc::Timer::GetTimestamp() as your time source in this case.
283 */
284 void AddVisionMeasurement(const Pose2d& visionRobotPose,
285 units::second_t timestamp) {
286 // Step 0: If this measurement is old enough to be outside the pose buffer's
287 // timespan, skip.
288 if (m_odometryPoseBuffer.GetInternalBuffer().empty() ||
289 m_odometryPoseBuffer.GetInternalBuffer().front().first -
290 kBufferDuration >
291 timestamp) {
292 return;
293 }
294
295 // Step 1: Clean up any old entries
296 CleanUpVisionUpdates();
297
298 // Step 2: Get the pose measured by odometry at the moment the vision
299 // measurement was made.
300 auto odometrySample = m_odometryPoseBuffer.Sample(timestamp);
301
302 if (!odometrySample) {
303 return;
304 }
305
306 // Step 3: Get the vision-compensated pose estimate at the moment the vision
307 // measurement was made.
308 auto visionSample = SampleAt(timestamp);
309
310 if (!visionSample) {
311 return;
312 }
313
314 // Step 4: Measure the transform between the old pose estimate and the
315 // vision transform.
316 auto transform = visionRobotPose - visionSample.value();
317
318 // Step 5: We should not trust the transform entirely, so instead we scale
319 // this transform by a Kalman gain matrix representing how much we trust
320 // vision measurements compared to our current pose.
321 Eigen::Vector3d k_times_transform =
322 m_vision_K * Eigen::Vector3d{transform.X().value(),
323 transform.Y().value(),
324 transform.Rotation().Radians().value()};
325
326 // Step 6: Convert back to Transform2d.
327 Transform2d scaledTransform{
328 units::meter_t{k_times_transform(0)},
329 units::meter_t{k_times_transform(1)},
330 Rotation2d{units::radian_t{k_times_transform(2)}}};
331
332 // Step 7: Calculate and record the vision update.
333 VisionUpdate visionUpdate{*visionSample + scaledTransform, *odometrySample};
334 m_visionUpdates[timestamp] = visionUpdate;
335
336 // Step 8: Remove later vision measurements. (Matches previous behavior)
337 auto firstAfter = m_visionUpdates.upper_bound(timestamp);
338 m_visionUpdates.erase(firstAfter, m_visionUpdates.end());
339
340 // Step 9: Update latest pose estimate. Since we cleared all updates after
341 // this vision update, it's guaranteed to be the latest vision update.
342 m_poseEstimate = visionUpdate.Compensate(m_odometry.GetPose());
343 }
344
345 /**
346 * Adds a vision measurement to the Kalman Filter. This will correct
347 * the odometry pose estimate while still accounting for measurement noise.
348 *
349 * This method can be called as infrequently as you want, as long as you are
350 * calling Update() every loop.
351 *
352 * To promote stability of the pose estimate and make it robust to bad vision
353 * data, we recommend only adding vision measurements that are already within
354 * one meter or so of the current pose estimate.
355 *
356 * Note that the vision measurement standard deviations passed into this
357 * method will continue to apply to future measurements until a subsequent
358 * call to SetVisionMeasurementStdDevs() or this method.
359 *
360 * @param visionRobotPose The pose of the robot as measured by the vision
361 * camera.
362 * @param timestamp The timestamp of the vision measurement in seconds. Note
363 * that if you don't use your own time source by calling UpdateWithTime(),
364 * then you must use a timestamp with an epoch since FPGA startup (i.e.,
365 * the epoch of this timestamp is the same epoch as
366 * frc::Timer::GetTimestamp(). This means that you should use
367 * frc::Timer::GetTimestamp() as your time source in this case.
368 * @param visionMeasurementStdDevs Standard deviations of the vision pose
369 * measurement (x position in meters, y position in meters, and heading in
370 * radians). Increase these numbers to trust the vision pose measurement
371 * less.
372 */
374 const Pose2d& visionRobotPose, units::second_t timestamp,
375 const wpi::array<double, 3>& visionMeasurementStdDevs) {
376 SetVisionMeasurementStdDevs(visionMeasurementStdDevs);
377 AddVisionMeasurement(visionRobotPose, timestamp);
378 }
379
380 /**
381 * Updates the pose estimator with wheel encoder and gyro information. This
382 * should be called every loop.
383 *
384 * @param gyroAngle The current gyro angle.
385 * @param wheelPositions The distances traveled by the encoders.
386 *
387 * @return The estimated pose of the robot in meters.
388 */
389 Pose2d Update(const Rotation2d& gyroAngle,
390 const WheelPositions& wheelPositions) {
391 return UpdateWithTime(wpi::math::MathSharedStore::GetTimestamp(), gyroAngle,
392 wheelPositions);
393 }
394
395 /**
396 * Updates the pose estimator with wheel encoder and gyro information. This
397 * should be called every loop.
398 *
399 * @param currentTime The time at which this method was called.
400 * @param gyroAngle The current gyro angle.
401 * @param wheelPositions The distances traveled by the encoders.
402 *
403 * @return The estimated pose of the robot in meters.
404 */
405 Pose2d UpdateWithTime(units::second_t currentTime,
406 const Rotation2d& gyroAngle,
407 const WheelPositions& wheelPositions) {
408 auto odometryEstimate = m_odometry.Update(gyroAngle, wheelPositions);
409
410 m_odometryPoseBuffer.AddSample(currentTime, odometryEstimate);
411
412 if (m_visionUpdates.empty()) {
413 m_poseEstimate = odometryEstimate;
414 } else {
415 auto visionUpdate = m_visionUpdates.rbegin()->second;
416 m_poseEstimate = visionUpdate.Compensate(odometryEstimate);
417 }
418
419 return GetEstimatedPosition();
420 }
421
422 private:
423 /**
424 * Removes stale vision updates that won't affect sampling.
425 */
426 void CleanUpVisionUpdates() {
427 // Step 0: If there are no odometry samples, skip.
428 if (m_odometryPoseBuffer.GetInternalBuffer().empty()) {
429 return;
430 }
431
432 // Step 1: Find the oldest timestamp that needs a vision update.
433 units::second_t oldestOdometryTimestamp =
434 m_odometryPoseBuffer.GetInternalBuffer().front().first;
435
436 // Step 2: If there are no vision updates before that timestamp, skip.
437 if (m_visionUpdates.empty() ||
438 oldestOdometryTimestamp < m_visionUpdates.begin()->first) {
439 return;
440 }
441
442 // Step 3: Find the newest vision update timestamp before or at the oldest
443 // timestamp.
444 // First, find the iterator past the oldest odometry timestamp, then go
445 // back one. Note that upper_bound() won't return begin() because we check
446 // begin() earlier.
447 auto newestNeededVisionUpdate =
448 m_visionUpdates.upper_bound(oldestOdometryTimestamp);
449 --newestNeededVisionUpdate;
450
451 // Step 4: Remove all entries strictly before the newest timestamp we need.
452 m_visionUpdates.erase(m_visionUpdates.begin(), newestNeededVisionUpdate);
453 }
454
455 struct VisionUpdate {
456 // The vision-compensated pose estimate
457 Pose2d visionPose;
458
459 // The pose estimated based solely on odometry
460 Pose2d odometryPose;
461
462 /**
463 * Returns the vision-compensated version of the pose. Specifically, changes
464 * the pose from being relative to this record's odometry pose to being
465 * relative to this record's vision pose.
466 *
467 * @param pose The pose to compensate.
468 * @return The compensated pose.
469 */
470 Pose2d Compensate(const Pose2d& pose) const {
471 auto delta = pose - odometryPose;
472 return visionPose + delta;
473 }
474 };
475
476 static constexpr units::second_t kBufferDuration = 1.5_s;
477
478 Odometry<WheelSpeeds, WheelPositions>& m_odometry;
479
480 // Diagonal of process noise covariance matrix Q
482
483 // Kalman gain matrix K
484 Eigen::DiagonalMatrix<double, 3> m_vision_K =
485 Eigen::DiagonalMatrix<double, 3>::Zero();
486
487 // Maps timestamps to odometry-only pose estimates
488 TimeInterpolatableBuffer<Pose2d> m_odometryPoseBuffer{kBufferDuration};
489 // Maps timestamps to vision updates
490 // Always contains one entry before the oldest entry in m_odometryPoseBuffer,
491 // unless there have been no vision measurements after the last reset. May
492 // contain one entry while m_odometryPoseBuffer is empty to correct for
493 // translation/rotation after a call to ResetRotation/ResetTranslation.
494 std::map<units::second_t, VisionUpdate> m_visionUpdates;
495
496 Pose2d m_poseEstimate;
497};
498
499} // namespace frc
#define WPILIB_DLLEXPORT
Definition SymbolExports.h:36
Helper class that converts a chassis velocity (dx, dy, and dtheta components) into individual wheel s...
Definition Kinematics.h:25
Class for odometry.
Definition Odometry.h:30
Represents a 2D pose containing translational and rotational elements.
Definition Pose2d.h:28
This class wraps odometry to fuse latency-compensated vision measurements with encoder measurements.
Definition PoseEstimator.h:44
void ResetRotation(const Rotation2d &rotation)
Resets the robot's rotation.
Definition PoseEstimator.h:182
void SetVisionMeasurementStdDevs(const wpi::array< double, 3 > &visionMeasurementStdDevs)
Sets the pose estimator's trust in vision measurements.
Definition PoseEstimator.h:87
Pose2d Update(const Rotation2d &gyroAngle, const WheelPositions &wheelPositions)
Updates the pose estimator with wheel encoder and gyro information.
Definition PoseEstimator.h:389
std::optional< Pose2d > SampleAt(units::second_t timestamp) const
Return the pose at a given timestamp, if the buffer is not empty.
Definition PoseEstimator.h:222
void AddVisionMeasurement(const Pose2d &visionRobotPose, units::second_t timestamp, const wpi::array< double, 3 > &visionMeasurementStdDevs)
Adds a vision measurement to the Kalman Filter.
Definition PoseEstimator.h:373
PoseEstimator(Kinematics< WheelSpeeds, WheelPositions > &kinematics, Odometry< WheelSpeeds, WheelPositions > &odometry, const wpi::array< double, 3 > &stateStdDevs, const wpi::array< double, 3 > &visionMeasurementStdDevs)
Constructs a PoseEstimator.
Definition PoseEstimator.h:63
void AddVisionMeasurement(const Pose2d &visionRobotPose, units::second_t timestamp)
Adds a vision measurement to the Kalman Filter.
Definition PoseEstimator.h:284
void ResetPose(const Pose2d &pose)
Resets the robot's pose.
Definition PoseEstimator.h:131
Pose2d UpdateWithTime(units::second_t currentTime, const Rotation2d &gyroAngle, const WheelPositions &wheelPositions)
Updates the pose estimator with wheel encoder and gyro information.
Definition PoseEstimator.h:405
void ResetPosition(const Rotation2d &gyroAngle, const WheelPositions &wheelPositions, const Pose2d &pose)
Resets the robot's position on the field.
Definition PoseEstimator.h:117
Pose2d GetEstimatedPosition() const
Gets the estimated robot pose.
Definition PoseEstimator.h:213
void ResetTranslation(const Translation2d &translation)
Resets the robot's translation.
Definition PoseEstimator.h:147
A rotation in a 2D coordinate frame represented by a point on the unit circle (cosine and sine).
Definition Rotation2d.h:31
Represents a transformation for a Pose2d in the pose's frame.
Definition Transform2d.h:21
Represents a translation in 2D space.
Definition Translation2d.h:30
This class is a wrapper around std::array that does compile time size checking.
Definition array.h:26
static units::second_t GetTimestamp()
Definition MathShared.h:84
static void ReportUsage(MathUsageId id, int count)
Definition MathShared.h:80
Definition CAN.h:11
constexpr empty_array_t empty_array
Definition array.h:16