Skip to content

Commit

Permalink
Merge pull request #222 from maxikyuu/dev
Browse files Browse the repository at this point in the history
Add Standard Deviations for YAGSL SwerveDrive Pose Estimator
  • Loading branch information
thenetworkgrinch authored Sep 3, 2024
2 parents ea3d8f3 + 15055a5 commit 3b1f380
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 22 deletions.
1 change: 1 addition & 0 deletions src/main/java/frc/robot/Constants.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
package frc.robot;

import com.pathplanner.lib.util.PIDConstants;

import edu.wpi.first.math.geometry.Translation3d;
import edu.wpi.first.math.util.Units;
import swervelib.math.Matter;
Expand Down
90 changes: 68 additions & 22 deletions src/main/java/frc/robot/subsystems/swervedrive/Vision.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import edu.wpi.first.apriltag.AprilTagFieldLayout;
import edu.wpi.first.apriltag.AprilTagFields;
import edu.wpi.first.math.Matrix;
import edu.wpi.first.math.VecBuilder;
import edu.wpi.first.math.geometry.Pose2d;
import edu.wpi.first.math.geometry.Pose3d;
import edu.wpi.first.math.geometry.Rotation2d;
Expand All @@ -10,6 +12,8 @@
import edu.wpi.first.math.geometry.Transform3d;
import edu.wpi.first.math.geometry.Translation2d;
import edu.wpi.first.math.geometry.Translation3d;
import edu.wpi.first.math.numbers.N1;
import edu.wpi.first.math.numbers.N3;
import edu.wpi.first.math.util.Units;
import edu.wpi.first.wpilibj.smartdashboard.Field2d;
import frc.robot.Robot;
Expand Down Expand Up @@ -60,23 +64,26 @@ enum Cameras
new Rotation3d(0, Math.toRadians(-24.094), Math.toRadians(30)),
new Translation3d(Units.inchesToMeters(12.056),
Units.inchesToMeters(10.981),
Units.inchesToMeters(8.44))),
Units.inchesToMeters(8.44)),
VecBuilder.fill(4, 4, 8), VecBuilder.fill(0.5,0.5,1)),
/**
* Right Camera
*/
RIGHT_CAM("right",
new Rotation3d(0, Math.toRadians(-24.094), Math.toRadians(-30)),
new Translation3d(Units.inchesToMeters(12.056),
Units.inchesToMeters(-10.981),
Units.inchesToMeters(8.44))),
Units.inchesToMeters(8.44)),
VecBuilder.fill(4, 4, 8), VecBuilder.fill(0.5,0.5,1)),
/**
* Center Camera
*/
CENTER_CAM("center",
new Rotation3d(0, Units.degreesToRadians(18), 0),
new Translation3d(Units.inchesToMeters(-4.628),
Units.inchesToMeters(-10.687),
Units.inchesToMeters(16.129)));
Units.inchesToMeters(16.129)),
VecBuilder.fill(4, 4, 8), VecBuilder.fill(0.5,0.5,1));

/**
* Transform of the camera rotation and translation relative to the center of the robot
Expand All @@ -101,15 +108,22 @@ enum Cameras
*/
public final PhotonPoseEstimator poseEstimator;

public final Matrix<N3, N1> singleTagStdDevs;

public final Matrix<N3, N1> multiTagStdDevs;

/**
* Construct a Photon Camera class with help.
* Construct a Photon Camera class with help. Standard deviations are fake values,
* experiment and determine estimation noise on an actual robot.
*
* @param name Name of the PhotonVision camera found in the PV UI.
* @param robotToCamRotation {@link Rotation3d} of the camera.
* @param robotToCamTranslation {@link Translation3d} relative to the center of the robot.
* @param singleTagStdDevs Single AprilTag standard deviations of estimated poses from the camera.
* @param multiTagStdDevs Multi AprilTag standard deviations of estimated poses from the camera.
*/
Cameras(String name, Rotation3d robotToCamRotation, Translation3d robotToCamTranslation)
Cameras(String name, Rotation3d robotToCamRotation, Translation3d robotToCamTranslation,
Matrix<N3, N1> singleTagStdDevs, Matrix<N3, N1> multiTagStdDevsMatrix)
{
latencyAlert = new Alert("'" + name + "' Camera is experiencing high latency.", AlertType.WARNING);

Expand All @@ -123,6 +137,9 @@ enum Cameras
robotToCamTransform);
poseEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY);

this.singleTagStdDevs = singleTagStdDevs;
this.multiTagStdDevs = multiTagStdDevsMatrix;

if (Robot.isSimulation())
{
SimCameraProperties cameraProp = new SimCameraProperties();
Expand Down Expand Up @@ -211,42 +228,71 @@ public Vision(Supplier<Pose2d> currentPose, Field2d field)
*/
public void updatePoseEstimation(SwerveDrive swerveDrive)
{
ArrayList<EstimatedRobotPose> estimatedRobotPoses = getEstimatedGlobalPose();
if(Robot.isReal()) {
for (EstimatedRobotPose i : estimatedRobotPoses)
for (Cameras camera : Cameras.values())
{
swerveDrive.addVisionMeasurement(i.estimatedPose.toPose2d(), i.timestampSeconds);
Optional<EstimatedRobotPose> poseEst = getEstimatedGlobalPose(camera);
if (poseEst.isPresent()) {
var pose = poseEst.get();
swerveDrive.addVisionMeasurement(pose.estimatedPose.toPose2d(), pose.timestampSeconds, getEstimationStdDevs(camera));
}
}
} else visionSim.update(swerveDrive.getPose());
}

/**
* generates the estimated robot pose. Returns empty if:
* Generates the estimated robot pose. Returns empty if:
* <ul>
* <li> No Pose Estimates could be generated</li>
* <li> The generated pose estimate was considered not accurate</li>
* </ul>
*
* @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to create the estimate
*/
public ArrayList<EstimatedRobotPose> getEstimatedGlobalPose()
public Optional<EstimatedRobotPose> getEstimatedGlobalPose(Cameras camera)
{
ArrayList<EstimatedRobotPose> poses = new ArrayList<>();

for (Cameras c : Cameras.values())
// Alternative method if you want to use both a pose filter and standard deviations based on distance + tags seen.
// Optional<EstimatedRobotPose> poseEst = filterPose(camera.poseEstimator.update());
Optional<EstimatedRobotPose> poseEst = camera.poseEstimator.update();
if (poseEst.isPresent())
{
Optional<EstimatedRobotPose> poseEst = filterPose(c.poseEstimator.update());

if (poseEst.isPresent())
{
poses.add(poseEst.get());
field2d.getObject(c + " est pose").setPose(poseEst.get().estimatedPose.toPose2d());
}
field2d.getObject(camera + " est pose").setPose(poseEst.get().estimatedPose.toPose2d());
}

return poses;
return poseEst;
}

/**
* The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use
* with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}.
* This should only be used when there are targets visible.
* @param camera Desired camera to get the standard deviation of the estimated pose.
*/
public Matrix<N3, N1> getEstimationStdDevs(Cameras camera) {
var poseEst = getEstimatedGlobalPose(camera);
var estStdDevs = camera.singleTagStdDevs;
var targets = getLatestResult(camera).getTargets();
int numTags = 0;
double avgDist = 0;
for (var tgt : targets) {
var tagPose = camera.poseEstimator.getFieldTags().getTagPose(tgt.getFiducialId());
if (tagPose.isEmpty()) continue;
numTags++;
if (poseEst.isPresent()) {
avgDist += PhotonUtils.getDistanceToPose(poseEst.get().estimatedPose.toPose2d(), tagPose.get().toPose2d());
}
}
if (numTags == 0) return estStdDevs;
avgDist /= numTags;
// Decrease std devs if multiple targets are visible
if (numTags > 1) estStdDevs = camera.multiTagStdDevs;
// Increase std devs based on (average) distance
if (numTags == 1 && avgDist > 4)
estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE);
else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30));

return estStdDevs;
}

/**
* Filter pose via the ambiguity and find best estimate between all of the camera's throwing out distances more than
* 10m for a short amount of time.
Expand Down

0 comments on commit 3b1f380

Please sign in to comment.