Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import static edu.wpi.first.units.Units.Microseconds;
import static edu.wpi.first.units.Units.Seconds;

import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.apriltag.AprilTagFieldLayout;
import edu.wpi.first.apriltag.AprilTagFields;
import edu.wpi.first.math.Matrix;
Expand Down Expand Up @@ -121,38 +122,39 @@ public static Pose2d getAprilTagPose(int aprilTag, Transform2d robotOffset)

}

/**
* Update the pose estimation inside of {@link SwerveDrive} with all of the given poses.
*
* @param swerveDrive {@link SwerveDrive} instance.
*/
public void updatePoseEstimation(SwerveDrive swerveDrive)
{
if (SwerveDriveTelemetry.isSimulation && swerveDrive.getSimulationDriveTrainPose().isPresent())
{
/*
* In the maple-sim, odometry is simulated using encoder values, accounting for factors like skidding and drifting.
* As a result, the odometry may not always be 100% accurate.
* However, the vision system should be able to provide a reasonably accurate pose estimation, even when odometry is incorrect.
* (This is why teams implement vision system to correct odometry.)
* Therefore, we must ensure that the actual robot pose is provided in the simulator when updating the vision simulation during the simulation.
*/
visionSim.update(swerveDrive.getSimulationDriveTrainPose().get());
}
for (Cameras camera : Cameras.values())
{
Optional<EstimatedRobotPose> poseEst = getEstimatedGlobalPose(camera);
if (poseEst.isPresent())
{
var pose = poseEst.get();
swerveDrive.addVisionMeasurement(pose.estimatedPose.toPose2d(),
pose.timestampSeconds,
camera.curStdDevs);
}
}

}

/**
* Update the pose estimation inside of {@link SwerveDrive} with all of the given poses.
*
* @param swerveDrive {@link SwerveDrive} instance.
*/
public void updatePoseEstimation(SwerveDrive swerveDrive)
{
if (SwerveDriveTelemetry.isSimulation && swerveDrive.getSimulationDriveTrainPose().isPresent())
{
/*
* In the maple-sim, odometry is simulated using encoder values, accounting for factors like skidding and drifting.
* As a result, the odometry may not always be 100% accurate.
* However, the vision system should be able to provide a reasonably accurate pose estimation, even when odometry is incorrect.
* (This is why teams implement vision system to correct odometry.)
* Therefore, we must ensure that the actual robot pose is provided in the simulator when updating the vision simulation during the simulation.
*/
visionSim.update(swerveDrive.getSimulationDriveTrainPose().get());
}
for (Cameras camera : Cameras.values())
{
camera.poseEstimator.addHeadingData(Timer.getFPGATimestamp(), swerveDrive.getPose().getRotation());
Optional<EstimatedRobotPose> poseEst = getEstimatedGlobalPose(camera);
if (poseEst.isPresent()) {
var pose = poseEst.get();
swerveDrive.addVisionMeasurement(pose.estimatedPose.toPose2d(),
pose.timestampSeconds,
camera.curStdDevs);
}
}
}

/**
* Generates the estimated robot pose. Returns empty if:
* <ul>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import edu.wpi.first.networktables.NetworkTablesJNI;
import edu.wpi.first.wpilibj.Alert;
import edu.wpi.first.wpilibj.Alert.AlertType;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.smartdashboard.Field2d;
import frc.robot.Robot;
import java.awt.Desktop;
Expand Down Expand Up @@ -121,38 +122,36 @@ public static Pose2d getAprilTagPose(int aprilTag, Transform2d robotOffset)

}

/**
* Update the pose estimation inside of {@link SwerveDrive} with all of the given poses.
*
* @param swerveDrive {@link SwerveDrive} instance.
*/
public void updatePoseEstimation(SwerveDrive swerveDrive)
{
if (SwerveDriveTelemetry.isSimulation && swerveDrive.getSimulationDriveTrainPose().isPresent())
{
/*
* In the maple-sim, odometry is simulated using encoder values, accounting for factors like skidding and drifting.
* As a result, the odometry may not always be 100% accurate.
* However, the vision system should be able to provide a reasonably accurate pose estimation, even when odometry is incorrect.
* (This is why teams implement vision system to correct odometry.)
* Therefore, we must ensure that the actual robot pose is provided in the simulator when updating the vision simulation during the simulation.
*/
visionSim.update(swerveDrive.getSimulationDriveTrainPose().get());
}
for (Cameras camera : Cameras.values())
/**
* Update the pose estimation inside of {@link SwerveDrive} with all of the given poses.
*
* @param swerveDrive {@link SwerveDrive} instance.
*/
public void updatePoseEstimation(SwerveDrive swerveDrive)
{
Optional<EstimatedRobotPose> poseEst = getEstimatedGlobalPose(camera);
if (poseEst.isPresent())
{
var pose = poseEst.get();
swerveDrive.addVisionMeasurement(pose.estimatedPose.toPose2d(),
pose.timestampSeconds,
camera.curStdDevs);
}
}

}

if (SwerveDriveTelemetry.isSimulation && swerveDrive.getSimulationDriveTrainPose().isPresent())
{
/*
* In the maple-sim, odometry is simulated using encoder values, accounting for factors like skidding and drifting.
* As a result, the odometry may not always be 100% accurate.
* However, the vision system should be able to provide a reasonably accurate pose estimation, even when odometry is incorrect.
* (This is why teams implement vision system to correct odometry.)
* Therefore, we must ensure that the actual robot pose is provided in the simulator when updating the vision simulation during the simulation.
*/
visionSim.update(swerveDrive.getSimulationDriveTrainPose().get());
}
for (Cameras camera : Cameras.values())
{
camera.poseEstimator.addHeadingData(Timer.getFPGATimestamp(), swerveDrive.getPose().getRotation());
Optional<EstimatedRobotPose> poseEst = getEstimatedGlobalPose(camera);
if (poseEst.isPresent()) {
var pose = poseEst.get();
swerveDrive.addVisionMeasurement(pose.estimatedPose.toPose2d(),
pose.timestampSeconds,
camera.curStdDevs);
}
}
}
/**
* Generates the estimated robot pose. Returns empty if:
* <ul>
Expand Down