From c148cf184ac3df2bf2f448228b486af49505d6ea Mon Sep 17 00:00:00 2001 From: Timothy Ellsworth Bowers Date: Tue, 3 Feb 2026 15:37:55 -0700 Subject: [PATCH 1/8] Add a pose buffer for vision measurements (#136) * Add vision examples from good teams * WIP: Getting 254's vision code to compile * FRC254's 2024 vision code builds! -- Next step is to mesh in the parts of the vision code I need for the circular vision buffer to the base vision subsystem. * WIP: Adding SPAM's vision code * SPAM / FRC180 vision now builds -- The vision subsystem from FRC180 / SPAM now builds within the 2486 code base. Next steps are to figure out which parts of the various vision combination schemes I want to use and hack together something! * Add 6328's 2025 drive-to-pose algorithm * Make cleaner diff against Az-RBSI * Update FRC254's vision to 2025 version * Fix rebase error * Clean non-ASCII characters * Compilable Vision with pose buffering -- Create a pose buffer and use timestamps to match vision values with those from odometry to reduce jitter. Also be able to define certain tags in whose location we trust more (e.g. HUB in 2026) in the event of field misshapenness. Weave the new Vision structure together with the drivetrain to allow for timestamped pose insertion into the estimator. --------- Co-authored-by: NutHouse coco-prog-3 --- src/main/java/frc/robot/Constants.java | 10 + src/main/java/frc/robot/Robot.java | 16 + src/main/java/frc/robot/RobotContainer.java | 89 +-- .../frc/robot/subsystems/drive/Drive.java | 267 +++++++-- .../frc/robot/subsystems/vision/Vision.java | 510 +++++++++++++----- .../frc/robot/subsystems/vision/VisionIO.java | 27 +- .../subsystems/vision/VisionIOLimelight.java | 10 +- .../vision/VisionIOPhotonVision.java | 131 +++-- .../vision/VisionIOPhotonVisionSim.java | 24 +- .../ConcurrentTimeInterpolatableBuffer.java | 164 ++++++ src/main/java/frc/robot/util/GeomUtil.java | 165 ------ 11 files changed, 939 insertions(+), 474 deletions(-) create mode 100644 src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java delete mode 100644 src/main/java/frc/robot/util/GeomUtil.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 26f25253..0b39dcc5 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -416,6 +416,16 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { + public static final java.util.Set kTrustedTags = + java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + + // Noise scaling factors (lower = more trusted) + public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight + public static final double kUntrustedTagStdDevScale = 1.3; // 30% less weight + + // Optional: if true, reject observations that contain no trusted tags + public static final boolean kRequireTrustedTag = false; + // AprilTag Identification Constants public static final double kAmbiguityThreshold = 0.4; public static final double kTargetLogTimeSecs = 0.1; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 6d58c6d0..b9668ae3 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -19,6 +19,7 @@ import com.revrobotics.util.StatusLogger; import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.DriverStation.Alliance; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; @@ -47,6 +48,7 @@ public class Robot extends LoggedRobot { private Command m_autoCommandPathPlanner; private RobotContainer m_robotContainer; private Timer m_disabledTimer; + private static boolean isBlueAlliance = false; // Define simulation fields here private VisionSystemSim visionSim; @@ -135,9 +137,13 @@ public Robot() { /** TESTING VERSION OF ROBOTPERIODIC FOR OVERRUN SOURCES */ @Override public void robotPeriodic() { + + isBlueAlliance = DriverStation.getAlliance().orElse(Alliance.Blue) == Alliance.Blue; + final long t0 = System.nanoTime(); if (isReal()) { + // Switch thread to high priority to improve loop timing Threads.setCurrentThreadPriority(true, 99); } final long t1 = System.nanoTime(); @@ -186,6 +192,7 @@ public void autonomousInit() { CommandScheduler.getInstance().cancelAll(); m_robotContainer.getDrivebase().setMotorBrake(true); m_robotContainer.getDrivebase().resetHeadingController(); + m_robotContainer.getVision().resetPoseGate(Timer.getFPGATimestamp()); // TODO: Make sure Gyro inits here with whatever is in the path planning thingie switch (Constants.getAutoType()) { @@ -315,4 +322,13 @@ public void simulationPeriodic() { // Update sim each sim tick visionSim.update(m_robotContainer.getDrivebase().getPose()); } + + // Helper method to simplify checking if the robot is blue or red alliance + public static boolean isBlue() { + return isBlueAlliance; + } + + public static boolean isRed() { + return !isBlue(); + } } diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index c38f5781..9c5a167a 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -34,7 +34,6 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; -import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -75,6 +74,9 @@ /** This is the location for defining robot hardware, commands, and controller button bindings. */ public class RobotContainer { + private static final boolean USE_MAPLESIM = true; + public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); + /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -128,36 +130,7 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); - // Vision Factories - private VisionIO[] buildVisionIOsReal(Drive drive) { - return switch (Constants.getVisionType()) { - case PHOTON -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) - .toArray(VisionIO[]::new); - - case LIMELIGHT -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) - .toArray(VisionIO[]::new); - - case NONE -> new VisionIO[] {new VisionIO() {}}; - }; - } - - private static VisionIO[] buildVisionIOsSim(Drive drive) { - var cams = Constants.Cameras.ALL; - VisionIO[] ios = new VisionIO[cams.length]; - for (int i = 0; i < cams.length; i++) { - var cfg = cams[i]; - ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); - } - return ios; - } - - private VisionIO[] buildVisionIOsReplay() { - return new VisionIO[] {new VisionIO() {}}; - } + public static RobotContainer instance; /** * Constructor for the Robot Container. This container holds subsystems, opertator interface @@ -179,7 +152,9 @@ public RobotContainer() { m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -193,11 +168,9 @@ public RobotContainer() { // ---------------- Vision IOs (robot code) ---------------- var cams = frc.robot.Constants.Cameras.ALL; - - // If you keep Vision expecting exactly two cameras: - VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); - m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); - + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); m_accel = new Accelerometer(m_imu); // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- @@ -231,7 +204,8 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); + m_vision = + new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -493,6 +467,11 @@ public Drive getDrivebase() { return m_drivebase; } + /** Vision getter method for use with Robot.java */ + public Vision getVision() { + return m_vision; + } + /** * Set up the SysID routines from AdvantageKit * @@ -536,6 +515,40 @@ private void definesysIdRoutines() { } } + // Vision Factories + // Vision Factories (REAL) + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {}; // recommended: no cameras + }; + } + + // Vision Factories (SIM) + private VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + // Vision Factories (REPLAY) + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {}; // simplest: Vision does nothing during replay + } + /** * Example Choreo auto command * diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index fada2245..46788c2b 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -48,10 +48,13 @@ import frc.robot.Constants.DrivebaseConstants; import frc.robot.Constants.RobotConstants; import frc.robot.subsystems.imu.Imu; +import frc.robot.util.ConcurrentTimeInterpolatableBuffer; import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; import frc.robot.util.RBSISubsystem; +import java.util.Optional; +import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.littletonrobotics.junction.AutoLogOutput; @@ -59,6 +62,16 @@ public class Drive extends RBSISubsystem { + // --- buffers for time-aligned pose + yaw + yawRate history --- + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + static final Lock odometryLock = new ReentrantLock(); private final Imu imu; private final Module[] modules = new Module[4]; // FL, FR, BL, BR @@ -74,6 +87,7 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -218,64 +232,98 @@ public Drive(Imu imu) { @Override public void rbsiPeriodic() { odometryLock.lock(); + try { + // Get the IMU inputs + final var imuInputs = imu.getInputs(); + + // Stop modules & log empty setpoint states if disabled + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + } - // Get the IMU inputs - final var imuInputs = imu.getInputs(); // primitive inputs + // Module periodic updates, which drains queues this cycle + for (var module : modules) module.periodic(); - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } + // Feed historical samples into odometry if REAL robot + if (Constants.getMode() != Mode.SIM) { - // Module periodic updates, which drains queues this cycle - for (var module : modules) { - module.periodic(); - } + // ---- Gather per-module histories ---- + final double[][] perModuleTs = new double[4][]; + final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; + int unionCap = 0; + + for (int m = 0; m < 4; m++) { + perModuleTs[m] = modules[m].getOdometryTimestamps(); + perModulePos[m] = modules[m].getOdometryPositions(); + unionCap += perModuleTs[m].length; + } + + if (unionCap == 0) { + gyroDisconnectedAlert.set(!imuInputs.connected); + return; + } - // Feed historical samples into odometry if REAL robot - if (Constants.getMode() != Mode.SIM) { - final double[] sampleTimestamps = modules[0].getOdometryTimestamps(); - final int sampleCount = sampleTimestamps.length; - - // Reuse arrays to reduce GC (you likely already have lastModulePositions as a field) - final SwerveModulePosition[] modulePositions = new SwerveModulePosition[4]; - final SwerveModulePosition[] moduleDeltas = new SwerveModulePosition[4]; - - for (int i = 0; i < sampleCount; i++) { - for (int moduleIndex = 0; moduleIndex < 4; moduleIndex++) { - modulePositions[moduleIndex] = modules[moduleIndex].getOdometryPositions()[i]; - moduleDeltas[moduleIndex] = - new SwerveModulePosition( - modulePositions[moduleIndex].distanceMeters - - lastModulePositions[moduleIndex].distanceMeters, - modulePositions[moduleIndex].angle); - lastModulePositions[moduleIndex] = modulePositions[moduleIndex]; + // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- + // These timestamps are authoritative for yaw interpolation and yaw-rate gating. + if (imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0) { + + final double[] yts = imuInputs.odometryYawTimestamps; + final double[] yaws = imuInputs.odometryYawPositionsRad; + + for (int i = 0; i < yts.length; i++) { + yawBuffer.addSample(yts[i], yaws[i]); + if (i > 0) { + double dt = yts[i] - yts[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + } + } + } + } else { + // fallback: buffer "now" yaw only + double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); } - // Pick yaw sample if available; otherwise fall back to current yaw - final double yawRad = - (imuInputs.connected - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawPositionsRad.length > i) - ? imuInputs.odometryYawPositionsRad[i] - : imuInputs.yawPositionRad; + // ---- Build union timeline ---- + final double[] unionTs = new double[Math.max(unionCap, 1)]; + final double EPS = 1e-4; // 0.1 ms + final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); + + // ---- Replay at union times ---- + for (int i = 0; i < unionN; i++) { + final double t = unionTs[i]; + + // Interpolate each module to union time + for (int m = 0; m < 4; m++) { + SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); + odomPositions[m] = p; + } - // Boundary conversion: PoseEstimator requires Rotation2d - final Rotation2d yaw = Rotation2d.fromRadians(yawRad); + // Interpolate yaw at union time + final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - // Apply to pose estimator - m_PoseEstimator.updateWithTime(sampleTimestamps[i], yaw, modulePositions); + m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // keep buffer in the same timebase as estimator + poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + } + + Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); } - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - } - odometryLock.unlock(); + gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + } finally { + odometryLock.unlock(); + } } /** Simulation Periodic Method */ @@ -327,6 +375,8 @@ public void simulationPeriodic() { m_PoseEstimator.addVisionMeasurement(visionPose, visionTimestamp, visionStdDevs); } + poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); + // 7) Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); @@ -463,6 +513,37 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } + } + + /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + if (t1 < t0) return OptionalDouble.empty(); + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; + } + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -550,6 +631,7 @@ public double getMaxAngularAccelRadPerSecPerSec() { /** Resets the current odometry pose. */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the gyro based on alliance color */ @@ -559,21 +641,33 @@ public void zeroHeadingForAlliance() { ? Rotation2d.kZero : Rotation2d.k180deg); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the heading */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); + } + + // ---- Pose reset gate (vision + anything latency-sensitive) ---- + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + public long getPoseResetEpoch() { + return poseResetEpoch; } - /** Adds a new timestamped vision measurement. */ - public void addVisionMeasurement( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs) { - m_PoseEstimator.addVisionMeasurement( - visionRobotPoseMeters, timestampSeconds, visionMeasurementStdDevs); + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; + } + + private void markPoseReset(double fpgaNow) { + lastPoseResetTimestamp = fpgaNow; + poseResetEpoch++; + Logger.recordOutput("Drive/PoseResetEpoch", poseResetEpoch); + Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ @@ -670,4 +764,69 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } + + private static SwerveModulePosition interpolateModulePosition( + double[] ts, SwerveModulePosition[] samples, double t) { + + final int n = ts.length; + if (n == 0) return new SwerveModulePosition(); + + if (t <= ts[0]) return samples[0]; + if (t >= ts[n - 1]) return samples[n - 1]; + + int lo = 0, hi = n - 1; + while (lo < hi) { + int mid = (lo + hi) >>> 1; + if (ts[mid] < t) lo = mid + 1; + else hi = mid; + } + int i1 = lo; + int i0 = i1 - 1; + + double t0 = ts[i0]; + double t1 = ts[i1]; + if (t1 <= t0) return samples[i1]; + + double alpha = (t - t0) / (t1 - t0); + + double dist = + samples[i0].distanceMeters + + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; + + Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); + + return new SwerveModulePosition(dist, angle); + } + + private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { + int[] idx = new int[perModuleTs.length]; + int outN = 0; + + while (true) { + double minT = Double.POSITIVE_INFINITY; + boolean any = false; + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + if (idx[m] >= ts.length) continue; + any = true; + minT = Math.min(minT, ts[idx[m]]); + } + + if (!any) break; + + if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { + outUnion[outN++] = minT; + } + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { + idx[m]++; + } + } + } + + return outN; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 04e20417..908cf9c5 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,13 +1,19 @@ // Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First -// Copyright (c) 2024-2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.vision; @@ -18,183 +24,419 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.wpilibj.Alert; -import edu.wpi.first.wpilibj.Alert.AlertType; import frc.robot.Constants; import frc.robot.FieldConstants; +import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; import frc.robot.util.VirtualSubsystem; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Optional; +import java.util.OptionalDouble; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import org.littletonrobotics.junction.Logger; public class Vision extends VirtualSubsystem { + + @FunctionalInterface + public interface VisionConsumer { + void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + } + + private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} + private final VisionConsumer consumer; + private final Drive drive; + private long lastSeenPoseResetEpoch = -1; + private final VisionIO[] io; private final VisionIOInputsAutoLogged[] inputs; - private final Alert[] disconnectedAlerts; - // Camera configs (names, transforms, stddev multipliers, sim props) - private final Constants.Cameras.CameraConfig[] camConfigs; + private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; - // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- - // Summary buffers - private final ArrayList allTagPoses = new ArrayList<>(32); - private final ArrayList allRobotPoses = new ArrayList<>(64); - private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // --- Per-camera monotonic gate --- + private final double[] lastAcceptedTsPerCam; - // Per-camera buffers (reused each camera) - private final ArrayList tagPoses = new ArrayList<>(16); - private final ArrayList robotPoses = new ArrayList<>(32); - private final ArrayList robotPosesAccepted = new ArrayList<>(32); - private final ArrayList robotPosesRejected = new ArrayList<>(32); + // --- Pose reset gate --- + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - public Vision(VisionConsumer consumer, VisionIO... io) { + // --- Smoothing buffer (recent fused estimates) --- + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final double smoothWindowSec = 0.25; + private final int smoothMaxSize = 12; + + // --- Trusted tags configuration (swappable per event/field) --- + private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); + private volatile boolean requireTrustedTag = false; + + // Scale factors applied based on fraction of trusted tags in an observation + private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted + private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted + + // --- Optional 254-style yaw gate for single-tag --- + private volatile boolean enableSingleTagYawGate = true; + private volatile double yawGateLookbackSec = 0.30; + private volatile double yawGateLimitRadPerSec = 5.0; + + public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + this.drive = drive; this.consumer = consumer; this.io = io; - this.camConfigs = Constants.Cameras.ALL; - - // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { inputs[i] = new VisionIOInputsAutoLogged(); } - // Initialize disconnected alerts - this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < io.length; i++) { - disconnectedAlerts[i] = - new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); - } + this.lastAcceptedTsPerCam = new double[io.length]; + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - // Log robot-to-camera transforms from the new camera config array - // (Only log as many as exist in BOTH configs and IOs) + // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); for (int i = 0; i < n; i++) { Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); } } - /** Returns the X angle to the best target, useful for simple servoing. */ - public Rotation2d getTargetX(int cameraIndex) { - return inputs[cameraIndex].latestTargetObservation.tx(); + // -------- Runtime configuration hooks -------- + + /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** Swap trusted tag set per event/field without redeploy. */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; } + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; + } + + // -------- Core periodic -------- + @Override public void rbsiPeriodic() { - // 1) Update inputs + process inputs first (keeps AK logs consistent) + + long epoch = drive.getPoseResetEpoch(); + if (epoch != lastSeenPoseResetEpoch) { + lastSeenPoseResetEpoch = epoch; + resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + Logger.recordOutput("Vision/PoseGateResetFromDrive", true); + } else { + Logger.recordOutput("Vision/PoseGateResetFromDrive", false); + } + + // 1) Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); - disconnectedAlerts[i].set(!inputs[i].connected); - } - - // 2) Clear summary buffers (reused) - allTagPoses.clear(); - allRobotPoses.clear(); - allRobotPosesAccepted.clear(); - allRobotPosesRejected.clear(); - - // 3) Process each camera - for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - // Clear per-camera buffers - tagPoses.clear(); - robotPoses.clear(); - robotPosesAccepted.clear(); - robotPosesRejected.clear(); - - // Add tag poses from ids - for (int tagId : inputs[cameraIndex].tagIds) { - var tagPose = FieldConstants.aprilTagLayout.getTagPose(tagId); - if (tagPose.isPresent()) { - tagPoses.add(tagPose.get()); - } - } + } - // Loop over pose observations - for (var observation : inputs[cameraIndex].poseObservations) { - // Reject rules - boolean rejectPose = - observation.tagCount() == 0 - || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) - || Math.abs(observation.pose().getZ()) > maxZError - || observation.pose().getX() < 0.0 - || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() - || observation.pose().getY() < 0.0 - || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - - // Log pose buckets - robotPoses.add(observation.pose()); - if (rejectPose) { - robotPosesRejected.add(observation.pose()); - } else { - robotPosesAccepted.add(observation.pose()); - } + // 2) Pick one best accepted estimate per camera for this loop + final ArrayList perCamAccepted = new ArrayList<>(io.length); + + for (int cam = 0; cam < io.length; cam++) { + int seen = 0; + int accepted = 0; + int rejected = 0; - if (rejectPose) { + TimedEstimate best = null; + double bestTrustScale = Double.NaN; + int bestTrustedCount = 0; + int bestTagCount = 0; + + for (var obs : inputs[cam].poseObservations) { + seen++; + + GateResult gate = passesHardGatesAndYawGate(cam, obs); + if (!gate.accepted) { + rejected++; continue; } - // Standard deviations - double stdDevFactor = - Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); - double linearStdDev = linearStdDevBaseline * stdDevFactor; - double angularStdDev = angularStdDevBaseline * stdDevFactor; - - if (observation.type() == PoseObservationType.MEGATAG_2) { - linearStdDev *= linearStdDevMegatag2Factor; - angularStdDev *= angularStdDevMegatag2Factor; + BuiltEstimate built = buildEstimate(cam, obs); + if (built == null) { + rejected++; + continue; } - // Apply per-camera multiplier from CameraConfig - if (cameraIndex < camConfigs.length) { - double k = camConfigs[cameraIndex].stdDevFactor(); - linearStdDev *= k; - angularStdDev *= k; + TimedEstimate est = built.estimate; + if (best == null || isBetter(est, best)) { + best = est; + bestTrustScale = built.trustScale; + bestTrustedCount = built.trustedCount; + bestTagCount = obs.tagCount(); } + } + + if (best != null) { + accepted++; + lastAcceptedTsPerCam[cam] = best.ts; + perCamAccepted.add(best); - consumer.accept( - observation.pose().toPose2d(), - observation.timestamp(), - VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } - // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPoses", robotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesAccepted", - robotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesRejected", - robotPosesRejected.toArray(new Pose3d[0])); - - // Aggregate summary - allTagPoses.addAll(tagPoses); - allRobotPoses.addAll(robotPoses); - allRobotPosesAccepted.addAll(robotPosesAccepted); - allRobotPosesRejected.addAll(robotPosesRejected); - } - - // 4) Summary logs - Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); - Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesAccepted", allRobotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesRejected", allRobotPosesRejected.toArray(new Pose3d[0])); + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); + } + + if (perCamAccepted.isEmpty()) return; + + // 3) Fusion time is the newest timestamp among accepted per-camera samples + double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + if (!Double.isFinite(tF)) return; + + // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + if (fused == null) return; + + // 5) Smooth by fusing recent fused estimates (also aligned to tF) + pushFused(fused); + TimedEstimate smoothed = smoothAtTime(tF); + if (smoothed == null) return; + + // 6) Inject + consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + + Logger.recordOutput("Vision/FusedPose", fused.pose); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedTimestamp", tF); } - @FunctionalInterface - public static interface VisionConsumer { - void accept( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs); + // -------- Gating + scoring -------- + + private static final class GateResult { + final boolean accepted; + + GateResult(boolean accepted) { + this.accepted = accepted; + } + } + + private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { + final double ts = obs.timestamp(); + + // Monotonic per-camera time + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + + // Reject anything older than last pose reset + if (ts < lastPoseResetTimestamp) return new GateResult(false); + + // Must have tags + if (obs.tagCount() <= 0) return new GateResult(false); + + // Single-tag ambiguity gate + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + + // Z sanity + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + + // Field bounds + Pose3d p = obs.pose(); + if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) + return new GateResult(false); + if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) + return new GateResult(false); + + // Optional 254-style yaw gate: only meaningful for single-tag + if (enableSingleTagYawGate && obs.tagCount() == 1) { + OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); + if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { + return new GateResult(false); + } + } + + return new GateResult(true); + } + + private static final class BuiltEstimate { + final TimedEstimate estimate; + final double trustScale; + final int trustedCount; + + BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + this.estimate = estimate; + this.trustScale = trustScale; + this.trustedCount = trustedCount; + } + } + + private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { + // Base uncertainty grows with distance^2 / tagCount (your 2486-style) + final double tagCount = Math.max(1, obs.tagCount()); + final double avgDist = Math.max(0.0, obs.averageTagDistance()); + final double distFactor = (avgDist * avgDist) / tagCount; + + final double camFactor = (cam < camConfigs.length) ? camConfigs[cam].stdDevFactor() : 1.0; + + double linearStdDev = linearStdDevBaseline * camFactor * distFactor; + double angularStdDev = angularStdDevBaseline * camFactor * distFactor; + + // MegaTag2 bonus if applicable + if (obs.type() == PoseObservationType.MEGATAG_2) { + linearStdDev *= linearStdDevMegatag2Factor; + angularStdDev *= angularStdDevMegatag2Factor; + } + + // Trusted tag blending + final Set kTrusted = trustedTags.get(); + int trustedCount = 0; + for (int id : obs.usedTagIds()) { + if (kTrusted.contains(id)) trustedCount++; + } + + if (requireTrustedTag && trustedCount == 0) { + return null; + } + + final int usedCount = obs.usedTagIds().size(); + final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); + + final double trustScale = + untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); + + linearStdDev *= trustScale; + angularStdDev *= trustScale; + + // Output logs for tuning + Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); + + return new BuiltEstimate( + new TimedEstimate( + obs.pose().toPose2d(), + obs.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), + trustScale, + trustedCount); + } + + private boolean isBetter(TimedEstimate a, TimedEstimate b) { + // Lower trace of stddev vector (x+y+theta) is better + double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); + double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + return ta < tb; + } + + // -------- Time alignment + fusion -------- + + private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); + for (var e : estimates) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) return null; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + return inverseVarianceFuse(aligned, tF); + } + + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { + Optional odomAtTsOpt = drive.getPoseAtTime(ts); + Optional odomAtTFOpt = drive.getPoseAtTime(tF); + if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; + + Pose2d odomAtTs = odomAtTsOpt.get(); + Pose2d odomAtTF = odomAtTFOpt.get(); + + // Transform that takes odomAtTs -> odomAtTF + Transform2d ts_T_tf = odomAtTF.minus(odomAtTs); + + // Apply same motion to vision pose to bring it forward + return visionPoseAtTs.transformBy(ts_T_tf); + } + + private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF.isEmpty()) return null; + if (alignedAtTF.size() == 1) return alignedAtTF.get(0); + + double sumWx = 0, sumWy = 0, sumWth = 0; + double sumX = 0, sumY = 0; + double sumCos = 0, sumSin = 0; + + for (var e : alignedAtTF) { + double sx = e.stdDevs.get(0, 0); + double sy = e.stdDevs.get(1, 0); + double sth = e.stdDevs.get(2, 0); + + double vx = sx * sx; + double vy = sy * sy; + double vth = sth * sth; + + double wx = 1.0 / vx; + double wy = 1.0 / vy; + double wth = 1.0 / vth; + + sumWx += wx; + sumWy += wy; + sumWth += wth; + + sumX += e.pose.getX() * wx; + sumY += e.pose.getY() * wy; + + sumCos += e.pose.getRotation().getCos() * wth; + sumSin += e.pose.getRotation().getSin() * wth; + } + + Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + + Matrix fusedStd = + VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); + + return new TimedEstimate(fused, tF, fusedStd); + } + + // -------- Smoothing buffer -------- + + private void pushFused(TimedEstimate fused) { + fusedBuffer.addLast(fused); + + while (fusedBuffer.size() > smoothMaxSize) { + fusedBuffer.removeFirst(); + } + + // Trim by time window relative to newest + while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + fusedBuffer.removeFirst(); + } + } + + private TimedEstimate smoothAtTime(double tF) { + if (fusedBuffer.isEmpty()) return null; + if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); + + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + for (var e : fusedBuffer) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) continue; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + + if (aligned.isEmpty()) return fusedBuffer.peekLast(); + return inverseVarianceFuse(aligned, tF); } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index c9ed8349..fad294d2 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -11,35 +11,44 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import java.util.Set; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { @AutoLog - public static class VisionIOInputs { + class VisionIOInputs { public boolean connected = false; + + /** Latest "servo to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + + /** Pose observations generated by the camera pipeline (each has its own timestamp) */ public PoseObservation[] poseObservations = new PoseObservation[0]; + + /** Union of tag IDs seen this loop (for logging/UI only) */ public int[] tagIds = new int[0]; } /** Represents the angle to a simple target, not used for pose estimation. */ - public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + record TargetObservation(Rotation2d tx, Rotation2d ty) {} /** Represents a robot pose sample used for pose estimation. */ - public static record PoseObservation( - double timestamp, - Pose3d pose, - double ambiguity, + record PoseObservation( + double timestamp, // camera capture time (seconds, same timebase as FPGA) + Pose3d pose, // field->robot pose + double ambiguity, // single-tag ambiguity if available int tagCount, double averageTagDistance, - PoseObservationType type) {} + PoseObservationType type, + Set usedTagIds // immutable per observation + ) {} - public static enum PoseObservationType { + enum PoseObservationType { MEGATAG_1, MEGATAG_2, PHOTONVISION } - public default void updateInputs(VisionIOInputs inputs) {} + default void updateInputs(VisionIOInputs inputs) {} } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index a4b1eaf6..07c83c7b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -96,7 +96,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_1)); + PoseObservationType.MEGATAG_1, + + // Visible Tag IDs + tagIds)); } for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -121,7 +124,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_2)); + PoseObservationType.MEGATAG_2, + + // Visible Tag IDs + tagIds)); } // Save pose observations to inputs object diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index f8d9cdd9..14a2867b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -19,7 +19,7 @@ import java.util.Set; import org.photonvision.PhotonCamera; -/** IO implementation for real PhotonVision hardware. */ +/** IO implementation for real PhotonVision hardware (pose already solved by PV). */ public class VisionIOPhotonVision implements VisionIO { protected final PhotonCamera camera; protected final Transform3d robotToCamera; @@ -28,10 +28,10 @@ public class VisionIOPhotonVision implements VisionIO { * Creates a new VisionIOPhotonVision. * * @param name The configured name of the camera. - * @param rotationSupplier The 3D position of the camera relative to the robot. + * @param robotToCamera The 3D position of the camera relative to the robot. */ public VisionIOPhotonVision(String name, Transform3d robotToCamera) { - camera = new PhotonCamera(name); + this.camera = new PhotonCamera(name); this.robotToCamera = robotToCamera; } @@ -42,33 +42,30 @@ public void updateInputs(VisionIOInputs inputs) { // Cap the number of unread results processed per loop final int kMaxUnread = 5; - // Use HashSet/ArrayList to avoid LinkedList churn - Set tagIds = new HashSet<>(); - ArrayList poseObservations = new ArrayList<>(kMaxUnread); + final Set unionTagIds = new HashSet<>(); + final ArrayList poseObservations = new ArrayList<>(kMaxUnread); + + double newestTargetTs = Double.NEGATIVE_INFINITY; + Rotation2d bestYaw = Rotation2d.kZero; + Rotation2d bestPitch = Rotation2d.kZero; int processed = 0; for (var result : camera.getAllUnreadResults()) { // Hard cap - if (processed++ >= kMaxUnread) { - break; - } + if (processed++ >= kMaxUnread) break; + + final double ts = result.getTimestampSeconds(); - // Update latest target observation - if (result.hasTargets()) { - inputs.latestTargetObservation = - new TargetObservation( - Rotation2d.fromDegrees(result.getBestTarget().getYaw()), - Rotation2d.fromDegrees(result.getBestTarget().getPitch())); - } else { - inputs.latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + if (result.hasTargets() && ts >= newestTargetTs) { + newestTargetTs = ts; + bestYaw = Rotation2d.fromDegrees(result.getBestTarget().getYaw()); + bestPitch = Rotation2d.fromDegrees(result.getBestTarget().getPitch()); } - // Add pose observation - if (result.multitagResult.isPresent()) { // Multitag result - var multitagResult = result.multitagResult.get(); + if (result.multitagResult.isPresent()) { + var multitag = result.multitagResult.get(); - // Calculate robot pose - Transform3d fieldToCamera = multitagResult.estimatedPose.best; + Transform3d fieldToCamera = multitag.estimatedPose.best; Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); @@ -77,64 +74,62 @@ public void updateInputs(VisionIOInputs inputs) { for (var target : result.targets) { totalTagDistance += target.bestCameraToTarget.getTranslation().getNorm(); } - - // Add tag IDs - tagIds.addAll(multitagResult.fiducialIDsUsed); - - // Guard against divide-by-zero if targets is empty (defensive) double avgTagDistance = result.targets.isEmpty() ? 0.0 : (totalTagDistance / result.targets.size()); - // Add observation + Set used = new HashSet<>(); + for (int id : multitag.fiducialIDsUsed) { + used.add(id); + unionTagIds.add(id); + } + poseObservations.add( new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - multitagResult.estimatedPose.ambiguity, // Ambiguity - multitagResult.fiducialIDsUsed.size(), // Tag count - avgTagDistance, // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - - } else if (!result.targets.isEmpty()) { // Single tag result + ts, + robotPose, + multitag.estimatedPose.ambiguity, + multitag.fiducialIDsUsed.size(), + avgTagDistance, + PoseObservationType.PHOTONVISION, + Set.copyOf(used))); + + } else if (!result.targets.isEmpty()) { var target = result.targets.get(0); - // Calculate robot pose var tagPose = aprilTagLayout.getTagPose(target.fiducialId); - if (tagPose.isPresent()) { - Transform3d fieldToTarget = - new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); - Transform3d cameraToTarget = target.bestCameraToTarget; - Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); - Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); - Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); - - // Add tag ID - tagIds.add((short) target.fiducialId); - - // Add observation - poseObservations.add( - new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - target.poseAmbiguity, // Ambiguity - 1, // Tag count - cameraToTarget.getTranslation().getNorm(), // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - } + if (tagPose.isEmpty()) continue; + + Transform3d fieldToTarget = + new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); + Transform3d cameraToTarget = target.bestCameraToTarget; + + Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); + Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); + Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); + + unionTagIds.add(target.fiducialId); + + poseObservations.add( + new PoseObservation( + ts, + robotPose, + target.poseAmbiguity, + 1, + cameraToTarget.getTranslation().getNorm(), + PoseObservationType.PHOTONVISION, + Set.of(target.fiducialId))); } } - // Save pose observations to inputs object - inputs.poseObservations = new PoseObservation[poseObservations.size()]; - for (int i = 0; i < poseObservations.size(); i++) { - inputs.poseObservations[i] = poseObservations.get(i); - } + inputs.latestTargetObservation = + (newestTargetTs > Double.NEGATIVE_INFINITY) + ? new TargetObservation(bestYaw, bestPitch) + : new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); - // Save tag IDs to inputs objects - inputs.tagIds = new int[tagIds.size()]; + inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + + inputs.tagIds = new int[unionTagIds.size()]; int i = 0; - for (int id : tagIds) { - inputs.tagIds[i++] = id; - } + for (int id : unionTagIds) inputs.tagIds[i++] = id; } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index fd4525b6..4e5761b9 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -22,34 +22,50 @@ public class VisionIOPhotonVisionSim extends VisionIOPhotonVision { private static VisionSystemSim visionSim; private final Supplier poseSupplier; + + @SuppressWarnings("unused") private final PhotonCameraSim cameraSim; /** * Creates a new VisionIOPhotonVisionSim. * - * @param name The name of the camera. - * @param poseSupplier Supplier for the robot pose to use in simulation. + * @param name The name of the camera (PhotonVision camera name). + * @param robotToCamera Camera pose relative to robot frame. + * @param poseSupplier Supplier for the robot pose (field->robot) to use in simulation. */ public VisionIOPhotonVisionSim( String name, Transform3d robotToCamera, Supplier poseSupplier) { super(name, robotToCamera); this.poseSupplier = poseSupplier; - // Initialize vision sim + // Initialize VisionSystemSim once for all cameras if (visionSim == null) { visionSim = new VisionSystemSim("main"); visionSim.addAprilTags(FieldConstants.aprilTagLayout); } - // Add sim camera + // Camera properties: + // - If you have per-camera SimCameraProperties in Constants, pass them here instead. + // - Otherwise keep the default and tune later. var cameraProperties = new SimCameraProperties(); + + // Recommended defaults (feel free to tune) + // cameraProperties.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); + // cameraProperties.setFPS(20); + // cameraProperties.setAvgLatencyMs(35); + // cameraProperties.setLatencyStdDevMs(5); + cameraSim = new PhotonCameraSim(camera, cameraProperties); visionSim.addCamera(cameraSim, robotToCamera); } @Override public void updateInputs(VisionIOInputs inputs) { + // NOTE: This updates the sim world every time a sim camera is polled. + // That's fine (fast enough), but if you want “update once per loop,” see note below. visionSim.update(poseSupplier.get()); + + // Then pull results like normal (and emit PoseObservation + usedTagIds sets) super.updateInputs(inputs); } } diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java new file mode 100644 index 00000000..df46efd2 --- /dev/null +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -0,0 +1,164 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.interpolation.Interpolatable; +import edu.wpi.first.math.interpolation.Interpolator; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; + +/** + * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit + * synchronization in our robot code. + * + * @param The type stored in this buffer. + */ +public final class ConcurrentTimeInterpolatableBuffer { + private final double m_historySize; + private final Interpolator m_interpolatingFunc; + private final ConcurrentNavigableMap m_pastSnapshots = new ConcurrentSkipListMap<>(); + + private ConcurrentTimeInterpolatableBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + this.m_historySize = historySizeSeconds; + this.m_interpolatingFunc = interpolateFunction; + } + + /** + * Create a new TimeInterpolatableBuffer. + * + * @param interpolateFunction The function used to interpolate between values. + * @param historySizeSeconds The history size of the buffer. + * @param The type of data to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(interpolateFunction, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer that stores a given subclass of {@link Interpolatable}. + * + * @param historySizeSeconds The history size of the buffer. + * @param The type of {@link Interpolatable} to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static > ConcurrentTimeInterpolatableBuffer createBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>( + Interpolatable::interpolate, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer to store Double values. + * + * @param historySizeSeconds The history size of the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(MathUtil::interpolate, historySizeSeconds); + } + + /** + * Add a sample to the buffer. + * + * @param timeSeconds The timestamp of the sample. + * @param sample The sample object. + */ + public void addSample(double timeSeconds, T sample) { + m_pastSnapshots.put(timeSeconds, sample); + cleanUp(timeSeconds); + } + + /** + * Removes samples older than our current history size. + * + * @param time The current timestamp. + */ + private void cleanUp(double time) { + m_pastSnapshots.headMap(time - m_historySize, false).clear(); + } + + /** Clear all old samples. */ + public void clear() { + m_pastSnapshots.clear(); + } + + /** + * Sample the buffer at the given time. If the buffer is empty, an empty Optional is returned. + * + * @param timeSeconds The time at which to sample. + * @return The interpolated value at that timestamp or an empty Optional. + */ + public Optional getSample(double timeSeconds) { + if (m_pastSnapshots.isEmpty()) { + return Optional.empty(); + } + + // Special case for when the requested time is the same as a sample + var nowEntry = m_pastSnapshots.get(timeSeconds); + if (nowEntry != null) { + return Optional.of(nowEntry); + } + + var bottomBound = m_pastSnapshots.floorEntry(timeSeconds); + var topBound = m_pastSnapshots.ceilingEntry(timeSeconds); + + // Return null if neither sample exists, and the opposite bound if the other is + // null + if (topBound == null && bottomBound == null) { + return Optional.empty(); + } else if (topBound == null) { + return Optional.of(bottomBound.getValue()); + } else if (bottomBound == null) { + return Optional.of(topBound.getValue()); + } else { + // Otherwise, interpolate. Because T is between [0, 1], we want the ratio of + // (the difference + // between the current time and bottom bound) and (the difference between top + // and bottom + // bounds). + return Optional.of( + m_interpolatingFunc.interpolate( + bottomBound.getValue(), + topBound.getValue(), + (timeSeconds - bottomBound.getKey()) / (topBound.getKey() - bottomBound.getKey()))); + } + } + + public Entry getLatest() { + return m_pastSnapshots.lastEntry(); + } + + /** + * Grant access to the internal sample buffer. Used in Pose Estimation to replay odometry inputs + * stored within this buffer. + * + * @return The internal sample buffer. + */ + public ConcurrentNavigableMap getInternalBuffer() { + return m_pastSnapshots; + } +} diff --git a/src/main/java/frc/robot/util/GeomUtil.java b/src/main/java/frc/robot/util/GeomUtil.java deleted file mode 100644 index f77c8b2f..00000000 --- a/src/main/java/frc/robot/util/GeomUtil.java +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) 2024-2026 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Twist2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; - -/** Geometry utilities for working with translations, rotations, transforms, and poses. */ -public class GeomUtil { - /** - * Creates a pure translating transform - * - * @param translation The translation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Translation2d translation) { - return new Transform2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure translating transform - * - * @param x The x coordinate of the translation - * @param y The y coordinate of the translation - * @return The resulting transform - */ - public static Transform2d toTransform2d(double x, double y) { - return new Transform2d(x, y, Rotation2d.kZero); - } - - /** - * Creates a pure rotating transform - * - * @param rotation The rotation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Rotation2d rotation) { - return new Transform2d(Translation2d.kZero, rotation); - } - - /** - * Converts a Pose2d to a Transform2d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform2d toTransform2d(Pose2d pose) { - return new Transform2d(pose.getTranslation(), pose.getRotation()); - } - - public static Pose2d inverse(Pose2d pose) { - Rotation2d rotationInverse = pose.getRotation().unaryMinus(); - return new Pose2d( - pose.getTranslation().unaryMinus().rotateBy(rotationInverse), rotationInverse); - } - - /** - * Converts a Transform2d to a Pose2d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose2d toPose2d(Transform2d transform) { - return new Pose2d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Creates a pure translated pose - * - * @param translation The translation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Translation2d translation) { - return new Pose2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure rotated pose - * - * @param rotation The rotation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Rotation2d rotation) { - return new Pose2d(Translation2d.kZero, rotation); - } - - /** - * Multiplies a twist by a scaling factor - * - * @param twist The twist to multiply - * @param factor The scaling factor for the twist components - * @return The new twist - */ - public static Twist2d multiply(Twist2d twist, double factor) { - return new Twist2d(twist.dx * factor, twist.dy * factor, twist.dtheta * factor); - } - - /** - * Converts a Pose3d to a Transform3d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform3d toTransform3d(Pose3d pose) { - return new Transform3d(pose.getTranslation(), pose.getRotation()); - } - - /** - * Converts a Transform3d to a Pose3d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose3d toPose3d(Transform3d transform) { - return new Pose3d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Converts a ChassisSpeeds to a Twist2d by extracting two dimensions (Y and Z). chain - * - * @param speeds The original translation - * @return The resulting translation - */ - public static Twist2d toTwist2d(ChassisSpeeds speeds) { - return new Twist2d( - speeds.vxMetersPerSecond, speeds.vyMetersPerSecond, speeds.omegaRadiansPerSecond); - } - - /** - * Creates a new pose from an existing one using a different translation value. - * - * @param pose The original pose - * @param translation The new translation to use - * @return The new pose with the new translation and original rotation - */ - public static Pose2d withTranslation(Pose2d pose, Translation2d translation) { - return new Pose2d(translation, pose.getRotation()); - } - - /** - * Creates a new pose from an existing one using a different rotation value. - * - * @param pose The original pose - * @param rotation The new rotation to use - * @return The new pose with the original translation and new rotation - */ - public static Pose2d withRotation(Pose2d pose, Rotation2d rotation) { - return new Pose2d(pose.getTranslation(), rotation); - } -} From 330b07d967196e7d1cd82b5ba112682add402def Mon Sep 17 00:00:00 2001 From: Timothy Ellsworth Bowers Date: Tue, 3 Feb 2026 18:03:44 -0700 Subject: [PATCH 2/8] Code Hygiene (#139) * Clean up the Accelerometer / IMU subsystems -- Clean up the code, but also specifically allow the RIO or IMU to be "on its side" (i.e., Z is not necessarily "up"). * Clean up Drive.java * Clean the Moudule modules a bit * Clean non-ASCII characters * Reduce drivebase periodic cycle time by using primitives * Clean up the Vision module --- src/main/java/frc/robot/Constants.java | 25 +- src/main/java/frc/robot/RobotContainer.java | 23 +- .../accelerometer/Accelerometer.java | 82 +--- .../frc/robot/subsystems/drive/Drive.java | 453 ++++++++++-------- .../subsystems/drive/DriveSimPhysics.java | 5 +- .../frc/robot/subsystems/drive/Module.java | 38 +- .../frc/robot/subsystems/drive/ModuleIO.java | 1 + .../subsystems/drive/ModuleIOTalonFX.java | 74 ++- .../subsystems/drive/SwerveConstants.java | 5 +- .../java/frc/robot/subsystems/imu/Imu.java | 12 +- .../java/frc/robot/subsystems/imu/ImuIO.java | 11 +- .../frc/robot/subsystems/imu/ImuIONavX.java | 53 +- .../robot/subsystems/imu/ImuIOPigeon2.java | 44 +- .../frc/robot/subsystems/imu/ImuIOSim.java | 26 +- .../vision/CameraSweepEvaluator.java | 6 +- .../frc/robot/subsystems/vision/Vision.java | 121 +++-- .../frc/robot/subsystems/vision/VisionIO.java | 2 +- .../vision/VisionIOPhotonVisionSim.java | 2 +- 18 files changed, 519 insertions(+), 464 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 0b39dcc5..7d30eaed 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -45,6 +45,7 @@ import frc.robot.util.RBSIEnum.SwerveType; import frc.robot.util.RBSIEnum.VisionType; import frc.robot.util.RobotDeviceId; +import java.util.Set; import org.photonvision.simulation.SimCameraProperties; import swervelib.math.Matter; @@ -126,19 +127,18 @@ public static final class RobotConstants { // Insert here the orientation (CCW == +) of the Rio and IMU from the robot // An angle of "0." means the x-y-z markings on the device match the robot's intrinsic reference // frame. - // NOTE: It is assumed that both the Rio and the IMU are mounted such that +Z is UP - public static final Rotation2d kRioOrientation = + public static final Rotation3d kRioOrientation = switch (getRobot()) { - case COMPBOT -> Rotation2d.fromDegrees(-90.); - case DEVBOT -> Rotation2d.fromDegrees(0.); - default -> Rotation2d.fromDegrees(0.); + case COMPBOT -> new Rotation3d(0, 0, -90); + case DEVBOT -> Rotation3d.kZero; + default -> Rotation3d.kZero; }; // IMU can be one of Pigeon2 or NavX - public static final Rotation2d kIMUOrientation = + public static final Rotation3d kIMUOrientation = switch (getRobot()) { - case COMPBOT -> Rotation2d.fromDegrees(0.); - case DEVBOT -> Rotation2d.fromDegrees(0.); - default -> Rotation2d.fromDegrees(0.); + case COMPBOT -> Rotation3d.kZero; + case DEVBOT -> Rotation3d.kZero; + default -> Rotation3d.kZero; }; } @@ -324,6 +324,9 @@ public static final class DrivebaseConstants { SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; public static final double kSteerD = 20.0; + + // Odometry-related constants + public static final double kHistorySize = 1.5; // seconds } /************************************************************************* */ @@ -416,8 +419,8 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { - public static final java.util.Set kTrustedTags = - java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + public static final Set kTrustedTags = + Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags // Noise scaling factors (lower = more trusted) public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 9c5a167a..521e09d5 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -3,9 +3,15 @@ // Copyright (c) 2021-2026 Littleton Robotics // http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. // // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of @@ -34,6 +40,7 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; +import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -98,6 +105,7 @@ public class RobotContainer { // These are "Virtual Subsystems" that report information but have no motors private final Imu m_imu; + private final Vision m_vision; @SuppressWarnings("unused") private final Accelerometer m_accel; @@ -105,9 +113,6 @@ public class RobotContainer { @SuppressWarnings("unused") private final RBSIPowerMonitor m_power; - @SuppressWarnings("unused") - private final Vision m_vision; - @SuppressWarnings("unused") private List canHealth; @@ -167,7 +172,7 @@ public RobotContainer() { m_flywheel = new Flywheel(new FlywheelIOSim()); // ---------------- Vision IOs (robot code) ---------------- - var cams = frc.robot.Constants.Cameras.ALL; + var cams = Cameras.ALL; m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); @@ -520,12 +525,12 @@ private void definesysIdRoutines() { private VisionIO[] buildVisionIOsReal(Drive drive) { return switch (Constants.getVisionType()) { case PHOTON -> - java.util.Arrays.stream(Constants.Cameras.ALL) + Arrays.stream(Constants.Cameras.ALL) .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) .toArray(VisionIO[]::new); case LIMELIGHT -> - java.util.Arrays.stream(Constants.Cameras.ALL) + Arrays.stream(Constants.Cameras.ALL) .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) .toArray(VisionIO[]::new); diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index 3bfc5c1f..fcb7dd6c 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -13,6 +13,7 @@ package frc.robot.subsystems.accelerometer; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.wpilibj.Timer; import frc.robot.Constants; import frc.robot.Constants.RobotConstants; @@ -26,30 +27,20 @@ *

This virtual subsystem pulls the acceleration values from both the RoboRIO and the swerve's * IMU (either Pigeon2 or NavX) and logs them to both AdvantageKitd. In addition to the * accelerations, the jerk (a-dot or x-tripple-dot) is computed from the delta accelerations. - * - *

Primitive-only hot path: no WPILib geometry objects or Units objects. */ public class Accelerometer extends VirtualSubsystem { + + // Gravitational acceleration private static final double G_TO_MPS2 = 9.80665; + // Define hardware interfaces private final RioAccelIO rio; private final RioAccelIO.Inputs rioInputs = new RioAccelIO.Inputs(); private final Imu imu; - // Precompute yaw-only rotation terms - private static final double rioCos = Math.cos(RobotConstants.kRioOrientation.getRadians()); - private static final double rioSin = Math.sin(RobotConstants.kRioOrientation.getRadians()); - private static final double imuCos = Math.cos(RobotConstants.kIMUOrientation.getRadians()); - private static final double imuSin = Math.sin(RobotConstants.kIMUOrientation.getRadians()); - - // Previous Rio accel (m/s^2) - private double prevRioAx = 0.0, prevRioAy = 0.0, prevRioAz = 0.0; - - // Reusable arrays for logging - private final double[] rioAccelArr = new double[3]; - private final double[] rioJerkArr = new double[3]; - private final double[] imuAccelArr = new double[3]; - private final double[] imuJerkArr = new double[3]; + // Variables needed during the periodic + private Translation3d rawRio, rioAcc, rioJerk, imuAcc, imuJerk; + private Translation3d prevRioAcc = Translation3d.kZero; // Log decimation private int loopCount = 0; @@ -74,58 +65,27 @@ public void rbsiPeriodic() { rio.updateInputs(rioInputs); // Compute RIO accelerations and jerks - final double rawX = rioInputs.xG; - final double rawY = rioInputs.yG; - final double rawZ = rioInputs.zG; - - final double rioAx = (rioCos * rawX - rioSin * rawY) * G_TO_MPS2; - final double rioAy = (rioSin * rawX + rioCos * rawY) * G_TO_MPS2; - final double rioAz = rawZ * G_TO_MPS2; - - final double rioJx = (rioAx - prevRioAx) / Constants.loopPeriodSecs; - final double rioJy = (rioAy - prevRioAy) / Constants.loopPeriodSecs; - final double rioJz = (rioAz - prevRioAz) / Constants.loopPeriodSecs; + rawRio = new Translation3d(rioInputs.xG, rioInputs.yG, rioInputs.zG); + rioAcc = rawRio.rotateBy(RobotConstants.kRioOrientation).times(G_TO_MPS2); // Acceleration from previous loop - prevRioAx = rioAx; - prevRioAy = rioAy; - prevRioAz = rioAz; - - // IMU rotate is also compute-only (already primitives) - final double imuAx = (imuCos * imuInputs.linearAccelX - imuSin * imuInputs.linearAccelY); - final double imuAy = (imuSin * imuInputs.linearAccelX + imuCos * imuInputs.linearAccelY); - final double imuAz = imuInputs.linearAccelZ; - - final double imuJx = (imuCos * imuInputs.jerkX - imuSin * imuInputs.jerkY); - final double imuJy = (imuSin * imuInputs.jerkX + imuCos * imuInputs.jerkY); - final double imuJz = imuInputs.jerkZ; - - // Fill accel arrays (still math) - rioAccelArr[0] = rioAx; - rioAccelArr[1] = rioAy; - rioAccelArr[2] = rioAz; - imuAccelArr[0] = imuAx; - imuAccelArr[1] = imuAy; - imuAccelArr[2] = imuAz; + prevRioAcc = rioAcc; - final boolean doHeavyLogs = (++loopCount >= LOG_EVERY_N); - if (doHeavyLogs) { - loopCount = 0; - rioJerkArr[0] = rioJx; - rioJerkArr[1] = rioJy; - rioJerkArr[2] = rioJz; - imuJerkArr[0] = imuJx; - imuJerkArr[1] = imuJy; - imuJerkArr[2] = imuJz; - } + // IMU accelerations and jerks + imuAcc = imuInputs.linearAccel.rotateBy(RobotConstants.kIMUOrientation); // Logging - Logger.recordOutput("Accel/Rio/Accel_mps2", rioAccelArr); - Logger.recordOutput("Accel/IMU/Accel_mps2", imuAccelArr); + Logger.recordOutput("Accel/Rio/Accel_mps2", rioAcc); + Logger.recordOutput("Accel/IMU/Accel_mps2", imuAcc); + // Every N loops, compute and log the Jerk + final boolean doHeavyLogs = (++loopCount >= LOG_EVERY_N); if (doHeavyLogs) { - Logger.recordOutput("Accel/Rio/Jerk_mps3", rioJerkArr); - Logger.recordOutput("Accel/IMU/Jerk_mps3", imuJerkArr); + loopCount = 0; + rioJerk = rioAcc.minus(prevRioAcc).div(Constants.loopPeriodSecs); + imuJerk = imuInputs.linearJerk.rotateBy(RobotConstants.kIMUOrientation); + Logger.recordOutput("Accel/Rio/Jerk_mps3", rioJerk); + Logger.recordOutput("Accel/IMU/Jerk_mps3", imuJerk); final double[] ts = imuInputs.odometryYawTimestamps; if (ts.length > 0) { diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 46788c2b..aeb71e65 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -60,25 +60,33 @@ import org.littletonrobotics.junction.AutoLogOutput; import org.littletonrobotics.junction.Logger; +/** + * Drive subsystem (RBSISubsystem) + * + *

The Drive subsystem controls the individual swerve Modules and owns the odometry of the robot. + * The odometry is updated from both the swerve modules and (optionally) the vision subsystem. + */ public class Drive extends RBSISubsystem { - // --- buffers for time-aligned pose + yaw + yawRate history --- - private final ConcurrentTimeInterpolatableBuffer poseBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + // Declare Hardware + private final Imu imu; + private final Module[] modules = new Module[4]; // FL, FR, BL, BR + private final SysIdRoutine sysId; + // Buffers for necessary things + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + ConcurrentTimeInterpolatableBuffer.createBuffer(DrivebaseConstants.kHistorySize); private final ConcurrentTimeInterpolatableBuffer yawBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); - + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(DrivebaseConstants.kHistorySize); private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = - frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(DrivebaseConstants.kHistorySize); - static final Lock odometryLock = new ReentrantLock(); - private final Imu imu; - private final Module[] modules = new Module[4]; // FL, FR, BL, BR - private final SysIdRoutine sysId; + // Declare an alert private final Alert gyroDisconnectedAlert = new Alert("Disconnected gyro, using kinematics as fallback.", AlertType.kError); + // Declare odometry and pose-related variables + static final Lock odometryLock = new ReentrantLock(); private SwerveDriveKinematics kinematics = new SwerveDriveKinematics(getModuleTranslations()); private SwerveModulePosition[] lastModulePositions = // For delta tracking new SwerveModulePosition[] { @@ -87,15 +95,24 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; - private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + private final SwerveModulePosition[] odomPositions = { + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition(), + new SwerveModulePosition() + }; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); + // Declare PID controller and siumulation physics private ProfiledPIDController angleController; - private DriveSimPhysics simPhysics; - // Constructor + // Pose reset gate (vision + anything latency-sensitive) + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + /** Constructor */ public Drive(Imu imu) { this.imu = imu; @@ -151,8 +168,8 @@ public Drive(Imu imu) { default: throw new RuntimeException("Invalid Swerve Drive Type"); } - // Start odometry thread (for the real robot) + // Start odometry thread (for the real robot) PhoenixOdometryThread.getInstance().start(); } else { @@ -211,12 +228,16 @@ public Drive(Imu imu) { break; case CHOREO: - // TODO: Probably need to add something here for Choreo autonomous path building + // TODO: If your team is using Choreo, you'll know what to do here... + break; + + case MANUAL: + // Nothing to be done for MANUAL; may just use AutoPilot break; default: } - // Configure SysId + // Configure SysId for drivebase characterization sysId = new SysIdRoutine( new SysIdRoutine.Config( @@ -228,126 +249,188 @@ public Drive(Imu imu) { (voltage) -> runCharacterization(voltage.in(Volts)), null, this)); } + /************************************************************************* */ /** Periodic function that is called each robot cycle by the command scheduler */ @Override public void rbsiPeriodic() { odometryLock.lock(); try { - // Get the IMU inputs + // Ensure IMU inputs are fresh for this cycle final var imuInputs = imu.getInputs(); // Stop modules & log empty setpoint states if disabled if (DriverStation.isDisabled()) { - for (var module : modules) module.stop(); + for (var module : modules) { + module.stop(); + } Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } - // Module periodic updates, which drains queues this cycle - for (var module : modules) module.periodic(); + // Drain per-module odometry queues for this cycle + for (var module : modules) { + module.periodic(); + } - // Feed historical samples into odometry if REAL robot + // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- if (Constants.getMode() != Mode.SIM) { + final double[] ts = modules[0].getOdometryTimestamps(); + final int n = (ts == null) ? 0 : ts.length; - // ---- Gather per-module histories ---- - final double[][] perModuleTs = new double[4][]; - final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; - int unionCap = 0; - + // Cache per-module histories ONCE (avoid repeated getters in the loop) + final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; for (int m = 0; m < 4; m++) { - perModuleTs[m] = modules[m].getOdometryTimestamps(); - perModulePos[m] = modules[m].getOdometryPositions(); - unionCap += perModuleTs[m].length; + modHist[m] = modules[m].getOdometryPositions(); } - if (unionCap == 0) { + // Determine yaw queue availability + final boolean hasYawQueue = + imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length + == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0; + + final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // If we have no module samples, still keep yaw buffers “alive” for gating callers + if (n == 0) { + final double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + gyroDisconnectedAlert.set(!imuInputs.connected); return; } - // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- - // These timestamps are authoritative for yaw interpolation and yaw-rate gating. - if (imuInputs.connected - && imuInputs.odometryYawTimestamps != null - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length - && imuInputs.odometryYawTimestamps.length > 0) { - - final double[] yts = imuInputs.odometryYawTimestamps; - final double[] yaws = imuInputs.odometryYawPositionsRad; - - for (int i = 0; i < yts.length; i++) { - yawBuffer.addSample(yts[i], yaws[i]); - if (i > 0) { - double dt = yts[i] - yts[i - 1]; + // Decide whether yaw queue is index-aligned with module[0] timestamps. + // We only trust index alignment if BOTH: + // - yaw has at least n samples + // - yawTs[i] ~= ts[i] for i in range (tight epsilon) + boolean yawIndexAligned = false; + if (hasYawQueue && yawTs.length >= n) { + yawIndexAligned = true; + final double eps = 1e-3; // 1 ms + for (int i = 0; i < n; i++) { + if (Math.abs(yawTs[i] - ts[i]) > eps) { + yawIndexAligned = false; + break; + } + } + } + + // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. + if (hasYawQueue && !yawIndexAligned) { + for (int k = 0; k < yawTs.length; k++) { + yawBuffer.addSample(yawTs[k], yawPos[k]); + if (k > 0) { + final double dt = yawTs[k] - yawTs[k - 1]; if (dt > 1e-6) { - yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); } } } - } else { - // fallback: buffer "now" yaw only - double now = Timer.getFPGATimestamp(); + } + + // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) + if (!hasYawQueue) { + final double now = Timer.getFPGATimestamp(); yawBuffer.addSample(now, imuInputs.yawPositionRad); yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); } - // ---- Build union timeline ---- - final double[] unionTs = new double[Math.max(unionCap, 1)]; - final double EPS = 1e-4; // 0.1 ms - final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); - - // ---- Replay at union times ---- - for (int i = 0; i < unionN; i++) { - final double t = unionTs[i]; + // Replay each odometry sample + for (int i = 0; i < n; i++) { + final double t = ts[i]; - // Interpolate each module to union time + // Build module positions at this sample index (clamp defensively) for (int m = 0; m < 4; m++) { - SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); - odomPositions[m] = p; + final SwerveModulePosition[] hist = modHist[m]; + if (hist == null || hist.length == 0) { + odomPositions[m] = modules[m].getPosition(); + } else if (i < hist.length) { + odomPositions[m] = hist[i]; + } else { + odomPositions[m] = hist[hist.length - 1]; + } } - // Interpolate yaw at union time - final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + // Determine yaw at this timestamp + double yawRad = imuInputs.yawPositionRad; // fallback + + if (hasYawQueue) { + if (yawIndexAligned) { + yawRad = yawPos[i]; + + // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) + yawBuffer.addSample(t, yawRad); + if (i > 0) { + final double dt = yawTs[i] - yawTs[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + } + } + } else { + // yawBuffer was pre-filled above; interpolate here + yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + } + } + // Feed estimator at this historical timestamp m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); - // keep buffer in the same timebase as estimator + // Maintain pose history in SAME timebase as estimator poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); } Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); + gyroDisconnectedAlert.set(!imuInputs.connected); + return; } - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + // SIMULATION: Keep sim pose buffer time-aligned, too + double now = Timer.getFPGATimestamp(); + poseBuffer.addSample(now, simPhysics.getPose()); + yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + + Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + gyroDisconnectedAlert.set(false); } finally { odometryLock.unlock(); } } - /** Simulation Periodic Method */ + /** + * Simulation Periodic Method + * + *

This function runs only for simulation, but does similar processing to the REAL periodic + * function. Instead of reading back what the modules actually say, use physics to predict where + * the module would have gone. + */ @Override public void simulationPeriodic() { final double dt = Constants.loopPeriodSecs; - // 1) Advance module wheel physics + // Advance module wheel physics for (int i = 0; i < modules.length; i++) { modules[i].simulationPeriodic(); } - // 2) Get module states from modules (authoritative) - NO STREAMS + // Get module states from modules final SwerveModuleState[] moduleStates = new SwerveModuleState[modules.length]; for (int i = 0; i < modules.length; i++) { moduleStates[i] = modules[i].getState(); } - // 3) Update SIM physics (linear + angular) + // Update SIM physics (linear & angular motion of the robot) simPhysics.update(moduleStates, dt); - // 4) Feed IMU from authoritative physics (primitive-only boundary) - final double yawRad = - simPhysics.getYaw().getRadians(); // or simPhysics.getYawRad() if you have it + // Feed the simulated IMU from authoritative physics + final double yawRad = simPhysics.getYaw().getRadians(); final double omegaRadPerSec = simPhysics.getOmegaRadPerSec(); final double ax = simPhysics.getLinearAccel().getX(); @@ -357,17 +440,15 @@ public void simulationPeriodic() { imu.simulationSetOmegaRadPerSec(omegaRadPerSec); imu.simulationSetLinearAccelMps2(ax, ay, 0.0); - // 5) Feed PoseEstimator with authoritative yaw and module positions - // (PoseEstimator still wants objects -> boundary conversion stays here) + // Feed PoseEstimator with authoritative yaw and module positions final SwerveModulePosition[] modulePositions = new SwerveModulePosition[modules.length]; for (int i = 0; i < modules.length; i++) { modulePositions[i] = modules[i].getPosition(); } - m_PoseEstimator.resetPosition( Rotation2d.fromRadians(yawRad), modulePositions, simPhysics.getPose()); - // 6) Optional: inject vision measurement in SIM + // If simulated vision available, inject vision measurement if (simulatedVisionAvailable) { final Pose2d visionPose = getSimulatedVisionPose(); final double visionTimestamp = Timer.getFPGATimestamp(); @@ -377,7 +458,7 @@ public void simulationPeriodic() { poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); - // 7) Logging + // Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); Logger.recordOutput("Sim/OmegaRadPerSec", simPhysics.getOmegaRadPerSec()); @@ -399,7 +480,7 @@ public void setMotorBrake(boolean brake) { } } - /** Stops the drive. */ + /** Stop the drive. */ public void stop() { runVelocity(new ChassisSpeeds()); } @@ -513,37 +594,6 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } - /** Returns interpolated odometry pose at a given timestamp. */ - public Optional getPoseAtTime(double timestampSeconds) { - return poseBuffer.getSample(timestampSeconds); - } - - /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { - odometryLock.lock(); - try { - m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); - } finally { - odometryLock.unlock(); - } - } - - /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ - public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { - if (t1 < t0) return OptionalDouble.empty(); - var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); - if (sub.isEmpty()) return OptionalDouble.empty(); - - double maxAbs = 0.0; - boolean any = false; - for (double v : sub.values()) { - any = true; - double a = Math.abs(v); - if (a > maxAbs) maxAbs = a; - } - return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); - } - /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -553,27 +603,8 @@ public Rotation2d getHeading() { return imu.getYaw(); } - /** Returns an array of module translations. */ - public static Translation2d[] getModuleTranslations() { - return new Translation2d[] { - new Translation2d(kFLXPosMeters, kFLYPosMeters), - new Translation2d(kFRXPosMeters, kFRYPosMeters), - new Translation2d(kBLXPosMeters, kBLYPosMeters), - new Translation2d(kBRXPosMeters, kBRYPosMeters) - }; - } - - /** Returns the position of each module in radians. */ - public double[] getWheelRadiusCharacterizationPositions() { - double[] values = new double[4]; - for (int i = 0; i < 4; i++) { - values[i] = modules[i].getWheelRadiusCharacterizationPosition(); - } - return values; - } - /** - * Returns the measured chassis speeds in FIELD coordinates. + * Returns the measured chassis speeds of the modules in FIELD coordinates. * *

+X = field forward +Y = field left CCW+ = counterclockwise */ @@ -581,7 +612,6 @@ public double[] getWheelRadiusCharacterizationPositions() { public ChassisSpeeds getFieldRelativeSpeeds() { // Robot-relative measured speeds from modules ChassisSpeeds robotRelative = getChassisSpeeds(); - // Convert to field-relative using authoritative yaw return ChassisSpeeds.fromRobotRelativeSpeeds(robotRelative, getHeading()); } @@ -597,13 +627,45 @@ public Translation2d getFieldLinearVelocity() { return new Translation2d(fieldSpeeds.vxMetersPerSecond, fieldSpeeds.vyMetersPerSecond); } - /** Returns the average velocity of the modules in rotations/sec (Phoenix native units). */ - public double getFFCharacterizationVelocity() { - double output = 0.0; - for (int i = 0; i < 4; i++) { - output += modules[i].getFFCharacterizationVelocity() / 4.0; + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** + * Max abs yaw rate over [t0, t1] using buffered yaw-rate history + * + * @param t0 Interval start + * @param t1 interval end + * @return Maximum yaw rate + */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + // If end before start, return empty + if (t1 < t0) return OptionalDouble.empty(); + + // Get the subset of entries from the buffer + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; } - return output; + // Return a value if there's anything to report, else empty + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + + /** Get the last EPOCH of a pose reset */ + public long getPoseResetEpoch() { + return poseResetEpoch; + } + + /** Get the last TIMESTAMP of a pose reset */ + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; } /** Returns the maximum linear speed in meters per sec. */ @@ -626,6 +688,34 @@ public double getMaxAngularAccelRadPerSecPerSec() { return getMaxLinearAccelMetersPerSecPerSec() / kDriveBaseRadiusMeters; } + /** Returns an array of module translations. */ + public static Translation2d[] getModuleTranslations() { + return new Translation2d[] { + new Translation2d(kFLXPosMeters, kFLYPosMeters), + new Translation2d(kFRXPosMeters, kFRYPosMeters), + new Translation2d(kBLXPosMeters, kBLYPosMeters), + new Translation2d(kBRXPosMeters, kBRYPosMeters) + }; + } + + /** Returns the position of each module in radians. */ + public double[] getWheelRadiusCharacterizationPositions() { + double[] values = new double[4]; + for (int i = 0; i < 4; i++) { + values[i] = modules[i].getWheelRadiusCharacterizationPosition(); + } + return values; + } + + /** Returns the average velocity of the modules in rotations/sec (Phoenix native units). */ + public double getFFCharacterizationVelocity() { + double output = 0.0; + for (int i = 0; i < 4; i++) { + output += modules[i].getFFCharacterizationVelocity() / 4.0; + } + return output; + } + /* Setter Functions ****************************************************** */ /** Resets the current odometry pose. */ @@ -651,18 +741,21 @@ public void zeroHeading() { markPoseReset(Timer.getFPGATimestamp()); } - // ---- Pose reset gate (vision + anything latency-sensitive) ---- - private volatile long poseResetEpoch = 0; // monotonic counter - private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - - public long getPoseResetEpoch() { - return poseResetEpoch; - } - - public double getLastPoseResetTimestamp() { - return lastPoseResetTimestamp; + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } } + /** + * Sets the EPOCH and TIMESTAMP for a pose reset + * + * @param fpgaNow The FPGA timestamp of the pose reset + */ private void markPoseReset(double fpgaNow) { lastPoseResetTimestamp = fpgaNow; poseResetEpoch++; @@ -670,7 +763,10 @@ private void markPoseReset(double fpgaNow) { Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } + /** UTILITY FUNCTION SECTION ********************************************* */ + /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ + /** Choreo: Reset odometry */ public Command resetOdometry(Pose2d orElseGet) { // TODO Auto-generated method stub @@ -725,7 +821,7 @@ public void followTrajectory(SwerveSample sample) { runVelocity(speeds); } - // ---------------- SIM VISION ---------------- + /** SIMULATION VISION FUNCTIONS ****************************************** */ // Vision measurement enabled in simulation private boolean simulatedVisionAvailable = true; @@ -764,69 +860,4 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } - - private static SwerveModulePosition interpolateModulePosition( - double[] ts, SwerveModulePosition[] samples, double t) { - - final int n = ts.length; - if (n == 0) return new SwerveModulePosition(); - - if (t <= ts[0]) return samples[0]; - if (t >= ts[n - 1]) return samples[n - 1]; - - int lo = 0, hi = n - 1; - while (lo < hi) { - int mid = (lo + hi) >>> 1; - if (ts[mid] < t) lo = mid + 1; - else hi = mid; - } - int i1 = lo; - int i0 = i1 - 1; - - double t0 = ts[i0]; - double t1 = ts[i1]; - if (t1 <= t0) return samples[i1]; - - double alpha = (t - t0) / (t1 - t0); - - double dist = - samples[i0].distanceMeters - + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; - - Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); - - return new SwerveModulePosition(dist, angle); - } - - private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { - int[] idx = new int[perModuleTs.length]; - int outN = 0; - - while (true) { - double minT = Double.POSITIVE_INFINITY; - boolean any = false; - - for (int m = 0; m < perModuleTs.length; m++) { - double[] ts = perModuleTs[m]; - if (idx[m] >= ts.length) continue; - any = true; - minT = Math.min(minT, ts[idx[m]]); - } - - if (!any) break; - - if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { - outUnion[outN++] = minT; - } - - for (int m = 0; m < perModuleTs.length; m++) { - double[] ts = perModuleTs[m]; - while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { - idx[m]++; - } - } - } - - return outN; - } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java b/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java index e58a4b33..ff4402ce 100644 --- a/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java +++ b/src/main/java/frc/robot/subsystems/drive/DriveSimPhysics.java @@ -45,6 +45,7 @@ public class DriveSimPhysics { private final SwerveDriveKinematics kinematics; + /** Constructor */ public DriveSimPhysics( SwerveDriveKinematics kinematics, double moiKgMetersSq, double maxTorqueNm) { this.kinematics = kinematics; @@ -97,7 +98,7 @@ public void update(SwerveModuleState[] moduleStates, double dtSeconds) { pose = new Pose2d(newTranslation, newYaw); } - /* ================== Getters ================== */ + /** Getter Functions ***************************************************** */ public Pose2d getPose() { return pose; } @@ -114,7 +115,7 @@ public Translation2d getLinearAccel() { return linearAccel; } - /* ================== Reset ================== */ + /** Reset Functions ****************************************************** */ public void reset(Pose2d pose) { this.pose = pose; this.omegaRadPerSec = 0.0; diff --git a/src/main/java/frc/robot/subsystems/drive/Module.java b/src/main/java/frc/robot/subsystems/drive/Module.java index abc40ee2..b57653d8 100644 --- a/src/main/java/frc/robot/subsystems/drive/Module.java +++ b/src/main/java/frc/robot/subsystems/drive/Module.java @@ -18,16 +18,26 @@ import frc.robot.Constants.DrivebaseConstants; import org.littletonrobotics.junction.Logger; +/** + * Module API Class + * + *

The Module class is the API-level class for a single swerve module, of which there are four on + * the robot. This is not a true subsystem, but an abstraction layer. + */ public class Module { + + // Define IO private final ModuleIO io; private final ModuleIOInputsAutoLogged inputs = new ModuleIOInputsAutoLogged(); private final int index; + // Declare alerts here, and only set/unset during the periodic() loop. private final Alert driveDisconnectedAlert; private final Alert turnDisconnectedAlert; private final Alert turnEncoderDisconnectedAlert; private SwerveModulePosition[] odometryPositions = new SwerveModulePosition[] {}; + /** Constructor */ public Module(ModuleIO io, int index) { this.io = io; this.index = index; @@ -44,6 +54,8 @@ public Module(ModuleIO io, int index) { AlertType.kError); } + /************************************************************************* */ + /** Periodic function that is called each robot cycle by the Drive class */ public void periodic() { io.updateInputs(inputs); Logger.processInputs("Drive/Module" + Integer.toString(index), inputs); @@ -64,7 +76,19 @@ public void periodic() { turnEncoderDisconnectedAlert.set(!inputs.turnEncoderConnected); } - /** Runs the module with the specified setpoint state. Mutates the state to optimize it. */ + /** Forwards the simulation periodic call to the IO layer */ + public void simulationPeriodic() { + io.simulationPeriodic(); + } + + /** Module Action Functions ********************************************** */ + /** + * Runs the module with the specified setpoint state + * + *

Mutates the state to optimize it + * + * @param state The requested Swerve Modeule State + */ public void runSetpoint(SwerveModuleState state) { // Optimize velocity setpoint state.optimize(getAngle()); @@ -87,12 +111,17 @@ public void stop() { io.setTurnOpenLoop(0.0); } - /** Sets whether brake mode is enabled. */ + /** + * Sets whether brake mode is enabled + * + * @param enabled Is the brake enabled? + */ public void setBrakeMode(boolean enabled) { io.setDriveBrakeMode(enabled); io.setTurnBrakeMode(enabled); } + /** Getter functions ***************************************************** */ /** Returns the current turn angle of the module. */ public Rotation2d getAngle() { return inputs.turnPosition; @@ -128,11 +157,6 @@ public double[] getOdometryTimestamps() { return inputs.odometryTimestamps; } - /** Forwards the simulation periodic call to the IO layer */ - public void simulationPeriodic() { - io.simulationPeriodic(); - } - /** Returns the module position in radians. */ public double getWheelRadiusCharacterizationPosition() { return inputs.drivePositionRad; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java index 10055776..8e9c1132 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIO.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIO.java @@ -13,6 +13,7 @@ import org.littletonrobotics.junction.AutoLog; public interface ModuleIO { + @AutoLog public static class ModuleIOInputs { public boolean driveConnected = false; diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index 6aec53eb..65da2c71 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -48,6 +48,7 @@ import frc.robot.generated.TunerConstants; import frc.robot.util.PhoenixUtil; import frc.robot.util.RBSICANBusRegistry; +import java.util.Arrays; import java.util.Queue; import org.littletonrobotics.junction.Logger; @@ -129,7 +130,7 @@ public class ModuleIOTalonFX implements ModuleIO { private long lastTimestampNano = System.nanoTime(); /* - * TalonFX I/O + * TalonFX I/O Constructor */ public ModuleIOTalonFX(int module) { // Record the module number for logging purposes @@ -260,17 +261,18 @@ public ModuleIOTalonFX(int module) { ParentDevice.optimizeBusUtilizationForAll(driveTalon, turnTalon); } + /** Input Updating Loop ************************************************** */ @Override public void updateInputs(ModuleIOInputs inputs) { - // Refresh most signals + // -------------------- Refresh Phoenix signals -------------------- var driveStatus = BaseStatusSignal.refreshAll(drivePosition, driveVelocity, driveAppliedVolts, driveCurrent); var turnStatus = BaseStatusSignal.refreshAll(turnPosition, turnVelocity, turnAppliedVolts, turnCurrent); var encStatus = BaseStatusSignal.refreshAll(turnAbsolutePosition); - // Log *which* groups are failing and what the code is + // Log refresh failures explicitly (debug gold) if (!driveStatus.isOK()) { Logger.recordOutput("CAN/Module" + module + "/DriveRefreshStatus", driveStatus.toString()); } @@ -281,40 +283,66 @@ public void updateInputs(ModuleIOInputs inputs) { Logger.recordOutput("CAN/Module" + module + "/EncRefreshStatus", encStatus.toString()); } - // Update drive inputs + // -------------------- Connectivity flags -------------------- inputs.driveConnected = driveConnectedDebounce.calculate(driveStatus.isOK()); + inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); + inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); + + // -------------------- Instantaneous state -------------------- inputs.drivePositionRad = Units.rotationsToRadians(drivePosition.getValueAsDouble()); inputs.driveVelocityRadPerSec = Units.rotationsToRadians(driveVelocity.getValueAsDouble()); inputs.driveAppliedVolts = driveAppliedVolts.getValueAsDouble(); inputs.driveCurrentAmps = driveCurrent.getValueAsDouble(); - // Update turn inputs - inputs.turnConnected = turnConnectedDebounce.calculate(turnStatus.isOK()); - inputs.turnEncoderConnected = turnEncoderConnectedDebounce.calculate(encStatus.isOK()); inputs.turnAbsolutePosition = Rotation2d.fromRotations(turnAbsolutePosition.getValueAsDouble()); inputs.turnPosition = Rotation2d.fromRotations(turnPosition.getValueAsDouble()); inputs.turnVelocityRadPerSec = Units.rotationsToRadians(turnVelocity.getValueAsDouble()); inputs.turnAppliedVolts = turnAppliedVolts.getValueAsDouble(); inputs.turnCurrentAmps = turnCurrent.getValueAsDouble(); - // Update odometry inputs - inputs.odometryTimestamps = - timestampQueue.stream().mapToDouble((Double value) -> value).toArray(); - inputs.odometryDrivePositionsRad = - drivePositionQueue.stream() - .mapToDouble((Double value) -> Units.rotationsToRadians(value)) - .toArray(); - inputs.odometryTurnPositions = - turnPositionQueue.stream() - .map((Double value) -> Rotation2d.fromRotations(value)) - .toArray(Rotation2d[]::new); - - // Clear the queues - timestampQueue.clear(); - drivePositionQueue.clear(); - turnPositionQueue.clear(); + // -------------------- Odometry queue drain -------------------- + final int tsCount = timestampQueue.size(); + final int driveCount = drivePositionQueue.size(); + final int turnCount = turnPositionQueue.size(); + + // Only consume the common prefix — guarantees alignment + final int sampleCount = Math.min(tsCount, Math.min(driveCount, turnCount)); + + if (sampleCount <= 0) { + inputs.odometryTimestamps = new double[0]; + inputs.odometryDrivePositionsRad = new double[0]; + inputs.odometryTurnPositions = new Rotation2d[0]; + return; + } + + final double[] outTs = new double[sampleCount]; + final double[] outDriveRad = new double[sampleCount]; + final Rotation2d[] outTurn = new Rotation2d[sampleCount]; + + for (int i = 0; i < sampleCount; i++) { + final Double t = timestampQueue.poll(); + final Double driveRot = drivePositionQueue.poll(); + final Double turnRot = turnPositionQueue.poll(); + + // Defensive guard (should never trigger, but keeps us safe) + if (t == null || driveRot == null || turnRot == null) { + inputs.odometryTimestamps = Arrays.copyOf(outTs, i); + inputs.odometryDrivePositionsRad = Arrays.copyOf(outDriveRad, i); + inputs.odometryTurnPositions = Arrays.copyOf(outTurn, i); + return; + } + + outTs[i] = t.doubleValue(); + outDriveRad[i] = Units.rotationsToRadians(driveRot.doubleValue()); + outTurn[i] = Rotation2d.fromRotations(turnRot.doubleValue()); + } + + inputs.odometryTimestamps = outTs; + inputs.odometryDrivePositionsRad = outDriveRad; + inputs.odometryTurnPositions = outTurn; } + /** Module Action Functions ********************************************** */ /** * Set the drive motor to an open-loop voltage, scaled to battery voltage * diff --git a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java index a9332cec..ceaff2ba 100644 --- a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java +++ b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java @@ -22,6 +22,7 @@ import frc.robot.subsystems.imu.ImuIONavX; import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.util.YagslConstants; +import java.util.function.Supplier; /** * Holds the proper set of drive constants given the type of drive @@ -310,9 +311,9 @@ public enum ImuType { NAVX(new String[] {"navx", "navx_spi"}, ImuIONavX::new); private final String[] keys; - public final java.util.function.Supplier factory; + public final Supplier factory; - ImuType(String[] keys, java.util.function.Supplier factory) { + ImuType(String[] keys, Supplier factory) { this.keys = keys; this.factory = factory; } diff --git a/src/main/java/frc/robot/subsystems/imu/Imu.java b/src/main/java/frc/robot/subsystems/imu/Imu.java index 2a7a98f4..f5f63152 100644 --- a/src/main/java/frc/robot/subsystems/imu/Imu.java +++ b/src/main/java/frc/robot/subsystems/imu/Imu.java @@ -19,8 +19,9 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Translation3d; +import frc.robot.util.VirtualSubsystem; -public class Imu { +public class Imu extends VirtualSubsystem { private final ImuIO io; private final ImuIO.ImuIOInputs inputs = new ImuIO.ImuIOInputs(); @@ -30,11 +31,13 @@ public class Imu { private Translation3d cachedAccel = Translation3d.kZero; private Translation3d cachedJerk = Translation3d.kZero; + // Constructor public Imu(ImuIO io) { this.io = io; } - public void periodic() { + // Periodic function to read inputs + public void rbsiPeriodic() { io.updateInputs(inputs); } @@ -70,12 +73,11 @@ private void refreshCachesIfNeeded() { cacheStampNs = stamp; cachedYaw = Rotation2d.fromRadians(inputs.yawPositionRad); - cachedAccel = new Translation3d(inputs.linearAccelX, inputs.linearAccelY, inputs.linearAccelZ); - cachedJerk = new Translation3d(inputs.jerkX, inputs.jerkY, inputs.jerkZ); + cachedAccel = inputs.linearAccel; + cachedJerk = inputs.linearJerk; } // ---------------- SIM PUSH (primitive-only boundary) ---------------- - /** Simulation: push authoritative yaw (radians) into the IO layer */ public void simulationSetYawRad(double yawRad) { io.simulationSetYawRad(yawRad); diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIO.java b/src/main/java/frc/robot/subsystems/imu/ImuIO.java index 37aa695f..bf350c4f 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIO.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIO.java @@ -17,6 +17,7 @@ package frc.robot.subsystems.imu; +import edu.wpi.first.math.geometry.Translation3d; import frc.robot.util.RBSIIO; import org.littletonrobotics.junction.AutoLog; @@ -43,16 +44,10 @@ class ImuIOInputs { public double yawRateRadPerSec = 0.0; /** Linear acceleration in robot frame (m/s^2) */ - public double linearAccelX = 0.0; - - public double linearAccelY = 0.0; - public double linearAccelZ = 0.0; + public Translation3d linearAccel = Translation3d.kZero; /** Linear jerk in robot frame (m/s^3) */ - public double jerkX = 0.0; - - public double jerkY = 0.0; - public double jerkZ = 0.0; + public Translation3d linearJerk = Translation3d.kZero; /** Time spent in the IO update call (seconds) */ public double latencySeconds = 0.0; diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java index 4bf1edcd..18fa3c23 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIONavX.java @@ -19,6 +19,7 @@ import com.studica.frc.AHRS; import com.studica.frc.AHRS.NavXComType; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; import frc.robot.subsystems.drive.PhoenixOdometryThread; @@ -38,7 +39,7 @@ public class ImuIONavX implements ImuIO { private final Queue yawTimestampQueue; // Previous accel (m/s^2) for jerk - private double prevAx = 0.0, prevAy = 0.0, prevAz = 0.0; + private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; // Reusable buffers for queue drain @@ -82,29 +83,23 @@ public void updateInputs(ImuIOInputs inputs) { inputs.yawRateRadPerSec = yawRateDegPerSec * DEG_TO_RAD; // World linear accel (NavX returns "g" typically). Convert to m/s^2. - final double ax = navx.getWorldLinearAccelX() * G_TO_MPS2; - final double ay = navx.getWorldLinearAccelY() * G_TO_MPS2; - final double az = navx.getWorldLinearAccelZ() * G_TO_MPS2; - - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = + new Translation3d( + navx.getWorldLinearAccelX(), + navx.getWorldLinearAccelY(), + navx.getWorldLinearAccelZ()) + .times(G_TO_MPS2); // Jerk if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; if (dt > 1e-6) { - final double invDt = 1.0 / dt; - inputs.jerkX = (ax - prevAx) * invDt; - inputs.jerkY = (ay - prevAy) * invDt; - inputs.jerkZ = (az - prevAz) * invDt; + inputs.linearJerk = inputs.linearAccel.minus(prevAcc).div(dt); } } prevTimestampNs = start; - prevAx = ax; - prevAy = ay; - prevAz = az; + prevAcc = inputs.linearAccel; inputs.timestampNs = start; @@ -126,6 +121,11 @@ public void updateInputs(ImuIOInputs inputs) { inputs.latencySeconds = (end - start) * 1e-9; } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { navx.setAngleAdjustment(yawRad / DEG_TO_RAD); @@ -133,7 +133,7 @@ public void zeroYawRad(double yawRad) { // Reset jerk history so you don't spike on the next frame prevTimestampNs = 0L; - prevAx = prevAy = prevAz = 0.0; + prevAcc = Translation3d.kZero; } private int drainOdomQueues() { @@ -174,25 +174,4 @@ private void ensureOdomCapacity(int n) { odomTsBuf = new double[newCap]; odomYawRadBuf = new double[newCap]; } - - // /** - // * Zero the NavX - // * - // *

This method should always rezero the pigeon in ALWAYS-BLUE-ORIGIN orientation. Testing, - // * however, shows that it's not doing what I think it should be doing. There is likely - // * interference with something else in the odometry - // */ - // @Override - // public void zero() { - // // With the Pigeon facing forward, forward depends on the alliance selected. - // // Set Angle Adjustment based on alliance - // if (DriverStation.getAlliance().get() == Alliance.Blue) { - // navx.setAngleAdjustment(0.0); - // } else { - // navx.setAngleAdjustment(180.0); - // } - // System.out.println("Setting YAW to " + navx.getAngleAdjustment()); - // navx.zeroYaw(); - // } - } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java index 0e07f02f..0db40ef9 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOPigeon2.java @@ -22,6 +22,8 @@ import com.ctre.phoenix6.StatusSignal; import com.ctre.phoenix6.configs.Pigeon2Configuration; import com.ctre.phoenix6.hardware.Pigeon2; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.units.measure.Angle; import edu.wpi.first.units.measure.AngularVelocity; import edu.wpi.first.units.measure.LinearAcceleration; @@ -31,12 +33,13 @@ import java.util.Iterator; import java.util.Queue; -/** IMU IO for CTRE Pigeon2 (primitive-only hot path) */ +/** IMU IO for CTRE Pigeon2 */ public class ImuIOPigeon2 implements ImuIO { - private static final double DEG_TO_RAD = Math.PI / 180.0; + // Constants private static final double G_TO_MPS2 = 9.80665; + // Define the Pigeon2 private final Pigeon2 pigeon = new Pigeon2( SwerveConstants.kPigeonId, RBSICANBusRegistry.getBus(SwerveConstants.kCANbusName)); @@ -53,13 +56,14 @@ public class ImuIOPigeon2 implements ImuIO { private final Queue odomYawsDeg; // Previous accel for jerk (m/s^2) - private double prevAx = 0.0, prevAy = 0.0, prevAz = 0.0; + private Translation3d prevAcc = Translation3d.kZero; private long prevTimestampNs = 0L; // Reusable buffers for queue-drain (avoid streams) private double[] odomTsBuf = new double[8]; private double[] odomYawRadBuf = new double[8]; + // Constructor public ImuIOPigeon2() { pigeon.getConfigurator().apply(new Pigeon2Configuration()); pigeon.getConfigurator().setYaw(0.0); @@ -77,6 +81,7 @@ public ImuIOPigeon2() { odomYawsDeg = PhoenixOdometryThread.getInstance().registerSignal(yawSignal); } + /** Update the Inputs */ @Override public void updateInputs(ImuIOInputs inputs) { final long start = System.nanoTime(); @@ -88,33 +93,25 @@ public void updateInputs(ImuIOInputs inputs) { final double yawDeg = yawSignal.getValueAsDouble(); final double yawRateDegPerSec = yawRateSignal.getValueAsDouble(); - inputs.yawPositionRad = yawDeg * DEG_TO_RAD; - inputs.yawRateRadPerSec = yawRateDegPerSec * DEG_TO_RAD; + inputs.yawPositionRad = Units.degreesToRadians(yawDeg); + inputs.yawRateRadPerSec = Units.degreesToRadians(yawRateDegPerSec); // Accel: Phoenix returns "g" for these signals (common for Pigeon2). Convert to m/s^2 - final double ax = accelX.getValueAsDouble() * G_TO_MPS2; - final double ay = accelY.getValueAsDouble() * G_TO_MPS2; - final double az = accelZ.getValueAsDouble() * G_TO_MPS2; - - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = + new Translation3d( + accelX.getValueAsDouble(), accelY.getValueAsDouble(), accelZ.getValueAsDouble()) + .times(G_TO_MPS2); // Jerk from delta accel / dt if (prevTimestampNs != 0L) { final double dt = (start - prevTimestampNs) * 1e-9; if (dt > 1e-6) { - final double invDt = 1.0 / dt; - inputs.jerkX = (ax - prevAx) * invDt; - inputs.jerkY = (ay - prevAy) * invDt; - inputs.jerkZ = (az - prevAz) * invDt; + inputs.linearJerk = inputs.linearAccel.minus(prevAcc).div(dt); } } prevTimestampNs = start; - prevAx = ax; - prevAy = ay; - prevAz = az; + prevAcc = inputs.linearAccel; inputs.timestampNs = start; @@ -136,9 +133,14 @@ public void updateInputs(ImuIOInputs inputs) { inputs.latencySeconds = (end - start) * 1e-9; } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { - pigeon.setYaw(yawRad / DEG_TO_RAD); + pigeon.setYaw(Units.radiansToDegrees(yawRad)); } private int drainOdometryQueuesIntoBuffers() { @@ -160,7 +162,7 @@ private int drainOdometryQueuesIntoBuffers() { int i = 0; while (i < n && itT.hasNext() && itY.hasNext()) { odomTsBuf[i] = itT.next(); - odomYawRadBuf[i] = itY.next() * DEG_TO_RAD; + odomYawRadBuf[i] = Units.degreesToRadians(itY.next()); i++; } diff --git a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java index 080dec1c..a28212ce 100644 --- a/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java +++ b/src/main/java/frc/robot/subsystems/imu/ImuIOSim.java @@ -17,12 +17,13 @@ package frc.robot.subsystems.imu; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Timer; import org.littletonrobotics.junction.Logger; -/** Simulated IMU for full robot simulation & replay logging (primitive-only) */ +/** Simulated IMU for full robot simulation & replay logging */ public class ImuIOSim implements ImuIO { - private static final double RAD_TO_DEG = 180.0 / Math.PI; // --- AUTHORITATIVE SIM STATE (PRIMITIVES) --- private double yawRad = 0.0; @@ -39,7 +40,6 @@ public class ImuIOSim implements ImuIO { public ImuIOSim() {} // ---------------- SIMULATION INPUTS (PUSH) ---------------- - @Override public void simulationSetYawRad(double yawRad) { this.yawRad = yawRad; @@ -58,7 +58,6 @@ public void simulationSetLinearAccelMps2(double ax, double ay, double az) { } // ---------------- IO UPDATE (PULL) ---------------- - @Override public void updateInputs(ImuIOInputs inputs) { inputs.connected = true; @@ -67,15 +66,11 @@ public void updateInputs(ImuIOInputs inputs) { // Authoritative sim state inputs.yawPositionRad = yawRad; inputs.yawRateRadPerSec = yawRateRadPerSec; - inputs.linearAccelX = ax; - inputs.linearAccelY = ay; - inputs.linearAccelZ = az; + inputs.linearAccel = new Translation3d(ax, ay, az); - // Jerk: SIM doesn’t have a prior accel here unless you want it; set to 0 by default. + // Jerk: SIM doesn't have a prior accel here unless you want it; set to 0 by default. // If you do want jerk, you can add prevAx/prevAy/prevAz + dt just like the real IO. - inputs.jerkX = 0.0; - inputs.jerkY = 0.0; - inputs.jerkZ = 0.0; + inputs.linearJerk = Translation3d.kZero; // Maintain odometry history pushOdomSample(Timer.getFPGATimestamp(), yawRad); @@ -101,10 +96,15 @@ public void updateInputs(ImuIOInputs inputs) { // Optional: SIM logging (primitive-friendly) Logger.recordOutput("IMU/YawRad", yawRad); - Logger.recordOutput("IMU/YawDeg", yawRad * RAD_TO_DEG); - Logger.recordOutput("IMU/YawRateDps", yawRateRadPerSec * RAD_TO_DEG); + Logger.recordOutput("IMU/YawDeg", Units.radiansToDegrees(yawRad)); + Logger.recordOutput("IMU/YawRateDps", Units.radiansToDegrees(yawRateRadPerSec)); } + /** + * Zero the YAW to this radian value + * + * @param yawRad The radian value to which to zero + */ @Override public void zeroYawRad(double yawRad) { this.yawRad = yawRad; diff --git a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java index 3cdcbb9d..18f56795 100644 --- a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java +++ b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java @@ -82,7 +82,7 @@ private static Quaternion quatFromAxisAngle(double ax, double ay, double az, dou return new Quaternion(c, ax * s, ay * s, az * s); } - // Hamilton product: q = a ⊗ b + // Hamilton product: q = a x b (cross-product) private static Quaternion quatMul(Quaternion a, Quaternion b) { double aw = a.getW(), ax = a.getX(), ay = a.getY(), az = a.getZ(); double bw = b.getW(), bx = b.getX(), by = b.getY(), bz = b.getZ(); @@ -103,8 +103,8 @@ private static Quaternion quatMul(Quaternion a, Quaternion b) { * *

Conventions: yawDeg = rotation about +Z (up), pitchDeg = rotation about +Y. * - *

Order: extra = yaw ⊗ pitch (yaw first, then pitch, both in the camera/base frame) combined = - * base ⊗ extra + *

Order: extra = yaw x pitch (yaw first, then pitch, both in the camera/base frame) combined = + * base x extra */ private static Rotation3d composeCameraExtra(Rotation3d base, double yawDeg, double pitchDeg) { double yaw = Units.degreesToRadians(yawDeg); diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 908cf9c5..74d318aa 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -34,6 +34,7 @@ import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Arrays; import java.util.Optional; import java.util.OptionalDouble; import java.util.Set; @@ -42,6 +43,7 @@ public class Vision extends VirtualSubsystem { + /** Vision Consumer definition */ @FunctionalInterface public interface VisionConsumer { void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); @@ -58,18 +60,16 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; - // --- Per-camera monotonic gate --- + // Per-camera monotonic and pose reset gates private final double[] lastAcceptedTsPerCam; - - // --- Pose reset gate --- private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - // --- Smoothing buffer (recent fused estimates) --- + // Smoothing buffer (recent fused estimates) private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; - // --- Trusted tags configuration (swappable per event/field) --- + // Trusted tags configuration (swappable per event/field) private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); private volatile boolean requireTrustedTag = false; @@ -77,11 +77,12 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted - // --- Optional 254-style yaw gate for single-tag --- + // Yaw-rate gate for single-tag measurements private volatile boolean enableSingleTagYawGate = true; private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; + /** Constructor */ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { this.drive = drive; this.consumer = consumer; @@ -93,7 +94,7 @@ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { } this.lastAcceptedTsPerCam = new double[io.length]; - java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); @@ -102,37 +103,7 @@ public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { } } - // -------- Runtime configuration hooks -------- - - /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ - public void resetPoseGate(double fpgaNowSeconds) { - lastPoseResetTimestamp = fpgaNowSeconds; - fusedBuffer.clear(); - java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - } - - /** Swap trusted tag set per event/field without redeploy. */ - public void setTrustedTags(Set tags) { - trustedTags.set(Set.copyOf(tags)); - } - - public void setRequireTrustedTag(boolean require) { - requireTrustedTag = require; - } - - public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { - trustedTagStdDevScale = trustedScale; - untrustedTagStdDevScale = untrustedScale; - } - - public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { - enableSingleTagYawGate = enabled; - yawGateLookbackSec = lookbackSec; - yawGateLimitRadPerSec = limitRadPerSec; - } - - // -------- Core periodic -------- - + /** Periodic Function */ @Override public void rbsiPeriodic() { @@ -145,13 +116,13 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/PoseGateResetFromDrive", false); } - // 1) Update/log camera inputs + // Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); } - // 2) Pick one best accepted estimate per camera for this loop + // Pick one best accepted estimate per camera for this loop final ArrayList perCamAccepted = new ArrayList<>(io.length); for (int cam = 0; cam < io.length; cam++) { @@ -208,20 +179,20 @@ public void rbsiPeriodic() { if (perCamAccepted.isEmpty()) return; - // 3) Fusion time is the newest timestamp among accepted per-camera samples + // Fusion time is the newest timestamp among accepted per-camera samples double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); if (!Double.isFinite(tF)) return; - // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse TimedEstimate fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; - // 5) Smooth by fusing recent fused estimates (also aligned to tF) + // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); TimedEstimate smoothed = smoothAtTime(tF); if (smoothed == null) return; - // 6) Inject + // Inject the pose consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); Logger.recordOutput("Vision/FusedPose", fused.pose); @@ -229,8 +200,62 @@ public void rbsiPeriodic() { Logger.recordOutput("Vision/FusedTimestamp", tF); } - // -------- Gating + scoring -------- + /** Runtime configuration hooks ****************************************** */ + /** + * Call when you reset odometry (e.g. auto start, manual reset, etc). + * + * @param fpgaNowSeconds Timestamp for the pose gate reset + */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** + * Swap trusted tag set per event/field without redeploy + * + * @param tags Set of trusted tags to use + */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + /** + * Set whether to requite trusted tags + * + * @param require Boolean + */ + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + /** + * Set the (un)trusted standard deviation scales + * + * @param trustedScale The scale for trusted tags + * @param untrustedScale The scale for untrusted tags + */ + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; + } + + /** + * Set the yaw gate for single-tag measurements + * + * @param enabled Enable the gate? + * @param lookbackSec Lookback time + * @param limitRadPerSec Yaw rate above which single-tag measurements will be ignored + */ + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; + } + + /** Gating + Scoring ***************************************************** */ private static final class GateResult { final boolean accepted; @@ -343,8 +368,7 @@ private boolean isBetter(TimedEstimate a, TimedEstimate b) { return ta < tb; } - // -------- Time alignment + fusion -------- - + /** Time alignment & fusion ********************************************** */ private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { @@ -410,8 +434,7 @@ private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, return new TimedEstimate(fused, tF, fusedStd); } - // -------- Smoothing buffer -------- - + /** Smoothing buffer ***************************************************** */ private void pushFused(TimedEstimate fused) { fusedBuffer.addLast(fused); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index fad294d2..7bc61ed2 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -19,7 +19,7 @@ public interface VisionIO { class VisionIOInputs { public boolean connected = false; - /** Latest "servo to target" observation (optional) */ + /** Latest "camera to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index 4e5761b9..f554cc4c 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -62,7 +62,7 @@ public VisionIOPhotonVisionSim( @Override public void updateInputs(VisionIOInputs inputs) { // NOTE: This updates the sim world every time a sim camera is polled. - // That's fine (fast enough), but if you want “update once per loop,” see note below. + // That's fine (fast enough), but if you want "update once per loop," see note below. visionSim.update(poseSupplier.get()); // Then pull results like normal (and emit PoseObservation + usedTagIds sets) From f594d2e95970800c65b9c6ad6846b6c3958e0bd6 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 13:50:54 -0700 Subject: [PATCH 3/8] Was double-adding pose esitmates The SIM mode was not separately gated behind an `else`, so REAL mode was double-adding pose estimates. --- .../java/frc/robot/subsystems/drive/Drive.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index aeb71e65..a6045156 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -388,17 +388,17 @@ public void rbsiPeriodic() { Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); gyroDisconnectedAlert.set(!imuInputs.connected); return; - } - - // SIMULATION: Keep sim pose buffer time-aligned, too - double now = Timer.getFPGATimestamp(); - poseBuffer.addSample(now, simPhysics.getPose()); - yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); - yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + } else { - Logger.recordOutput("Drive/Pose", simPhysics.getPose()); - gyroDisconnectedAlert.set(false); + // SIMULATION: Keep sim pose buffer time-aligned, too + double now = Timer.getFPGATimestamp(); + poseBuffer.addSample(now, simPhysics.getPose()); + yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + gyroDisconnectedAlert.set(false); + } } finally { odometryLock.unlock(); } From e2d8fad903cde1d96a7ab96f433877a1fd1ecc72 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 16:07:03 -0700 Subject: [PATCH 4/8] Fix double-count of pose estimation; add kS for swerve turn --- src/main/java/frc/robot/Constants.java | 39 ++++++++++--------- .../frc/robot/subsystems/drive/Drive.java | 1 + .../subsystems/drive/ModuleIOBlended.java | 3 +- .../robot/subsystems/drive/ModuleIOSpark.java | 3 +- .../drive/ModuleIOSparkCANcoder.java | 3 +- .../subsystems/drive/ModuleIOTalonFX.java | 2 +- .../subsystems/drive/ModuleIOTalonFXS.java | 2 +- 7 files changed, 29 insertions(+), 24 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 7d30eaed..1887d604 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -74,7 +74,7 @@ public final class Constants { private static SwerveType swerveType = SwerveType.PHOENIX6; // PHOENIX6, YAGSL private static CTREPro phoenixPro = CTREPro.UNLICENSED; // LICENSED, UNLICENSED private static AutoType autoType = AutoType.MANUAL; // MANUAL, PATHPLANNER, CHOREO - private static VisionType visionType = VisionType.NONE; // PHOTON, LIMELIGHT, NONE + private static VisionType visionType = VisionType.PHOTON; // PHOTON, LIMELIGHT, NONE /** Enumerate the robot types (name your robots here) */ public static enum RobotType { @@ -324,6 +324,7 @@ public static final class DrivebaseConstants { SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; public static final double kSteerD = 20.0; + public static final double kSteerS = 2.0; // Odometry-related constants public static final double kHistorySize = 1.5; // seconds @@ -466,7 +467,7 @@ public record CameraConfig( // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways public static final CameraConfig[] ALL = { new CameraConfig( - "camera_0", + "Photon_BW7", new Transform3d( Inches.of(-13.0), Inches.of(13.0), @@ -483,23 +484,23 @@ public record CameraConfig( } }), // - new CameraConfig( - "camera_1", - new Transform3d( - Inches.of(-13.0), - Inches.of(-13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, -Math.PI / 2)), - 1.0, - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }), + // new CameraConfig( + // "camera_1", + // new Transform3d( + // Inches.of(-13.0), + // Inches.of(-13.0), + // Inches.of(12.0), + // new Rotation3d(0.0, 0.0, -Math.PI / 2)), + // 1.0, + // new SimCameraProperties() { + // { + // setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + // setCalibError(0.25, 0.08); + // setFPS(30); + // setAvgLatencyMs(20); + // setLatencyStdDevMs(5); + // } + // }), // ... And more, if needed }; diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index a6045156..2dcc9591 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -388,6 +388,7 @@ public void rbsiPeriodic() { Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); gyroDisconnectedAlert.set(!imuInputs.connected); return; + } else { // SIMULATION: Keep sim pose buffer time-aligned, too diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java index 5c68481c..01d0ffc2 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOBlended.java @@ -243,7 +243,8 @@ public ModuleIOBlended(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java index 75738618..1c92c68d 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSpark.java @@ -183,7 +183,8 @@ public ModuleIOSpark(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java index fc76b593..587f8ec8 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOSparkCANcoder.java @@ -202,7 +202,8 @@ public ModuleIOSparkCANcoder(int module) { SwerveConstants.turnPIDMinInput, SwerveConstants.turnPIDMaxInput) .pid(DrivebaseConstants.kSteerP, 0.0, DrivebaseConstants.kSteerD) .feedForward - .kV(0.0); + .kV(0.0) + .kS(DrivebaseConstants.kSteerS); turnConfig .signals .absoluteEncoderPositionAlwaysOn(true) diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java index 65da2c71..df569b4b 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFX.java @@ -192,7 +192,7 @@ public ModuleIOTalonFX(int module) { .withKP(DrivebaseConstants.kSteerP) .withKI(0.0) .withKD(DrivebaseConstants.kSteerD) - .withKS(0.0) + .withKS(DrivebaseConstants.kSteerS) .withKV(0.0) .withKA(0.0) .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); diff --git a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java index 23f2885c..f3cc029a 100644 --- a/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java +++ b/src/main/java/frc/robot/subsystems/drive/ModuleIOTalonFXS.java @@ -180,7 +180,7 @@ public ModuleIOTalonFXS( .withKP(DrivebaseConstants.kSteerP) .withKI(0.0) .withKD(DrivebaseConstants.kSteerD) - .withKS(0.0) + .withKS(DrivebaseConstants.kSteerS) .withKV(0.0) .withKA(0.0) .withStaticFeedforwardSign(StaticFeedforwardSignValue.UseClosedLoopSign); From ecff3ea3f965eca3e2fd808705694bc66f3b2726 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sat, 7 Feb 2026 16:50:25 -0700 Subject: [PATCH 5/8] Clean up the consumer syntax --- .../frc/robot/subsystems/drive/Drive.java | 7 +- .../frc/robot/subsystems/vision/Vision.java | 145 +++++++++++------- src/main/java/frc/robot/util/RBSIPose.java | 21 +++ 3 files changed, 111 insertions(+), 62 deletions(-) create mode 100644 src/main/java/frc/robot/util/RBSIPose.java diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 2dcc9591..041a0e61 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -22,7 +22,6 @@ import edu.wpi.first.hal.FRCNetComm.tInstances; import edu.wpi.first.hal.FRCNetComm.tResourceType; import edu.wpi.first.hal.HAL; -import edu.wpi.first.math.Matrix; import edu.wpi.first.math.controller.PIDController; import edu.wpi.first.math.controller.ProfiledPIDController; import edu.wpi.first.math.estimator.SwerveDrivePoseEstimator; @@ -52,6 +51,7 @@ import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; +import frc.robot.util.RBSIPose; import frc.robot.util.RBSISubsystem; import java.util.Optional; import java.util.OptionalDouble; @@ -743,10 +743,11 @@ public void zeroHeading() { } /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + public void addVisionMeasurement(RBSIPose rbsiPose) { odometryLock.lock(); try { - m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + m_PoseEstimator.addVisionMeasurement( + rbsiPose.pose(), rbsiPose.timestampSeconds(), rbsiPose.stdDevs()); } finally { odometryLock.unlock(); } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 74d318aa..e37b6a72 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -25,12 +25,14 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import frc.robot.Constants; import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; +import frc.robot.util.RBSIPose; import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; @@ -45,13 +47,11 @@ public class Vision extends VirtualSubsystem { /** Vision Consumer definition */ @FunctionalInterface - public interface VisionConsumer { - void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + public interface PoseMeasurementConsumer { + void accept(RBSIPose measurement); } - private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} - - private final VisionConsumer consumer; + private final PoseMeasurementConsumer consumer; private final Drive drive; private long lastSeenPoseResetEpoch = -1; @@ -65,7 +65,7 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; // Smoothing buffer (recent fused estimates) - private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; @@ -82,8 +82,10 @@ private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; + private static final double kMinVariance = 1e-12; // prevents divide-by-zero explosions + /** Constructor */ - public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { this.drive = drive; this.consumer = consumer; this.io = io; @@ -123,14 +125,14 @@ public void rbsiPeriodic() { } // Pick one best accepted estimate per camera for this loop - final ArrayList perCamAccepted = new ArrayList<>(io.length); + final ArrayList perCamAccepted = new ArrayList<>(io.length); for (int cam = 0; cam < io.length; cam++) { int seen = 0; int accepted = 0; int rejected = 0; - TimedEstimate best = null; + RBSIPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; @@ -150,7 +152,7 @@ public void rbsiPeriodic() { continue; } - TimedEstimate est = built.estimate; + RBSIPose est = built.estimate; if (best == null || isBetter(est, best)) { best = est; bestTrustScale = built.trustScale; @@ -161,12 +163,12 @@ public void rbsiPeriodic() { if (best != null) { accepted++; - lastAcceptedTsPerCam[cam] = best.ts; + lastAcceptedTsPerCam[cam] = best.timestampSeconds(); perCamAccepted.add(best); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); - Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose()); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.timestampSeconds()); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs()); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); @@ -180,23 +182,24 @@ public void rbsiPeriodic() { if (perCamAccepted.isEmpty()) return; // Fusion time is the newest timestamp among accepted per-camera samples - double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + double tF = + perCamAccepted.stream().mapToDouble(e -> e.timestampSeconds()).max().orElse(Double.NaN); if (!Double.isFinite(tF)) return; // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse - TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + RBSIPose fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); - TimedEstimate smoothed = smoothAtTime(tF); + RBSIPose smoothed = smoothAtTime(tF); if (smoothed == null) return; - // Inject the pose - consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + // Inject the pose -- commented out for now... + consumer.accept(smoothed); - Logger.recordOutput("Vision/FusedPose", fused.pose); - Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedPose", fused.pose()); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose()); Logger.recordOutput("Vision/FusedTimestamp", tF); } @@ -301,11 +304,11 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o } private static final class BuiltEstimate { - final TimedEstimate estimate; + final RBSIPose estimate; final double trustScale; final int trustedCount; - BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + BuiltEstimate(RBSIPose estimate, double trustScale, int trustedCount) { this.estimate = estimate; this.trustScale = trustScale; this.trustedCount = trustedCount; @@ -353,7 +356,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); return new BuiltEstimate( - new TimedEstimate( + new RBSIPose( obs.pose().toPose2d(), obs.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), @@ -361,20 +364,20 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { trustedCount); } - private boolean isBetter(TimedEstimate a, TimedEstimate b) { + private boolean isBetter(RBSIPose a, RBSIPose b) { // Lower trace of stddev vector (x+y+theta) is better - double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); - double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + double ta = a.stdDevs().get(0, 0) + a.stdDevs().get(1, 0) + a.stdDevs().get(2, 0); + double tb = b.stdDevs().get(0, 0) + b.stdDevs().get(1, 0) + b.stdDevs().get(2, 0); return ta < tb; } /** Time alignment & fusion ********************************************** */ - private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { - final ArrayList aligned = new ArrayList<>(estimates.size()); + private RBSIPose fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { - Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) return null; - aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); } return inverseVarianceFuse(aligned, tF); } @@ -394,48 +397,70 @@ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { return visionPoseAtTs.transformBy(ts_T_tf); } - private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { - if (alignedAtTF.isEmpty()) return null; + private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF == null || alignedAtTF.isEmpty()) return null; if (alignedAtTF.size() == 1) return alignedAtTF.get(0); - double sumWx = 0, sumWy = 0, sumWth = 0; - double sumX = 0, sumY = 0; - double sumCos = 0, sumSin = 0; + double sumWx = 0.0, sumWy = 0.0, sumWth = 0.0; + double sumX = 0.0, sumY = 0.0; + double sumCos = 0.0, sumSin = 0.0; + + for (int i = 0; i < alignedAtTF.size(); i++) { + final RBSIPose e = alignedAtTF.get(i); + final Pose2d p = e.pose(); + final Matrix s = e.stdDevs(); + + final double sx = s.get(0, 0); + final double sy = s.get(1, 0); + final double sth = s.get(2, 0); - for (var e : alignedAtTF) { - double sx = e.stdDevs.get(0, 0); - double sy = e.stdDevs.get(1, 0); - double sth = e.stdDevs.get(2, 0); + // variance = std^2, clamp away from 0 + final double vx = Math.max(sx * sx, kMinVariance); + final double vy = Math.max(sy * sy, kMinVariance); + final double vth = Math.max(sth * sth, kMinVariance); - double vx = sx * sx; - double vy = sy * sy; - double vth = sth * sth; + final double wx = 1.0 / vx; + final double wy = 1.0 / vy; + final double wth = 1.0 / vth; - double wx = 1.0 / vx; - double wy = 1.0 / vy; - double wth = 1.0 / vth; + // If any weight goes NaN/Inf, skip this measurement rather than poisoning the fuse. + if (!Double.isFinite(wx) || !Double.isFinite(wy) || !Double.isFinite(wth)) { + continue; + } sumWx += wx; sumWy += wy; sumWth += wth; - sumX += e.pose.getX() * wx; - sumY += e.pose.getY() * wy; + sumX += p.getX() * wx; + sumY += p.getY() * wy; - sumCos += e.pose.getRotation().getCos() * wth; - sumSin += e.pose.getRotation().getSin() * wth; + final Rotation2d rot = p.getRotation(); + sumCos += rot.getCos() * wth; + sumSin += rot.getSin() * wth; } - Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + // If everything got skipped, fail closed. + if (sumWx <= 0.0 || sumWy <= 0.0 || sumWth <= 0.0) return null; + + final Translation2d fusedTranslation = new Translation2d(sumX / sumWx, sumY / sumWy); + + // Rotation2d(cos, sin) will normalize internally; if both are ~0, fall back to zero. + final Rotation2d fusedRotation = + (Math.abs(sumCos) < 1e-12 && Math.abs(sumSin) < 1e-12) + ? Rotation2d.kZero + : new Rotation2d(sumCos, sumSin); + + final Pose2d fusedPose = new Pose2d(fusedTranslation, fusedRotation); - Matrix fusedStd = + final Matrix fusedStdDevs = VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); - return new TimedEstimate(fused, tF, fusedStd); + return new RBSIPose(fusedPose, tF, fusedStdDevs); } /** Smoothing buffer ***************************************************** */ - private void pushFused(TimedEstimate fused) { + private void pushFused(RBSIPose fused) { fusedBuffer.addLast(fused); while (fusedBuffer.size() > smoothMaxSize) { @@ -443,20 +468,22 @@ private void pushFused(TimedEstimate fused) { } // Trim by time window relative to newest - while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + while (!fusedBuffer.isEmpty() + && fused.timestampSeconds() - fusedBuffer.peekFirst().timestampSeconds() + > smoothWindowSec) { fusedBuffer.removeFirst(); } } - private TimedEstimate smoothAtTime(double tF) { + private RBSIPose smoothAtTime(double tF) { if (fusedBuffer.isEmpty()) return null; if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); - final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { - Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) continue; - aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); diff --git a/src/main/java/frc/robot/util/RBSIPose.java b/src/main/java/frc/robot/util/RBSIPose.java new file mode 100644 index 00000000..4a4df2fc --- /dev/null +++ b/src/main/java/frc/robot/util/RBSIPose.java @@ -0,0 +1,21 @@ +// Copyright (c) 2024-2026 Az-FIRST +// http://github.com/AZ-First +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; + +public record RBSIPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} From e4b0e3e1f8f4dfaa17cb7bdbf165651eaeeb392b Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 9 Feb 2026 10:08:40 -0700 Subject: [PATCH 6/8] Add extra logging for when the vision pose doesn't work --- .../frc/robot/subsystems/vision/Vision.java | 45 ++++++++++++------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index e37b6a72..c7849e31 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -82,7 +82,8 @@ public interface PoseMeasurementConsumer { private volatile double yawGateLookbackSec = 0.30; private volatile double yawGateLimitRadPerSec = 5.0; - private static final double kMinVariance = 1e-12; // prevents divide-by-zero explosions + // Variance minimum for fusing poses to prevent divide-by-zero explosions + private static final double kMinVariance = 1e-12; /** Constructor */ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { @@ -109,43 +110,52 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { @Override public void rbsiPeriodic() { + // Pose reset logic and logging long epoch = drive.getPoseResetEpoch(); if (epoch != lastSeenPoseResetEpoch) { lastSeenPoseResetEpoch = epoch; - resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + resetPoseGate(drive.getLastPoseResetTimestamp()); Logger.recordOutput("Vision/PoseGateResetFromDrive", true); } else { Logger.recordOutput("Vision/PoseGateResetFromDrive", false); } - // Update/log camera inputs + // Update & log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); } - // Pick one best accepted estimate per camera for this loop + // Pick the one best accepted estimate per camera for this loop final ArrayList perCamAccepted = new ArrayList<>(io.length); + // Loop over cameras for (int cam = 0; cam < io.length; cam++) { + + // Instantiate variables for this camera int seen = 0; int accepted = 0; int rejected = 0; - RBSIPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; + // Loop over observations for this camera this loop for (var obs : inputs[cam].poseObservations) { + + // Increment seen++; + // Check the gating criteria; move on if bad GateResult gate = passesHardGatesAndYawGate(cam, obs); + Logger.recordOutput("Vision/Camera" + cam + "/GateFail", gate.reason); if (!gate.accepted) { rejected++; continue; } + // Build a pose estimate; move on if bad BuiltEstimate built = buildEstimate(cam, obs); if (built == null) { rejected++; @@ -261,9 +271,11 @@ public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limi /** Gating + Scoring ***************************************************** */ private static final class GateResult { final boolean accepted; + final String reason; - GateResult(boolean accepted) { + GateResult(boolean accepted, String reason) { this.accepted = accepted; + this.reason = reason; } } @@ -271,36 +283,37 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o final double ts = obs.timestamp(); // Monotonic per-camera time - if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false, "monotonic time"); // Reject anything older than last pose reset - if (ts < lastPoseResetTimestamp) return new GateResult(false); + if (ts < lastPoseResetTimestamp) return new GateResult(false, "older than pose reset"); // Must have tags - if (obs.tagCount() <= 0) return new GateResult(false); + if (obs.tagCount() <= 0) return new GateResult(false, "no tags"); // Single-tag ambiguity gate - if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) + return new GateResult(false, "highly ambiguous"); // Z sanity - if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false, "z not sane"); // Field bounds Pose3d p = obs.pose(); if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) - return new GateResult(false); + return new GateResult(false, "out of bounds field X"); if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) - return new GateResult(false); + return new GateResult(false, "out of bounds field Y"); - // Optional 254-style yaw gate: only meaningful for single-tag + // Optional yaw gate: only meaningful for single-tag if (enableSingleTagYawGate && obs.tagCount() == 1) { OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { - return new GateResult(false); + return new GateResult(false, "YAW gate failed"); } } - return new GateResult(true); + return new GateResult(true, ""); } private static final class BuiltEstimate { From 2b8c50e2979b173ebe7c316b05731ff8d03044b4 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 9 Feb 2026 16:33:50 -0700 Subject: [PATCH 7/8] Call the pose estimator "TimedPose" --- .../frc/robot/subsystems/drive/Drive.java | 4 +- .../frc/robot/subsystems/vision/Vision.java | 44 +++++++++---------- .../util/{RBSIPose.java => TimedPose.java} | 2 +- 3 files changed, 25 insertions(+), 25 deletions(-) rename src/main/java/frc/robot/util/{RBSIPose.java => TimedPose.java} (89%) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 041a0e61..71b9e378 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -51,8 +51,8 @@ import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; -import frc.robot.util.RBSIPose; import frc.robot.util.RBSISubsystem; +import frc.robot.util.TimedPose; import java.util.Optional; import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; @@ -743,7 +743,7 @@ public void zeroHeading() { } /** Adds a vision measurement safely into the PoseEstimator. */ - public void addVisionMeasurement(RBSIPose rbsiPose) { + public void addVisionMeasurement(TimedPose rbsiPose) { odometryLock.lock(); try { m_PoseEstimator.addVisionMeasurement( diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index c7849e31..86c10558 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -32,7 +32,7 @@ import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; -import frc.robot.util.RBSIPose; +import frc.robot.util.TimedPose; import frc.robot.util.VirtualSubsystem; import java.util.ArrayDeque; import java.util.ArrayList; @@ -48,7 +48,7 @@ public class Vision extends VirtualSubsystem { /** Vision Consumer definition */ @FunctionalInterface public interface PoseMeasurementConsumer { - void accept(RBSIPose measurement); + void accept(TimedPose measurement); } private final PoseMeasurementConsumer consumer; @@ -65,7 +65,7 @@ public interface PoseMeasurementConsumer { private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; // Smoothing buffer (recent fused estimates) - private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); private final double smoothWindowSec = 0.25; private final int smoothMaxSize = 12; @@ -127,7 +127,7 @@ public void rbsiPeriodic() { } // Pick the one best accepted estimate per camera for this loop - final ArrayList perCamAccepted = new ArrayList<>(io.length); + final ArrayList perCamAccepted = new ArrayList<>(io.length); // Loop over cameras for (int cam = 0; cam < io.length; cam++) { @@ -136,7 +136,7 @@ public void rbsiPeriodic() { int seen = 0; int accepted = 0; int rejected = 0; - RBSIPose best = null; + TimedPose best = null; double bestTrustScale = Double.NaN; int bestTrustedCount = 0; int bestTagCount = 0; @@ -162,7 +162,7 @@ public void rbsiPeriodic() { continue; } - RBSIPose est = built.estimate; + TimedPose est = built.estimate; if (best == null || isBetter(est, best)) { best = est; bestTrustScale = built.trustScale; @@ -197,12 +197,12 @@ public void rbsiPeriodic() { if (!Double.isFinite(tF)) return; // Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse - RBSIPose fused = fuseAtTime(perCamAccepted, tF); + TimedPose fused = fuseAtTime(perCamAccepted, tF); if (fused == null) return; // Smooth by fusing recent fused estimates (also aligned to tF) pushFused(fused); - RBSIPose smoothed = smoothAtTime(tF); + TimedPose smoothed = smoothAtTime(tF); if (smoothed == null) return; // Inject the pose -- commented out for now... @@ -317,11 +317,11 @@ private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation o } private static final class BuiltEstimate { - final RBSIPose estimate; + final TimedPose estimate; final double trustScale; final int trustedCount; - BuiltEstimate(RBSIPose estimate, double trustScale, int trustedCount) { + BuiltEstimate(TimedPose estimate, double trustScale, int trustedCount) { this.estimate = estimate; this.trustScale = trustScale; this.trustedCount = trustedCount; @@ -369,7 +369,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); return new BuiltEstimate( - new RBSIPose( + new TimedPose( obs.pose().toPose2d(), obs.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), @@ -377,7 +377,7 @@ private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { trustedCount); } - private boolean isBetter(RBSIPose a, RBSIPose b) { + private boolean isBetter(TimedPose a, TimedPose b) { // Lower trace of stddev vector (x+y+theta) is better double ta = a.stdDevs().get(0, 0) + a.stdDevs().get(1, 0) + a.stdDevs().get(2, 0); double tb = b.stdDevs().get(0, 0) + b.stdDevs().get(1, 0) + b.stdDevs().get(2, 0); @@ -385,12 +385,12 @@ private boolean isBetter(RBSIPose a, RBSIPose b) { } /** Time alignment & fusion ********************************************** */ - private RBSIPose fuseAtTime(ArrayList estimates, double tF) { - final ArrayList aligned = new ArrayList<>(estimates.size()); + private TimedPose fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); for (var e : estimates) { Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) return null; - aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); } return inverseVarianceFuse(aligned, tF); } @@ -410,7 +410,7 @@ private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { return visionPoseAtTs.transformBy(ts_T_tf); } - private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + private TimedPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) { if (alignedAtTF == null || alignedAtTF.isEmpty()) return null; if (alignedAtTF.size() == 1) return alignedAtTF.get(0); @@ -419,7 +419,7 @@ private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) double sumCos = 0.0, sumSin = 0.0; for (int i = 0; i < alignedAtTF.size(); i++) { - final RBSIPose e = alignedAtTF.get(i); + final TimedPose e = alignedAtTF.get(i); final Pose2d p = e.pose(); final Matrix s = e.stdDevs(); @@ -469,11 +469,11 @@ private RBSIPose inverseVarianceFuse(ArrayList alignedAtTF, double tF) final Matrix fusedStdDevs = VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); - return new RBSIPose(fusedPose, tF, fusedStdDevs); + return new TimedPose(fusedPose, tF, fusedStdDevs); } /** Smoothing buffer ***************************************************** */ - private void pushFused(RBSIPose fused) { + private void pushFused(TimedPose fused) { fusedBuffer.addLast(fused); while (fusedBuffer.size() > smoothMaxSize) { @@ -488,15 +488,15 @@ private void pushFused(RBSIPose fused) { } } - private RBSIPose smoothAtTime(double tF) { + private TimedPose smoothAtTime(double tF) { if (fusedBuffer.isEmpty()) return null; if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); - final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); for (var e : fusedBuffer) { Pose2d alignedPose = timeAlignPose(e.pose(), e.timestampSeconds(), tF); if (alignedPose == null) continue; - aligned.add(new RBSIPose(alignedPose, tF, e.stdDevs())); + aligned.add(new TimedPose(alignedPose, tF, e.stdDevs())); } if (aligned.isEmpty()) return fusedBuffer.peekLast(); diff --git a/src/main/java/frc/robot/util/RBSIPose.java b/src/main/java/frc/robot/util/TimedPose.java similarity index 89% rename from src/main/java/frc/robot/util/RBSIPose.java rename to src/main/java/frc/robot/util/TimedPose.java index 4a4df2fc..440cd315 100644 --- a/src/main/java/frc/robot/util/RBSIPose.java +++ b/src/main/java/frc/robot/util/TimedPose.java @@ -18,4 +18,4 @@ import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -public record RBSIPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} +public record TimedPose(Pose2d pose, double timestampSeconds, Matrix stdDevs) {} From 3bf64b5b63d0e47979c647107add4bdc0267cccf Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 10 Feb 2026 18:07:07 -0700 Subject: [PATCH 8/8] Move the odometry queue draining into a VirtualSubsystem --- src/main/java/frc/robot/RobotContainer.java | 14 +- .../accelerometer/Accelerometer.java | 6 + .../frc/robot/subsystems/drive/Drive.java | 226 +++++-------- .../robot/subsystems/drive/DriveOdometry.java | 297 ++++++++++++++++++ .../java/frc/robot/subsystems/imu/Imu.java | 7 + .../frc/robot/subsystems/vision/Vision.java | 21 +- .../java/frc/robot/util/VirtualSubsystem.java | 22 ++ 7 files changed, 438 insertions(+), 155 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/drive/DriveOdometry.java diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 521e09d5..8e4fdd9a 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -47,6 +47,7 @@ import frc.robot.commands.DriveCommands; import frc.robot.subsystems.accelerometer.Accelerometer; import frc.robot.subsystems.drive.Drive; +import frc.robot.subsystems.drive.DriveOdometry; import frc.robot.subsystems.drive.SwerveConstants; import frc.robot.subsystems.flywheel_example.Flywheel; import frc.robot.subsystems.flywheel_example.FlywheelIO; @@ -107,6 +108,9 @@ public class RobotContainer { private final Imu m_imu; private final Vision m_vision; + @SuppressWarnings("unused") + private final DriveOdometry m_driveOdometry; + @SuppressWarnings("unused") private final Accelerometer m_accel; @@ -156,10 +160,12 @@ public RobotContainer() { m_imu = new Imu(SwerveConstants.kImu.factory.get()); m_drivebase = new Drive(m_imu); - m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note m_vision = new Vision( m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -169,6 +175,9 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim()); m_drivebase = new Drive(m_imu); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note + m_flywheel = new Flywheel(new FlywheelIOSim()); // ---------------- Vision IOs (robot code) ---------------- @@ -208,6 +217,9 @@ public RobotContainer() { RBSICANBusRegistry.initSim(CANBuses.RIO, CANBuses.DRIVE); m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); + m_driveOdometry = + new DriveOdometry(m_drivebase, m_imu, m_drivebase.getModules()); // see note + m_flywheel = new Flywheel(new FlywheelIO() {}); m_vision = new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); diff --git a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java index fcb7dd6c..0b5ff652 100644 --- a/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java +++ b/src/main/java/frc/robot/subsystems/accelerometer/Accelerometer.java @@ -55,6 +55,12 @@ public Accelerometer(Imu imu) { this.rio = new RioAccelIORoboRIO(200.0); // 200 Hz is a good start } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return +10; + } + @Override public void rbsiPeriodic() { final boolean doProfile = (++profileCount >= PROFILE_EVERY_N); diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 71b9e378..fda4cde7 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -253,156 +253,20 @@ public Drive(Imu imu) { /** Periodic function that is called each robot cycle by the command scheduler */ @Override public void rbsiPeriodic() { - odometryLock.lock(); - try { - // Ensure IMU inputs are fresh for this cycle - final var imuInputs = imu.getInputs(); - - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } - - // Drain per-module odometry queues for this cycle - for (var module : modules) { - module.periodic(); - } - - // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- - if (Constants.getMode() != Mode.SIM) { - final double[] ts = modules[0].getOdometryTimestamps(); - final int n = (ts == null) ? 0 : ts.length; - - // Cache per-module histories ONCE (avoid repeated getters in the loop) - final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; - for (int m = 0; m < 4; m++) { - modHist[m] = modules[m].getOdometryPositions(); - } - - // Determine yaw queue availability - final boolean hasYawQueue = - imuInputs.connected - && imuInputs.odometryYawTimestamps != null - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawTimestamps.length - == imuInputs.odometryYawPositionsRad.length - && imuInputs.odometryYawTimestamps.length > 0; - - final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; - final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; - - // If we have no module samples, still keep yaw buffers “alive” for gating callers - if (n == 0) { - final double now = Timer.getFPGATimestamp(); - yawBuffer.addSample(now, imuInputs.yawPositionRad); - yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - - gyroDisconnectedAlert.set(!imuInputs.connected); - return; - } - - // Decide whether yaw queue is index-aligned with module[0] timestamps. - // We only trust index alignment if BOTH: - // - yaw has at least n samples - // - yawTs[i] ~= ts[i] for i in range (tight epsilon) - boolean yawIndexAligned = false; - if (hasYawQueue && yawTs.length >= n) { - yawIndexAligned = true; - final double eps = 1e-3; // 1 ms - for (int i = 0; i < n; i++) { - if (Math.abs(yawTs[i] - ts[i]) > eps) { - yawIndexAligned = false; - break; - } - } - } - - // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. - if (hasYawQueue && !yawIndexAligned) { - for (int k = 0; k < yawTs.length; k++) { - yawBuffer.addSample(yawTs[k], yawPos[k]); - if (k > 0) { - final double dt = yawTs[k] - yawTs[k - 1]; - if (dt > 1e-6) { - yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); - } - } - } - } - - // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) - if (!hasYawQueue) { - final double now = Timer.getFPGATimestamp(); - yawBuffer.addSample(now, imuInputs.yawPositionRad); - yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); - } - - // Replay each odometry sample - for (int i = 0; i < n; i++) { - final double t = ts[i]; - - // Build module positions at this sample index (clamp defensively) - for (int m = 0; m < 4; m++) { - final SwerveModulePosition[] hist = modHist[m]; - if (hist == null || hist.length == 0) { - odomPositions[m] = modules[m].getPosition(); - } else if (i < hist.length) { - odomPositions[m] = hist[i]; - } else { - odomPositions[m] = hist[hist.length - 1]; - } - } - - // Determine yaw at this timestamp - double yawRad = imuInputs.yawPositionRad; // fallback - - if (hasYawQueue) { - if (yawIndexAligned) { - yawRad = yawPos[i]; - - // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) - yawBuffer.addSample(t, yawRad); - if (i > 0) { - final double dt = yawTs[i] - yawTs[i - 1]; - if (dt > 1e-6) { - yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); - } - } - } else { - // yawBuffer was pre-filled above; interpolate here - yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - } - } - - // Feed estimator at this historical timestamp - m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); - - // Maintain pose history in SAME timebase as estimator - poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); - } - - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - gyroDisconnectedAlert.set(!imuInputs.connected); - return; - - } else { + // NO odometryLock needed unless you touch poseEstimator directly here. + // Keep this focused on control / setpoints / characterization. - // SIMULATION: Keep sim pose buffer time-aligned, too - double now = Timer.getFPGATimestamp(); - poseBuffer.addSample(now, simPhysics.getPose()); - yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); - yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); - - Logger.recordOutput("Drive/Pose", simPhysics.getPose()); - gyroDisconnectedAlert.set(false); - } - } finally { - odometryLock.unlock(); + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } + + // If you need module refresh for control, you can either: + // A) leave it in DriveOdometry (recommended) and here just use cached inputs, OR + // B) keep a lightweight "module.controlPeriodic()" that doesn't drain queues. + // + // For now: do nothing here re: odometry. } /** @@ -559,6 +423,9 @@ public Command sysIdDynamic(SysIdRoutine.Direction direction) { } /** Getter Functions ***************************************************** */ + public Module[] getModules() { + return modules; + } /** Returns the module states (turn angles and drive velocities) for all of the modules. */ @AutoLogOutput(key = "SwerveStates/Measured") @@ -862,4 +729,67 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } + + // --- Helpers used by DriveOdometry (package-private) --- + + Pose2d poseEstimatorGetPose() { + return m_PoseEstimator.getEstimatedPosition(); + } + + void poseEstimatorUpdateWithTime(double t, Rotation2d yaw, SwerveModulePosition[] positions) { + m_PoseEstimator.updateWithTime(t, yaw, positions); + } + + void poseBufferAddSample(double t, Pose2d pose) { + poseBuffer.addSample(t, pose); + } + + // Yaw buffer helpers (assuming you already have yawBuffer + yawRateBuffer) + double yawBufferSampleOr(double t, double fallbackYawRad) { + return yawBuffer.getSample(t).orElse(fallbackYawRad); + } + + void yawBuffersAddSample(double t, double yawRad, double yawRateRadPerSec) { + yawBuffer.addSample(t, yawRad); + yawRateBuffer.addSample(t, yawRateRadPerSec); + } + + void yawBuffersFillFromQueue(double[] yawTs, double[] yawPosRad) { + for (int k = 0; k < yawTs.length; k++) { + yawBuffer.addSample(yawTs[k], yawPosRad[k]); + if (k > 0) { + double dt = yawTs[k] - yawTs[k - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yawTs[k], (yawPosRad[k] - yawPosRad[k - 1]) / dt); + } + } + } + } + + void yawBuffersAddSampleIndexAligned(double t, double[] yawTs, double[] yawPos, int i) { + yawBuffer.addSample(t, yawPos[i]); + if (i > 0) { + double dt = yawTs[i] - yawTs[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + } + } + } + + void setGyroDisconnectedAlert(boolean disconnected) { + gyroDisconnectedAlert.set(disconnected); + } + + // Drive.java + public Pose2d getSimPose() { + return simPhysics.getPose(); + } + + public double getSimYawRad() { + return simPhysics.getYaw().getRadians(); + } + + public double getSimYawRateRadPerSec() { + return simPhysics.getOmegaRadPerSec(); + } } diff --git a/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java new file mode 100644 index 00000000..494e0ab2 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/drive/DriveOdometry.java @@ -0,0 +1,297 @@ +package frc.robot.subsystems.drive; + +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.kinematics.SwerveModulePosition; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.Constants; +import frc.robot.subsystems.imu.Imu; +import frc.robot.util.RBSIEnum.Mode; +import frc.robot.util.VirtualSubsystem; +import org.littletonrobotics.junction.Logger; + +public final class DriveOdometry extends VirtualSubsystem { + private final Drive drive; + private final Imu imu; + private final Module[] modules; + + // Scratch (no per-loop allocations) + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; + + public DriveOdometry(Drive drive, Imu imu, Module[] modules) { + this.drive = drive; + this.imu = imu; + this.modules = modules; + } + + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -20; + } + + @Override + public void rbsiPeriodic() { + Drive.odometryLock.lock(); + try { + final var imuInputs = imu.getInputs(); + + // Drain per-module odometry queues ONCE per loop (this also refreshes motor signals) + for (var module : modules) { + module.periodic(); // if you later split, this becomes module.updateInputs() + } + + if (Constants.getMode() == Mode.SIM) { + final double now = Timer.getFPGATimestamp(); + drive.poseBufferAddSample(now, drive.getSimPose()); + drive.yawBuffersAddSample(now, drive.getSimYawRad(), drive.getSimYawRateRadPerSec()); + Logger.recordOutput("Drive/Pose", drive.getSimPose()); + return; + } + + // Canonical timestamp queue from module[0] + final double[] ts = modules[0].getOdometryTimestamps(); + final int n = (ts == null) ? 0 : ts.length; + + // Always keep yaw buffers “alive” even if no samples + if (n == 0) { + final double now = Timer.getFPGATimestamp(); + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + drive.setGyroDisconnectedAlert(!imuInputs.connected); + return; + } + + // Cache module histories once + final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; + for (int m = 0; m < 4; m++) { + modHist[m] = modules[m].getOdometryPositions(); + } + + // Yaw queue availability + final boolean hasYawQueue = + imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0; + + final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // Determine index alignment (cheap + deterministic) + boolean yawIndexAligned = false; + if (hasYawQueue && yawTs.length >= n) { + yawIndexAligned = true; + final double eps = 1e-3; // 1ms + for (int i = 0; i < n; i++) { + if (Math.abs(yawTs[i] - ts[i]) > eps) { + yawIndexAligned = false; + break; + } + } + } + + // If yaw not aligned, pre-fill yaw buffers once and interpolate later + if (hasYawQueue && !yawIndexAligned) { + drive.yawBuffersFillFromQueue(yawTs, yawPos); + } else if (!hasYawQueue) { + // Single “now” sample once (not per replay) + final double now = Timer.getFPGATimestamp(); + drive.yawBuffersAddSample(now, imuInputs.yawPositionRad, imuInputs.yawRateRadPerSec); + } + + // Replay + for (int i = 0; i < n; i++) { + final double t = ts[i]; + + // Build module positions at sample i (clamp defensively) + for (int m = 0; m < 4; m++) { + final SwerveModulePosition[] hist = modHist[m]; + if (hist == null || hist.length == 0) { + odomPositions[m] = modules[m].getPosition(); + } else if (i < hist.length) { + odomPositions[m] = hist[i]; + } else { + odomPositions[m] = hist[hist.length - 1]; + } + } + + double yawRad = imuInputs.yawPositionRad; + if (hasYawQueue) { + if (yawIndexAligned) { + yawRad = yawPos[i]; + // Keep yaw buffers aligned to replay timeline (good for yaw-rate gate) + drive.yawBuffersAddSampleIndexAligned(t, yawTs, yawPos, i); + } else { + yawRad = drive.yawBufferSampleOr(t, imuInputs.yawPositionRad); + } + } + + drive.poseEstimatorUpdateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + drive.poseBufferAddSample(t, drive.poseEstimatorGetPose()); + } + + Logger.recordOutput("Drive/Pose", drive.poseEstimatorGetPose()); + drive.setGyroDisconnectedAlert(!imuInputs.connected); + + } finally { + Drive.odometryLock.unlock(); + } + } + + // /************************************************************************* */ + // /** Periodic function that is called each robot cycle by the command scheduler */ + // @Override + // public void rbsiPeriodic() { + // odometryLock.lock(); + // try { + // // Ensure IMU inputs are fresh for this cycle + // final var imuInputs = imu.getInputs(); + + // // Stop modules & log empty setpoint states if disabled + // if (DriverStation.isDisabled()) { + // for (var module : modules) { + // module.stop(); + // } + // Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + // Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + // } + + // // Drain per-module odometry queues for this cycle + // for (var module : modules) { + // module.periodic(); + // } + + // // -------- REAL ODOMETRY REPLAY (canonical timestamps from module[0]) -------- + // if (Constants.getMode() != Mode.SIM) { + // final double[] ts = modules[0].getOdometryTimestamps(); + // final int n = (ts == null) ? 0 : ts.length; + + // // Cache per-module histories ONCE (avoid repeated getters in the loop) + // final SwerveModulePosition[][] modHist = new SwerveModulePosition[4][]; + // for (int m = 0; m < 4; m++) { + // modHist[m] = modules[m].getOdometryPositions(); + // } + + // // Determine yaw queue availability + // final boolean hasYawQueue = + // imuInputs.connected + // && imuInputs.odometryYawTimestamps != null + // && imuInputs.odometryYawPositionsRad != null + // && imuInputs.odometryYawTimestamps.length + // == imuInputs.odometryYawPositionsRad.length + // && imuInputs.odometryYawTimestamps.length > 0; + + // final double[] yawTs = hasYawQueue ? imuInputs.odometryYawTimestamps : null; + // final double[] yawPos = hasYawQueue ? imuInputs.odometryYawPositionsRad : null; + + // // If we have no module samples, still keep yaw buffers “alive” for gating callers + // if (n == 0) { + // final double now = Timer.getFPGATimestamp(); + // yawBuffer.addSample(now, imuInputs.yawPositionRad); + // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + + // gyroDisconnectedAlert.set(!imuInputs.connected); + // return; + // } + + // // Decide whether yaw queue is index-aligned with module[0] timestamps. + // // We only trust index alignment if BOTH: + // // - yaw has at least n samples + // // - yawTs[i] ~= ts[i] for i in range (tight epsilon) + // boolean yawIndexAligned = false; + // if (hasYawQueue && yawTs.length >= n) { + // yawIndexAligned = true; + // final double eps = 1e-3; // 1 ms + // for (int i = 0; i < n; i++) { + // if (Math.abs(yawTs[i] - ts[i]) > eps) { + // yawIndexAligned = false; + // break; + // } + // } + // } + + // // If yaw is NOT index-aligned but we have a yaw queue, build yaw/yawRate buffers ONCE. + // if (hasYawQueue && !yawIndexAligned) { + // for (int k = 0; k < yawTs.length; k++) { + // yawBuffer.addSample(yawTs[k], yawPos[k]); + // if (k > 0) { + // final double dt = yawTs[k] - yawTs[k - 1]; + // if (dt > 1e-6) { + // yawRateBuffer.addSample(yawTs[k], (yawPos[k] - yawPos[k - 1]) / dt); + // } + // } + // } + // } + + // // If NO yaw queue, add a single “now” sample once (don’t do this per replay sample) + // if (!hasYawQueue) { + // final double now = Timer.getFPGATimestamp(); + // yawBuffer.addSample(now, imuInputs.yawPositionRad); + // yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + // } + + // // Replay each odometry sample + // for (int i = 0; i < n; i++) { + // final double t = ts[i]; + + // // Build module positions at this sample index (clamp defensively) + // for (int m = 0; m < 4; m++) { + // final SwerveModulePosition[] hist = modHist[m]; + // if (hist == null || hist.length == 0) { + // odomPositions[m] = modules[m].getPosition(); + // } else if (i < hist.length) { + // odomPositions[m] = hist[i]; + // } else { + // odomPositions[m] = hist[hist.length - 1]; + // } + // } + + // // Determine yaw at this timestamp + // double yawRad = imuInputs.yawPositionRad; // fallback + + // if (hasYawQueue) { + // if (yawIndexAligned) { + // yawRad = yawPos[i]; + + // // Keep yaw/yawRate buffers updated in odometry timebase (good for yaw-gate) + // yawBuffer.addSample(t, yawRad); + // if (i > 0) { + // final double dt = yawTs[i] - yawTs[i - 1]; + // if (dt > 1e-6) { + // yawRateBuffer.addSample(t, (yawPos[i] - yawPos[i - 1]) / dt); + // } + // } + // } else { + // // yawBuffer was pre-filled above; interpolate here + // yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); + // } + // } + + // // Feed estimator at this historical timestamp + // m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // // Maintain pose history in SAME timebase as estimator + // poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + // } + + // Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); + // gyroDisconnectedAlert.set(!imuInputs.connected); + // return; + + // } else { + + // // SIMULATION: Keep sim pose buffer time-aligned, too + // double now = Timer.getFPGATimestamp(); + // poseBuffer.addSample(now, simPhysics.getPose()); + // yawBuffer.addSample(now, simPhysics.getYaw().getRadians()); + // yawRateBuffer.addSample(now, simPhysics.getOmegaRadPerSec()); + + // Logger.recordOutput("Drive/Pose", simPhysics.getPose()); + // gyroDisconnectedAlert.set(false); + // } + // } finally { + // odometryLock.unlock(); + // } + // } + +} diff --git a/src/main/java/frc/robot/subsystems/imu/Imu.java b/src/main/java/frc/robot/subsystems/imu/Imu.java index f5f63152..c0a1ba6e 100644 --- a/src/main/java/frc/robot/subsystems/imu/Imu.java +++ b/src/main/java/frc/robot/subsystems/imu/Imu.java @@ -36,7 +36,14 @@ public Imu(ImuIO io) { this.io = io; } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -30; + } + // Periodic function to read inputs + @Override public void rbsiPeriodic() { io.updateInputs(inputs); } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 86c10558..0928d659 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -28,7 +28,7 @@ import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import frc.robot.Constants; +import frc.robot.Constants.Cameras; import frc.robot.FieldConstants; import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; @@ -45,20 +45,23 @@ public class Vision extends VirtualSubsystem { + // Declare the Vision IO + private final VisionIO[] io; + private final VisionIOInputsAutoLogged[] inputs; + /** Vision Consumer definition */ @FunctionalInterface public interface PoseMeasurementConsumer { void accept(TimedPose measurement); } + // Declare pose consumer, drivebase, and epoch reset private final PoseMeasurementConsumer consumer; private final Drive drive; private long lastSeenPoseResetEpoch = -1; - private final VisionIO[] io; - private final VisionIOInputsAutoLogged[] inputs; - - private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; + // Declare the camera configurations + private final Cameras.CameraConfig[] camConfigs = Cameras.ALL; // Per-camera monotonic and pose reset gates private final double[] lastAcceptedTsPerCam; @@ -106,6 +109,12 @@ public Vision(Drive drive, PoseMeasurementConsumer consumer, VisionIO... io) { } } + // Priority value for this virtual subsystem + @Override + protected int getPeriodPriority() { + return -10; + } + /** Periodic Function */ @Override public void rbsiPeriodic() { @@ -205,7 +214,7 @@ public void rbsiPeriodic() { TimedPose smoothed = smoothAtTime(tF); if (smoothed == null) return; - // Inject the pose -- commented out for now... + // Inject the pose consumer.accept(smoothed); Logger.recordOutput("Vision/FusedPose", fused.pose()); diff --git a/src/main/java/frc/robot/util/VirtualSubsystem.java b/src/main/java/frc/robot/util/VirtualSubsystem.java index 2996aa33..4b1c57e0 100644 --- a/src/main/java/frc/robot/util/VirtualSubsystem.java +++ b/src/main/java/frc/robot/util/VirtualSubsystem.java @@ -10,6 +10,7 @@ package frc.robot.util; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import org.littletonrobotics.junction.Logger; @@ -18,14 +19,35 @@ */ public abstract class VirtualSubsystem { private static final List subsystems = new ArrayList<>(); + private static boolean needsSort = false; + private final String name = getClass().getSimpleName(); // Load all defined virtual subsystems into a list public VirtualSubsystem() { subsystems.add(this); + needsSort = true; // a new subsystem changed ordering + } + + /** + * Override to control ordering. Lower runs earlier. + * + *

Example: IMU inputs -30, Drive odometry -20, Vision -10, Coordinator 0. + */ + protected int getPeriodPriority() { + return 0; } public static void periodicAll() { + // Sort once (and again only if new subsystems are constructed) + if (needsSort) { + subsystems.sort( + Comparator.comparingInt(VirtualSubsystem::getPeriodPriority) + // deterministic tie-break to avoid “random” order when priorities match + .thenComparingInt(System::identityHashCode)); + needsSort = false; + } + // Call each virtual subsystem during robotPeriodic() for (VirtualSubsystem subsystem : subsystems) { subsystem.periodic();