diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 26f25253..0b39dcc5 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -416,6 +416,16 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { + public static final java.util.Set kTrustedTags = + java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + + // Noise scaling factors (lower = more trusted) + public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight + public static final double kUntrustedTagStdDevScale = 1.3; // 30% less weight + + // Optional: if true, reject observations that contain no trusted tags + public static final boolean kRequireTrustedTag = false; + // AprilTag Identification Constants public static final double kAmbiguityThreshold = 0.4; public static final double kTargetLogTimeSecs = 0.1; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 6d58c6d0..b9668ae3 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -19,6 +19,7 @@ import com.revrobotics.util.StatusLogger; import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.DriverStation.Alliance; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; @@ -47,6 +48,7 @@ public class Robot extends LoggedRobot { private Command m_autoCommandPathPlanner; private RobotContainer m_robotContainer; private Timer m_disabledTimer; + private static boolean isBlueAlliance = false; // Define simulation fields here private VisionSystemSim visionSim; @@ -135,9 +137,13 @@ public Robot() { /** TESTING VERSION OF ROBOTPERIODIC FOR OVERRUN SOURCES */ @Override public void robotPeriodic() { + + isBlueAlliance = DriverStation.getAlliance().orElse(Alliance.Blue) == Alliance.Blue; + final long t0 = System.nanoTime(); if (isReal()) { + // Switch thread to high priority to improve loop timing Threads.setCurrentThreadPriority(true, 99); } final long t1 = System.nanoTime(); @@ -186,6 +192,7 @@ public void autonomousInit() { CommandScheduler.getInstance().cancelAll(); m_robotContainer.getDrivebase().setMotorBrake(true); m_robotContainer.getDrivebase().resetHeadingController(); + m_robotContainer.getVision().resetPoseGate(Timer.getFPGATimestamp()); // TODO: Make sure Gyro inits here with whatever is in the path planning thingie switch (Constants.getAutoType()) { @@ -315,4 +322,13 @@ public void simulationPeriodic() { // Update sim each sim tick visionSim.update(m_robotContainer.getDrivebase().getPose()); } + + // Helper method to simplify checking if the robot is blue or red alliance + public static boolean isBlue() { + return isBlueAlliance; + } + + public static boolean isRed() { + return !isBlue(); + } } diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index c38f5781..9c5a167a 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -34,7 +34,6 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; -import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -75,6 +74,9 @@ /** This is the location for defining robot hardware, commands, and controller button bindings. */ public class RobotContainer { + private static final boolean USE_MAPLESIM = true; + public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); + /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -128,36 +130,7 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); - // Vision Factories - private VisionIO[] buildVisionIOsReal(Drive drive) { - return switch (Constants.getVisionType()) { - case PHOTON -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) - .toArray(VisionIO[]::new); - - case LIMELIGHT -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) - .toArray(VisionIO[]::new); - - case NONE -> new VisionIO[] {new VisionIO() {}}; - }; - } - - private static VisionIO[] buildVisionIOsSim(Drive drive) { - var cams = Constants.Cameras.ALL; - VisionIO[] ios = new VisionIO[cams.length]; - for (int i = 0; i < cams.length; i++) { - var cfg = cams[i]; - ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); - } - return ios; - } - - private VisionIO[] buildVisionIOsReplay() { - return new VisionIO[] {new VisionIO() {}}; - } + public static RobotContainer instance; /** * Constructor for the Robot Container. This container holds subsystems, opertator interface @@ -179,7 +152,9 @@ public RobotContainer() { m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -193,11 +168,9 @@ public RobotContainer() { // ---------------- Vision IOs (robot code) ---------------- var cams = frc.robot.Constants.Cameras.ALL; - - // If you keep Vision expecting exactly two cameras: - VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); - m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); - + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); m_accel = new Accelerometer(m_imu); // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- @@ -231,7 +204,8 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); + m_vision = + new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -493,6 +467,11 @@ public Drive getDrivebase() { return m_drivebase; } + /** Vision getter method for use with Robot.java */ + public Vision getVision() { + return m_vision; + } + /** * Set up the SysID routines from AdvantageKit * @@ -536,6 +515,40 @@ private void definesysIdRoutines() { } } + // Vision Factories + // Vision Factories (REAL) + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {}; // recommended: no cameras + }; + } + + // Vision Factories (SIM) + private VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + // Vision Factories (REPLAY) + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {}; // simplest: Vision does nothing during replay + } + /** * Example Choreo auto command * diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index fada2245..46788c2b 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -48,10 +48,13 @@ import frc.robot.Constants.DrivebaseConstants; import frc.robot.Constants.RobotConstants; import frc.robot.subsystems.imu.Imu; +import frc.robot.util.ConcurrentTimeInterpolatableBuffer; import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; import frc.robot.util.RBSISubsystem; +import java.util.Optional; +import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.littletonrobotics.junction.AutoLogOutput; @@ -59,6 +62,16 @@ public class Drive extends RBSISubsystem { + // --- buffers for time-aligned pose + yaw + yawRate history --- + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + static final Lock odometryLock = new ReentrantLock(); private final Imu imu; private final Module[] modules = new Module[4]; // FL, FR, BL, BR @@ -74,6 +87,7 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -218,64 +232,98 @@ public Drive(Imu imu) { @Override public void rbsiPeriodic() { odometryLock.lock(); + try { + // Get the IMU inputs + final var imuInputs = imu.getInputs(); + + // Stop modules & log empty setpoint states if disabled + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + } - // Get the IMU inputs - final var imuInputs = imu.getInputs(); // primitive inputs + // Module periodic updates, which drains queues this cycle + for (var module : modules) module.periodic(); - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } + // Feed historical samples into odometry if REAL robot + if (Constants.getMode() != Mode.SIM) { - // Module periodic updates, which drains queues this cycle - for (var module : modules) { - module.periodic(); - } + // ---- Gather per-module histories ---- + final double[][] perModuleTs = new double[4][]; + final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; + int unionCap = 0; + + for (int m = 0; m < 4; m++) { + perModuleTs[m] = modules[m].getOdometryTimestamps(); + perModulePos[m] = modules[m].getOdometryPositions(); + unionCap += perModuleTs[m].length; + } + + if (unionCap == 0) { + gyroDisconnectedAlert.set(!imuInputs.connected); + return; + } - // Feed historical samples into odometry if REAL robot - if (Constants.getMode() != Mode.SIM) { - final double[] sampleTimestamps = modules[0].getOdometryTimestamps(); - final int sampleCount = sampleTimestamps.length; - - // Reuse arrays to reduce GC (you likely already have lastModulePositions as a field) - final SwerveModulePosition[] modulePositions = new SwerveModulePosition[4]; - final SwerveModulePosition[] moduleDeltas = new SwerveModulePosition[4]; - - for (int i = 0; i < sampleCount; i++) { - for (int moduleIndex = 0; moduleIndex < 4; moduleIndex++) { - modulePositions[moduleIndex] = modules[moduleIndex].getOdometryPositions()[i]; - moduleDeltas[moduleIndex] = - new SwerveModulePosition( - modulePositions[moduleIndex].distanceMeters - - lastModulePositions[moduleIndex].distanceMeters, - modulePositions[moduleIndex].angle); - lastModulePositions[moduleIndex] = modulePositions[moduleIndex]; + // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- + // These timestamps are authoritative for yaw interpolation and yaw-rate gating. + if (imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0) { + + final double[] yts = imuInputs.odometryYawTimestamps; + final double[] yaws = imuInputs.odometryYawPositionsRad; + + for (int i = 0; i < yts.length; i++) { + yawBuffer.addSample(yts[i], yaws[i]); + if (i > 0) { + double dt = yts[i] - yts[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + } + } + } + } else { + // fallback: buffer "now" yaw only + double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); } - // Pick yaw sample if available; otherwise fall back to current yaw - final double yawRad = - (imuInputs.connected - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawPositionsRad.length > i) - ? imuInputs.odometryYawPositionsRad[i] - : imuInputs.yawPositionRad; + // ---- Build union timeline ---- + final double[] unionTs = new double[Math.max(unionCap, 1)]; + final double EPS = 1e-4; // 0.1 ms + final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); + + // ---- Replay at union times ---- + for (int i = 0; i < unionN; i++) { + final double t = unionTs[i]; + + // Interpolate each module to union time + for (int m = 0; m < 4; m++) { + SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); + odomPositions[m] = p; + } - // Boundary conversion: PoseEstimator requires Rotation2d - final Rotation2d yaw = Rotation2d.fromRadians(yawRad); + // Interpolate yaw at union time + final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - // Apply to pose estimator - m_PoseEstimator.updateWithTime(sampleTimestamps[i], yaw, modulePositions); + m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // keep buffer in the same timebase as estimator + poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + } + + Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); } - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - } - odometryLock.unlock(); + gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + } finally { + odometryLock.unlock(); + } } /** Simulation Periodic Method */ @@ -327,6 +375,8 @@ public void simulationPeriodic() { m_PoseEstimator.addVisionMeasurement(visionPose, visionTimestamp, visionStdDevs); } + poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); + // 7) Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); @@ -463,6 +513,37 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } + } + + /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + if (t1 < t0) return OptionalDouble.empty(); + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; + } + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -550,6 +631,7 @@ public double getMaxAngularAccelRadPerSecPerSec() { /** Resets the current odometry pose. */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the gyro based on alliance color */ @@ -559,21 +641,33 @@ public void zeroHeadingForAlliance() { ? Rotation2d.kZero : Rotation2d.k180deg); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the heading */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); + } + + // ---- Pose reset gate (vision + anything latency-sensitive) ---- + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + public long getPoseResetEpoch() { + return poseResetEpoch; } - /** Adds a new timestamped vision measurement. */ - public void addVisionMeasurement( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs) { - m_PoseEstimator.addVisionMeasurement( - visionRobotPoseMeters, timestampSeconds, visionMeasurementStdDevs); + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; + } + + private void markPoseReset(double fpgaNow) { + lastPoseResetTimestamp = fpgaNow; + poseResetEpoch++; + Logger.recordOutput("Drive/PoseResetEpoch", poseResetEpoch); + Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ @@ -670,4 +764,69 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { stdDevs.set(2, 0, Math.toRadians(2)); // rotation standard deviation (radians) return stdDevs; } + + private static SwerveModulePosition interpolateModulePosition( + double[] ts, SwerveModulePosition[] samples, double t) { + + final int n = ts.length; + if (n == 0) return new SwerveModulePosition(); + + if (t <= ts[0]) return samples[0]; + if (t >= ts[n - 1]) return samples[n - 1]; + + int lo = 0, hi = n - 1; + while (lo < hi) { + int mid = (lo + hi) >>> 1; + if (ts[mid] < t) lo = mid + 1; + else hi = mid; + } + int i1 = lo; + int i0 = i1 - 1; + + double t0 = ts[i0]; + double t1 = ts[i1]; + if (t1 <= t0) return samples[i1]; + + double alpha = (t - t0) / (t1 - t0); + + double dist = + samples[i0].distanceMeters + + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; + + Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); + + return new SwerveModulePosition(dist, angle); + } + + private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { + int[] idx = new int[perModuleTs.length]; + int outN = 0; + + while (true) { + double minT = Double.POSITIVE_INFINITY; + boolean any = false; + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + if (idx[m] >= ts.length) continue; + any = true; + minT = Math.min(minT, ts[idx[m]]); + } + + if (!any) break; + + if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { + outUnion[outN++] = minT; + } + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { + idx[m]++; + } + } + } + + return outN; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 04e20417..908cf9c5 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,13 +1,19 @@ // Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First -// Copyright (c) 2024-2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.vision; @@ -18,183 +24,419 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.wpilibj.Alert; -import edu.wpi.first.wpilibj.Alert.AlertType; import frc.robot.Constants; import frc.robot.FieldConstants; +import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; import frc.robot.util.VirtualSubsystem; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Optional; +import java.util.OptionalDouble; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import org.littletonrobotics.junction.Logger; public class Vision extends VirtualSubsystem { + + @FunctionalInterface + public interface VisionConsumer { + void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + } + + private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} + private final VisionConsumer consumer; + private final Drive drive; + private long lastSeenPoseResetEpoch = -1; + private final VisionIO[] io; private final VisionIOInputsAutoLogged[] inputs; - private final Alert[] disconnectedAlerts; - // Camera configs (names, transforms, stddev multipliers, sim props) - private final Constants.Cameras.CameraConfig[] camConfigs; + private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; - // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- - // Summary buffers - private final ArrayList allTagPoses = new ArrayList<>(32); - private final ArrayList allRobotPoses = new ArrayList<>(64); - private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // --- Per-camera monotonic gate --- + private final double[] lastAcceptedTsPerCam; - // Per-camera buffers (reused each camera) - private final ArrayList tagPoses = new ArrayList<>(16); - private final ArrayList robotPoses = new ArrayList<>(32); - private final ArrayList robotPosesAccepted = new ArrayList<>(32); - private final ArrayList robotPosesRejected = new ArrayList<>(32); + // --- Pose reset gate --- + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; - public Vision(VisionConsumer consumer, VisionIO... io) { + // --- Smoothing buffer (recent fused estimates) --- + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final double smoothWindowSec = 0.25; + private final int smoothMaxSize = 12; + + // --- Trusted tags configuration (swappable per event/field) --- + private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); + private volatile boolean requireTrustedTag = false; + + // Scale factors applied based on fraction of trusted tags in an observation + private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted + private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted + + // --- Optional 254-style yaw gate for single-tag --- + private volatile boolean enableSingleTagYawGate = true; + private volatile double yawGateLookbackSec = 0.30; + private volatile double yawGateLimitRadPerSec = 5.0; + + public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + this.drive = drive; this.consumer = consumer; this.io = io; - this.camConfigs = Constants.Cameras.ALL; - - // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { inputs[i] = new VisionIOInputsAutoLogged(); } - // Initialize disconnected alerts - this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < io.length; i++) { - disconnectedAlerts[i] = - new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); - } + this.lastAcceptedTsPerCam = new double[io.length]; + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - // Log robot-to-camera transforms from the new camera config array - // (Only log as many as exist in BOTH configs and IOs) + // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); for (int i = 0; i < n; i++) { Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); } } - /** Returns the X angle to the best target, useful for simple servoing. */ - public Rotation2d getTargetX(int cameraIndex) { - return inputs[cameraIndex].latestTargetObservation.tx(); + // -------- Runtime configuration hooks -------- + + /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** Swap trusted tag set per event/field without redeploy. */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; } + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; + } + + // -------- Core periodic -------- + @Override public void rbsiPeriodic() { - // 1) Update inputs + process inputs first (keeps AK logs consistent) + + long epoch = drive.getPoseResetEpoch(); + if (epoch != lastSeenPoseResetEpoch) { + lastSeenPoseResetEpoch = epoch; + resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + Logger.recordOutput("Vision/PoseGateResetFromDrive", true); + } else { + Logger.recordOutput("Vision/PoseGateResetFromDrive", false); + } + + // 1) Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); - disconnectedAlerts[i].set(!inputs[i].connected); - } - - // 2) Clear summary buffers (reused) - allTagPoses.clear(); - allRobotPoses.clear(); - allRobotPosesAccepted.clear(); - allRobotPosesRejected.clear(); - - // 3) Process each camera - for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - // Clear per-camera buffers - tagPoses.clear(); - robotPoses.clear(); - robotPosesAccepted.clear(); - robotPosesRejected.clear(); - - // Add tag poses from ids - for (int tagId : inputs[cameraIndex].tagIds) { - var tagPose = FieldConstants.aprilTagLayout.getTagPose(tagId); - if (tagPose.isPresent()) { - tagPoses.add(tagPose.get()); - } - } + } - // Loop over pose observations - for (var observation : inputs[cameraIndex].poseObservations) { - // Reject rules - boolean rejectPose = - observation.tagCount() == 0 - || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) - || Math.abs(observation.pose().getZ()) > maxZError - || observation.pose().getX() < 0.0 - || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() - || observation.pose().getY() < 0.0 - || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - - // Log pose buckets - robotPoses.add(observation.pose()); - if (rejectPose) { - robotPosesRejected.add(observation.pose()); - } else { - robotPosesAccepted.add(observation.pose()); - } + // 2) Pick one best accepted estimate per camera for this loop + final ArrayList perCamAccepted = new ArrayList<>(io.length); + + for (int cam = 0; cam < io.length; cam++) { + int seen = 0; + int accepted = 0; + int rejected = 0; - if (rejectPose) { + TimedEstimate best = null; + double bestTrustScale = Double.NaN; + int bestTrustedCount = 0; + int bestTagCount = 0; + + for (var obs : inputs[cam].poseObservations) { + seen++; + + GateResult gate = passesHardGatesAndYawGate(cam, obs); + if (!gate.accepted) { + rejected++; continue; } - // Standard deviations - double stdDevFactor = - Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); - double linearStdDev = linearStdDevBaseline * stdDevFactor; - double angularStdDev = angularStdDevBaseline * stdDevFactor; - - if (observation.type() == PoseObservationType.MEGATAG_2) { - linearStdDev *= linearStdDevMegatag2Factor; - angularStdDev *= angularStdDevMegatag2Factor; + BuiltEstimate built = buildEstimate(cam, obs); + if (built == null) { + rejected++; + continue; } - // Apply per-camera multiplier from CameraConfig - if (cameraIndex < camConfigs.length) { - double k = camConfigs[cameraIndex].stdDevFactor(); - linearStdDev *= k; - angularStdDev *= k; + TimedEstimate est = built.estimate; + if (best == null || isBetter(est, best)) { + best = est; + bestTrustScale = built.trustScale; + bestTrustedCount = built.trustedCount; + bestTagCount = obs.tagCount(); } + } + + if (best != null) { + accepted++; + lastAcceptedTsPerCam[cam] = best.ts; + perCamAccepted.add(best); - consumer.accept( - observation.pose().toPose2d(), - observation.timestamp(), - VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } - // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPoses", robotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesAccepted", - robotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesRejected", - robotPosesRejected.toArray(new Pose3d[0])); - - // Aggregate summary - allTagPoses.addAll(tagPoses); - allRobotPoses.addAll(robotPoses); - allRobotPosesAccepted.addAll(robotPosesAccepted); - allRobotPosesRejected.addAll(robotPosesRejected); - } - - // 4) Summary logs - Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); - Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesAccepted", allRobotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesRejected", allRobotPosesRejected.toArray(new Pose3d[0])); + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); + } + + if (perCamAccepted.isEmpty()) return; + + // 3) Fusion time is the newest timestamp among accepted per-camera samples + double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + if (!Double.isFinite(tF)) return; + + // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + if (fused == null) return; + + // 5) Smooth by fusing recent fused estimates (also aligned to tF) + pushFused(fused); + TimedEstimate smoothed = smoothAtTime(tF); + if (smoothed == null) return; + + // 6) Inject + consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + + Logger.recordOutput("Vision/FusedPose", fused.pose); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedTimestamp", tF); } - @FunctionalInterface - public static interface VisionConsumer { - void accept( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs); + // -------- Gating + scoring -------- + + private static final class GateResult { + final boolean accepted; + + GateResult(boolean accepted) { + this.accepted = accepted; + } + } + + private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { + final double ts = obs.timestamp(); + + // Monotonic per-camera time + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + + // Reject anything older than last pose reset + if (ts < lastPoseResetTimestamp) return new GateResult(false); + + // Must have tags + if (obs.tagCount() <= 0) return new GateResult(false); + + // Single-tag ambiguity gate + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + + // Z sanity + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + + // Field bounds + Pose3d p = obs.pose(); + if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) + return new GateResult(false); + if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) + return new GateResult(false); + + // Optional 254-style yaw gate: only meaningful for single-tag + if (enableSingleTagYawGate && obs.tagCount() == 1) { + OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); + if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { + return new GateResult(false); + } + } + + return new GateResult(true); + } + + private static final class BuiltEstimate { + final TimedEstimate estimate; + final double trustScale; + final int trustedCount; + + BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + this.estimate = estimate; + this.trustScale = trustScale; + this.trustedCount = trustedCount; + } + } + + private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { + // Base uncertainty grows with distance^2 / tagCount (your 2486-style) + final double tagCount = Math.max(1, obs.tagCount()); + final double avgDist = Math.max(0.0, obs.averageTagDistance()); + final double distFactor = (avgDist * avgDist) / tagCount; + + final double camFactor = (cam < camConfigs.length) ? camConfigs[cam].stdDevFactor() : 1.0; + + double linearStdDev = linearStdDevBaseline * camFactor * distFactor; + double angularStdDev = angularStdDevBaseline * camFactor * distFactor; + + // MegaTag2 bonus if applicable + if (obs.type() == PoseObservationType.MEGATAG_2) { + linearStdDev *= linearStdDevMegatag2Factor; + angularStdDev *= angularStdDevMegatag2Factor; + } + + // Trusted tag blending + final Set kTrusted = trustedTags.get(); + int trustedCount = 0; + for (int id : obs.usedTagIds()) { + if (kTrusted.contains(id)) trustedCount++; + } + + if (requireTrustedTag && trustedCount == 0) { + return null; + } + + final int usedCount = obs.usedTagIds().size(); + final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); + + final double trustScale = + untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); + + linearStdDev *= trustScale; + angularStdDev *= trustScale; + + // Output logs for tuning + Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); + + return new BuiltEstimate( + new TimedEstimate( + obs.pose().toPose2d(), + obs.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), + trustScale, + trustedCount); + } + + private boolean isBetter(TimedEstimate a, TimedEstimate b) { + // Lower trace of stddev vector (x+y+theta) is better + double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); + double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + return ta < tb; + } + + // -------- Time alignment + fusion -------- + + private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); + for (var e : estimates) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) return null; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + return inverseVarianceFuse(aligned, tF); + } + + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { + Optional odomAtTsOpt = drive.getPoseAtTime(ts); + Optional odomAtTFOpt = drive.getPoseAtTime(tF); + if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; + + Pose2d odomAtTs = odomAtTsOpt.get(); + Pose2d odomAtTF = odomAtTFOpt.get(); + + // Transform that takes odomAtTs -> odomAtTF + Transform2d ts_T_tf = odomAtTF.minus(odomAtTs); + + // Apply same motion to vision pose to bring it forward + return visionPoseAtTs.transformBy(ts_T_tf); + } + + private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF.isEmpty()) return null; + if (alignedAtTF.size() == 1) return alignedAtTF.get(0); + + double sumWx = 0, sumWy = 0, sumWth = 0; + double sumX = 0, sumY = 0; + double sumCos = 0, sumSin = 0; + + for (var e : alignedAtTF) { + double sx = e.stdDevs.get(0, 0); + double sy = e.stdDevs.get(1, 0); + double sth = e.stdDevs.get(2, 0); + + double vx = sx * sx; + double vy = sy * sy; + double vth = sth * sth; + + double wx = 1.0 / vx; + double wy = 1.0 / vy; + double wth = 1.0 / vth; + + sumWx += wx; + sumWy += wy; + sumWth += wth; + + sumX += e.pose.getX() * wx; + sumY += e.pose.getY() * wy; + + sumCos += e.pose.getRotation().getCos() * wth; + sumSin += e.pose.getRotation().getSin() * wth; + } + + Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + + Matrix fusedStd = + VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); + + return new TimedEstimate(fused, tF, fusedStd); + } + + // -------- Smoothing buffer -------- + + private void pushFused(TimedEstimate fused) { + fusedBuffer.addLast(fused); + + while (fusedBuffer.size() > smoothMaxSize) { + fusedBuffer.removeFirst(); + } + + // Trim by time window relative to newest + while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + fusedBuffer.removeFirst(); + } + } + + private TimedEstimate smoothAtTime(double tF) { + if (fusedBuffer.isEmpty()) return null; + if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); + + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + for (var e : fusedBuffer) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) continue; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + + if (aligned.isEmpty()) return fusedBuffer.peekLast(); + return inverseVarianceFuse(aligned, tF); } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index c9ed8349..fad294d2 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -11,35 +11,44 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import java.util.Set; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { @AutoLog - public static class VisionIOInputs { + class VisionIOInputs { public boolean connected = false; + + /** Latest "servo to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + + /** Pose observations generated by the camera pipeline (each has its own timestamp) */ public PoseObservation[] poseObservations = new PoseObservation[0]; + + /** Union of tag IDs seen this loop (for logging/UI only) */ public int[] tagIds = new int[0]; } /** Represents the angle to a simple target, not used for pose estimation. */ - public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + record TargetObservation(Rotation2d tx, Rotation2d ty) {} /** Represents a robot pose sample used for pose estimation. */ - public static record PoseObservation( - double timestamp, - Pose3d pose, - double ambiguity, + record PoseObservation( + double timestamp, // camera capture time (seconds, same timebase as FPGA) + Pose3d pose, // field->robot pose + double ambiguity, // single-tag ambiguity if available int tagCount, double averageTagDistance, - PoseObservationType type) {} + PoseObservationType type, + Set usedTagIds // immutable per observation + ) {} - public static enum PoseObservationType { + enum PoseObservationType { MEGATAG_1, MEGATAG_2, PHOTONVISION } - public default void updateInputs(VisionIOInputs inputs) {} + default void updateInputs(VisionIOInputs inputs) {} } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index a4b1eaf6..07c83c7b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -96,7 +96,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_1)); + PoseObservationType.MEGATAG_1, + + // Visible Tag IDs + tagIds)); } for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -121,7 +124,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_2)); + PoseObservationType.MEGATAG_2, + + // Visible Tag IDs + tagIds)); } // Save pose observations to inputs object diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index f8d9cdd9..14a2867b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -19,7 +19,7 @@ import java.util.Set; import org.photonvision.PhotonCamera; -/** IO implementation for real PhotonVision hardware. */ +/** IO implementation for real PhotonVision hardware (pose already solved by PV). */ public class VisionIOPhotonVision implements VisionIO { protected final PhotonCamera camera; protected final Transform3d robotToCamera; @@ -28,10 +28,10 @@ public class VisionIOPhotonVision implements VisionIO { * Creates a new VisionIOPhotonVision. * * @param name The configured name of the camera. - * @param rotationSupplier The 3D position of the camera relative to the robot. + * @param robotToCamera The 3D position of the camera relative to the robot. */ public VisionIOPhotonVision(String name, Transform3d robotToCamera) { - camera = new PhotonCamera(name); + this.camera = new PhotonCamera(name); this.robotToCamera = robotToCamera; } @@ -42,33 +42,30 @@ public void updateInputs(VisionIOInputs inputs) { // Cap the number of unread results processed per loop final int kMaxUnread = 5; - // Use HashSet/ArrayList to avoid LinkedList churn - Set tagIds = new HashSet<>(); - ArrayList poseObservations = new ArrayList<>(kMaxUnread); + final Set unionTagIds = new HashSet<>(); + final ArrayList poseObservations = new ArrayList<>(kMaxUnread); + + double newestTargetTs = Double.NEGATIVE_INFINITY; + Rotation2d bestYaw = Rotation2d.kZero; + Rotation2d bestPitch = Rotation2d.kZero; int processed = 0; for (var result : camera.getAllUnreadResults()) { // Hard cap - if (processed++ >= kMaxUnread) { - break; - } + if (processed++ >= kMaxUnread) break; + + final double ts = result.getTimestampSeconds(); - // Update latest target observation - if (result.hasTargets()) { - inputs.latestTargetObservation = - new TargetObservation( - Rotation2d.fromDegrees(result.getBestTarget().getYaw()), - Rotation2d.fromDegrees(result.getBestTarget().getPitch())); - } else { - inputs.latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + if (result.hasTargets() && ts >= newestTargetTs) { + newestTargetTs = ts; + bestYaw = Rotation2d.fromDegrees(result.getBestTarget().getYaw()); + bestPitch = Rotation2d.fromDegrees(result.getBestTarget().getPitch()); } - // Add pose observation - if (result.multitagResult.isPresent()) { // Multitag result - var multitagResult = result.multitagResult.get(); + if (result.multitagResult.isPresent()) { + var multitag = result.multitagResult.get(); - // Calculate robot pose - Transform3d fieldToCamera = multitagResult.estimatedPose.best; + Transform3d fieldToCamera = multitag.estimatedPose.best; Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); @@ -77,64 +74,62 @@ public void updateInputs(VisionIOInputs inputs) { for (var target : result.targets) { totalTagDistance += target.bestCameraToTarget.getTranslation().getNorm(); } - - // Add tag IDs - tagIds.addAll(multitagResult.fiducialIDsUsed); - - // Guard against divide-by-zero if targets is empty (defensive) double avgTagDistance = result.targets.isEmpty() ? 0.0 : (totalTagDistance / result.targets.size()); - // Add observation + Set used = new HashSet<>(); + for (int id : multitag.fiducialIDsUsed) { + used.add(id); + unionTagIds.add(id); + } + poseObservations.add( new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - multitagResult.estimatedPose.ambiguity, // Ambiguity - multitagResult.fiducialIDsUsed.size(), // Tag count - avgTagDistance, // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - - } else if (!result.targets.isEmpty()) { // Single tag result + ts, + robotPose, + multitag.estimatedPose.ambiguity, + multitag.fiducialIDsUsed.size(), + avgTagDistance, + PoseObservationType.PHOTONVISION, + Set.copyOf(used))); + + } else if (!result.targets.isEmpty()) { var target = result.targets.get(0); - // Calculate robot pose var tagPose = aprilTagLayout.getTagPose(target.fiducialId); - if (tagPose.isPresent()) { - Transform3d fieldToTarget = - new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); - Transform3d cameraToTarget = target.bestCameraToTarget; - Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); - Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); - Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); - - // Add tag ID - tagIds.add((short) target.fiducialId); - - // Add observation - poseObservations.add( - new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - target.poseAmbiguity, // Ambiguity - 1, // Tag count - cameraToTarget.getTranslation().getNorm(), // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - } + if (tagPose.isEmpty()) continue; + + Transform3d fieldToTarget = + new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); + Transform3d cameraToTarget = target.bestCameraToTarget; + + Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); + Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); + Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); + + unionTagIds.add(target.fiducialId); + + poseObservations.add( + new PoseObservation( + ts, + robotPose, + target.poseAmbiguity, + 1, + cameraToTarget.getTranslation().getNorm(), + PoseObservationType.PHOTONVISION, + Set.of(target.fiducialId))); } } - // Save pose observations to inputs object - inputs.poseObservations = new PoseObservation[poseObservations.size()]; - for (int i = 0; i < poseObservations.size(); i++) { - inputs.poseObservations[i] = poseObservations.get(i); - } + inputs.latestTargetObservation = + (newestTargetTs > Double.NEGATIVE_INFINITY) + ? new TargetObservation(bestYaw, bestPitch) + : new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); - // Save tag IDs to inputs objects - inputs.tagIds = new int[tagIds.size()]; + inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + + inputs.tagIds = new int[unionTagIds.size()]; int i = 0; - for (int id : tagIds) { - inputs.tagIds[i++] = id; - } + for (int id : unionTagIds) inputs.tagIds[i++] = id; } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index fd4525b6..4e5761b9 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -22,34 +22,50 @@ public class VisionIOPhotonVisionSim extends VisionIOPhotonVision { private static VisionSystemSim visionSim; private final Supplier poseSupplier; + + @SuppressWarnings("unused") private final PhotonCameraSim cameraSim; /** * Creates a new VisionIOPhotonVisionSim. * - * @param name The name of the camera. - * @param poseSupplier Supplier for the robot pose to use in simulation. + * @param name The name of the camera (PhotonVision camera name). + * @param robotToCamera Camera pose relative to robot frame. + * @param poseSupplier Supplier for the robot pose (field->robot) to use in simulation. */ public VisionIOPhotonVisionSim( String name, Transform3d robotToCamera, Supplier poseSupplier) { super(name, robotToCamera); this.poseSupplier = poseSupplier; - // Initialize vision sim + // Initialize VisionSystemSim once for all cameras if (visionSim == null) { visionSim = new VisionSystemSim("main"); visionSim.addAprilTags(FieldConstants.aprilTagLayout); } - // Add sim camera + // Camera properties: + // - If you have per-camera SimCameraProperties in Constants, pass them here instead. + // - Otherwise keep the default and tune later. var cameraProperties = new SimCameraProperties(); + + // Recommended defaults (feel free to tune) + // cameraProperties.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); + // cameraProperties.setFPS(20); + // cameraProperties.setAvgLatencyMs(35); + // cameraProperties.setLatencyStdDevMs(5); + cameraSim = new PhotonCameraSim(camera, cameraProperties); visionSim.addCamera(cameraSim, robotToCamera); } @Override public void updateInputs(VisionIOInputs inputs) { + // NOTE: This updates the sim world every time a sim camera is polled. + // That's fine (fast enough), but if you want “update once per loop,” see note below. visionSim.update(poseSupplier.get()); + + // Then pull results like normal (and emit PoseObservation + usedTagIds sets) super.updateInputs(inputs); } } diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java new file mode 100644 index 00000000..df46efd2 --- /dev/null +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -0,0 +1,164 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.interpolation.Interpolatable; +import edu.wpi.first.math.interpolation.Interpolator; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; + +/** + * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit + * synchronization in our robot code. + * + * @param The type stored in this buffer. + */ +public final class ConcurrentTimeInterpolatableBuffer { + private final double m_historySize; + private final Interpolator m_interpolatingFunc; + private final ConcurrentNavigableMap m_pastSnapshots = new ConcurrentSkipListMap<>(); + + private ConcurrentTimeInterpolatableBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + this.m_historySize = historySizeSeconds; + this.m_interpolatingFunc = interpolateFunction; + } + + /** + * Create a new TimeInterpolatableBuffer. + * + * @param interpolateFunction The function used to interpolate between values. + * @param historySizeSeconds The history size of the buffer. + * @param The type of data to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(interpolateFunction, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer that stores a given subclass of {@link Interpolatable}. + * + * @param historySizeSeconds The history size of the buffer. + * @param The type of {@link Interpolatable} to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static > ConcurrentTimeInterpolatableBuffer createBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>( + Interpolatable::interpolate, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer to store Double values. + * + * @param historySizeSeconds The history size of the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(MathUtil::interpolate, historySizeSeconds); + } + + /** + * Add a sample to the buffer. + * + * @param timeSeconds The timestamp of the sample. + * @param sample The sample object. + */ + public void addSample(double timeSeconds, T sample) { + m_pastSnapshots.put(timeSeconds, sample); + cleanUp(timeSeconds); + } + + /** + * Removes samples older than our current history size. + * + * @param time The current timestamp. + */ + private void cleanUp(double time) { + m_pastSnapshots.headMap(time - m_historySize, false).clear(); + } + + /** Clear all old samples. */ + public void clear() { + m_pastSnapshots.clear(); + } + + /** + * Sample the buffer at the given time. If the buffer is empty, an empty Optional is returned. + * + * @param timeSeconds The time at which to sample. + * @return The interpolated value at that timestamp or an empty Optional. + */ + public Optional getSample(double timeSeconds) { + if (m_pastSnapshots.isEmpty()) { + return Optional.empty(); + } + + // Special case for when the requested time is the same as a sample + var nowEntry = m_pastSnapshots.get(timeSeconds); + if (nowEntry != null) { + return Optional.of(nowEntry); + } + + var bottomBound = m_pastSnapshots.floorEntry(timeSeconds); + var topBound = m_pastSnapshots.ceilingEntry(timeSeconds); + + // Return null if neither sample exists, and the opposite bound if the other is + // null + if (topBound == null && bottomBound == null) { + return Optional.empty(); + } else if (topBound == null) { + return Optional.of(bottomBound.getValue()); + } else if (bottomBound == null) { + return Optional.of(topBound.getValue()); + } else { + // Otherwise, interpolate. Because T is between [0, 1], we want the ratio of + // (the difference + // between the current time and bottom bound) and (the difference between top + // and bottom + // bounds). + return Optional.of( + m_interpolatingFunc.interpolate( + bottomBound.getValue(), + topBound.getValue(), + (timeSeconds - bottomBound.getKey()) / (topBound.getKey() - bottomBound.getKey()))); + } + } + + public Entry getLatest() { + return m_pastSnapshots.lastEntry(); + } + + /** + * Grant access to the internal sample buffer. Used in Pose Estimation to replay odometry inputs + * stored within this buffer. + * + * @return The internal sample buffer. + */ + public ConcurrentNavigableMap getInternalBuffer() { + return m_pastSnapshots; + } +} diff --git a/src/main/java/frc/robot/util/GeomUtil.java b/src/main/java/frc/robot/util/GeomUtil.java deleted file mode 100644 index f77c8b2f..00000000 --- a/src/main/java/frc/robot/util/GeomUtil.java +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) 2024-2026 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Twist2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; - -/** Geometry utilities for working with translations, rotations, transforms, and poses. */ -public class GeomUtil { - /** - * Creates a pure translating transform - * - * @param translation The translation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Translation2d translation) { - return new Transform2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure translating transform - * - * @param x The x coordinate of the translation - * @param y The y coordinate of the translation - * @return The resulting transform - */ - public static Transform2d toTransform2d(double x, double y) { - return new Transform2d(x, y, Rotation2d.kZero); - } - - /** - * Creates a pure rotating transform - * - * @param rotation The rotation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Rotation2d rotation) { - return new Transform2d(Translation2d.kZero, rotation); - } - - /** - * Converts a Pose2d to a Transform2d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform2d toTransform2d(Pose2d pose) { - return new Transform2d(pose.getTranslation(), pose.getRotation()); - } - - public static Pose2d inverse(Pose2d pose) { - Rotation2d rotationInverse = pose.getRotation().unaryMinus(); - return new Pose2d( - pose.getTranslation().unaryMinus().rotateBy(rotationInverse), rotationInverse); - } - - /** - * Converts a Transform2d to a Pose2d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose2d toPose2d(Transform2d transform) { - return new Pose2d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Creates a pure translated pose - * - * @param translation The translation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Translation2d translation) { - return new Pose2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure rotated pose - * - * @param rotation The rotation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Rotation2d rotation) { - return new Pose2d(Translation2d.kZero, rotation); - } - - /** - * Multiplies a twist by a scaling factor - * - * @param twist The twist to multiply - * @param factor The scaling factor for the twist components - * @return The new twist - */ - public static Twist2d multiply(Twist2d twist, double factor) { - return new Twist2d(twist.dx * factor, twist.dy * factor, twist.dtheta * factor); - } - - /** - * Converts a Pose3d to a Transform3d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform3d toTransform3d(Pose3d pose) { - return new Transform3d(pose.getTranslation(), pose.getRotation()); - } - - /** - * Converts a Transform3d to a Pose3d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose3d toPose3d(Transform3d transform) { - return new Pose3d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Converts a ChassisSpeeds to a Twist2d by extracting two dimensions (Y and Z). chain - * - * @param speeds The original translation - * @return The resulting translation - */ - public static Twist2d toTwist2d(ChassisSpeeds speeds) { - return new Twist2d( - speeds.vxMetersPerSecond, speeds.vyMetersPerSecond, speeds.omegaRadiansPerSecond); - } - - /** - * Creates a new pose from an existing one using a different translation value. - * - * @param pose The original pose - * @param translation The new translation to use - * @return The new pose with the new translation and original rotation - */ - public static Pose2d withTranslation(Pose2d pose, Translation2d translation) { - return new Pose2d(translation, pose.getRotation()); - } - - /** - * Creates a new pose from an existing one using a different rotation value. - * - * @param pose The original pose - * @param rotation The new rotation to use - * @return The new pose with the original translation and new rotation - */ - public static Pose2d withRotation(Pose2d pose, Rotation2d rotation) { - return new Pose2d(pose.getTranslation(), rotation); - } -}