diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f35c15b..082bc7f 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -27,3 +27,4 @@ updates: interval: "weekly" ignore: - dependency-name: "gradle" + - dependency-name: "gradle-wrapper" diff --git a/doc/INSTALL.md b/doc/INSTALL.md index c3c2372..af3c52e 100644 --- a/doc/INSTALL.md +++ b/doc/INSTALL.md @@ -118,7 +118,10 @@ steps you need to complete: https://github.com/CrossTheRoadElec/Phoenix6-Examples/blob/1db713d75b08a4315c9273cebf5b5e6a130ed3f7/java/SwerveWithPathPlanner/src/main/java/frc/robot/generated/TunerConstants.java#L171-L175). Before removing them, both lines will be marked as errors in VSCode. -5. In `TunerConstants.java`, change `kSteerInertia` to `0.004` and +5. In `TunerConstants.java`, change `kSlipCurrent` to `60` amps. This will + keep your robot from tearing holes in the carpet at competition! + +6. In `TunerConstants.java`, change `kSteerInertia` to `0.004` and `kDriveInertia` to `0.025` to allow the AdvantageKit simulation code to operate as expected. diff --git a/doc/RBSI-GSG.md b/doc/RBSI-GSG.md index da7774e..78b25b7 100644 --- a/doc/RBSI-GSG.md +++ b/doc/RBSI-GSG.md @@ -41,6 +41,10 @@ modifications to extant RBSI code will be done to files within the ### Tuning constants for optimal performance +**It cannot be overemphasized the importance of tuning your drivetrain for +smooth and consistent performance, battery longevity, and not tearing up the +field.** + 4. Over the course of your robot project, you will need to tune PID parameters for both your drivebase and any mechanisms you build to play the game. AdvantageKit includes detailed instructions for how to tune the various @@ -132,3 +136,18 @@ section of [each release](https://github.com/AZ-First/Az-RBSI/releases). Mounting the case to the robot requires 4x #10-32 nylock nuts (placed in the hex-shaped mounts inside the case) and 4x #10-32 bolts. + + Order of addembly of the Orange Pi Double Case matters given tight clearances: + 1. Super-glue the nylock nuts into the hex mounting holes. + 2. Intall the fans and grates into the case side. + 3. Assemble the Pi's into the standoffs outside the box. + 4. Solder / mount the Voltage Regular solution of your choice. + 5. Connect the USB-C power cables to the Pi's. + 6. Connect the fan power to the 5V (red) and GND (black) pins in the Pi's. + 7. Install the Pi/standoff assembly into the case using screws at the bottom, + be careful of the tight clearance between the USB sockets and the case opening. + 8. Tie a knot in the incoing power line _to be placed inside the box + for strain relief_, and pass the incoming power line through the notch + in the lower case. + 9. Install the cover on the box using screws. + 10. Mount the case to your robot using the #10-32 screws. diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index cfb7214..26f2525 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -164,8 +164,10 @@ public static final class PowerConstants { /************************************************************************* */ /** List of Robot CAN Busses ********************************************* */ public static final class CANBuses { - public static final String DRIVE = "DriveTrain"; public static final String RIO = ""; + public static final String DRIVE = "DriveTrain"; + + public static final String[] ALL = {RIO, DRIVE}; } /************************************************************************* */ @@ -178,30 +180,30 @@ public static class RobotDevices { // Front Left public static final RobotDeviceId FL_DRIVE = - new RobotDeviceId(SwerveConstants.kFLDriveMotorId, SwerveConstants.kFLDriveCanbus, 18); + new RobotDeviceId(SwerveConstants.kFLDriveMotorId, SwerveConstants.kFLDriveCanbus, 16); public static final RobotDeviceId FL_ROTATION = - new RobotDeviceId(SwerveConstants.kFLSteerMotorId, SwerveConstants.kFLSteerCanbus, 19); + new RobotDeviceId(SwerveConstants.kFLSteerMotorId, SwerveConstants.kFLSteerCanbus, 17); public static final RobotDeviceId FL_CANCODER = new RobotDeviceId(SwerveConstants.kFLEncoderId, SwerveConstants.kFLEncoderCanbus, null); // Front Right public static final RobotDeviceId FR_DRIVE = - new RobotDeviceId(SwerveConstants.kFRDriveMotorId, SwerveConstants.kFRDriveCanbus, 17); + new RobotDeviceId(SwerveConstants.kFRDriveMotorId, SwerveConstants.kFRDriveCanbus, 13); public static final RobotDeviceId FR_ROTATION = - new RobotDeviceId(SwerveConstants.kFRSteerMotorId, SwerveConstants.kFRSteerCanbus, 16); + new RobotDeviceId(SwerveConstants.kFRSteerMotorId, SwerveConstants.kFRSteerCanbus, 12); public static final RobotDeviceId FR_CANCODER = new RobotDeviceId(SwerveConstants.kFREncoderId, SwerveConstants.kFREncoderCanbus, null); // Back Left public static final RobotDeviceId BL_DRIVE = - new RobotDeviceId(SwerveConstants.kBLDriveMotorId, SwerveConstants.kBLDriveCanbus, 1); + new RobotDeviceId(SwerveConstants.kBLDriveMotorId, SwerveConstants.kBLDriveCanbus, 2); public static final RobotDeviceId BL_ROTATION = - new RobotDeviceId(SwerveConstants.kBLSteerMotorId, SwerveConstants.kBLSteerCanbus, 0); + new RobotDeviceId(SwerveConstants.kBLSteerMotorId, SwerveConstants.kBLSteerCanbus, 3); public static final RobotDeviceId BL_CANCODER = new RobotDeviceId(SwerveConstants.kBLEncoderId, SwerveConstants.kBLEncoderCanbus, null); // Back Right public static final RobotDeviceId BR_DRIVE = - new RobotDeviceId(SwerveConstants.kBRDriveMotorId, SwerveConstants.kBRSteerCanbus, 2); + new RobotDeviceId(SwerveConstants.kBRDriveMotorId, SwerveConstants.kBRSteerCanbus, 7); public static final RobotDeviceId BR_ROTATION = - new RobotDeviceId(SwerveConstants.kBRSteerMotorId, SwerveConstants.kBRSteerCanbus, 3); + new RobotDeviceId(SwerveConstants.kBRSteerMotorId, SwerveConstants.kBRSteerCanbus, 6); public static final RobotDeviceId BR_CANCODER = new RobotDeviceId(SwerveConstants.kBREncoderId, SwerveConstants.kBREncoderCanbus, null); // Pigeon @@ -317,7 +319,7 @@ public static final class DrivebaseConstants { public static final double kDriveD = 0.03; public static final double kDriveV = 0.83; public static final double kDriveA = 0.0; - public static final double kDriveS = 0.21; + public static final double kDriveS = 2.00; public static final double kDriveT = SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; @@ -439,61 +441,55 @@ public static class VisionConstants { /************************************************************************* */ /** Vision Camera Posses ************************************************* */ - public static class Cameras { - // Camera names, must match names configured on coprocessor - public static String camera0Name = "camera_0"; - public static String camera1Name = "camera_1"; - // ... And more, if needed - - // Robot to camera transforms + public static final class Cameras { + public record CameraConfig( + String name, + Transform3d robotToCamera, + double stdDevFactor, + SimCameraProperties simProps) {} + + // Camera Configuration Records // (ONLY USED FOR PHOTONVISION -- Limelight: configure in web UI instead) // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways - public static Transform3d robotToCamera0 = - new Transform3d( - Inches.of(-13.0), - Inches.of(13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, Math.PI / 2)); - public static Transform3d robotToCamera1 = - new Transform3d( - Inches.of(-13.0), - Inches.of(-13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, -Math.PI / 2)); - - // Standard deviation multipliers for each camera - // (Adjust to trust some cameras more than others) - public static double[] cameraStdDevFactors = - new double[] { - 1.0, // Camera 0 - 1.0 // Camera 1 - }; - } - - /************************************************************************* */ - /** Simulation Camera Properties ***************************************** */ - public static class SimCameras { - public static final SimCameraProperties kSimCamera1Props = - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }; - - public static final SimCameraProperties kSimCamera2Props = - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }; + public static final CameraConfig[] ALL = { + new CameraConfig( + "camera_0", + new Transform3d( + Inches.of(-13.0), + Inches.of(13.0), + Inches.of(12.0), + new Rotation3d(0.0, 0.0, Math.PI / 2)), + 1.0, + new SimCameraProperties() { + { + setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + setCalibError(0.25, 0.08); + setFPS(30); + setAvgLatencyMs(20); + setLatencyStdDevMs(5); + } + }), + // + new CameraConfig( + "camera_1", + new Transform3d( + Inches.of(-13.0), + Inches.of(-13.0), + Inches.of(12.0), + new Rotation3d(0.0, 0.0, -Math.PI / 2)), + 1.0, + new SimCameraProperties() { + { + setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + setCalibError(0.25, 0.08); + setFPS(30); + setAvgLatencyMs(20); + setLatencyStdDevMs(5); + } + }), + + // ... And more, if needed + }; } /************************************************************************* */ diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index e0ef5c6..6d58c6d 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -18,9 +18,6 @@ package frc.robot; import com.revrobotics.util.StatusLogger; -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; @@ -38,7 +35,6 @@ import org.littletonrobotics.urcl.URCL; import org.photonvision.PhotonCamera; import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; import org.photonvision.simulation.VisionSystemSim; /** @@ -285,31 +281,32 @@ public void testPeriodic() {} /** This function is called once when the robot is first started up. */ @Override public void simulationInit() { - visionSim = new VisionSystemSim("main"); - - // Load AprilTag field layout - AprilTagFieldLayout fieldLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); - - // Register AprilTags with vision simulation - visionSim.addAprilTags(fieldLayout); - - // Simulated Camera Properties - SimCameraProperties cameraProp = new SimCameraProperties(); - // A 1280 x 800 camera with a 100 degree diagonal FOV. - cameraProp.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); - // Approximate detection noise with average and standard deviation error in pixels. - cameraProp.setCalibError(0.25, 0.08); - // Set the camera image capture framerate (Note: this is limited by robot loop rate). - cameraProp.setFPS(20); - // The average and standard deviation in milliseconds of image data latency. - cameraProp.setAvgLatencyMs(35); - cameraProp.setLatencyStdDevMs(5); - - // Define Cameras and add to simulation - PhotonCamera camera0 = new PhotonCamera("frontCam"); - PhotonCamera camera1 = new PhotonCamera("backCam"); - visionSim.addCamera(new PhotonCameraSim(camera0, cameraProp), Constants.Cameras.robotToCamera0); - visionSim.addCamera(new PhotonCameraSim(camera1, cameraProp), Constants.Cameras.robotToCamera1); + // ---------------- SIM-ONLY: vision simulation world + camera sims ---------------- + // 1) Create the vision simulation world + visionSim = new VisionSystemSim("CameraSweepWorld"); + + // 2) Add AprilTags (field layout) + visionSim.addAprilTags(FieldConstants.aprilTagLayout); + + // 3) Build PhotonCameraSim objects from Constants camera configs + final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; + + PhotonCameraSim[] simCams = new PhotonCameraSim[camConfigs.length]; + + for (int i = 0; i < camConfigs.length; i++) { + final var cfg = camConfigs[i]; + + // Name must match the VisionIOPhotonVisionSim name + PhotonCamera photonCam = new PhotonCamera(cfg.name()); + + // 2026 API: wrap camera with sim properties from Constants + PhotonCameraSim camSim = new PhotonCameraSim(photonCam, cfg.simProps()); + + // Register camera with the sim using the robot-to-camera transform + visionSim.addCamera(camSim, cfg.robotToCamera()); + + simCams[i] = camSim; + } } /** This function is called periodically whilst in simulation. */ diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 10bcd05..c38f578 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -13,8 +13,6 @@ package frc.robot; -import static frc.robot.Constants.Cameras.*; - import choreo.auto.AutoChooser; import choreo.auto.AutoFactory; import choreo.auto.AutoRoutine; @@ -23,7 +21,6 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Filesystem; @@ -37,8 +34,8 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; +import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; -import frc.robot.Constants.SimCameras; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; import frc.robot.commands.DriveCommands; @@ -49,9 +46,6 @@ import frc.robot.subsystems.flywheel_example.FlywheelIO; import frc.robot.subsystems.flywheel_example.FlywheelIOSim; import frc.robot.subsystems.imu.Imu; -import frc.robot.subsystems.imu.ImuIO; -import frc.robot.subsystems.imu.ImuIONavX; -import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.subsystems.imu.ImuIOSim; import frc.robot.subsystems.vision.CameraSweepEvaluator; import frc.robot.subsystems.vision.Vision; @@ -70,6 +64,8 @@ import frc.robot.util.RBSIEnum.DriveStyle; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIPowerMonitor; +import java.util.Arrays; +import java.util.List; import java.util.Set; import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; import org.photonvision.PhotonCamera; @@ -111,7 +107,7 @@ public class RobotContainer { private final Vision m_vision; @SuppressWarnings("unused") - private RBSICANHealth m_can0, m_can1; + private List canHealth; /** Dashboard inputs ***************************************************** */ // AutoChoosers for both supported path planning types @@ -132,6 +128,37 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); + // Vision Factories + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + Arrays.stream(Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + Arrays.stream(Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {new VisionIO() {}}; + }; + } + + private static VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {new VisionIO() {}}; + } + /** * Constructor for the Robot Container. This container holds subsystems, opertator interface * devices, and commands. @@ -148,74 +175,54 @@ public RobotContainer() { // YAGSL drivebase, get config from deploy directory // Get the IMU instance - ImuIO imuIO = - switch (SwerveConstants.kImuType) { - case "pigeon2" -> new ImuIOPigeon2(); - case "navx", "navx_spi" -> new ImuIONavX(); - default -> throw new RuntimeException("Invalid IMU type"); - }; - m_imu = new Imu(imuIO); + m_imu = new Imu(SwerveConstants.kImu.factory.get()); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = - switch (Constants.getVisionType()) { - case PHOTON -> - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOPhotonVision(camera0Name, robotToCamera0), - new VisionIOPhotonVision(camera1Name, robotToCamera1)); - case LIMELIGHT -> - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOLimelight(camera0Name, m_drivebase::getHeading), - new VisionIOLimelight(camera1Name, m_drivebase::getHeading)); - case NONE -> - new Vision( - m_drivebase::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); - default -> null; - }; + m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); - break; case SIM: - // Sim robot, instantiate physics sim IO implementations RBSICANBusRegistry.initSim(CANBuses.RIO, CANBuses.DRIVE); - m_imu = new Imu(new ImuIOSim() {}); + + m_imu = new Imu(new ImuIOSim()); m_drivebase = new Drive(m_imu); - m_flywheel = new Flywheel(new FlywheelIOSim() {}); - m_vision = - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOPhotonVisionSim(camera0Name, robotToCamera0, m_drivebase::getPose), - new VisionIOPhotonVisionSim(camera1Name, robotToCamera1, m_drivebase::getPose)); + m_flywheel = new Flywheel(new FlywheelIOSim()); + + // ---------------- Vision IOs (robot code) ---------------- + var cams = frc.robot.Constants.Cameras.ALL; + + // If you keep Vision expecting exactly two cameras: + VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); + m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); + m_accel = new Accelerometer(m_imu); - // CameraSweepEvaluator Construct - // 1) Create the vision simulation world + // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- VisionSystemSim visionSim = new VisionSystemSim("CameraSweepWorld"); - // 2) Add AprilTags (field layout) visionSim.addAprilTags(FieldConstants.aprilTagLayout); - // 3) Create two simulated cameras - // Create PhotonCamera objects (names must match VisionIOPhotonVisionSim) - PhotonCamera camera1 = new PhotonCamera(camera0Name); - PhotonCamera camera2 = new PhotonCamera(camera1Name); - - // Wrap them with simulation + properties (2026 API) - PhotonCameraSim cam1 = new PhotonCameraSim(camera1, SimCameras.kSimCamera1Props); - PhotonCameraSim cam2 = new PhotonCameraSim(camera2, SimCameras.kSimCamera2Props); - - // 4) Register cameras with the sim - visionSim.addCamera(cam1, Transform3d.kZero); - visionSim.addCamera(cam2, Transform3d.kZero); - // 5) Create the sweep evaluator - sweep = new CameraSweepEvaluator(visionSim, cam1, cam2); - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); + + PhotonCameraSim[] simCams = new PhotonCameraSim[cams.length]; + + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + + PhotonCamera photonCam = new PhotonCamera(cfg.name()); + PhotonCameraSim camSim = new PhotonCameraSim(photonCam, cfg.simProps()); + + visionSim.addCamera(camSim, cfg.robotToCamera()); + simCams[i] = camSim; + } + + // Create the sweep evaluator (expects two cameras; adapt if you add more later) + if (simCams.length >= 2) { + sweep = new CameraSweepEvaluator(visionSim, simCams[0], simCams[1]); + } else { + sweep = null; // or throw if you require exactly 2 cameras + } + break; default: @@ -224,15 +231,16 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = - new Vision(m_drivebase::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); + m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); break; } + // Init all CAN busses specified in the `Constants.CANBuses` class + RBSICANBusRegistry.initReal(Constants.CANBuses.ALL); + canHealth = Arrays.stream(Constants.CANBuses.ALL).map(RBSICANHealth::new).toList(); + // In addition to the initial battery capacity from the Dashbaord, ``RBSIPowerMonitor`` takes // all the non-drivebase subsystems for which you wish to have power monitoring; DO NOT // include ``m_drivebase``, as that is automatically monitored. diff --git a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java index 98f775f..a9332ce 100644 --- a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java +++ b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java @@ -18,6 +18,9 @@ import frc.robot.Constants; import frc.robot.Constants.CANBuses; import frc.robot.generated.TunerConstants; +import frc.robot.subsystems.imu.ImuIO; +import frc.robot.subsystems.imu.ImuIONavX; +import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.util.YagslConstants; /** @@ -270,6 +273,9 @@ public class SwerveConstants { } } + // Derived constant AFTER the static block + public static final ImuType kImu = ImuType.fromString(kImuType); + // Computed quantities public static final double kDriveBaseRadiusMeters = Math.max( @@ -297,4 +303,30 @@ public class SwerveConstants { // Turn PID configuration0 public static final double turnPIDMinInput = 0; // Radians public static final double turnPIDMaxInput = 2 * Math.PI; // Radians + + /** IMU Type enum */ + public enum ImuType { + PIGEON(new String[] {"pigeon", "pigeon2"}, ImuIOPigeon2::new), + NAVX(new String[] {"navx", "navx_spi"}, ImuIONavX::new); + + private final String[] keys; + public final java.util.function.Supplier factory; + + ImuType(String[] keys, java.util.function.Supplier factory) { + this.keys = keys; + this.factory = factory; + } + + public static ImuType fromString(String s) { + if (s == null) throw new IllegalArgumentException("IMU type string is null"); + String norm = s.trim().toLowerCase(); + + for (ImuType t : values()) { + for (String k : t.keys) { + if (norm.equals(k)) return t; + } + } + throw new IllegalArgumentException("Unknown IMU type: '" + s + "'"); + } + } } diff --git a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java index 05a0eb1..3cdcbb9 100644 --- a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java +++ b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java @@ -17,15 +17,13 @@ package frc.robot.subsystems.vision; -import static frc.robot.Constants.Cameras.robotToCamera0; -import static frc.robot.Constants.Cameras.robotToCamera1; - import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Quaternion; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.util.Units; +import frc.robot.Constants; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; @@ -44,11 +42,27 @@ public class CameraSweepEvaluator { private final PhotonCameraSim camSim1; private final PhotonCameraSim camSim2; + /** Indexes into Constants.Cameras.ALL for each camera being swept. */ + private final int cam1Index; + + private final int cam2Index; + public CameraSweepEvaluator( VisionSystemSim visionSim, PhotonCameraSim camSim1, PhotonCameraSim camSim2) { + this(visionSim, camSim1, camSim2, 0, 1); + } + + public CameraSweepEvaluator( + VisionSystemSim visionSim, + PhotonCameraSim camSim1, + PhotonCameraSim camSim2, + int cam1Index, + int cam2Index) { this.visionSim = visionSim; this.camSim1 = camSim1; this.camSim2 = camSim2; + this.cam1Index = cam1Index; + this.cam2Index = cam2Index; } private static Quaternion quatFromAxisAngle(double ax, double ay, double az, double angleRad) { @@ -87,7 +101,7 @@ private static Quaternion quatMul(Quaternion a, Quaternion b) { /** * Compose base rotation with an "extra" camera yaw/pitch. * - *

Conventions: yawDeg = rotation about +Z (up) pitchDeg = rotation about +Y + *

Conventions: yawDeg = rotation about +Z (up), pitchDeg = rotation about +Y. * *

Order: extra = yaw ⊗ pitch (yaw first, then pitch, both in the camera/base frame) combined = * base ⊗ extra @@ -107,98 +121,101 @@ private static Rotation3d composeCameraExtra(Rotation3d base, double yawDeg, dou return new Rotation3d(qCombined); } - /** - * Run a full sweep of candidate camera placements. - * - * @param outputCsvPath Path to write the CSV results - */ + private Transform3d getMountTransformOrThrow(int idx) { + var all = Constants.Cameras.ALL; + if (idx < 0 || idx >= all.length) { + throw new IllegalArgumentException( + "CameraSweepEvaluator camera index out of range: " + + idx + + " (ALL.length=" + + all.length + + ")"); + } + return all[idx].robotToCamera(); + } + + /** Run a full sweep of candidate camera placements and write a CSV of results. */ public void runFullSweep(String outputCsvPath) throws IOException { // Example field bounds (meters) -- tune these for your field size double[] fieldX = { 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0 }; double[] fieldY = {0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5}; - double[] robotZ = {0.0}; // Usually floor height + double[] robotZ = {0.0}; - // NEW: Sweep robot yaw at each location (degrees) + // Sweep robot yaw at each location (degrees) double[] robotYawDeg = {-180, -135, -90, -45, 0, 45, 90, 135}; // Camera “extra” rotation sweep (applied on top of the mount rotation) double[] camYawDeg = {-15, 0, 15}; double[] camPitchDeg = {-10, 0, 10}; - // Pull fixed mount transforms from Constants once (and log/write them) - final Transform3d rToC0 = robotToCamera0; - final Transform3d rToC1 = robotToCamera1; + // Pull fixed mount transforms from Constants (new regime) + final Transform3d rToC0 = getMountTransformOrThrow(cam1Index); + final Transform3d rToC1 = getMountTransformOrThrow(cam2Index); + // Extract mount transform translation + quaternion once for CSV (self-contained rows) final Translation3d c0T = rToC0.getTranslation(); final Translation3d c1T = rToC1.getTranslation(); final Quaternion c0Q = rToC0.getRotation().getQuaternion(); final Quaternion c1Q = rToC1.getRotation().getQuaternion(); + // Build the target list ONCE (huge savings vs doing it inside the deepest loop) + final Set simTargets = visionSim.getVisionTargets(); + final List targetList = new ArrayList<>(simTargets); + try (FileWriter writer = new FileWriter(outputCsvPath)) { - // Header includes robot yaw and BOTH robot->camera transforms (translation + quaternion) writer.write( "robotX,robotY,robotZ,robotYawDeg," + "cam1YawDeg,cam1PitchDeg,cam2YawDeg,cam2PitchDeg,score," - + "rToC0_tx,rToC0_ty,rToC0_tz,rToC0_qw,rToC0_qx,rToC0_qy,rToC0_qz," - + "rToC1_tx,rToC1_ty,rToC1_tz,rToC1_qw,rToC1_qx,rToC1_qy,rToC1_qz\n"); + + "mount0_idx,mount0_tx,mount0_ty,mount0_tz,mount0_qw,mount0_qx,mount0_qy,mount0_qz," + + "mount1_idx,mount1_tx,mount1_ty,mount1_tz,mount1_qw,mount1_qx,mount1_qy,mount1_qz\n"); - // Sweep robot over the field for (double rx : fieldX) { for (double ry : fieldY) { for (double rz : robotZ) { - - // NEW: Sweep robot yaw at each location for (double rYaw : robotYawDeg) { Pose3d robotPose = new Pose3d( new Translation3d(rx, ry, rz), new Rotation3d(0.0, 0.0, Units.degreesToRadians(rYaw))); - // Base camera poses from mount transforms (these rotate with robot yaw) + // Base camera poses from mount transforms (rotate with robot yaw) Pose3d cam1BasePose = robotPose.transformBy(rToC0); Pose3d cam2BasePose = robotPose.transformBy(rToC1); - // Sweep camera "extra" rotations for (double c1Yaw : camYawDeg) { for (double c1Pitch : camPitchDeg) { - Rotation3d cam1Rot = composeCameraExtra(cam1BasePose.getRotation(), c1Yaw, c1Pitch); Pose3d cam1Pose = new Pose3d(cam1BasePose.getTranslation(), cam1Rot); for (double c2Yaw : camYawDeg) { for (double c2Pitch : camPitchDeg) { - Rotation3d cam2Rot = composeCameraExtra(cam2BasePose.getRotation(), c2Yaw, c2Pitch); Pose3d cam2Pose = new Pose3d(cam2BasePose.getTranslation(), cam2Rot); - // Get all vision targets - Set simTargets = visionSim.getVisionTargets(); - List targetList = new ArrayList<>(simTargets); - - // Simulate camera processing + // Simulate camera processing (timestampSeconds can be 0 for this offline + // sweep) PhotonPipelineResult res1 = camSim1.process(0, cam1Pose, targetList); PhotonPipelineResult res2 = camSim2.process(0, cam2Pose, targetList); - // Score - double score = res1.getTargets().size() + res2.getTargets().size(); - if (res1.getTargets().size() >= 2) score += 2.0; - if (res2.getTargets().size() >= 2) score += 2.0; + // Score: count visible targets + bonus for multi-tag + int n1 = res1.getTargets().size(); + int n2 = res2.getTargets().size(); + double score = n1 + n2; + if (n1 >= 2) score += 2.0; + if (n2 >= 2) score += 2.0; - // Penalize ambiguity safely with loops for (var t : res1.getTargets()) score -= t.getPoseAmbiguity() * 2.0; for (var t : res2.getTargets()) score -= t.getPoseAmbiguity() * 2.0; - // Write CSV row (note: we repeat mount transforms each row for - // “self-contained” CSV) writer.write( String.format( "%.2f,%.2f,%.2f,%.1f,%.1f,%.1f,%.1f,%.1f,%.3f," - + "%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g," - + "%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g\n", + + "%d,%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g," + + "%d,%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g\n", rx, ry, rz, @@ -208,6 +225,7 @@ public void runFullSweep(String outputCsvPath) throws IOException { c2Yaw, c2Pitch, score, + cam1Index, c0T.getX(), c0T.getY(), c0T.getZ(), @@ -215,6 +233,7 @@ public void runFullSweep(String outputCsvPath) throws IOException { c0Q.getX(), c0Q.getY(), c0Q.getZ(), + cam2Index, c1T.getX(), c1T.getY(), c1T.getZ(), @@ -223,10 +242,9 @@ public void runFullSweep(String outputCsvPath) throws IOException { c1Q.getY(), c1Q.getZ())); + // Optional logging: keep it light (or decimate) so it doesn’t dominate + // runtime Logger.recordOutput("CameraSweep/Score", score); - Logger.recordOutput("CameraSweep/RobotPose", robotPose); - Logger.recordOutput("CameraSweep/Cam1Pose", cam1Pose); - Logger.recordOutput("CameraSweep/Cam2Pose", cam2Pose); } } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 263ed34..04e2041 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -11,7 +11,6 @@ package frc.robot.subsystems.vision; -import static frc.robot.Constants.Cameras.*; import static frc.robot.Constants.VisionConstants.*; import edu.wpi.first.math.Matrix; @@ -23,7 +22,7 @@ import edu.wpi.first.math.numbers.N3; import edu.wpi.first.wpilibj.Alert; import edu.wpi.first.wpilibj.Alert.AlertType; -import frc.robot.Constants.Cameras; +import frc.robot.Constants; import frc.robot.FieldConstants; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; import frc.robot.util.VirtualSubsystem; @@ -36,10 +35,28 @@ public class Vision extends VirtualSubsystem { private final VisionIOInputsAutoLogged[] inputs; private final Alert[] disconnectedAlerts; + // Camera configs (names, transforms, stddev multipliers, sim props) + private final Constants.Cameras.CameraConfig[] camConfigs; + + // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- + // Summary buffers + private final ArrayList allTagPoses = new ArrayList<>(32); + private final ArrayList allRobotPoses = new ArrayList<>(64); + private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); + private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + + // Per-camera buffers (reused each camera) + private final ArrayList tagPoses = new ArrayList<>(16); + private final ArrayList robotPoses = new ArrayList<>(32); + private final ArrayList robotPosesAccepted = new ArrayList<>(32); + private final ArrayList robotPosesRejected = new ArrayList<>(32); + public Vision(VisionConsumer consumer, VisionIO... io) { this.consumer = consumer; this.io = io; + this.camConfigs = Constants.Cameras.ALL; + // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; for (int i = 0; i < inputs.length; i++) { @@ -48,53 +65,46 @@ public Vision(VisionConsumer consumer, VisionIO... io) { // Initialize disconnected alerts this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { disconnectedAlerts[i] = - new Alert( - "Vision camera " + Integer.toString(i) + " is disconnected.", AlertType.kWarning); + new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); } - // Log the robot-to-camera transformations - Logger.recordOutput("Vision/RobotToCamera0", Cameras.robotToCamera0); - Logger.recordOutput("Vision/RobotToCamera1", Cameras.robotToCamera1); + // Log robot-to-camera transforms from the new camera config array + // (Only log as many as exist in BOTH configs and IOs) + int n = Math.min(camConfigs.length, io.length); + for (int i = 0; i < n; i++) { + Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); + } } - /** - * Returns the X angle to the best target, which can be used for simple servoing with vision. - * - * @param cameraIndex The index of the camera to use. - */ + /** Returns the X angle to the best target, useful for simple servoing. */ public Rotation2d getTargetX(int cameraIndex) { return inputs[cameraIndex].latestTargetObservation.tx(); } @Override - public void rbsiPeriodic() {} - - public void somethingElse() { - - // Update inputs + process inputs first (cheap, and keeps AK logs consistent) + public void rbsiPeriodic() { + // 1) Update inputs + process inputs first (keeps AK logs consistent) for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); disconnectedAlerts[i].set(!inputs[i].connected); } - // Reusable scratch buffers (ArrayList avoids LinkedList churn) - // Tune these capacities if you know typical sizes - final ArrayList allTagPoses = new ArrayList<>(32); - final ArrayList allRobotPoses = new ArrayList<>(64); - final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // 2) Clear summary buffers (reused) + allTagPoses.clear(); + allRobotPoses.clear(); + allRobotPosesAccepted.clear(); + allRobotPosesRejected.clear(); - // Loop over cameras + // 3) Process each camera for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - - // Per-camera scratch buffers - final ArrayList tagPoses = new ArrayList<>(16); - final ArrayList robotPoses = new ArrayList<>(32); - final ArrayList robotPosesAccepted = new ArrayList<>(32); - final ArrayList robotPosesRejected = new ArrayList<>(32); + // Clear per-camera buffers + tagPoses.clear(); + robotPoses.clear(); + robotPosesAccepted.clear(); + robotPosesRejected.clear(); // Add tag poses from ids for (int tagId : inputs[cameraIndex].tagIds) { @@ -106,21 +116,17 @@ public void somethingElse() { // Loop over pose observations for (var observation : inputs[cameraIndex].poseObservations) { - // Check whether to reject pose + // Reject rules boolean rejectPose = - observation.tagCount() == 0 // Must have at least one tag - || (observation.tagCount() == 1 - && observation.ambiguity() > maxAmbiguity) // Cannot be high ambiguity - || Math.abs(observation.pose().getZ()) - > maxZError // Must have realistic Z coordinate - - // Must be within the field boundaries + observation.tagCount() == 0 + || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) + || Math.abs(observation.pose().getZ()) > maxZError || observation.pose().getX() < 0.0 || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() || observation.pose().getY() < 0.0 || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - // Add pose to log + // Log pose buckets robotPoses.add(observation.pose()); if (rejectPose) { robotPosesRejected.add(observation.pose()); @@ -128,33 +134,35 @@ public void somethingElse() { robotPosesAccepted.add(observation.pose()); } - // Skip if rejected if (rejectPose) { continue; } - // Calculate standard deviations + // Standard deviations double stdDevFactor = Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); double linearStdDev = linearStdDevBaseline * stdDevFactor; double angularStdDev = angularStdDevBaseline * stdDevFactor; + if (observation.type() == PoseObservationType.MEGATAG_2) { linearStdDev *= linearStdDevMegatag2Factor; angularStdDev *= angularStdDevMegatag2Factor; } - if (cameraIndex < cameraStdDevFactors.length) { - linearStdDev *= cameraStdDevFactors[cameraIndex]; - angularStdDev *= cameraStdDevFactors[cameraIndex]; + + // Apply per-camera multiplier from CameraConfig + if (cameraIndex < camConfigs.length) { + double k = camConfigs[cameraIndex].stdDevFactor(); + linearStdDev *= k; + angularStdDev *= k; } - // Send vision observation consumer.accept( observation.pose().toPose2d(), observation.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); } - // Log camera data + // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) Logger.recordOutput( "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); Logger.recordOutput( @@ -166,14 +174,14 @@ public void somethingElse() { "Vision/Camera" + cameraIndex + "/RobotPosesRejected", robotPosesRejected.toArray(new Pose3d[0])); - // Summary aggregation + // Aggregate summary allTagPoses.addAll(tagPoses); allRobotPoses.addAll(robotPoses); allRobotPosesAccepted.addAll(robotPosesAccepted); allRobotPosesRejected.addAll(robotPosesRejected); } - // Log summary data + // 4) Summary logs Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); Logger.recordOutput( @@ -184,7 +192,7 @@ public void somethingElse() { @FunctionalInterface public static interface VisionConsumer { - public void accept( + void accept( Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix visionMeasurementStdDevs);