From 2da4c0888e4a412963d3b4fbb59786f8259d9d9b Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 10 Nov 2025 16:51:52 -0700 Subject: [PATCH 01/12] New template-syncing Action modified: .github/workflows/create-dependabot-labels.yaml --- .github/workflows/create-dependabot-labels.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/create-dependabot-labels.yaml b/.github/workflows/create-dependabot-labels.yaml index d2795e5..64f7e3b 100644 --- a/.github/workflows/create-dependabot-labels.yaml +++ b/.github/workflows/create-dependabot-labels.yaml @@ -14,7 +14,11 @@ jobs: issues: write # Needed to create labels via API steps: - name: Create labels for Dependabot PRs +<<<<<<< HEAD uses: actions/github-script@v8 +======= + uses: actions/github-script@v7 +>>>>>>> ec71cef (New template-syncing Action) with: script: | const labels = [ From 92dc3d8b986985b084082760be47e54da431e616 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 12 May 2025 13:19:59 -0700 Subject: [PATCH 02/12] Add vision examples from good teams new file: src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java new file: src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java new file: src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java new file: src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java --- .../subsystems/vision/FRC254/RobotState.java | 416 ++++++++++++ .../FRC254/VisionFieldPoseEstimate.java | 33 + .../vision/FRC254/VisionSubsystem.java | 594 ++++++++++++++++++ .../subsystems/vision/FRC6328/RobotState.java | 448 +++++++++++++ .../subsystems/vision/FRC6328/Vision.java | 401 ++++++++++++ .../vision/FRC6328/VisionConstants.java | 175 ++++++ 6 files changed, 2067 insertions(+) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java new file mode 100644 index 0000000..9c938df --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java @@ -0,0 +1,416 @@ +// package com.team254.frc2024; + +// import com.team254.frc2024.controlboard.ModalControls; +// import com.team254.frc2024.subsystems.vision.VisionFieldPoseEstimate; +// import com.team254.lib.time.RobotTime; +// import com.team254.lib.util.ConcurrentTimeInterpolatableBuffer; +// import com.team254.lib.util.MathHelpers; +// import com.team254.lib.util.PoopTargetFactory.NearTarget; + +// import edu.wpi.first.math.MathUtil; +// import edu.wpi.first.math.geometry.*; +// import edu.wpi.first.math.kinematics.ChassisSpeeds; +// import edu.wpi.first.wpilibj.DriverStation; +// import edu.wpi.first.wpilibj.DriverStation.Alliance; +// import edu.wpi.first.wpilibj.Timer; +// import java.util.*; +// import java.util.concurrent.atomic.AtomicReference; +// import java.util.concurrent.atomic.AtomicBoolean; +// import java.util.concurrent.atomic.AtomicInteger; +// import java.util.function.Consumer; +// import java.util.function.IntSupplier; + +// import org.littletonrobotics.junction.Logger; + +// public class RobotState { + +// public final static double LOOKBACK_TIME = 1.0; + +// private final Consumer visionEstimateConsumer; + +// public RobotState(Consumer visionEstimateConsumer) { +// this.visionEstimateConsumer = visionEstimateConsumer; +// // Make sure to add one sample to these methods to protect callers against null. +// fieldToRobot.addSample(0.0, MathHelpers.kPose2dZero); +// robotToTurret.addSample(0.0, MathHelpers.kRotation2dZero); +// turretAngularVelocity.addSample(0.0, 0.0); +// driveYawAngularVelocity.addSample(0.0, 0.0); +// turretPositionRadians.addSample(0.0, 0.0); +// } + +// // State of robot. + +// // Kinematic Frames +// private final ConcurrentTimeInterpolatableBuffer fieldToRobot = +// ConcurrentTimeInterpolatableBuffer +// .createBuffer(LOOKBACK_TIME); +// private final ConcurrentTimeInterpolatableBuffer robotToTurret = +// ConcurrentTimeInterpolatableBuffer +// .createBuffer(LOOKBACK_TIME); +// private static final Transform2d TURRET_TO_CAMERA = new +// Transform2d(Constants.kTurretToCameraX, +// Constants.kTurretToCameraY, +// MathHelpers.kRotation2dZero); +// private static final Transform2d ROBOT_TO_CAMERA_B = new +// Transform2d(Constants.kTurretToCameraBX, +// Constants.kTurretToCameraBY, +// MathHelpers.kRotation2dZero); +// private final AtomicReference measuredRobotRelativeChassisSpeeds = new +// AtomicReference<>( +// new ChassisSpeeds()); +// private final AtomicReference measuredFieldRelativeChassisSpeeds = new +// AtomicReference<>( +// new ChassisSpeeds()); +// private final AtomicReference desiredFieldRelativeChassisSpeeds = new +// AtomicReference<>( +// new ChassisSpeeds()); +// private final AtomicReference fusedFieldRelativeChassisSpeeds = new +// AtomicReference<>( +// new ChassisSpeeds()); + +// private final AtomicInteger iteration = new AtomicInteger(0); + +// private double lastUsedMegatagTimestamp = 0; +// private double lastTriggeredIntakeSensorTimestamp = 0; +// private ConcurrentTimeInterpolatableBuffer turretAngularVelocity = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer turretPositionRadians = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); + +// private ConcurrentTimeInterpolatableBuffer drivePitchRads = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer driveRollRads = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer accelX = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); +// private ConcurrentTimeInterpolatableBuffer accelY = +// ConcurrentTimeInterpolatableBuffer +// .createDoubleBuffer(LOOKBACK_TIME); + +// private final AtomicBoolean enablePathCancel = new AtomicBoolean(false); + +// private double autoStartTime; + +// public void setAutoStartTime(double timestamp) { +// autoStartTime = timestamp; +// } + +// public double getAutoStartTime() { +// return autoStartTime; +// } + +// public void enablePathCancel() { +// enablePathCancel.set(true); +// } + +// public void disablePathCancel() { +// enablePathCancel.set(false); +// } + +// public boolean getPathCancel() { +// return enablePathCancel.get(); +// } + +// public void addOdometryMeasurement(double timestamp, Pose2d pose) { +// fieldToRobot.addSample(timestamp, pose); +// } + +// public void incrementIterationCount() { +// iteration.incrementAndGet(); +// } + +// public int getIteration() { +// return iteration.get(); +// } + +// public IntSupplier getIterationSupplier() { +// return () -> getIteration(); +// } + +// public void addDriveMotionMeasurements(double timestamp, +// double angularRollRadsPerS, +// double angularPitchRadsPerS, +// double angularYawRadsPerS, +// double pitchRads, +// double rollRads, +// double accelX, +// double accelY, +// ChassisSpeeds desiredFieldRelativeSpeeds, +// ChassisSpeeds measuredSpeeds, +// ChassisSpeeds measuredFieldRelativeSpeeds, +// ChassisSpeeds fusedFieldRelativeSpeeds) { +// this.driveRollAngularVelocity.addSample(timestamp, angularRollRadsPerS); +// this.drivePitchAngularVelocity.addSample(timestamp, angularPitchRadsPerS); +// this.driveYawAngularVelocity.addSample(timestamp, angularYawRadsPerS); +// this.drivePitchRads.addSample(timestamp, pitchRads); +// this.driveRollRads.addSample(timestamp, rollRads); +// this.accelY.addSample(timestamp, accelY); +// this.accelX.addSample(timestamp, accelX); +// this.desiredFieldRelativeChassisSpeeds.set(desiredFieldRelativeSpeeds); +// this.measuredRobotRelativeChassisSpeeds.set(measuredSpeeds); +// this.measuredFieldRelativeChassisSpeeds.set(measuredFieldRelativeSpeeds); +// this.fusedFieldRelativeChassisSpeeds.set(fusedFieldRelativeSpeeds); +// } + +// public Map.Entry getLatestFieldToRobot() { +// return fieldToRobot.getLatest(); +// } + +// public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { +// var maybeFieldToRobot = getLatestFieldToRobot(); +// Pose2d fieldToRobot = maybeFieldToRobot == null ? MathHelpers.kPose2dZero : +// maybeFieldToRobot.getValue(); +// var delta = getLatestRobotRelativeChassisSpeed(); +// delta = delta.times(lookaheadTimeS); +// return fieldToRobot +// .exp(new Twist2d(delta.vxMetersPerSecond, delta.vyMetersPerSecond, +// delta.omegaRadiansPerSecond)); +// } + +// public Pose2d getLatestFieldToRobotCenter() { +// return fieldToRobot.getLatest().getValue().transformBy(Constants.kTurretToRobotCenter); +// } + +// // This has rotation and radians to allow for wrapping tracking. +// public void addTurretUpdates(double timestamp, +// Rotation2d turretRotation, +// double turretRadians, +// double angularYawRadsPerS) { +// // turret frame 180 degrees off from robot frame +// robotToTurret.addSample(timestamp, turretRotation.rotateBy(MathHelpers.kRotation2dPi)); +// this.turretAngularVelocity.addSample(timestamp, angularYawRadsPerS); +// this.turretPositionRadians.addSample(timestamp, turretRadians); +// } + +// public double getLatestTurretPositionRadians() { +// return this.turretPositionRadians.getInternalBuffer().lastEntry().getValue(); +// } + +// public double getLatestTurretAngularVelocity() { +// return this.turretAngularVelocity.getInternalBuffer().lastEntry().getValue(); +// } + +// private Rotation2d hoodRotation = new Rotation2d(); + +// public void addHoodRotation(Rotation2d rotationFromZero) { +// hoodRotation = rotationFromZero; +// } + +// public Rotation2d getHoodRotation() { +// return hoodRotation; +// } + +// private double elevatorHeightM = 0.0; + +// public void setElevatorHeight(double heightM) { +// this.elevatorHeightM = heightM; +// } + +// public double getElevatorHeight() { +// return elevatorHeightM; +// } + +// private double climberRotations = 0.0; + +// public void setClimberRotations(double rotations) { +// this.climberRotations = rotations; +// } + +// private NearTarget poopNearTarget = NearTarget.DEEP_AMP; + +// public void setPoopNearTarget(NearTarget nearTarget) { +// poopNearTarget = nearTarget; +// } + +// public NearTarget getPoopNearTarget() { +// return poopNearTarget; +// } + +// public Optional getRobotToTurret(double timestamp) { +// return robotToTurret.getSample(timestamp); +// } + +// public Optional getFieldToRobot(double timestamp) { +// return fieldToRobot.getSample(timestamp); +// } + +// public Transform2d getTurretToCamera(boolean isTurretCamera) { +// return isTurretCamera ? TURRET_TO_CAMERA : ROBOT_TO_CAMERA_B; +// } + +// public Map.Entry getLatestRobotToTurret() { +// return robotToTurret.getLatest(); +// } + +// public ChassisSpeeds getLatestMeasuredFieldRelativeChassisSpeeds() { +// return measuredFieldRelativeChassisSpeeds.get(); +// } + +// public ChassisSpeeds getLatestRobotRelativeChassisSpeed() { +// return measuredRobotRelativeChassisSpeeds.get(); +// } + +// public ChassisSpeeds getLatestDesiredFieldRelativeChassisSpeed() { +// return desiredFieldRelativeChassisSpeeds.get(); +// } + +// public ChassisSpeeds getLatestFusedFieldRelativeChassisSpeed() { +// return fusedFieldRelativeChassisSpeeds.get(); +// } + +// public ChassisSpeeds getLatestFusedRobotRelativeChassisSpeed() { +// var speeds = getLatestRobotRelativeChassisSpeed(); +// speeds.omegaRadiansPerSecond = +// getLatestFusedFieldRelativeChassisSpeed().omegaRadiansPerSecond; +// return speeds; +// } + +// public Optional getTurretAngularVelocity(double timestamp) { +// return turretAngularVelocity.getSample(timestamp); +// } + +// private Optional getMaxAbsValueInRange(ConcurrentTimeInterpolatableBuffer +// buffer, double minTime, +// double maxTime) { +// var submap = buffer.getInternalBuffer().subMap(minTime, maxTime).values(); +// var max = submap.stream().max(Double::compare); +// var min = submap.stream().min(Double::compare); +// if (max.isEmpty() || min.isEmpty()) +// return Optional.empty(); +// if (Math.abs(max.get()) >= Math.abs(min.get())) +// return max; +// else +// return min; +// } + +// public Optional getMaxAbsTurretYawAngularVelocityInRange(double minTime, double +// maxTime) { +// return getMaxAbsValueInRange(turretAngularVelocity, minTime, maxTime); +// } + +// public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double +// maxTime) { +// // Gyro yaw rate not set in sim. +// if (Robot.isReal()) +// return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); +// return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); +// } + +// public Optional getMaxAbsDrivePitchAngularVelocityInRange(double minTime, double +// maxTime) { +// return getMaxAbsValueInRange(drivePitchAngularVelocity, minTime, maxTime); +// } + +// public Optional getMaxAbsDriveRollAngularVelocityInRange(double minTime, double +// maxTime) { +// return getMaxAbsValueInRange(driveRollAngularVelocity, minTime, maxTime); +// } + +// public void updateMegatagEstimate(VisionFieldPoseEstimate megatagEstimate) { +// lastUsedMegatagTimestamp = Timer.getFPGATimestamp(); +// visionEstimateConsumer.accept(megatagEstimate); +// } + +// public void updatePinholeEstimate(VisionFieldPoseEstimate pinholeEstimate) { +// visionEstimateConsumer.accept(pinholeEstimate); +// } + +// public void updateLastTriggeredIntakeSensorTimestamp(boolean triggered) { +// if (triggered) +// lastTriggeredIntakeSensorTimestamp = RobotTime.getTimestampSeconds(); +// } + +// public double lastUsedMegatagTimestamp() { +// return lastUsedMegatagTimestamp; +// } + +// public double lastTriggeredIntakeSensorTimestamp() { +// return lastTriggeredIntakeSensorTimestamp; +// } + +// public boolean isRedAlliance() { +// return DriverStation.getAlliance().isPresent() && +// DriverStation.getAlliance().equals(Optional.of(Alliance.Red)); +// } + +// public void updateLogger() { +// if (this.driveYawAngularVelocity.getInternalBuffer().lastEntry() != null) { +// Logger.recordOutput("RobotState/YawAngularVelocity", +// this.driveYawAngularVelocity.getInternalBuffer().lastEntry().getValue()); +// } +// if (this.driveRollAngularVelocity.getInternalBuffer().lastEntry() != null) { +// Logger.recordOutput("RobotState/RollAngularVelocity", +// this.driveRollAngularVelocity.getInternalBuffer().lastEntry().getValue()); +// } +// if (this.drivePitchAngularVelocity.getInternalBuffer().lastEntry() != null) { +// Logger.recordOutput("RobotState/PitchAngularVelocity", +// this.drivePitchAngularVelocity.getInternalBuffer().lastEntry().getValue()); +// } +// if (this.accelX.getInternalBuffer().lastEntry() != null) { +// Logger.recordOutput("RobotState/AccelX", +// this.accelX.getInternalBuffer().lastEntry().getValue()); +// } +// if (this.accelY.getInternalBuffer().lastEntry() != null) { +// Logger.recordOutput("RobotState/AccelY", +// this.accelY.getInternalBuffer().lastEntry().getValue()); +// } +// Logger.recordOutput("RobotState/DesiredChassisSpeedFieldFrame", +// getLatestDesiredFieldRelativeChassisSpeed()); +// Logger.recordOutput("RobotState/MeasuredChassisSpeedFieldFrame", +// getLatestMeasuredFieldRelativeChassisSpeeds()); +// Logger.recordOutput("RobotState/FusedChassisSpeedFieldFrame", +// getLatestFusedFieldRelativeChassisSpeed()); +// } + +// Pose3d ampPose3d = new Pose3d(); +// Pose3d climberPose3d = new Pose3d(); +// Pose3d hoodPose3d = new Pose3d(); +// Pose3d shooterPose3d = new Pose3d(); +// Pose3d turretPose3d = new Pose3d(); + +// public void updateViz() { +// if (getLatestRobotToTurret().getValue() != null) { +// shooterPose3d = new Pose3d(new Translation3d(), +// new Rotation3d(0, 0, +// getLatestRobotToTurret().getValue().getRadians())); +// hoodPose3d = new Pose3d(new Translation3d(), +// new Rotation3d(0, 0, +// getLatestRobotToTurret().getValue().getRadians())); +// } +// ampPose3d = new Pose3d( +// new Translation3d(0.0, 0.0, this.elevatorHeightM), +// new Rotation3d()); +// var climberRot = MathUtil.interpolate(0.0, -Math.PI / 2.0, +// this.climberRotations / +// (Constants.ClimberConstants.kForwardMaxPositionRotations +// * Constants.kClimberConfig.unitToRotorRatio)); +// climberPose3d = new Pose3d( +// new Translation3d(0, 0.0, 0.15), new Rotation3d(0.0, climberRot, 0.0)); +// // model_0 is elevator +// // model_1 is climber +// // model_2 is hood plates +// // model_3 is shooter +// // model_4 is turret +// Logger.recordOutput("ComponentsPoseArray", +// new Pose3d[] { ampPose3d, climberPose3d, hoodPose3d, shooterPose3d, turretPose3d +// }); +// } + +// public void logControllerMode() { +// Logger.recordOutput("Controller Mode", ModalControls.getInstance().getMode().toString()); +// } +// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java new file mode 100644 index 0000000..57174d3 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java @@ -0,0 +1,33 @@ +// package com.team254.frc2024.subsystems.vision; + +// import edu.wpi.first.math.Matrix; +// import edu.wpi.first.math.geometry.Pose2d; +// import edu.wpi.first.math.numbers.N1; +// import edu.wpi.first.math.numbers.N3; + +// public class VisionFieldPoseEstimate { + +// private final Pose2d visionRobotPoseMeters; +// private final double timestampSeconds; +// private final Matrix visionMeasurementStdDevs; + +// public VisionFieldPoseEstimate(Pose2d visionRobotPoseMeters, +// double timestampSeconds, +// Matrix visionMeasurementStdDevs) { +// this.visionRobotPoseMeters = visionRobotPoseMeters; +// this.timestampSeconds = timestampSeconds; +// this.visionMeasurementStdDevs = visionMeasurementStdDevs; +// } + +// public Pose2d getVisionRobotPoseMeters() { +// return visionRobotPoseMeters; +// } + +// public double getTimestampSeconds() { +// return timestampSeconds; +// } + +// public Matrix getVisionMeasurementStdDevs() { +// return visionMeasurementStdDevs; +// } +// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java new file mode 100644 index 0000000..f2d0489 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java @@ -0,0 +1,594 @@ +// package com.team254.frc2024.subsystems.vision; + +// import java.util.Arrays; +// import java.util.HashSet; +// import java.util.List; +// import java.util.Objects; +// import java.util.Optional; +// import java.util.Set; +// import java.util.stream.Collectors; + +// import org.littletonrobotics.junction.Logger; + +// import com.team254.frc2024.Constants; +// import com.team254.frc2024.RobotState; +// import com.team254.lib.time.RobotTime; +// import com.team254.lib.util.MathHelpers; +// import com.team254.lib.util.Util; + +// import edu.wpi.first.math.Matrix; +// import edu.wpi.first.math.VecBuilder; +// import edu.wpi.first.math.geometry.Pose2d; +// import edu.wpi.first.math.geometry.Pose3d; +// import edu.wpi.first.math.geometry.Rotation2d; +// import edu.wpi.first.math.geometry.Rotation3d; +// import edu.wpi.first.math.geometry.Transform2d; +// import edu.wpi.first.math.geometry.Transform3d; +// import edu.wpi.first.math.geometry.Translation2d; +// import edu.wpi.first.math.geometry.Translation3d; +// import edu.wpi.first.math.numbers.N1; +// import edu.wpi.first.math.numbers.N3; +// import edu.wpi.first.math.util.Units; +// import edu.wpi.first.wpilibj.DriverStation; +// import edu.wpi.first.wpilibj2.command.SubsystemBase; + +// public class VisionSubsystem extends SubsystemBase { +// private final VisionIO io; + +// private final RobotState state; + +// private final VisionIOInputsAutoLogged inputs = new VisionIOInputsAutoLogged(); + +// private static class PinholeObservation { +// public Translation2d cameraToTag; +// public Pose3d tagPose; +// } + +// private double lastProcessedTurretTimestamp = 0.0; +// private double lastProcessedElevatorTimestamp = 0.0; + +// public VisionSubsystem(VisionIO io, RobotState state) { +// this.io = io; +// this.state = state; +// } + +// @Override +// public void periodic() { +// double timestamp = RobotTime.getTimestampSeconds(); +// // Read inputs from IO +// io.readInputs(inputs); +// Logger.processInputs("Vision", inputs); + +// // Optionally update RobotState +// if (inputs.turretCameraSeesTarget) { +// updateVision(inputs.turretCameraSeesTarget, inputs.turretCameraFiducialObservations, +// inputs.turretCameraMegatagPoseEstimate, +// inputs.turretCameraMegatag2PoseEstimate, true); +// } else if (inputs.elevatorCameraSeesTarget) { +// updateVision(inputs.elevatorCameraSeesTarget, +// inputs.elevatorCameraFiducialObservations, +// inputs.elevatorCameraMegatagPoseEstimate, +// inputs.elevatorCameraMegatag2PoseEstimate, false); +// } + +// Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - +// timestamp); +// } + +// private void updateVision(boolean cameraSeesTarget, FiducialObservation[] +// cameraFiducialObservations, +// MegatagPoseEstimate cameraMegatagPoseEstimate, MegatagPoseEstimate +// cameraMegatag2PoseEstimate, +// boolean isTurretCamera) { +// if (cameraMegatagPoseEstimate != null) { + +// String logPreface = "Vision/" + (isTurretCamera ? "Turret/" : "Elevator/"); +// var updateTimestamp = cameraMegatagPoseEstimate.timestampSeconds; +// boolean alreadyProcessedTimestamp = (isTurretCamera ? lastProcessedTurretTimestamp +// : lastProcessedElevatorTimestamp) == updateTimestamp; +// if (!alreadyProcessedTimestamp && cameraSeesTarget) { +// if (!isTurretCamera && !Util.epsilonEquals(state.getElevatorHeight(), 0.0, 0.05)) +// return; +// Optional pinholeEstimate = Optional.empty();// +// processPinholeVisionEstimate(pinholeObservations, +// // +// updateTimestamp, +// // +// isTurretCamera); + +// Optional megatagEstimate = processMegatagPoseEstimate( +// cameraMegatagPoseEstimate, +// isTurretCamera); +// Optional megatag2Estimate = processMegatag2PoseEstimate( +// cameraMegatag2PoseEstimate, isTurretCamera, logPreface); + +// boolean used_megatag = false; +// if (megatagEstimate.isPresent()) { +// if (shouldUseMegatag(cameraMegatagPoseEstimate, cameraFiducialObservations, +// isTurretCamera, +// logPreface)) { +// Logger.recordOutput(logPreface + "MegatagEstimate", +// megatagEstimate.get().getVisionRobotPoseMeters()); +// state.updateMegatagEstimate(megatagEstimate.get()); +// used_megatag = true; +// } else { +// if (megatagEstimate.isPresent()) { +// Logger.recordOutput(logPreface + "MegatagEstimateRejected", +// megatagEstimate.get().getVisionRobotPoseMeters()); +// } +// } +// } + +// if (megatag2Estimate.isPresent() && !used_megatag) { +// if (shouldUseMegatag2(cameraMegatag2PoseEstimate, isTurretCamera, +// logPreface)) { +// Logger.recordOutput(logPreface + "Megatag2Estimate", +// megatag2Estimate.get().getVisionRobotPoseMeters()); +// state.updateMegatagEstimate(megatag2Estimate.get()); +// } else { +// if (megatagEstimate.isPresent()) { +// Logger.recordOutput(logPreface + "Megatag2EstimateRejected", +// megatag2Estimate.get().getVisionRobotPoseMeters()); +// } +// } +// } +// if (pinholeEstimate.isPresent()) { +// if (shouldUsePinhole(updateTimestamp, isTurretCamera, logPreface)) { +// Logger.recordOutput(logPreface + "PinholeEstimate", +// pinholeEstimate.get().getVisionRobotPoseMeters()); +// state.updatePinholeEstimate(pinholeEstimate.get()); +// } else { +// Logger.recordOutput(logPreface + "PinholeEstimateRejected", +// pinholeEstimate.get().getVisionRobotPoseMeters()); +// } +// } + +// if (isTurretCamera) +// lastProcessedTurretTimestamp = updateTimestamp; +// else +// lastProcessedElevatorTimestamp = updateTimestamp; +// } +// } +// } + +// @SuppressWarnings("unused") +// private List getPinholeObservations(FiducialObservation[] fiducials, +// boolean isTurretCamera) { +// // Iterate over the fiducials to make VisionUpdates +// return Arrays.stream(fiducials).map(fiducial -> { +// Optional tagPoseOptional = Constants.kAprilTagLayout.getTagPose(fiducial.id); +// if (tagPoseOptional.isEmpty()) { +// return null; +// } +// Pose3d tagPose = tagPoseOptional.get(); +// Optional cameraToTarget = getCameraToTargetTranslation(fiducial, +// tagPose, isTurretCamera); + +// if (cameraToTarget.isEmpty()) { +// return null; +// } + +// var observation = new PinholeObservation(); +// observation.cameraToTag = cameraToTarget.get(); +// observation.tagPose = tagPose; +// return observation; +// }).filter(Objects::nonNull).toList(); +// } + +// private Optional getCameraToTargetTranslation(FiducialObservation fiducial, +// Pose3d tagLocation, +// boolean isTurretCamera) { +// // Get the yaw and pitch angles from to target from the camera POV +// double yawRadians = Math.toRadians(fiducial.txnc); +// double pitchRadians = Math.toRadians(fiducial.tync); + +// Transform3d cameraToTarget = new Transform3d(new Translation3d(), new Rotation3d(0.0, +// pitchRadians, 0.0)); +// cameraToTarget = cameraToTarget +// .plus(new Transform3d(new Translation3d(), new Rotation3d(0.0, 0.0, +// yawRadians))); +// Transform3d cameraGroundPlaneToCamera = new Transform3d(new Translation3d(), +// new Rotation3d(0.0, isTurretCamera ? Constants.kCameraPitchRads : +// Constants.kCameraBPitchRads, 0)); +// Rotation3d cameraGroundPlaneToTarget = new +// Pose3d().plus(cameraGroundPlaneToCamera.plus(cameraToTarget)) +// .getRotation().unaryMinus(); + +// // Built a unit vector from adjusted rotation. +// // Raw vector: x = 1, y = tan(yaw), z = tan(pitch) +// // Make it a unit vector by dividing each component by magnitude +// // sqrt(x^2+y^2+z^2). +// double tan_ty = Math.tan(cameraGroundPlaneToTarget.getZ()); // y and z switch intentional +// double tan_tz = -Math.tan(cameraGroundPlaneToTarget.getY()); // y and z switch +// intentional + +// if (tan_tz == 0.0) { +// // Protect against divide by zero (i.e. target is at same height as camera). +// return Optional.empty(); +// } + +// // Find the fixed height difference between the center of the tag and the camera +// // lens +// double differential_height = tagLocation.getZ() +// - (isTurretCamera ? Constants.kCameraHeightOffGroundMeters : +// Constants.kCameraBHeightOffGroundMeters); + +// // We now obtain 3d distance by dividing differential_height by our normalized z +// // component z / (Math.sqrt(x^2+y^2+z^2)) +// double distance = differential_height * Math.sqrt(1.0 + tan_tz * tan_tz + tan_ty * +// tan_ty) / tan_tz; +// // Build a 3d vector from distance (which we now know) and orientation (which we +// // already computed above). +// Translation3d cameraToTargetTranslation = new Translation3d(distance, +// cameraGroundPlaneToTarget); + +// // Grab the x and y components. +// return Optional.of(new Translation2d(cameraToTargetTranslation.getX(), +// cameraToTargetTranslation.getY())); +// } + +// final static Set kTagsBlueSpeaker = new HashSet<>(List.of(7, 8)); +// final static Set kTagsRedSpeaker = new HashSet<>(List.of(3, 4)); + +// private boolean shouldUseMegatag(MegatagPoseEstimate poseEstimate, FiducialObservation[] +// fiducials, +// boolean isTurretCamera, String logPreface) { +// final double kMinAreaForTurretMegatagEnabled = 0.4; +// final double kMinAreaForTurretMegatagDisabled = 0.05; + +// final double kMinAreaForElevatorMegatagEnabled = 0.4; +// final double kMinAreaForElevatorMegatagDisabled = 0.05; + +// double kMinAreaForMegatag = 0.0; +// if (DriverStation.isDisabled()) { +// kMinAreaForMegatag = isTurretCamera ? kMinAreaForTurretMegatagDisabled +// : kMinAreaForElevatorMegatagDisabled; +// } else { +// kMinAreaForMegatag = isTurretCamera ? kMinAreaForTurretMegatagEnabled +// : kMinAreaForElevatorMegatagEnabled; +// } + +// final int kExpectedTagCount = 2; + +// final double kLargeYawThreshold = Units.degreesToRadians(200.0); +// final double kLargeYawEventTimeWindowS = 0.05; + +// if (!isTurretCamera) { +// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( +// poseEstimate.timestampSeconds - kLargeYawEventTimeWindowS, +// poseEstimate.timestampSeconds); +// if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { +// Logger.recordOutput("Vision/Elevator/MegatagYawAngular", false); +// return false; +// } +// Logger.recordOutput("Vision/Elevator/MegatagYawAngular", true); +// } + +// if (poseEstimate.avgTagArea < kMinAreaForMegatag) { +// Logger.recordOutput(logPreface + "megaTagAvgTagArea", false); +// return false; +// } +// Logger.recordOutput(logPreface + "megaTagAvgTagArea", true); + +// if (poseEstimate.fiducialIds.length != kExpectedTagCount) { +// Logger.recordOutput(logPreface + "fiducialLength", false); +// return false; +// } +// Logger.recordOutput(logPreface + "fiducialLength", true); + +// if (poseEstimate.fiducialIds.length < 1) { +// Logger.recordOutput(logPreface + "fiducialLengthLess1", false); +// return false; +// } +// Logger.recordOutput(logPreface + "fiducialLengthLess1", true); + +// if (poseEstimate.fieldToCamera.getTranslation().getNorm() < 1.0) { +// Logger.recordOutput(logPreface + "NormCheck", false); +// return false; +// } +// Logger.recordOutput(logPreface + "NormCheck", true); + +// for (var fiducial : fiducials) { +// if (fiducial.ambiguity > .9) { +// Logger.recordOutput(logPreface + "Ambiguity", false); +// return false; +// } +// } +// Logger.recordOutput(logPreface + "Ambiguity", true); + +// Set seenTags = Arrays.stream(poseEstimate.fiducialIds).boxed() +// .collect(Collectors.toCollection(HashSet::new)); +// Set expectedTags = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; +// var result = expectedTags.equals(seenTags); +// Logger.recordOutput(logPreface + "SeenTags", result); +// return result; +// } + +// private boolean shouldUseMegatag2(MegatagPoseEstimate poseEstimate, boolean isTurretCamera, +// String logPreface) { +// return shouldUsePinhole(poseEstimate.timestampSeconds, isTurretCamera, logPreface); +// } + +// private boolean shouldUsePinhole(double timestamp, boolean isTurretCamera, String preface) { +// final double kLargePitchRollYawEventTimeWindowS = 0.1; +// final double kLargePitchRollThreshold = Units.degreesToRadians(10.0); +// final double kLargeYawThreshold = Units.degreesToRadians(100.0); +// if (isTurretCamera) { +// var maxTurretVel = state.getMaxAbsTurretYawAngularVelocityInRange( +// timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); +// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( +// timestamp - kLargePitchRollYawEventTimeWindowS, +// timestamp); + +// if (maxTurretVel.isPresent() && maxYawVel.isPresent() && +// Math.abs(maxTurretVel.get() + maxYawVel.get()) > kLargeYawThreshold) { +// Logger.recordOutput(preface + "PinholeTurretAngular", false); +// return false; +// } +// Logger.recordOutput(preface + "PinholeTurretAngular", true); +// } else { +// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( +// timestamp - kLargePitchRollYawEventTimeWindowS, +// timestamp); +// if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { +// Logger.recordOutput(preface + "PinholeYawAngular", false); +// return false; +// } +// Logger.recordOutput(preface + "PinholeYawAngular", true); +// } + +// var maxPitchVel = state.getMaxAbsDrivePitchAngularVelocityInRange( +// timestamp - kLargePitchRollYawEventTimeWindowS, +// timestamp); +// if (maxPitchVel.isPresent() && Math.abs(maxPitchVel.get()) > kLargePitchRollThreshold) { +// Logger.recordOutput(preface + "PinholePitchAngular", false); +// return false; +// } +// Logger.recordOutput(preface + "PinholePitchAngular", true); + +// var maxRollVel = state.getMaxAbsDriveRollAngularVelocityInRange(timestamp - +// kLargePitchRollYawEventTimeWindowS, +// timestamp); +// if (maxRollVel.isPresent() && Math.abs(maxRollVel.get()) > kLargePitchRollThreshold) { +// Logger.recordOutput(preface + "PinholeRollAngular", false); +// return false; +// } +// Logger.recordOutput(preface + "PinholeRollAngular", true); + +// return true; +// } + +// private Optional getFieldToRobotEstimate(MegatagPoseEstimate poseEstimate, boolean +// isTurretCamera) { +// var fieldToCamera = poseEstimate.fieldToCamera; +// if (fieldToCamera.getX() == 0.0) { +// return Optional.empty(); +// } +// var turretToCameraTransform = state.getTurretToCamera(isTurretCamera); +// var cameraToTurretTransform = turretToCameraTransform.inverse(); +// var fieldToTurretPose = fieldToCamera.plus(cameraToTurretTransform); +// var fieldToRobotEstimate = MathHelpers.kPose2dZero; +// if (isTurretCamera) { +// var robotToTurretObservation = state.getRobotToTurret(poseEstimate.timestampSeconds); +// if (robotToTurretObservation.isEmpty()) { +// return Optional.empty(); +// } +// var turretToRobot = +// MathHelpers.transform2dFromRotation(robotToTurretObservation.get().unaryMinus()); +// fieldToRobotEstimate = fieldToTurretPose.plus(turretToRobot); +// } else { +// fieldToRobotEstimate = fieldToCamera.plus(turretToCameraTransform.inverse()); +// } + +// return Optional.of(fieldToRobotEstimate); +// } + +// private Optional processMegatag2PoseEstimate(MegatagPoseEstimate +// poseEstimate, +// boolean isTurretCamera, String logPreface) { +// var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); +// if (loggedFieldToRobot.isEmpty()) { +// return Optional.empty(); +// } + +// var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); +// if (maybeFieldToRobotEstimate.isEmpty()) +// return Optional.empty(); +// var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); + +// // distance from current pose to vision estimated pose +// double poseDifference = fieldToRobotEstimate.getTranslation() +// .getDistance(loggedFieldToRobot.get().getTranslation()); + +// var defaultSet = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; +// Set speakerTags = new HashSet<>(defaultSet); +// speakerTags.removeAll( +// +// Arrays.stream(poseEstimate.fiducialIds).boxed().collect(Collectors.toCollection(HashSet::new))); +// boolean seesSpeakerTags = speakerTags.size() < 2; + +// double xyStds; +// if (poseEstimate.fiducialIds.length > 0) { +// // multiple targets detected +// if (poseEstimate.fiducialIds.length >= 2 && poseEstimate.avgTagArea > 0.1) { +// xyStds = 0.2; +// } +// // we detect at least one of our speaker tags and we're close to it. +// else if (seesSpeakerTags && poseEstimate.avgTagArea > 0.2) { +// xyStds = 0.5; +// } +// // 1 target with large area and close to estimated pose +// else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { +// xyStds = 0.5; +// } +// // 1 target farther away and estimated pose is close +// else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { +// xyStds = 1.0; +// } else if (poseEstimate.fiducialIds.length > 1) { +// xyStds = 1.2; +// } else { +// xyStds = 2.0; +// } + +// Logger.recordOutput(logPreface + "Megatag2StdDev", xyStds); +// Logger.recordOutput(logPreface + "Megatag2AvgTagArea", poseEstimate.avgTagArea); +// Logger.recordOutput(logPreface + "Megatag2PoseDifference", poseDifference); + +// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, +// Units.degreesToRadians(50.0)); +// fieldToRobotEstimate = new Pose2d(fieldToRobotEstimate.getTranslation(), +// loggedFieldToRobot.get().getRotation()); +// return Optional.of( +// new VisionFieldPoseEstimate(fieldToRobotEstimate, +// poseEstimate.timestampSeconds, +// visionMeasurementStdDevs)); +// } +// return Optional.empty(); +// } + +// private Optional processMegatagPoseEstimate(MegatagPoseEstimate +// poseEstimate, +// boolean isTurretCamera) { +// var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); +// if (loggedFieldToRobot.isEmpty()) { +// return Optional.empty(); +// } + +// var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); +// if (maybeFieldToRobotEstimate.isEmpty()) +// return Optional.empty(); +// var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); + +// // distance from current pose to vision estimated pose +// double poseDifference = fieldToRobotEstimate.getTranslation() +// .getDistance(loggedFieldToRobot.get().getTranslation()); + +// if (poseEstimate.fiducialIds.length > 0) { +// double xyStds = 1.0; +// double degStds = 12; +// // multiple targets detected +// if (poseEstimate.fiducialIds.length >= 2) { +// xyStds = 0.5; +// degStds = 6; +// } +// // 1 target with large area and close to estimated pose +// else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { +// xyStds = 1.0; +// degStds = 12; +// } +// // 1 target farther away and estimated pose is close +// else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { +// xyStds = 2.0; +// degStds = 30; +// } + +// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, +// Units.degreesToRadians(degStds)); +// return Optional.of( +// new VisionFieldPoseEstimate(fieldToRobotEstimate, +// poseEstimate.timestampSeconds, +// visionMeasurementStdDevs)); +// } +// return Optional.empty(); +// } + +// @SuppressWarnings("unused") +// private Optional +// processPinholeVisionEstimate(List observations, +// double timestamp, boolean isTurretCamera) { +// observations = observations.stream().filter((observation) -> { +// if (observation.cameraToTag.getNorm() < 10.0) { +// return true; +// } +// Logger.recordOutput("Vision/RejectOnNormTimestamp", RobotTime.getTimestampSeconds()); +// return false; +// }).toList(); + +// if (observations.isEmpty()) +// return Optional.empty(); + +// int num_updates = 0; +// double x = 0.0, y = 0.0; +// // All timestamps and rotations are the same. If that changes, need to revisit. +// Rotation2d rotation = MathHelpers.kRotation2dZero; +// double avgRange = 0.0; +// var poseTurret = state.getRobotToTurret(timestamp); +// var poseRobot = state.getFieldToRobot(timestamp); +// if (poseRobot.isEmpty() || poseTurret.isEmpty()) { +// return Optional.empty(); +// } +// for (var observation : observations) { +// Pose2d fieldToRobotEstimate = estimateFieldToRobot( +// observation.cameraToTag, +// observation.tagPose, +// poseTurret.get(), +// poseRobot.get().getRotation(), +// MathHelpers.kRotation2dZero, isTurretCamera); +// x += fieldToRobotEstimate.getX(); +// y += fieldToRobotEstimate.getY(); +// rotation = fieldToRobotEstimate.getRotation(); +// avgRange += observation.cameraToTag.getNorm(); +// num_updates++; +// } + +// if (num_updates == 0) +// return Optional.empty(); + +// avgRange /= num_updates; + +// double xyStds = 100.0; +// var fieldToRobotEstimate = new Pose2d(x / num_updates, y / num_updates, rotation); +// var poseDifference = +// fieldToRobotEstimate.getTranslation().getDistance(poseRobot.get().getTranslation()); +// // multiple targets detected +// if (observations.size() >= 2 && avgRange < 3.0) { +// xyStds = 0.2; +// } else if (avgRange < 5.0 && poseDifference < 0.5) { +// xyStds = 0.5; +// } +// // 1 target farther away and estimated pose is close +// else if (avgRange < 3.0 && poseDifference < 0.3) { +// xyStds = 1.0; +// } else if (observations.size() > 1) { +// xyStds = 1.2; +// } else { +// xyStds = 2.0; +// } +// ; + +// final double rotStdDev = Units.degreesToRadians(50); +// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, rotStdDev); +// return Optional.of(new VisionFieldPoseEstimate(fieldToRobotEstimate, timestamp, +// visionMeasurementStdDevs)); +// } + +// private Pose2d estimateFieldToRobot(Translation2d cameraToTarget, Pose3d fieldToTarget, +// Rotation2d robotToTurret, +// Rotation2d gyroAngle, Rotation2d cameraYawOffset, boolean isTurretCamera) { +// Transform2d cameraToTargetFixed = MathHelpers +// .transform2dFromTranslation(cameraToTarget.rotateBy(cameraYawOffset)); +// Transform2d turretToTarget = +// state.getTurretToCamera(isTurretCamera).plus(cameraToTargetFixed); +// // In robot frame +// Transform2d robotToTarget = turretToTarget; +// if (isTurretCamera) { +// robotToTarget = +// MathHelpers.transform2dFromRotation(robotToTurret).plus(turretToTarget); +// } + +// // In field frame +// Transform2d robotToTargetField = MathHelpers +// .transform2dFromTranslation(robotToTarget.getTranslation().rotateBy(gyroAngle)); + +// // In field frame +// Pose2d fieldToTarget2d = +// MathHelpers.pose2dFromTranslation(fieldToTarget.toPose2d().getTranslation()); + +// Pose2d fieldToRobot = fieldToTarget2d.transformBy(MathHelpers.transform2dFromTranslation( +// robotToTargetField.getTranslation().unaryMinus())); + +// Pose2d fieldToRobotYawAdjusted = new Pose2d(fieldToRobot.getTranslation(), gyroAngle); +// return fieldToRobotYawAdjusted; +// } + +// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java new file mode 100644 index 0000000..7d0e53c --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java @@ -0,0 +1,448 @@ +// // Copyright (c) 2025 FRC 6328 +// // http://github.com/Mechanical-Advantage +// // +// // Use of this source code is governed by an MIT-style +// // license that can be found in the LICENSE file at +// // the root directory of this project. + +// package org.littletonrobotics.frc2025; + +// import edu.wpi.first.math.MathUtil; +// import edu.wpi.first.math.Matrix; +// import edu.wpi.first.math.Nat; +// import edu.wpi.first.math.VecBuilder; +// import edu.wpi.first.math.geometry.*; +// import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer; +// import edu.wpi.first.math.kinematics.ChassisSpeeds; +// import edu.wpi.first.math.kinematics.SwerveDriveKinematics; +// import edu.wpi.first.math.kinematics.SwerveModulePosition; +// import edu.wpi.first.math.numbers.N1; +// import edu.wpi.first.math.numbers.N3; +// import edu.wpi.first.math.util.Units; +// import edu.wpi.first.wpilibj.DriverStation; +// import edu.wpi.first.wpilibj.Timer; +// import java.util.*; +// import java.util.stream.Collectors; +// import lombok.Getter; +// import lombok.Setter; +// import lombok.experimental.ExtensionMethod; +// import org.littletonrobotics.frc2025.subsystems.drive.DriveConstants; +// import org.littletonrobotics.frc2025.subsystems.vision.VisionConstants; +// import org.littletonrobotics.frc2025.util.*; +// import org.littletonrobotics.junction.AutoLogOutput; +// import org.littletonrobotics.junction.Logger; + +// @ExtensionMethod({GeomUtil.class}) +// public class RobotState { +// // Must be less than 2.0 +// private static final LoggedTunableNumber txTyObservationStaleSecs = +// new LoggedTunableNumber("RobotState/TxTyObservationStaleSeconds", 0.2); +// private static final LoggedTunableNumber minDistanceTagPoseBlend = +// new LoggedTunableNumber("RobotState/MinDistanceTagPoseBlend", Units.inchesToMeters(24.0)); +// private static final LoggedTunableNumber maxDistanceTagPoseBlend = +// new LoggedTunableNumber("RobotState/MaxDistanceTagPoseBlend", Units.inchesToMeters(36.0)); +// private static final LoggedTunableNumber coralOverlap = +// new LoggedTunableNumber("RobotState/CoralOverlap", .5); +// private static final LoggedTunableNumber coralPersistanceTime = +// new LoggedTunableNumber("RobotState/CoralPersistanceTime", 0.75); +// private static final LoggedTunableNumber algaePersistanceTime = +// new LoggedTunableNumber("RobotState/AlgaePersistanceTime", 0.1); + +// private static final double poseBufferSizeSec = 2.0; +// private static final Matrix odometryStateStdDevs = +// new Matrix<>(VecBuilder.fill(0.003, 0.003, 0.002)); +// private static final Map tagPoses2d = new HashMap<>(); + +// static { +// for (int i = 1; i <= FieldConstants.aprilTagCount; i++) { +// tagPoses2d.put( +// i, +// FieldConstants.defaultAprilTagType +// .getLayout() +// .getTagPose(i) +// .map(Pose3d::toPose2d) +// .orElse(Pose2d.kZero)); +// } +// } + +// private static RobotState instance; + +// public static RobotState getInstance() { +// if (instance == null) instance = new RobotState(); +// return instance; +// } + +// // Pose Estimation Members +// @Getter @AutoLogOutput private Pose2d odometryPose = Pose2d.kZero; +// @Getter @AutoLogOutput private Pose2d estimatedPose = Pose2d.kZero; + +// private final TimeInterpolatableBuffer poseBuffer = +// TimeInterpolatableBuffer.createBuffer(poseBufferSizeSec); +// private final Matrix qStdDevs = new Matrix<>(Nat.N3(), Nat.N1()); +// // Odometry +// private final SwerveDriveKinematics kinematics; +// private SwerveModulePosition[] lastWheelPositions = +// new SwerveModulePosition[] { +// new SwerveModulePosition(), +// new SwerveModulePosition(), +// new SwerveModulePosition(), +// new SwerveModulePosition() +// }; +// // Assume gyro starts at zero +// private Rotation2d gyroOffset = Rotation2d.kZero; + +// private final Map txTyPoses = new HashMap<>(); +// private Set coralPoses = new HashSet<>(); + +// @AutoLogOutput private Rotation2d leftAlgaeObservation = Rotation2d.kZero; +// @AutoLogOutput private Rotation2d rightAlgaeObservation = Rotation2d.kZero; +// private double leftAlgaeObservationTimestamp = 0.0; +// private double rightAlgaeObservationTimestamp = 0.0; + +// @Getter +// @AutoLogOutput(key = "RobotState/RobotVelocity") +// private ChassisSpeeds robotVelocity = new ChassisSpeeds(); + +// @Getter +// @Setter +// @AutoLogOutput(key = "RobotState/ElevatorExtensionPercent") +// private double elevatorExtensionPercent; + +// @Getter @Setter private OptionalDouble distanceToBranch = OptionalDouble.empty(); +// @Getter @Setter private OptionalDouble distanceToReefAlgae = OptionalDouble.empty(); +// @Getter @Setter private Rotation2d pitch = Rotation2d.kZero; +// @Getter @Setter private Rotation2d roll = Rotation2d.kZero; + +// private RobotState() { +// for (int i = 0; i < 3; ++i) { +// qStdDevs.set(i, 0, Math.pow(odometryStateStdDevs.get(i, 0), 2)); +// } +// kinematics = new SwerveDriveKinematics(DriveConstants.moduleTranslations); + +// for (int i = 1; i <= FieldConstants.aprilTagCount; i++) { +// txTyPoses.put(i, new TxTyPoseRecord(Pose2d.kZero, Double.POSITIVE_INFINITY, -1.0)); +// } +// } + +// public void resetPose(Pose2d pose) { +// // Gyro offset is the rotation that maps the old gyro rotation (estimated - offset) to the +// new +// // frame of rotation +// gyroOffset = pose.getRotation().minus(odometryPose.getRotation().minus(gyroOffset)); +// estimatedPose = pose; +// odometryPose = pose; +// poseBuffer.clear(); +// } + +// public void addOdometryObservation(OdometryObservation observation) { +// Twist2d twist = kinematics.toTwist2d(lastWheelPositions, observation.wheelPositions()); +// lastWheelPositions = observation.wheelPositions(); +// Pose2d lastOdometryPose = odometryPose; +// odometryPose = odometryPose.exp(twist); +// // Use gyro if connected +// observation.gyroAngle.ifPresent( +// gyroAngle -> { +// // Add offset to measured angle +// Rotation2d angle = gyroAngle.plus(gyroOffset); +// odometryPose = new Pose2d(odometryPose.getTranslation(), angle); +// }); +// // Add pose to buffer at timestamp +// poseBuffer.addSample(observation.timestamp(), odometryPose); +// // Calculate diff from last odometry pose and add onto pose estimate +// Twist2d finalTwist = lastOdometryPose.log(odometryPose); +// estimatedPose = estimatedPose.exp(finalTwist); +// } + +// public void addVisionObservation(VisionObservation observation) { +// // If measurement is old enough to be outside the pose buffer's timespan, skip. +// try { +// if (poseBuffer.getInternalBuffer().lastKey() - poseBufferSizeSec > observation.timestamp()) +// { +// return; +// } +// } catch (NoSuchElementException ex) { +// return; +// } +// // Get odometry based pose at timestamp +// var sample = poseBuffer.getSample(observation.timestamp()); +// if (sample.isEmpty()) { +// // exit if not there +// return; +// } + +// // sample --> odometryPose transform and backwards of that +// var sampleToOdometryTransform = new Transform2d(sample.get(), odometryPose); +// var odometryToSampleTransform = new Transform2d(odometryPose, sample.get()); +// // get old estimate by applying odometryToSample Transform +// Pose2d estimateAtTime = estimatedPose.plus(odometryToSampleTransform); + +// // Calculate 3 x 3 vision matrix +// var r = new double[3]; +// for (int i = 0; i < 3; ++i) { +// r[i] = observation.stdDevs().get(i, 0) * observation.stdDevs().get(i, 0); +// } +// // Solve for closed form Kalman gain for continuous Kalman filter with A = 0 +// // and C = I. See wpimath/algorithms.md. +// Matrix visionK = new Matrix<>(Nat.N3(), Nat.N3()); +// for (int row = 0; row < 3; ++row) { +// double stdDev = qStdDevs.get(row, 0); +// if (stdDev == 0.0) { +// visionK.set(row, row, 0.0); +// } else { +// visionK.set(row, row, stdDev / (stdDev + Math.sqrt(stdDev * r[row]))); +// } +// } +// // difference between estimate and vision pose +// Transform2d transform = new Transform2d(estimateAtTime, observation.visionPose()); +// // scale transform by visionK +// var kTimesTransform = +// visionK.times( +// VecBuilder.fill( +// transform.getX(), transform.getY(), transform.getRotation().getRadians())); +// Transform2d scaledTransform = +// new Transform2d( +// kTimesTransform.get(0, 0), +// kTimesTransform.get(1, 0), +// Rotation2d.fromRadians(kTimesTransform.get(2, 0))); + +// // Recalculate current estimate by applying scaled transform to old estimate +// // then replaying odometry data +// estimatedPose = estimateAtTime.plus(scaledTransform).plus(sampleToOdometryTransform); +// } + +// public void addTxTyObservation(TxTyObservation observation) { +// // Skip if current data for tag is newer +// if (txTyPoses.containsKey(observation.tagId()) +// && txTyPoses.get(observation.tagId()).timestamp() >= observation.timestamp()) { +// return; +// } + +// // Get rotation at timestamp +// var sample = poseBuffer.getSample(observation.timestamp()); +// if (sample.isEmpty()) { +// // exit if not there +// return; +// } +// Rotation2d robotRotation = +// estimatedPose.transformBy(new Transform2d(odometryPose, sample.get())).getRotation(); + +// // Average tx's and ty's +// double tx = 0.0; +// double ty = 0.0; +// for (int i = 0; i < 4; i++) { +// tx += observation.tx()[i]; +// ty += observation.ty()[i]; +// } +// tx /= 4.0; +// ty /= 4.0; + +// Pose3d cameraPose = VisionConstants.cameras[observation.camera()].pose().get(); + +// // Use 3D distance and tag angles to find robot pose +// Translation2d camToTagTranslation = +// new Pose3d(Translation3d.kZero, new Rotation3d(0, ty, -tx)) +// .transformBy( +// new Transform3d(new Translation3d(observation.distance(), 0, 0), +// Rotation3d.kZero)) +// .getTranslation() +// .rotateBy(new Rotation3d(0, cameraPose.getRotation().getY(), 0)) +// .toTranslation2d(); +// Rotation2d camToTagRotation = +// robotRotation.plus( +// cameraPose.toPose2d().getRotation().plus(camToTagTranslation.getAngle())); +// var tagPose2d = tagPoses2d.get(observation.tagId()); +// if (tagPose2d == null) return; +// Translation2d fieldToCameraTranslation = +// new Pose2d(tagPose2d.getTranslation(), camToTagRotation.plus(Rotation2d.kPi)) +// .transformBy(GeomUtil.toTransform2d(camToTagTranslation.getNorm(), 0.0)) +// .getTranslation(); +// Pose2d robotPose = +// new Pose2d( +// fieldToCameraTranslation, +// robotRotation.plus(cameraPose.toPose2d().getRotation())) +// .transformBy(new Transform2d(cameraPose.toPose2d(), Pose2d.kZero)); +// // Use gyro angle at time for robot rotation +// robotPose = new Pose2d(robotPose.getTranslation(), robotRotation); + +// // Add transform to current odometry based pose for latency correction +// txTyPoses.put( +// observation.tagId(), +// new TxTyPoseRecord(robotPose, camToTagTranslation.getNorm(), observation.timestamp())); +// } + +// public void addDriveSpeeds(ChassisSpeeds speeds) { +// robotVelocity = speeds; +// } + +// @AutoLogOutput(key = "RobotState/FieldVelocity") +// public ChassisSpeeds getFieldVelocity() { +// return ChassisSpeeds.fromRobotRelativeSpeeds(robotVelocity, getRotation()); +// } + +// /** Get 2d pose estimate of robot if not stale. */ +// public Optional getTxTyPose(int tagId) { +// if (!txTyPoses.containsKey(tagId)) { +// return Optional.empty(); +// } +// var data = txTyPoses.get(tagId); +// // Check if stale +// if (Timer.getTimestamp() - data.timestamp() >= txTyObservationStaleSecs.get()) { +// return Optional.empty(); +// } +// // Get odometry based pose at timestamp +// var sample = poseBuffer.getSample(data.timestamp()); +// // Latency compensate +// return sample.map(pose2d -> data.pose().plus(new Transform2d(pose2d, odometryPose))); +// } + +// /** +// * Get estimated pose using txty data given tagId on reef and aligned pose on reef. Used for +// algae +// * intaking and coral scoring. +// */ +// public ReefPoseEstimate getReefPose(int face, Pose2d finalPose) { +// final boolean isRed = AllianceFlipUtil.shouldFlip(); +// var tagPose = +// getTxTyPose( +// switch (face) { +// case 1 -> isRed ? 6 : 19; +// case 2 -> isRed ? 11 : 20; +// case 3 -> isRed ? 10 : 21; +// case 4 -> isRed ? 9 : 22; +// case 5 -> isRed ? 8 : 17; +// // 0 +// default -> isRed ? 7 : 18; +// }); +// // Use estimated pose if tag pose is not present +// if (tagPose.isEmpty()) return new ReefPoseEstimate(getEstimatedPose(), 0.0); +// // Use distance from estimated pose to final pose to get t value +// final double t = +// MathUtil.clamp( +// (getEstimatedPose().getTranslation().getDistance(finalPose.getTranslation()) +// - minDistanceTagPoseBlend.get()) +// / (maxDistanceTagPoseBlend.get() - minDistanceTagPoseBlend.get()), +// 0.0, +// 1.0); +// return new ReefPoseEstimate(getEstimatedPose().interpolate(tagPose.get(), 1.0 - t), 1.0 - t); +// } + +// public void addCoralTxTyObservation(CoralTxTyObservation observation) { +// var oldOdometryPose = poseBuffer.getSample(observation.timestamp()); +// if (oldOdometryPose.isEmpty()) { +// return; +// } +// Pose2d fieldToRobot = +// estimatedPose.transformBy(new Transform2d(odometryPose, oldOdometryPose.get())); +// Pose3d robotToCamera = VisionConstants.cameras[observation.camera()].pose().get(); + +// // Assume coral height of zero and find midpoint of width of bottom tx ty +// double tx = (observation.tx()[2] + observation.tx()[3]) / 2; +// double ty = (observation.ty()[2] + observation.ty()[3]) / 2; + +// double cameraToCoralAngle = -robotToCamera.getRotation().getY() - ty; +// if (cameraToCoralAngle >= 0) { +// return; +// } + +// double cameraToCoralNorm = +// (-robotToCamera.getZ()) +// / Math.tan(-robotToCamera.getRotation().getY() - ty) +// / Math.cos(-tx); +// Pose2d fieldToCamera = fieldToRobot.transformBy(robotToCamera.toPose2d().toTransform2d()); +// Pose2d fieldToCoral = +// fieldToCamera +// .transformBy(new Transform2d(Translation2d.kZero, new Rotation2d(-tx))) +// .transformBy( +// new Transform2d(new Translation2d(cameraToCoralNorm, 0), Rotation2d.kZero)); +// Translation2d fieldToCoralTranslation2d = fieldToCoral.getTranslation(); +// CoralPoseRecord coralPoseRecord = +// new CoralPoseRecord(fieldToCoralTranslation2d, observation.timestamp()); + +// coralPoses = +// coralPoses.stream() +// .filter( +// (x) -> x.translation.getDistance(fieldToCoralTranslation2d) > coralOverlap.get()) +// .collect(Collectors.toSet()); +// coralPoses.add(coralPoseRecord); +// } + +// public Set getCoralTranslations() { +// if (Constants.getRobot() == Constants.RobotType.SIMBOT && +// DriverStation.isAutonomousEnabled()) { +// return AutoCoralSim.getCorals(); +// } +// return coralPoses.stream() +// .filter((x) -> Timer.getTimestamp() - x.timestamp() < coralPersistanceTime.get()) +// .map(CoralPoseRecord::translation) +// .collect(Collectors.toSet()); +// } + +// public Rotation2d getRotation() { +// return estimatedPose.getRotation(); +// } + +// public void addLeftAlgaeObservation(Rotation2d angle) { +// leftAlgaeObservation = angle; +// leftAlgaeObservationTimestamp = Timer.getTimestamp(); +// } + +// public void addRightAlgaeObservation(Rotation2d angle) { +// rightAlgaeObservation = angle; +// rightAlgaeObservationTimestamp = Timer.getTimestamp(); +// } + +// public Optional getLeftAlgaeObservation() { +// if (Timer.getTimestamp() - leftAlgaeObservationTimestamp < algaePersistanceTime.get()) { +// return Optional.of(leftAlgaeObservation); +// } else { +// return Optional.empty(); +// } +// } + +// public Optional getRightAlgaeObservation() { +// if (Timer.getTimestamp() - rightAlgaeObservationTimestamp < algaePersistanceTime.get()) { +// return Optional.of(rightAlgaeObservation); +// } else { +// return Optional.empty(); +// } +// } + +// public void periodicLog() { +// // Log tx/ty poses +// Pose2d[] tagPoses = new Pose2d[FieldConstants.aprilTagCount + 1]; +// for (int i = 0; i < FieldConstants.aprilTagCount + 1; i++) { +// tagPoses[i] = getTxTyPose(i).orElse(Pose2d.kZero); +// } +// Logger.recordOutput("RobotState/TxTyPoses", tagPoses); + +// // Log coral poses +// Logger.recordOutput( +// "RobotState/CoralPoses", +// getCoralTranslations().stream() +// .map( +// (translation) -> +// new Pose3d( +// new Translation3d( +// translation.getX(), +// translation.getY(), +// FieldConstants.coralDiameter / 2.0), +// new Rotation3d(new Rotation2d(Timer.getTimestamp() * 5.0)))) +// .toArray(Pose3d[]::new)); +// } + +// public record OdometryObservation( +// SwerveModulePosition[] wheelPositions, Optional gyroAngle, double timestamp) {} + +// public record VisionObservation(Pose2d visionPose, double timestamp, Matrix stdDevs) {} + +// public record TxTyObservation( +// int tagId, int camera, double[] tx, double[] ty, double distance, double timestamp) {} + +// public record TxTyPoseRecord(Pose2d pose, double distance, double timestamp) {} + +// public record CoralTxTyObservation(int camera, double[] tx, double[] ty, double timestamp) {} + +// public record CoralPoseRecord(Translation2d translation, double timestamp) {} + +// public record ReefPoseEstimate(Pose2d pose, double blend) {} +// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java new file mode 100644 index 0000000..2b21394 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java @@ -0,0 +1,401 @@ +// // Copyright (c) 2025 FRC 6328 +// // http://github.com/Mechanical-Advantage +// // +// // Use of this source code is governed by an MIT-style +// // license that can be found in the LICENSE file at +// // the root directory of this project. + +// package org.littletonrobotics.frc2025.subsystems.vision; + +// import static org.littletonrobotics.frc2025.subsystems.vision.VisionConstants.*; + +// import edu.wpi.first.math.VecBuilder; +// import edu.wpi.first.math.geometry.*; +// import edu.wpi.first.wpilibj.Alert; +// import edu.wpi.first.wpilibj.DriverStation; +// import edu.wpi.first.wpilibj.Timer; +// import java.util.*; +// import java.util.function.Supplier; +// import lombok.experimental.ExtensionMethod; +// import org.littletonrobotics.frc2025.FieldConstants; +// import org.littletonrobotics.frc2025.FieldConstants.AprilTagLayoutType; +// import org.littletonrobotics.frc2025.RobotState; +// import org.littletonrobotics.frc2025.RobotState.CoralTxTyObservation; +// import org.littletonrobotics.frc2025.RobotState.TxTyObservation; +// import org.littletonrobotics.frc2025.RobotState.VisionObservation; +// import org.littletonrobotics.frc2025.subsystems.leds.Leds; +// import org.littletonrobotics.frc2025.util.GeomUtil; +// import org.littletonrobotics.frc2025.util.LoggedTracer; +// import org.littletonrobotics.frc2025.util.VirtualSubsystem; +// import org.littletonrobotics.junction.Logger; +// import org.littletonrobotics.junction.networktables.LoggedNetworkBoolean; + +// /** Vision subsystem for vision. */ +// @ExtensionMethod({GeomUtil.class}) +// public class Vision extends VirtualSubsystem { +// private final Supplier aprilTagLayoutSupplier; +// private final VisionIO[] io; +// private final VisionIOInputsAutoLogged[] inputs; +// private final AprilTagVisionIOInputsAutoLogged[] aprilTagInputs; +// private final ObjDetectVisionIOInputsAutoLogged[] objDetectInputs; +// private final LoggedNetworkBoolean recordingRequest = +// new LoggedNetworkBoolean("/SmartDashboard/Enable Recording", false); + +// private final Map lastFrameTimes = new HashMap<>(); +// private final Map lastTagDetectionTimes = new HashMap<>(); + +// private final double disconnectedTimeout = 0.5; +// private final Timer[] disconnectedTimers; +// private final Alert[] disconnectedAlerts; + +// public Vision(Supplier aprilTagLayoutSupplier, VisionIO... io) { +// this.aprilTagLayoutSupplier = aprilTagLayoutSupplier; +// this.io = io; +// inputs = new VisionIOInputsAutoLogged[io.length]; +// aprilTagInputs = new AprilTagVisionIOInputsAutoLogged[io.length]; +// objDetectInputs = new ObjDetectVisionIOInputsAutoLogged[io.length]; +// disconnectedTimers = new Timer[io.length]; +// disconnectedAlerts = new Alert[io.length]; +// for (int i = 0; i < io.length; i++) { +// inputs[i] = new VisionIOInputsAutoLogged(); +// aprilTagInputs[i] = new AprilTagVisionIOInputsAutoLogged(); +// objDetectInputs[i] = new ObjDetectVisionIOInputsAutoLogged(); +// disconnectedAlerts[i] = new Alert("", Alert.AlertType.kError); +// } + +// // Create map of last frame times for instances +// for (int i = 0; i < io.length; i++) { +// lastFrameTimes.put(i, 0.0); +// } + +// for (int i = 0; i < io.length; i++) { +// disconnectedTimers[i] = new Timer(); +// disconnectedTimers[i].start(); +// } +// } + +// public void periodic() { +// for (int i = 0; i < io.length; i++) { +// io[i].updateInputs(inputs[i], aprilTagInputs[i], objDetectInputs[i]); +// Logger.processInputs("Vision/Inst" + i, aprilTagInputs[i]); +// Logger.processInputs("AprilTagVision/Inst" + i, aprilTagInputs[i]); +// Logger.processInputs("ObjDetectVision/Inst" + i, objDetectInputs[i]); +// } + +// // Update recording state +// boolean shouldRecord = DriverStation.isFMSAttached() || recordingRequest.get(); +// for (var ioInst : io) { +// ioInst.setRecording(shouldRecord); +// } + +// // Update disconnected alerts & LEDs +// boolean anyNTDisconnected = false; +// for (int i = 0; i < io.length; i++) { +// if (aprilTagInputs[i].timestamps.length > 0 || objDetectInputs[i].timestamps.length > 0) { +// disconnectedTimers[i].reset(); +// } +// boolean disconnected = +// disconnectedTimers[i].hasElapsed(disconnectedTimeout) || !inputs[i].ntConnected; +// if (disconnected) { +// disconnectedAlerts[i].setText( +// inputs[i].ntConnected +// ? "Northstar " + i + " connected to NT but not publishing frames" +// : "Northstar " + i + " disconnected from NT"); +// } +// disconnectedAlerts[i].set(disconnected); +// anyNTDisconnected = anyNTDisconnected || !inputs[i].ntConnected; +// } +// Leds.getInstance().visionDisconnected = anyNTDisconnected; + +// // Loop over instances +// List allRobotPoses = new ArrayList<>(); +// List allVisionObservations = new ArrayList<>(); +// Map allTxTyObservations = new HashMap<>(); +// List allCoralTxTyObservations = new ArrayList<>(); +// for (int instanceIndex = 0; instanceIndex < io.length; instanceIndex++) { +// // Loop over frames +// for (int frameIndex = 0; +// frameIndex < aprilTagInputs[instanceIndex].timestamps.length; +// frameIndex++) { +// lastFrameTimes.put(instanceIndex, Timer.getTimestamp()); +// var timestamp = +// aprilTagInputs[instanceIndex].timestamps[frameIndex] + timestampOffset.get(); +// var values = aprilTagInputs[instanceIndex].frames[frameIndex]; + +// // Exit if blank frame +// if (values.length == 0 || values[0] == 0) { +// continue; +// } + +// // Switch based on number of poses +// Pose3d cameraPose = null; +// Pose2d robotPose = null; +// boolean useVisionRotation = false; +// switch ((int) values[0]) { +// case 1: +// // One pose (multi-tag), use directly +// cameraPose = +// new Pose3d( +// values[2], +// values[3], +// values[4], +// new Rotation3d(new Quaternion(values[5], values[6], values[7], values[8]))); +// robotPose = +// cameraPose +// .toPose2d() +// .transformBy( +// +// cameras[instanceIndex].pose().get().toPose2d().toTransform2d().inverse()); +// useVisionRotation = true; +// break; +// case 2: +// // Two poses (one tag), disambiguate +// double error0 = values[1]; +// double error1 = values[9]; +// Pose3d cameraPose0 = +// new Pose3d( +// values[2], +// values[3], +// values[4], +// new Rotation3d(new Quaternion(values[5], values[6], values[7], values[8]))); +// Pose3d cameraPose1 = +// new Pose3d( +// values[10], +// values[11], +// values[12], +// new Rotation3d(new Quaternion(values[13], values[14], values[15], +// values[16]))); +// Transform2d cameraToRobot = +// cameras[instanceIndex].pose().get().toPose2d().toTransform2d().inverse(); +// Pose2d robotPose0 = cameraPose0.toPose2d().transformBy(cameraToRobot); +// Pose2d robotPose1 = cameraPose1.toPose2d().transformBy(cameraToRobot); + +// // Check for ambiguity and select based on estimated rotation +// if (error0 < error1 * ambiguityThreshold || error1 < error0 * ambiguityThreshold) { +// Rotation2d currentRotation = RobotState.getInstance().getRotation(); +// Rotation2d visionRotation0 = robotPose0.getRotation(); +// Rotation2d visionRotation1 = robotPose1.getRotation(); +// if (Math.abs(currentRotation.minus(visionRotation0).getRadians()) +// < Math.abs(currentRotation.minus(visionRotation1).getRadians())) { +// cameraPose = cameraPose0; +// robotPose = robotPose0; +// } else { +// cameraPose = cameraPose1; +// robotPose = robotPose1; +// } +// } +// break; +// } + +// // Exit if no data +// if (cameraPose == null || robotPose == null) { +// continue; +// } + +// // Exit if robot pose is off the field +// if (robotPose.getX() < -fieldBorderMargin +// || robotPose.getX() > FieldConstants.fieldLength + fieldBorderMargin +// || robotPose.getY() < -fieldBorderMargin +// || robotPose.getY() > FieldConstants.fieldWidth + fieldBorderMargin) { +// continue; +// } + +// // Get tag poses and update last detection times +// List tagPoses = new ArrayList<>(); +// for (int i = (values[0] == 1 ? 9 : 17); i < values.length; i += 10) { +// int tagId = (int) values[i]; +// lastTagDetectionTimes.put(tagId, Timer.getTimestamp()); +// Optional tagPose = +// aprilTagLayoutSupplier.get().getLayout().getTagPose((int) values[i]); +// tagPose.ifPresent(tagPoses::add); +// } +// if (tagPoses.isEmpty()) continue; + +// // Calculate average distance to tag +// double totalDistance = 0.0; +// for (Pose3d tagPose : tagPoses) { +// totalDistance += tagPose.getTranslation().getDistance(cameraPose.getTranslation()); +// } +// double avgDistance = totalDistance / tagPoses.size(); + +// // Add observation to list +// double xyStdDev = +// xyStdDevCoefficient +// * Math.pow(avgDistance, 1.2) +// / Math.pow(tagPoses.size(), 2.0) +// * cameras[instanceIndex].stdDevFactor(); +// double thetaStdDev = +// useVisionRotation +// ? thetaStdDevCoefficient +// * Math.pow(avgDistance, 1.2) +// / Math.pow(tagPoses.size(), 2.0) +// * cameras[instanceIndex].stdDevFactor() +// : Double.POSITIVE_INFINITY; +// allVisionObservations.add( +// new VisionObservation( +// robotPose, timestamp, VecBuilder.fill(xyStdDev, xyStdDev, thetaStdDev))); +// allRobotPoses.add(robotPose); + +// // Log data from instance +// if (enableInstanceLogging) { +// Logger.recordOutput( +// "AprilTagVision/Inst" + instanceIndex + "/LatencySecs", +// Timer.getTimestamp() - timestamp); +// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/RobotPose", robotPose); +// Logger.recordOutput( +// "AprilTagVision/Inst" + instanceIndex + "/TagPoses", +// tagPoses.toArray(Pose3d[]::new)); +// } +// } + +// // Get tag tx ty observation data +// Map txTyObservations = new HashMap<>(); +// for (int frameIndex = 0; +// frameIndex < aprilTagInputs[instanceIndex].timestamps.length; +// frameIndex++) { +// var timestamp = aprilTagInputs[instanceIndex].timestamps[frameIndex]; +// var values = aprilTagInputs[instanceIndex].frames[frameIndex]; +// int tagEstimationDataEndIndex = +// switch ((int) values[0]) { +// default -> 0; +// case 1 -> 8; +// case 2 -> 16; +// }; + +// for (int index = tagEstimationDataEndIndex + 1; index < values.length; index += 10) { +// double[] tx = new double[4]; +// double[] ty = new double[4]; +// for (int i = 0; i < 4; i++) { +// tx[i] = values[index + 1 + (2 * i)]; +// ty[i] = values[index + 1 + (2 * i) + 1]; +// } +// int tagId = (int) values[index]; +// double distance = values[index + 9]; + +// txTyObservations.put( +// tagId, new TxTyObservation(tagId, instanceIndex, tx, ty, distance, timestamp)); +// } +// } + +// // Save tx ty observation data +// for (var observation : txTyObservations.values()) { +// if (!allTxTyObservations.containsKey(observation.tagId()) +// || observation.distance() < allTxTyObservations.get(observation.tagId()).distance()) +// { +// allTxTyObservations.put(observation.tagId(), observation); +// } +// } + +// // Record object detection observations +// switch (instanceIndex) { +// case 2: +// // Record coral observations +// for (int frameIndex = 0; +// frameIndex < objDetectInputs[instanceIndex].timestamps.length; +// frameIndex++) { +// double[] frame = objDetectInputs[instanceIndex].frames[frameIndex]; +// for (int i = 0; i < frame.length; i += 10) { +// if (frame[i + 1] > coralDetectConfidenceThreshold) { +// double[] tx = new double[4]; +// double[] ty = new double[4]; +// for (int z = 0; z < 4; z++) { +// tx[z] = frame[i + 2 + (2 * z)]; +// ty[z] = frame[i + 2 + (2 * z) + 1]; +// } +// allCoralTxTyObservations.add( +// new CoralTxTyObservation( +// instanceIndex, +// tx, +// ty, +// objDetectInputs[instanceIndex].timestamps[frameIndex])); +// } +// } +// } +// break; + +// case 0: +// case 1: +// // Record algae observations +// if (objDetectInputs[instanceIndex].timestamps.length > 0) { +// double[] frame = +// objDetectInputs[instanceIndex] +// .frames[objDetectInputs[instanceIndex].timestamps.length - 1]; +// double largestSize = 0.0; +// Rotation2d angle = null; +// for (int i = 0; i < frame.length; i += 10) { +// if (frame[i + 1] > algaeDetectConfidenceThreshold) { +// // Read data from frame +// double[] tx = new double[4]; +// double[] ty = new double[4]; +// for (int z = 0; z < 4; z++) { +// tx[z] = frame[i + 2 + (2 * z)]; +// ty[z] = frame[i + 2 + (2 * z) + 1]; +// } + +// // Check if at bottom of frame +// double maxTy = Math.max(ty[2], ty[3]); +// if (maxTy < 0.4) { +// continue; +// } + +// // Check if largest algae +// double size = Math.abs(tx[0] - tx[1]); +// if (size > largestSize) { +// largestSize = size; +// angle = Rotation2d.fromRadians(-(tx[0] + tx[1]) / 2.0); +// } +// } +// } +// if (angle != null) { +// if (instanceIndex == 0) { +// RobotState.getInstance().addLeftAlgaeObservation(angle); +// } else { +// RobotState.getInstance().addRightAlgaeObservation(angle); +// } +// } +// } +// break; +// } + +// // If no frames from instances, clear robot pose +// if (enableInstanceLogging && aprilTagInputs[instanceIndex].timestamps.length == 0) { +// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/RobotPose", Pose2d.kZero); +// } + +// // If no recent frames from instance, clear tag poses +// if (enableInstanceLogging +// && Timer.getTimestamp() - lastFrameTimes.get(instanceIndex) > targetLogTimeSecs) { +// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/TagPoses", new Pose3d[] +// {}); +// } +// } + +// // Log robot poses +// Logger.recordOutput("AprilTagVision/RobotPoses", allRobotPoses.toArray(Pose2d[]::new)); + +// // Log tag poses +// List allTagPoses = new ArrayList<>(); +// for (Map.Entry detectionEntry : lastTagDetectionTimes.entrySet()) { +// if (Timer.getTimestamp() - detectionEntry.getValue() < targetLogTimeSecs) { +// aprilTagLayoutSupplier +// .get() +// .getLayout() +// .getTagPose(detectionEntry.getKey()) +// .ifPresent(allTagPoses::add); +// } +// } +// Logger.recordOutput("AprilTagVision/TagPoses", allTagPoses.toArray(Pose3d[]::new)); + +// // Send results to robot state +// allVisionObservations.stream() +// .sorted(Comparator.comparingDouble(VisionObservation::timestamp)) +// .forEach(RobotState.getInstance()::addVisionObservation); +// allTxTyObservations.values().stream().forEach(RobotState.getInstance()::addTxTyObservation); +// allCoralTxTyObservations.stream() +// .sorted(Comparator.comparingDouble(CoralTxTyObservation::timestamp)) +// .forEach(RobotState.getInstance()::addCoralTxTyObservation); + +// // Record cycle time +// LoggedTracer.record("Vision"); +// } +// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java new file mode 100644 index 0000000..3f84f9d --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java @@ -0,0 +1,175 @@ +// // Copyright (c) 2025 FRC 6328 +// // http://github.com/Mechanical-Advantage +// // +// // Use of this source code is governed by an MIT-style +// // license that can be found in the LICENSE file at +// // the root directory of this project. + +// package org.littletonrobotics.frc2025.subsystems.vision; + +// import edu.wpi.first.math.geometry.Pose3d; +// import edu.wpi.first.math.geometry.Rotation3d; +// import edu.wpi.first.math.util.Units; +// import java.util.function.Supplier; +// import lombok.Builder; +// import org.littletonrobotics.frc2025.Constants; +// import org.littletonrobotics.frc2025.Constants.Mode; +// import org.littletonrobotics.frc2025.util.LoggedTunableNumber; + +// public class VisionConstants { +// private static final boolean forceEnableInstanceLogging = false; +// public static final boolean enableInstanceLogging = +// forceEnableInstanceLogging || Constants.getMode() == Mode.REPLAY; + +// public static final double ambiguityThreshold = 0.4; +// public static final double targetLogTimeSecs = 0.1; +// public static final double fieldBorderMargin = 0.5; +// public static final double xyStdDevCoefficient = 0.01; +// public static final double thetaStdDevCoefficient = 0.03; +// public static final double demoTagPosePersistenceSecs = 0.5; +// public static final double coralDetectConfidenceThreshold = 0.35; +// public static final double algaeDetectConfidenceThreshold = 0.35; +// public static final LoggedTunableNumber timestampOffset = +// new LoggedTunableNumber("AprilTagVision/TimestampOffset", 0.0); + +// private static int monoExposure = 2200; +// private static double monoGain = 17.5; +// private static double monoDenoise = 1.0; +// private static int colorExposure = 4500; +// private static double colorGain = 5.0; + +// public static LoggedTunableNumber[] pitchAdjustments = +// switch (Constants.getRobot()) { +// case DEVBOT -> +// new LoggedTunableNumber[] { +// new LoggedTunableNumber("Vision/PitchAdjust0", 0.0), +// new LoggedTunableNumber("Vision/PitchAdjust1", 0.0) +// }; +// case COMPBOT -> +// new LoggedTunableNumber[] { +// new LoggedTunableNumber("Vision/PitchAdjust0", 0.0), +// new LoggedTunableNumber("Vision/PitchAdjust1", 0.0), +// new LoggedTunableNumber("Vision/PitchAdjust2", 0.0), +// }; +// default -> new LoggedTunableNumber[] {}; +// }; +// public static CameraConfig[] cameras = +// switch (Constants.getRobot()) { +// case DEVBOT -> +// new CameraConfig[] { +// CameraConfig.builder() +// .pose( +// () -> +// new Pose3d( +// 0.254, +// 0.2032, +// 0.21113, +// new Rotation3d( +// 0.0, +// Units.degreesToRadians(-25.0 + pitchAdjustments[0].get()), +// Units.degreesToRadians(-20.0)))) +// .id("40265450") +// .width(1600) +// .height(1200) +// .exposure(monoExposure) +// .gain(monoGain) +// .stdDevFactor(1.0) +// .build(), +// CameraConfig.builder() +// .pose( +// () -> +// new Pose3d( +// 0.254, +// -0.2032, +// 0.21113, +// new Rotation3d( +// 0.0, +// Units.degreesToRadians(-25.0 + pitchAdjustments[1].get()), +// Units.degreesToRadians(20.0)))) +// .id("40265453") +// .width(1600) +// .height(1200) +// .exposure(monoExposure) +// .gain(monoGain) +// .stdDevFactor(1.0) +// .build() +// }; +// case COMPBOT -> +// new CameraConfig[] { +// // Front Left +// CameraConfig.builder() +// .pose( +// () -> +// new Pose3d( +// 0.2794, +// 0.2286, +// 0.21113, +// new Rotation3d( +// 0.0, +// Units.degreesToRadians(-25.0 + pitchAdjustments[0].get()), +// Units.degreesToRadians(-20.0)))) +// .id("40530395") +// .width(1600) +// .height(1200) +// .exposure(monoExposure) +// .gain(monoGain) +// .denoise(monoDenoise) +// .stdDevFactor(1.0) +// .build(), +// // Front Right +// CameraConfig.builder() +// .pose( +// () -> +// new Pose3d( +// 0.2794, +// -0.2286, +// 0.21113, +// new Rotation3d( +// 0.0, +// Units.degreesToRadians(-25.0 + pitchAdjustments[1].get()), +// Units.degreesToRadians(20.0)))) +// .id("40552081") +// .width(1600) +// .height(1200) +// .exposure(monoExposure) +// .gain(monoGain) +// .denoise(monoDenoise) +// .stdDevFactor(1.0) +// .build(), +// // Back Up +// CameraConfig.builder() +// .pose( +// () -> +// new Pose3d( +// Units.inchesToMeters(3), +// Units.inchesToMeters(-10), +// Units.inchesToMeters(25), +// new Rotation3d( +// 0.0, +// Units.degreesToRadians(25.0 + pitchAdjustments[2].get()), +// Units.degreesToRadians(170.0)))) +// .id("24737133") +// .width(1280) +// .height(960) +// .exposure(colorExposure) +// .gain(colorGain) +// .stdDevFactor(1.25) +// .build(), +// }; +// default -> new CameraConfig[] {}; +// }; + +// @Builder +// public record CameraConfig( +// Supplier pose, +// String id, +// int width, +// int height, +// int autoExposure, +// int exposure, +// double gain, +// double denoise, +// double stdDevFactor) {} + +// private VisionConstants() {} +// } From de45b4076c674fc1c09bb4822faa0761b58a17d9 Mon Sep 17 00:00:00 2001 From: NutHouse coco-prog-3 Date: Fri, 16 May 2025 13:50:08 -0700 Subject: [PATCH 03/12] WIP: Getting 254's vision code to compile modified: src/main/java/frc/robot/Constants.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java modified: src/main/java/frc/robot/subsystems/vision/Vision.java modified: src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java modified: src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java new file: src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java new file: src/main/java/frc/robot/util/LimelightHelpers.java new file: src/main/java/frc/robot/util/MathHelpers.java new file: src/main/java/frc/robot/util/RobotTime.java new file: src/main/java/frc/robot/util/Util.java --- src/main/java/frc/robot/Constants.java | 8 +- .../vision/FRC254/FiducialObservation.java | 93 ++ .../vision/FRC254/MegatagPoseEstimate.java | 99 ++ .../subsystems/vision/FRC254/RobotState.java | 813 ++++++----- .../FRC254/VisionFieldPoseEstimate.java | 80 +- .../subsystems/vision/FRC254/VisionIO.java | 46 + .../vision/FRC254/VisionSubsystem.java | 1219 +++++++++-------- .../frc/robot/subsystems/vision/Vision.java | 6 +- .../vision/VisionIOPhotonVision.java | 2 +- .../vision/VisionIOPhotonVisionSim.java | 2 +- .../ConcurrentTimeInterpolatableBuffer.java | 164 +++ .../java/frc/robot/util/LimelightHelpers.java | 1130 +++++++++++++++ src/main/java/frc/robot/util/MathHelpers.java | 48 + src/main/java/frc/robot/util/RobotTime.java | 25 + src/main/java/frc/robot/util/Util.java | 123 ++ 15 files changed, 2810 insertions(+), 1048 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java create mode 100644 src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java create mode 100644 src/main/java/frc/robot/util/LimelightHelpers.java create mode 100644 src/main/java/frc/robot/util/MathHelpers.java create mode 100644 src/main/java/frc/robot/util/RobotTime.java create mode 100644 src/main/java/frc/robot/util/Util.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 89b556f..d236484 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -381,9 +381,15 @@ public static class AprilTagConstants { public static final String aprilTagFamily = "36h11"; public static final AprilTagLayoutType defaultAprilTagType = AprilTagLayoutType.OFFICIAL; - public static final AprilTagFieldLayout aprilTagLayout = + public static final AprilTagFieldLayout kAprilTagLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); + public static final double kFieldWidthMeters = + kAprilTagLayout.getFieldWidth(); // distance between field walls, + // 8.211m + public static final double kFieldLengthMeters = + kAprilTagLayout.getFieldLength(); // distance between driver station + // walls, 16.541m @Getter public enum AprilTagLayoutType { OFFICIAL("2025-official"); diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java new file mode 100644 index 0000000..40bd60e --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java @@ -0,0 +1,93 @@ +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import edu.wpi.first.util.struct.Struct; +import edu.wpi.first.util.struct.StructSerializable; +import frc.robot.util.LimelightHelpers; +import java.nio.ByteBuffer; + +public class FiducialObservation implements StructSerializable { + public static class FiducialObservationStruct implements Struct { + public int id; + public double txnc; + public double tync; + public double ambiguity; + + @Override + public Class getTypeClass() { + return FiducialObservation.class; + } + + @Override + public String getTypeString() { + return "struct:FiducialObservation"; + } + + @Override + public int getSize() { + return kSizeInt32 + 3 * kSizeDouble; + } + + @Override + public String getSchema() { + return "int id;double txnc;double tync;double ambiguity"; + } + + @Override + public FiducialObservation unpack(ByteBuffer bb) { + FiducialObservation rv = new FiducialObservation(); + rv.id = bb.getInt(); + rv.txnc = bb.getDouble(); + rv.tync = bb.getDouble(); + rv.ambiguity = bb.getDouble(); + return rv; + } + + @Override + public void pack(ByteBuffer bb, FiducialObservation value) { + bb.putInt(value.id); + bb.putDouble(value.txnc); + bb.putDouble(value.tync); + bb.putDouble(value.ambiguity); + } + } + + public int id; + public double txnc; + public double tync; + public double ambiguity; + + public FiducialObservation() {} + + public static FiducialObservation fromLimelight(LimelightHelpers.RawFiducial fiducial) { + FiducialObservation rv = new FiducialObservation(); + rv.id = fiducial.id; + rv.txnc = fiducial.txnc; + rv.tync = fiducial.tync; + rv.ambiguity = fiducial.ambiguity; + + return rv; + } + + public static FiducialObservation[] fromLimelight(LimelightHelpers.RawFiducial[] fiducials) { + FiducialObservation[] rv = new FiducialObservation[fiducials.length]; + for (int i = 0; i < fiducials.length; ++i) { + rv[i] = fromLimelight(fiducials[i]); + } + return rv; + } + + public static final FiducialObservationStruct struct = new FiducialObservationStruct(); +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java new file mode 100644 index 0000000..924324f --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java @@ -0,0 +1,99 @@ +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.util.struct.Struct; +import edu.wpi.first.util.struct.StructSerializable; +import frc.robot.util.LimelightHelpers; +import frc.robot.util.MathHelpers; +import java.nio.ByteBuffer; + +public class MegatagPoseEstimate implements StructSerializable { + public static class MegatagPoseEstimateStruct implements Struct { + public Pose2d fieldToCamera = MathHelpers.kPose2dZero; + public double timestampSeconds; + public double latency; + public double avgTagArea; + + @Override + public Class getTypeClass() { + return MegatagPoseEstimate.class; + } + + @Override + public String getTypeString() { + return "struct:MegatagPoseEstimate"; + } + + @Override + public int getSize() { + return Pose2d.struct.getSize() + kSizeDouble * 3; + } + + @Override + public String getSchema() { + return "Pose2d fieldToCamera;double timestampSeconds;double latency;double avgTagArea"; + } + + @Override + public Struct[] getNested() { + return new Struct[] {Pose2d.struct}; + } + + @Override + public MegatagPoseEstimate unpack(ByteBuffer bb) { + MegatagPoseEstimate rv = new MegatagPoseEstimate(); + rv.fieldToCamera = Pose2d.struct.unpack(bb); + rv.timestampSeconds = bb.getDouble(); + rv.latency = bb.getDouble(); + rv.avgTagArea = bb.getDouble(); + rv.fiducialIds = new int[0]; + return rv; + } + + @Override + public void pack(ByteBuffer bb, MegatagPoseEstimate value) { + Pose2d.struct.pack(bb, value.fieldToCamera); + bb.putDouble(value.timestampSeconds); + bb.putDouble(value.latency); + bb.putDouble(value.avgTagArea); + } + } + + public Pose2d fieldToCamera = MathHelpers.kPose2dZero; + public double timestampSeconds; + public double latency; + public double avgTagArea; + public int[] fiducialIds; + + public MegatagPoseEstimate() {} + + public static MegatagPoseEstimate fromLimelight(LimelightHelpers.PoseEstimate poseEstimate) { + MegatagPoseEstimate rv = new MegatagPoseEstimate(); + rv.fieldToCamera = poseEstimate.pose; + if (rv.fieldToCamera == null) rv.fieldToCamera = MathHelpers.kPose2dZero; + rv.timestampSeconds = poseEstimate.timestampSeconds; + rv.latency = poseEstimate.latency; + rv.avgTagArea = poseEstimate.avgTagArea; + rv.fiducialIds = new int[poseEstimate.rawFiducials.length]; + for (int i = 0; i < rv.fiducialIds.length; ++i) { + rv.fiducialIds[i] = poseEstimate.rawFiducials[i].id; + } + + return rv; + } + + public static final MegatagPoseEstimateStruct struct = new MegatagPoseEstimateStruct(); +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java index 9c938df..0aeb482 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java @@ -1,416 +1,397 @@ -// package com.team254.frc2024; - -// import com.team254.frc2024.controlboard.ModalControls; -// import com.team254.frc2024.subsystems.vision.VisionFieldPoseEstimate; -// import com.team254.lib.time.RobotTime; -// import com.team254.lib.util.ConcurrentTimeInterpolatableBuffer; -// import com.team254.lib.util.MathHelpers; -// import com.team254.lib.util.PoopTargetFactory.NearTarget; - -// import edu.wpi.first.math.MathUtil; -// import edu.wpi.first.math.geometry.*; -// import edu.wpi.first.math.kinematics.ChassisSpeeds; -// import edu.wpi.first.wpilibj.DriverStation; -// import edu.wpi.first.wpilibj.DriverStation.Alliance; -// import edu.wpi.first.wpilibj.Timer; -// import java.util.*; -// import java.util.concurrent.atomic.AtomicReference; -// import java.util.concurrent.atomic.AtomicBoolean; -// import java.util.concurrent.atomic.AtomicInteger; -// import java.util.function.Consumer; -// import java.util.function.IntSupplier; - -// import org.littletonrobotics.junction.Logger; - -// public class RobotState { - -// public final static double LOOKBACK_TIME = 1.0; - -// private final Consumer visionEstimateConsumer; - -// public RobotState(Consumer visionEstimateConsumer) { -// this.visionEstimateConsumer = visionEstimateConsumer; -// // Make sure to add one sample to these methods to protect callers against null. -// fieldToRobot.addSample(0.0, MathHelpers.kPose2dZero); -// robotToTurret.addSample(0.0, MathHelpers.kRotation2dZero); -// turretAngularVelocity.addSample(0.0, 0.0); -// driveYawAngularVelocity.addSample(0.0, 0.0); -// turretPositionRadians.addSample(0.0, 0.0); -// } - -// // State of robot. - -// // Kinematic Frames -// private final ConcurrentTimeInterpolatableBuffer fieldToRobot = -// ConcurrentTimeInterpolatableBuffer -// .createBuffer(LOOKBACK_TIME); -// private final ConcurrentTimeInterpolatableBuffer robotToTurret = -// ConcurrentTimeInterpolatableBuffer -// .createBuffer(LOOKBACK_TIME); -// private static final Transform2d TURRET_TO_CAMERA = new -// Transform2d(Constants.kTurretToCameraX, -// Constants.kTurretToCameraY, -// MathHelpers.kRotation2dZero); -// private static final Transform2d ROBOT_TO_CAMERA_B = new -// Transform2d(Constants.kTurretToCameraBX, -// Constants.kTurretToCameraBY, -// MathHelpers.kRotation2dZero); -// private final AtomicReference measuredRobotRelativeChassisSpeeds = new -// AtomicReference<>( -// new ChassisSpeeds()); -// private final AtomicReference measuredFieldRelativeChassisSpeeds = new -// AtomicReference<>( -// new ChassisSpeeds()); -// private final AtomicReference desiredFieldRelativeChassisSpeeds = new -// AtomicReference<>( -// new ChassisSpeeds()); -// private final AtomicReference fusedFieldRelativeChassisSpeeds = new -// AtomicReference<>( -// new ChassisSpeeds()); - -// private final AtomicInteger iteration = new AtomicInteger(0); - -// private double lastUsedMegatagTimestamp = 0; -// private double lastTriggeredIntakeSensorTimestamp = 0; -// private ConcurrentTimeInterpolatableBuffer turretAngularVelocity = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer turretPositionRadians = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); - -// private ConcurrentTimeInterpolatableBuffer drivePitchRads = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer driveRollRads = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer accelX = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); -// private ConcurrentTimeInterpolatableBuffer accelY = -// ConcurrentTimeInterpolatableBuffer -// .createDoubleBuffer(LOOKBACK_TIME); - -// private final AtomicBoolean enablePathCancel = new AtomicBoolean(false); - -// private double autoStartTime; - -// public void setAutoStartTime(double timestamp) { -// autoStartTime = timestamp; -// } - -// public double getAutoStartTime() { -// return autoStartTime; -// } - -// public void enablePathCancel() { -// enablePathCancel.set(true); -// } - -// public void disablePathCancel() { -// enablePathCancel.set(false); -// } - -// public boolean getPathCancel() { -// return enablePathCancel.get(); -// } - -// public void addOdometryMeasurement(double timestamp, Pose2d pose) { -// fieldToRobot.addSample(timestamp, pose); -// } - -// public void incrementIterationCount() { -// iteration.incrementAndGet(); -// } - -// public int getIteration() { -// return iteration.get(); -// } - -// public IntSupplier getIterationSupplier() { -// return () -> getIteration(); -// } - -// public void addDriveMotionMeasurements(double timestamp, -// double angularRollRadsPerS, -// double angularPitchRadsPerS, -// double angularYawRadsPerS, -// double pitchRads, -// double rollRads, -// double accelX, -// double accelY, -// ChassisSpeeds desiredFieldRelativeSpeeds, -// ChassisSpeeds measuredSpeeds, -// ChassisSpeeds measuredFieldRelativeSpeeds, -// ChassisSpeeds fusedFieldRelativeSpeeds) { -// this.driveRollAngularVelocity.addSample(timestamp, angularRollRadsPerS); -// this.drivePitchAngularVelocity.addSample(timestamp, angularPitchRadsPerS); -// this.driveYawAngularVelocity.addSample(timestamp, angularYawRadsPerS); -// this.drivePitchRads.addSample(timestamp, pitchRads); -// this.driveRollRads.addSample(timestamp, rollRads); -// this.accelY.addSample(timestamp, accelY); -// this.accelX.addSample(timestamp, accelX); -// this.desiredFieldRelativeChassisSpeeds.set(desiredFieldRelativeSpeeds); -// this.measuredRobotRelativeChassisSpeeds.set(measuredSpeeds); -// this.measuredFieldRelativeChassisSpeeds.set(measuredFieldRelativeSpeeds); -// this.fusedFieldRelativeChassisSpeeds.set(fusedFieldRelativeSpeeds); -// } - -// public Map.Entry getLatestFieldToRobot() { -// return fieldToRobot.getLatest(); -// } - -// public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { -// var maybeFieldToRobot = getLatestFieldToRobot(); -// Pose2d fieldToRobot = maybeFieldToRobot == null ? MathHelpers.kPose2dZero : -// maybeFieldToRobot.getValue(); -// var delta = getLatestRobotRelativeChassisSpeed(); -// delta = delta.times(lookaheadTimeS); -// return fieldToRobot -// .exp(new Twist2d(delta.vxMetersPerSecond, delta.vyMetersPerSecond, -// delta.omegaRadiansPerSecond)); -// } - -// public Pose2d getLatestFieldToRobotCenter() { -// return fieldToRobot.getLatest().getValue().transformBy(Constants.kTurretToRobotCenter); -// } - -// // This has rotation and radians to allow for wrapping tracking. -// public void addTurretUpdates(double timestamp, -// Rotation2d turretRotation, -// double turretRadians, -// double angularYawRadsPerS) { -// // turret frame 180 degrees off from robot frame -// robotToTurret.addSample(timestamp, turretRotation.rotateBy(MathHelpers.kRotation2dPi)); -// this.turretAngularVelocity.addSample(timestamp, angularYawRadsPerS); -// this.turretPositionRadians.addSample(timestamp, turretRadians); -// } - -// public double getLatestTurretPositionRadians() { -// return this.turretPositionRadians.getInternalBuffer().lastEntry().getValue(); -// } - -// public double getLatestTurretAngularVelocity() { -// return this.turretAngularVelocity.getInternalBuffer().lastEntry().getValue(); -// } - -// private Rotation2d hoodRotation = new Rotation2d(); - -// public void addHoodRotation(Rotation2d rotationFromZero) { -// hoodRotation = rotationFromZero; -// } - -// public Rotation2d getHoodRotation() { -// return hoodRotation; -// } - -// private double elevatorHeightM = 0.0; - -// public void setElevatorHeight(double heightM) { -// this.elevatorHeightM = heightM; -// } - -// public double getElevatorHeight() { -// return elevatorHeightM; -// } - -// private double climberRotations = 0.0; - -// public void setClimberRotations(double rotations) { -// this.climberRotations = rotations; -// } - -// private NearTarget poopNearTarget = NearTarget.DEEP_AMP; - -// public void setPoopNearTarget(NearTarget nearTarget) { -// poopNearTarget = nearTarget; -// } - -// public NearTarget getPoopNearTarget() { -// return poopNearTarget; -// } - -// public Optional getRobotToTurret(double timestamp) { -// return robotToTurret.getSample(timestamp); -// } - -// public Optional getFieldToRobot(double timestamp) { -// return fieldToRobot.getSample(timestamp); -// } - -// public Transform2d getTurretToCamera(boolean isTurretCamera) { -// return isTurretCamera ? TURRET_TO_CAMERA : ROBOT_TO_CAMERA_B; -// } - -// public Map.Entry getLatestRobotToTurret() { -// return robotToTurret.getLatest(); -// } - -// public ChassisSpeeds getLatestMeasuredFieldRelativeChassisSpeeds() { -// return measuredFieldRelativeChassisSpeeds.get(); -// } - -// public ChassisSpeeds getLatestRobotRelativeChassisSpeed() { -// return measuredRobotRelativeChassisSpeeds.get(); -// } - -// public ChassisSpeeds getLatestDesiredFieldRelativeChassisSpeed() { -// return desiredFieldRelativeChassisSpeeds.get(); -// } - -// public ChassisSpeeds getLatestFusedFieldRelativeChassisSpeed() { -// return fusedFieldRelativeChassisSpeeds.get(); -// } - -// public ChassisSpeeds getLatestFusedRobotRelativeChassisSpeed() { -// var speeds = getLatestRobotRelativeChassisSpeed(); -// speeds.omegaRadiansPerSecond = -// getLatestFusedFieldRelativeChassisSpeed().omegaRadiansPerSecond; -// return speeds; -// } - -// public Optional getTurretAngularVelocity(double timestamp) { -// return turretAngularVelocity.getSample(timestamp); -// } - -// private Optional getMaxAbsValueInRange(ConcurrentTimeInterpolatableBuffer -// buffer, double minTime, -// double maxTime) { -// var submap = buffer.getInternalBuffer().subMap(minTime, maxTime).values(); -// var max = submap.stream().max(Double::compare); -// var min = submap.stream().min(Double::compare); -// if (max.isEmpty() || min.isEmpty()) -// return Optional.empty(); -// if (Math.abs(max.get()) >= Math.abs(min.get())) -// return max; -// else -// return min; -// } - -// public Optional getMaxAbsTurretYawAngularVelocityInRange(double minTime, double -// maxTime) { -// return getMaxAbsValueInRange(turretAngularVelocity, minTime, maxTime); -// } - -// public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double -// maxTime) { -// // Gyro yaw rate not set in sim. -// if (Robot.isReal()) -// return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); -// return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); -// } - -// public Optional getMaxAbsDrivePitchAngularVelocityInRange(double minTime, double -// maxTime) { -// return getMaxAbsValueInRange(drivePitchAngularVelocity, minTime, maxTime); -// } - -// public Optional getMaxAbsDriveRollAngularVelocityInRange(double minTime, double -// maxTime) { -// return getMaxAbsValueInRange(driveRollAngularVelocity, minTime, maxTime); -// } - -// public void updateMegatagEstimate(VisionFieldPoseEstimate megatagEstimate) { -// lastUsedMegatagTimestamp = Timer.getFPGATimestamp(); -// visionEstimateConsumer.accept(megatagEstimate); -// } - -// public void updatePinholeEstimate(VisionFieldPoseEstimate pinholeEstimate) { -// visionEstimateConsumer.accept(pinholeEstimate); -// } - -// public void updateLastTriggeredIntakeSensorTimestamp(boolean triggered) { -// if (triggered) -// lastTriggeredIntakeSensorTimestamp = RobotTime.getTimestampSeconds(); -// } - -// public double lastUsedMegatagTimestamp() { -// return lastUsedMegatagTimestamp; -// } - -// public double lastTriggeredIntakeSensorTimestamp() { -// return lastTriggeredIntakeSensorTimestamp; -// } - -// public boolean isRedAlliance() { -// return DriverStation.getAlliance().isPresent() && -// DriverStation.getAlliance().equals(Optional.of(Alliance.Red)); -// } - -// public void updateLogger() { -// if (this.driveYawAngularVelocity.getInternalBuffer().lastEntry() != null) { -// Logger.recordOutput("RobotState/YawAngularVelocity", -// this.driveYawAngularVelocity.getInternalBuffer().lastEntry().getValue()); -// } -// if (this.driveRollAngularVelocity.getInternalBuffer().lastEntry() != null) { -// Logger.recordOutput("RobotState/RollAngularVelocity", -// this.driveRollAngularVelocity.getInternalBuffer().lastEntry().getValue()); -// } -// if (this.drivePitchAngularVelocity.getInternalBuffer().lastEntry() != null) { -// Logger.recordOutput("RobotState/PitchAngularVelocity", -// this.drivePitchAngularVelocity.getInternalBuffer().lastEntry().getValue()); -// } -// if (this.accelX.getInternalBuffer().lastEntry() != null) { -// Logger.recordOutput("RobotState/AccelX", -// this.accelX.getInternalBuffer().lastEntry().getValue()); -// } -// if (this.accelY.getInternalBuffer().lastEntry() != null) { -// Logger.recordOutput("RobotState/AccelY", -// this.accelY.getInternalBuffer().lastEntry().getValue()); -// } -// Logger.recordOutput("RobotState/DesiredChassisSpeedFieldFrame", -// getLatestDesiredFieldRelativeChassisSpeed()); -// Logger.recordOutput("RobotState/MeasuredChassisSpeedFieldFrame", -// getLatestMeasuredFieldRelativeChassisSpeeds()); -// Logger.recordOutput("RobotState/FusedChassisSpeedFieldFrame", -// getLatestFusedFieldRelativeChassisSpeed()); -// } - -// Pose3d ampPose3d = new Pose3d(); -// Pose3d climberPose3d = new Pose3d(); -// Pose3d hoodPose3d = new Pose3d(); -// Pose3d shooterPose3d = new Pose3d(); -// Pose3d turretPose3d = new Pose3d(); - -// public void updateViz() { -// if (getLatestRobotToTurret().getValue() != null) { -// shooterPose3d = new Pose3d(new Translation3d(), -// new Rotation3d(0, 0, -// getLatestRobotToTurret().getValue().getRadians())); -// hoodPose3d = new Pose3d(new Translation3d(), -// new Rotation3d(0, 0, -// getLatestRobotToTurret().getValue().getRadians())); -// } -// ampPose3d = new Pose3d( -// new Translation3d(0.0, 0.0, this.elevatorHeightM), -// new Rotation3d()); -// var climberRot = MathUtil.interpolate(0.0, -Math.PI / 2.0, -// this.climberRotations / -// (Constants.ClimberConstants.kForwardMaxPositionRotations -// * Constants.kClimberConfig.unitToRotorRatio)); -// climberPose3d = new Pose3d( -// new Translation3d(0, 0.0, 0.15), new Rotation3d(0.0, climberRot, 0.0)); -// // model_0 is elevator -// // model_1 is climber -// // model_2 is hood plates -// // model_3 is shooter -// // model_4 is turret -// Logger.recordOutput("ComponentsPoseArray", -// new Pose3d[] { ampPose3d, climberPose3d, hoodPose3d, shooterPose3d, turretPose3d -// }); -// } - -// public void logControllerMode() { -// Logger.recordOutput("Controller Mode", ModalControls.getInstance().getMode().toString()); -// } -// } +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.geometry.Twist2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.DriverStation.Alliance; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.util.ConcurrentTimeInterpolatableBuffer; +import frc.robot.util.MathHelpers; +import frc.robot.util.RobotTime; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.IntSupplier; +import org.littletonrobotics.junction.Logger; + +public class RobotState { + + public static final double LOOKBACK_TIME = 1.0; + + private final Consumer visionEstimateConsumer; + + public RobotState(Consumer visionEstimateConsumer) { + this.visionEstimateConsumer = visionEstimateConsumer; + // Make sure to add one sample to these methods to protect callers against null. + fieldToRobot.addSample(0.0, MathHelpers.kPose2dZero); + robotToTurret.addSample(0.0, MathHelpers.kRotation2dZero); + turretAngularVelocity.addSample(0.0, 0.0); + driveYawAngularVelocity.addSample(0.0, 0.0); + turretPositionRadians.addSample(0.0, 0.0); + } + + // State of robot. + + // Kinematic Frames + private final ConcurrentTimeInterpolatableBuffer fieldToRobot = + ConcurrentTimeInterpolatableBuffer.createBuffer(LOOKBACK_TIME); + private final ConcurrentTimeInterpolatableBuffer robotToTurret = + ConcurrentTimeInterpolatableBuffer.createBuffer(LOOKBACK_TIME); + private static final Transform2d TURRET_TO_CAMERA = + new Transform2d( + Constants.kTurretToCameraX, Constants.kTurretToCameraY, MathHelpers.kRotation2dZero); + private static final Transform2d ROBOT_TO_CAMERA_B = + new Transform2d( + Constants.kTurretToCameraBX, Constants.kTurretToCameraBY, MathHelpers.kRotation2dZero); + private final AtomicReference measuredRobotRelativeChassisSpeeds = + new AtomicReference<>(new ChassisSpeeds()); + private final AtomicReference measuredFieldRelativeChassisSpeeds = + new AtomicReference<>(new ChassisSpeeds()); + private final AtomicReference desiredFieldRelativeChassisSpeeds = + new AtomicReference<>(new ChassisSpeeds()); + private final AtomicReference fusedFieldRelativeChassisSpeeds = + new AtomicReference<>(new ChassisSpeeds()); + + private final AtomicInteger iteration = new AtomicInteger(0); + + private double lastUsedMegatagTimestamp = 0; + private double lastTriggeredIntakeSensorTimestamp = 0; + private ConcurrentTimeInterpolatableBuffer turretAngularVelocity = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer turretPositionRadians = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + + private ConcurrentTimeInterpolatableBuffer drivePitchRads = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer driveRollRads = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer accelX = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + private ConcurrentTimeInterpolatableBuffer accelY = + ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); + + private final AtomicBoolean enablePathCancel = new AtomicBoolean(false); + + private double autoStartTime; + + public void setAutoStartTime(double timestamp) { + autoStartTime = timestamp; + } + + public double getAutoStartTime() { + return autoStartTime; + } + + public void enablePathCancel() { + enablePathCancel.set(true); + } + + public void disablePathCancel() { + enablePathCancel.set(false); + } + + public boolean getPathCancel() { + return enablePathCancel.get(); + } + + public void addOdometryMeasurement(double timestamp, Pose2d pose) { + fieldToRobot.addSample(timestamp, pose); + } + + public void incrementIterationCount() { + iteration.incrementAndGet(); + } + + public int getIteration() { + return iteration.get(); + } + + public IntSupplier getIterationSupplier() { + return () -> getIteration(); + } + + public void addDriveMotionMeasurements( + double timestamp, + double angularRollRadsPerS, + double angularPitchRadsPerS, + double angularYawRadsPerS, + double pitchRads, + double rollRads, + double accelX, + double accelY, + ChassisSpeeds desiredFieldRelativeSpeeds, + ChassisSpeeds measuredSpeeds, + ChassisSpeeds measuredFieldRelativeSpeeds, + ChassisSpeeds fusedFieldRelativeSpeeds) { + this.driveRollAngularVelocity.addSample(timestamp, angularRollRadsPerS); + this.drivePitchAngularVelocity.addSample(timestamp, angularPitchRadsPerS); + this.driveYawAngularVelocity.addSample(timestamp, angularYawRadsPerS); + this.drivePitchRads.addSample(timestamp, pitchRads); + this.driveRollRads.addSample(timestamp, rollRads); + this.accelY.addSample(timestamp, accelY); + this.accelX.addSample(timestamp, accelX); + this.desiredFieldRelativeChassisSpeeds.set(desiredFieldRelativeSpeeds); + this.measuredRobotRelativeChassisSpeeds.set(measuredSpeeds); + this.measuredFieldRelativeChassisSpeeds.set(measuredFieldRelativeSpeeds); + this.fusedFieldRelativeChassisSpeeds.set(fusedFieldRelativeSpeeds); + } + + public Map.Entry getLatestFieldToRobot() { + return fieldToRobot.getLatest(); + } + + public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { + var maybeFieldToRobot = getLatestFieldToRobot(); + Pose2d fieldToRobot = + maybeFieldToRobot == null ? MathHelpers.kPose2dZero : maybeFieldToRobot.getValue(); + var delta = getLatestRobotRelativeChassisSpeed(); + delta = delta.times(lookaheadTimeS); + return fieldToRobot.exp( + new Twist2d(delta.vxMetersPerSecond, delta.vyMetersPerSecond, delta.omegaRadiansPerSecond)); + } + + public Pose2d getLatestFieldToRobotCenter() { + return fieldToRobot.getLatest().getValue().transformBy(Constants.kTurretToRobotCenter); + } + + // This has rotation and radians to allow for wrapping tracking. + public void addTurretUpdates( + double timestamp, + Rotation2d turretRotation, + double turretRadians, + double angularYawRadsPerS) { + // turret frame 180 degrees off from robot frame + robotToTurret.addSample(timestamp, turretRotation.rotateBy(MathHelpers.kRotation2dPi)); + this.turretAngularVelocity.addSample(timestamp, angularYawRadsPerS); + this.turretPositionRadians.addSample(timestamp, turretRadians); + } + + public double getLatestTurretPositionRadians() { + return this.turretPositionRadians.getInternalBuffer().lastEntry().getValue(); + } + + public double getLatestTurretAngularVelocity() { + return this.turretAngularVelocity.getInternalBuffer().lastEntry().getValue(); + } + + private Rotation2d hoodRotation = new Rotation2d(); + + public void addHoodRotation(Rotation2d rotationFromZero) { + hoodRotation = rotationFromZero; + } + + public Rotation2d getHoodRotation() { + return hoodRotation; + } + + private double elevatorHeightM = 0.0; + + public void setElevatorHeight(double heightM) { + this.elevatorHeightM = heightM; + } + + public double getElevatorHeight() { + return elevatorHeightM; + } + + private double climberRotations = 0.0; + + public void setClimberRotations(double rotations) { + this.climberRotations = rotations; + } + + public Optional getRobotToTurret(double timestamp) { + return robotToTurret.getSample(timestamp); + } + + public Optional getFieldToRobot(double timestamp) { + return fieldToRobot.getSample(timestamp); + } + + public Transform2d getTurretToCamera(boolean isTurretCamera) { + return isTurretCamera ? TURRET_TO_CAMERA : ROBOT_TO_CAMERA_B; + } + + public Map.Entry getLatestRobotToTurret() { + return robotToTurret.getLatest(); + } + + public ChassisSpeeds getLatestMeasuredFieldRelativeChassisSpeeds() { + return measuredFieldRelativeChassisSpeeds.get(); + } + + public ChassisSpeeds getLatestRobotRelativeChassisSpeed() { + return measuredRobotRelativeChassisSpeeds.get(); + } + + public ChassisSpeeds getLatestDesiredFieldRelativeChassisSpeed() { + return desiredFieldRelativeChassisSpeeds.get(); + } + + public ChassisSpeeds getLatestFusedFieldRelativeChassisSpeed() { + return fusedFieldRelativeChassisSpeeds.get(); + } + + public ChassisSpeeds getLatestFusedRobotRelativeChassisSpeed() { + var speeds = getLatestRobotRelativeChassisSpeed(); + speeds.omegaRadiansPerSecond = getLatestFusedFieldRelativeChassisSpeed().omegaRadiansPerSecond; + return speeds; + } + + public Optional getTurretAngularVelocity(double timestamp) { + return turretAngularVelocity.getSample(timestamp); + } + + private Optional getMaxAbsValueInRange( + ConcurrentTimeInterpolatableBuffer buffer, double minTime, double maxTime) { + var submap = buffer.getInternalBuffer().subMap(minTime, maxTime).values(); + var max = submap.stream().max(Double::compare); + var min = submap.stream().min(Double::compare); + if (max.isEmpty() || min.isEmpty()) return Optional.empty(); + if (Math.abs(max.get()) >= Math.abs(min.get())) return max; + else return min; + } + + public Optional getMaxAbsTurretYawAngularVelocityInRange(double minTime, double maxTime) { + return getMaxAbsValueInRange(turretAngularVelocity, minTime, maxTime); + } + + public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double maxTime) { + // Gyro yaw rate not set in sim. + if (Robot.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); + return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); + } + + public Optional getMaxAbsDrivePitchAngularVelocityInRange( + double minTime, double maxTime) { + return getMaxAbsValueInRange(drivePitchAngularVelocity, minTime, maxTime); + } + + public Optional getMaxAbsDriveRollAngularVelocityInRange(double minTime, double maxTime) { + return getMaxAbsValueInRange(driveRollAngularVelocity, minTime, maxTime); + } + + public void updateMegatagEstimate(VisionFieldPoseEstimate megatagEstimate) { + lastUsedMegatagTimestamp = Timer.getFPGATimestamp(); + visionEstimateConsumer.accept(megatagEstimate); + } + + public void updatePinholeEstimate(VisionFieldPoseEstimate pinholeEstimate) { + visionEstimateConsumer.accept(pinholeEstimate); + } + + public void updateLastTriggeredIntakeSensorTimestamp(boolean triggered) { + if (triggered) lastTriggeredIntakeSensorTimestamp = RobotTime.getTimestampSeconds(); + } + + public double lastUsedMegatagTimestamp() { + return lastUsedMegatagTimestamp; + } + + public double lastTriggeredIntakeSensorTimestamp() { + return lastTriggeredIntakeSensorTimestamp; + } + + public boolean isRedAlliance() { + return DriverStation.getAlliance().isPresent() + && DriverStation.getAlliance().equals(Optional.of(Alliance.Red)); + } + + public void updateLogger() { + if (this.driveYawAngularVelocity.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/YawAngularVelocity", + this.driveYawAngularVelocity.getInternalBuffer().lastEntry().getValue()); + } + if (this.driveRollAngularVelocity.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/RollAngularVelocity", + this.driveRollAngularVelocity.getInternalBuffer().lastEntry().getValue()); + } + if (this.drivePitchAngularVelocity.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/PitchAngularVelocity", + this.drivePitchAngularVelocity.getInternalBuffer().lastEntry().getValue()); + } + if (this.accelX.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/AccelX", this.accelX.getInternalBuffer().lastEntry().getValue()); + } + if (this.accelY.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/AccelY", this.accelY.getInternalBuffer().lastEntry().getValue()); + } + Logger.recordOutput( + "RobotState/DesiredChassisSpeedFieldFrame", getLatestDesiredFieldRelativeChassisSpeed()); + Logger.recordOutput( + "RobotState/MeasuredChassisSpeedFieldFrame", getLatestMeasuredFieldRelativeChassisSpeeds()); + Logger.recordOutput( + "RobotState/FusedChassisSpeedFieldFrame", getLatestFusedFieldRelativeChassisSpeed()); + } + + Pose3d ampPose3d = new Pose3d(); + Pose3d climberPose3d = new Pose3d(); + Pose3d hoodPose3d = new Pose3d(); + Pose3d shooterPose3d = new Pose3d(); + Pose3d turretPose3d = new Pose3d(); + + public void updateViz() { + if (getLatestRobotToTurret().getValue() != null) { + shooterPose3d = + new Pose3d( + new Translation3d(), + new Rotation3d(0, 0, getLatestRobotToTurret().getValue().getRadians())); + hoodPose3d = + new Pose3d( + new Translation3d(), + new Rotation3d(0, 0, getLatestRobotToTurret().getValue().getRadians())); + } + ampPose3d = new Pose3d(new Translation3d(0.0, 0.0, this.elevatorHeightM), new Rotation3d()); + var climberRot = + MathUtil.interpolate( + 0.0, + -Math.PI / 2.0, + this.climberRotations + / (Constants.ClimberConstants.kForwardMaxPositionRotations + * Constants.kClimberConfig.unitToRotorRatio)); + climberPose3d = + new Pose3d(new Translation3d(0, 0.0, 0.15), new Rotation3d(0.0, (double) climberRot, 0.0)); + // model_0 is elevator + // model_1 is climber + // model_2 is hood plates + // model_3 is shooter + // model_4 is turret + Logger.recordOutput( + "ComponentsPoseArray", + new Pose3d[] {ampPose3d, climberPose3d, hoodPose3d, shooterPose3d, turretPose3d}); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java index 57174d3..8a95f20 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java @@ -1,33 +1,47 @@ -// package com.team254.frc2024.subsystems.vision; - -// import edu.wpi.first.math.Matrix; -// import edu.wpi.first.math.geometry.Pose2d; -// import edu.wpi.first.math.numbers.N1; -// import edu.wpi.first.math.numbers.N3; - -// public class VisionFieldPoseEstimate { - -// private final Pose2d visionRobotPoseMeters; -// private final double timestampSeconds; -// private final Matrix visionMeasurementStdDevs; - -// public VisionFieldPoseEstimate(Pose2d visionRobotPoseMeters, -// double timestampSeconds, -// Matrix visionMeasurementStdDevs) { -// this.visionRobotPoseMeters = visionRobotPoseMeters; -// this.timestampSeconds = timestampSeconds; -// this.visionMeasurementStdDevs = visionMeasurementStdDevs; -// } - -// public Pose2d getVisionRobotPoseMeters() { -// return visionRobotPoseMeters; -// } - -// public double getTimestampSeconds() { -// return timestampSeconds; -// } - -// public Matrix getVisionMeasurementStdDevs() { -// return visionMeasurementStdDevs; -// } -// } +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; + +public class VisionFieldPoseEstimate { + + private final Pose2d visionRobotPoseMeters; + private final double timestampSeconds; + private final Matrix visionMeasurementStdDevs; + + public VisionFieldPoseEstimate( + Pose2d visionRobotPoseMeters, + double timestampSeconds, + Matrix visionMeasurementStdDevs) { + this.visionRobotPoseMeters = visionRobotPoseMeters; + this.timestampSeconds = timestampSeconds; + this.visionMeasurementStdDevs = visionMeasurementStdDevs; + } + + public Pose2d getVisionRobotPoseMeters() { + return visionRobotPoseMeters; + } + + public double getTimestampSeconds() { + return timestampSeconds; + } + + public Matrix getVisionMeasurementStdDevs() { + return visionMeasurementStdDevs; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java new file mode 100644 index 0000000..0bc5ada --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java @@ -0,0 +1,46 @@ +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import org.littletonrobotics.junction.AutoLog; + +public interface VisionIO { + + @AutoLog + class VisionIOInputs { + public boolean turretCameraSeesTarget; + + public boolean elevatorCameraSeesTarget; + + public FiducialObservation[] turretCameraFiducialObservations; + + public FiducialObservation[] elevatorCameraFiducialObservations; + + public MegatagPoseEstimate turretCameraMegatagPoseEstimate; + + public int turretCameraMegatagCount; + + public MegatagPoseEstimate elevatorCameraMegatagPoseEstimate; + + public int elevatorCameraMegatagCount; + + public MegatagPoseEstimate turretCameraMegatag2PoseEstimate; + + public MegatagPoseEstimate elevatorCameraMegatag2PoseEstimate; + } + + void readInputs(VisionIOInputs inputs); + + void pollNetworkTables(); +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java index f2d0489..cb90753 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java @@ -1,594 +1,627 @@ -// package com.team254.frc2024.subsystems.vision; - -// import java.util.Arrays; -// import java.util.HashSet; -// import java.util.List; -// import java.util.Objects; -// import java.util.Optional; -// import java.util.Set; -// import java.util.stream.Collectors; - -// import org.littletonrobotics.junction.Logger; - -// import com.team254.frc2024.Constants; -// import com.team254.frc2024.RobotState; -// import com.team254.lib.time.RobotTime; -// import com.team254.lib.util.MathHelpers; -// import com.team254.lib.util.Util; - -// import edu.wpi.first.math.Matrix; -// import edu.wpi.first.math.VecBuilder; -// import edu.wpi.first.math.geometry.Pose2d; -// import edu.wpi.first.math.geometry.Pose3d; -// import edu.wpi.first.math.geometry.Rotation2d; -// import edu.wpi.first.math.geometry.Rotation3d; -// import edu.wpi.first.math.geometry.Transform2d; -// import edu.wpi.first.math.geometry.Transform3d; -// import edu.wpi.first.math.geometry.Translation2d; -// import edu.wpi.first.math.geometry.Translation3d; -// import edu.wpi.first.math.numbers.N1; -// import edu.wpi.first.math.numbers.N3; -// import edu.wpi.first.math.util.Units; -// import edu.wpi.first.wpilibj.DriverStation; -// import edu.wpi.first.wpilibj2.command.SubsystemBase; - -// public class VisionSubsystem extends SubsystemBase { -// private final VisionIO io; - -// private final RobotState state; - -// private final VisionIOInputsAutoLogged inputs = new VisionIOInputsAutoLogged(); - -// private static class PinholeObservation { -// public Translation2d cameraToTag; -// public Pose3d tagPose; -// } - -// private double lastProcessedTurretTimestamp = 0.0; -// private double lastProcessedElevatorTimestamp = 0.0; - -// public VisionSubsystem(VisionIO io, RobotState state) { -// this.io = io; -// this.state = state; -// } - -// @Override -// public void periodic() { -// double timestamp = RobotTime.getTimestampSeconds(); -// // Read inputs from IO -// io.readInputs(inputs); -// Logger.processInputs("Vision", inputs); - -// // Optionally update RobotState -// if (inputs.turretCameraSeesTarget) { -// updateVision(inputs.turretCameraSeesTarget, inputs.turretCameraFiducialObservations, -// inputs.turretCameraMegatagPoseEstimate, -// inputs.turretCameraMegatag2PoseEstimate, true); -// } else if (inputs.elevatorCameraSeesTarget) { -// updateVision(inputs.elevatorCameraSeesTarget, -// inputs.elevatorCameraFiducialObservations, -// inputs.elevatorCameraMegatagPoseEstimate, -// inputs.elevatorCameraMegatag2PoseEstimate, false); -// } - -// Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - -// timestamp); -// } - -// private void updateVision(boolean cameraSeesTarget, FiducialObservation[] -// cameraFiducialObservations, -// MegatagPoseEstimate cameraMegatagPoseEstimate, MegatagPoseEstimate -// cameraMegatag2PoseEstimate, -// boolean isTurretCamera) { -// if (cameraMegatagPoseEstimate != null) { - -// String logPreface = "Vision/" + (isTurretCamera ? "Turret/" : "Elevator/"); -// var updateTimestamp = cameraMegatagPoseEstimate.timestampSeconds; -// boolean alreadyProcessedTimestamp = (isTurretCamera ? lastProcessedTurretTimestamp -// : lastProcessedElevatorTimestamp) == updateTimestamp; -// if (!alreadyProcessedTimestamp && cameraSeesTarget) { -// if (!isTurretCamera && !Util.epsilonEquals(state.getElevatorHeight(), 0.0, 0.05)) -// return; -// Optional pinholeEstimate = Optional.empty();// -// processPinholeVisionEstimate(pinholeObservations, -// // -// updateTimestamp, -// // -// isTurretCamera); - -// Optional megatagEstimate = processMegatagPoseEstimate( -// cameraMegatagPoseEstimate, -// isTurretCamera); -// Optional megatag2Estimate = processMegatag2PoseEstimate( -// cameraMegatag2PoseEstimate, isTurretCamera, logPreface); - -// boolean used_megatag = false; -// if (megatagEstimate.isPresent()) { -// if (shouldUseMegatag(cameraMegatagPoseEstimate, cameraFiducialObservations, -// isTurretCamera, -// logPreface)) { -// Logger.recordOutput(logPreface + "MegatagEstimate", -// megatagEstimate.get().getVisionRobotPoseMeters()); -// state.updateMegatagEstimate(megatagEstimate.get()); -// used_megatag = true; -// } else { -// if (megatagEstimate.isPresent()) { -// Logger.recordOutput(logPreface + "MegatagEstimateRejected", -// megatagEstimate.get().getVisionRobotPoseMeters()); -// } -// } -// } - -// if (megatag2Estimate.isPresent() && !used_megatag) { -// if (shouldUseMegatag2(cameraMegatag2PoseEstimate, isTurretCamera, -// logPreface)) { -// Logger.recordOutput(logPreface + "Megatag2Estimate", -// megatag2Estimate.get().getVisionRobotPoseMeters()); -// state.updateMegatagEstimate(megatag2Estimate.get()); -// } else { -// if (megatagEstimate.isPresent()) { -// Logger.recordOutput(logPreface + "Megatag2EstimateRejected", -// megatag2Estimate.get().getVisionRobotPoseMeters()); -// } -// } -// } -// if (pinholeEstimate.isPresent()) { -// if (shouldUsePinhole(updateTimestamp, isTurretCamera, logPreface)) { -// Logger.recordOutput(logPreface + "PinholeEstimate", -// pinholeEstimate.get().getVisionRobotPoseMeters()); -// state.updatePinholeEstimate(pinholeEstimate.get()); -// } else { -// Logger.recordOutput(logPreface + "PinholeEstimateRejected", -// pinholeEstimate.get().getVisionRobotPoseMeters()); -// } -// } - -// if (isTurretCamera) -// lastProcessedTurretTimestamp = updateTimestamp; -// else -// lastProcessedElevatorTimestamp = updateTimestamp; -// } -// } -// } - -// @SuppressWarnings("unused") -// private List getPinholeObservations(FiducialObservation[] fiducials, -// boolean isTurretCamera) { -// // Iterate over the fiducials to make VisionUpdates -// return Arrays.stream(fiducials).map(fiducial -> { -// Optional tagPoseOptional = Constants.kAprilTagLayout.getTagPose(fiducial.id); -// if (tagPoseOptional.isEmpty()) { -// return null; -// } -// Pose3d tagPose = tagPoseOptional.get(); -// Optional cameraToTarget = getCameraToTargetTranslation(fiducial, -// tagPose, isTurretCamera); - -// if (cameraToTarget.isEmpty()) { -// return null; -// } - -// var observation = new PinholeObservation(); -// observation.cameraToTag = cameraToTarget.get(); -// observation.tagPose = tagPose; -// return observation; -// }).filter(Objects::nonNull).toList(); -// } - -// private Optional getCameraToTargetTranslation(FiducialObservation fiducial, -// Pose3d tagLocation, -// boolean isTurretCamera) { -// // Get the yaw and pitch angles from to target from the camera POV -// double yawRadians = Math.toRadians(fiducial.txnc); -// double pitchRadians = Math.toRadians(fiducial.tync); - -// Transform3d cameraToTarget = new Transform3d(new Translation3d(), new Rotation3d(0.0, -// pitchRadians, 0.0)); -// cameraToTarget = cameraToTarget -// .plus(new Transform3d(new Translation3d(), new Rotation3d(0.0, 0.0, -// yawRadians))); -// Transform3d cameraGroundPlaneToCamera = new Transform3d(new Translation3d(), -// new Rotation3d(0.0, isTurretCamera ? Constants.kCameraPitchRads : -// Constants.kCameraBPitchRads, 0)); -// Rotation3d cameraGroundPlaneToTarget = new -// Pose3d().plus(cameraGroundPlaneToCamera.plus(cameraToTarget)) -// .getRotation().unaryMinus(); - -// // Built a unit vector from adjusted rotation. -// // Raw vector: x = 1, y = tan(yaw), z = tan(pitch) -// // Make it a unit vector by dividing each component by magnitude -// // sqrt(x^2+y^2+z^2). -// double tan_ty = Math.tan(cameraGroundPlaneToTarget.getZ()); // y and z switch intentional -// double tan_tz = -Math.tan(cameraGroundPlaneToTarget.getY()); // y and z switch -// intentional - -// if (tan_tz == 0.0) { -// // Protect against divide by zero (i.e. target is at same height as camera). -// return Optional.empty(); -// } - -// // Find the fixed height difference between the center of the tag and the camera -// // lens -// double differential_height = tagLocation.getZ() -// - (isTurretCamera ? Constants.kCameraHeightOffGroundMeters : -// Constants.kCameraBHeightOffGroundMeters); - -// // We now obtain 3d distance by dividing differential_height by our normalized z -// // component z / (Math.sqrt(x^2+y^2+z^2)) -// double distance = differential_height * Math.sqrt(1.0 + tan_tz * tan_tz + tan_ty * -// tan_ty) / tan_tz; -// // Build a 3d vector from distance (which we now know) and orientation (which we -// // already computed above). -// Translation3d cameraToTargetTranslation = new Translation3d(distance, -// cameraGroundPlaneToTarget); - -// // Grab the x and y components. -// return Optional.of(new Translation2d(cameraToTargetTranslation.getX(), -// cameraToTargetTranslation.getY())); -// } - -// final static Set kTagsBlueSpeaker = new HashSet<>(List.of(7, 8)); -// final static Set kTagsRedSpeaker = new HashSet<>(List.of(3, 4)); - -// private boolean shouldUseMegatag(MegatagPoseEstimate poseEstimate, FiducialObservation[] -// fiducials, -// boolean isTurretCamera, String logPreface) { -// final double kMinAreaForTurretMegatagEnabled = 0.4; -// final double kMinAreaForTurretMegatagDisabled = 0.05; - -// final double kMinAreaForElevatorMegatagEnabled = 0.4; -// final double kMinAreaForElevatorMegatagDisabled = 0.05; - -// double kMinAreaForMegatag = 0.0; -// if (DriverStation.isDisabled()) { -// kMinAreaForMegatag = isTurretCamera ? kMinAreaForTurretMegatagDisabled -// : kMinAreaForElevatorMegatagDisabled; -// } else { -// kMinAreaForMegatag = isTurretCamera ? kMinAreaForTurretMegatagEnabled -// : kMinAreaForElevatorMegatagEnabled; -// } - -// final int kExpectedTagCount = 2; - -// final double kLargeYawThreshold = Units.degreesToRadians(200.0); -// final double kLargeYawEventTimeWindowS = 0.05; - -// if (!isTurretCamera) { -// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( -// poseEstimate.timestampSeconds - kLargeYawEventTimeWindowS, -// poseEstimate.timestampSeconds); -// if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { -// Logger.recordOutput("Vision/Elevator/MegatagYawAngular", false); -// return false; -// } -// Logger.recordOutput("Vision/Elevator/MegatagYawAngular", true); -// } - -// if (poseEstimate.avgTagArea < kMinAreaForMegatag) { -// Logger.recordOutput(logPreface + "megaTagAvgTagArea", false); -// return false; -// } -// Logger.recordOutput(logPreface + "megaTagAvgTagArea", true); - -// if (poseEstimate.fiducialIds.length != kExpectedTagCount) { -// Logger.recordOutput(logPreface + "fiducialLength", false); -// return false; -// } -// Logger.recordOutput(logPreface + "fiducialLength", true); - -// if (poseEstimate.fiducialIds.length < 1) { -// Logger.recordOutput(logPreface + "fiducialLengthLess1", false); -// return false; -// } -// Logger.recordOutput(logPreface + "fiducialLengthLess1", true); - -// if (poseEstimate.fieldToCamera.getTranslation().getNorm() < 1.0) { -// Logger.recordOutput(logPreface + "NormCheck", false); -// return false; -// } -// Logger.recordOutput(logPreface + "NormCheck", true); - -// for (var fiducial : fiducials) { -// if (fiducial.ambiguity > .9) { -// Logger.recordOutput(logPreface + "Ambiguity", false); -// return false; -// } -// } -// Logger.recordOutput(logPreface + "Ambiguity", true); - -// Set seenTags = Arrays.stream(poseEstimate.fiducialIds).boxed() -// .collect(Collectors.toCollection(HashSet::new)); -// Set expectedTags = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; -// var result = expectedTags.equals(seenTags); -// Logger.recordOutput(logPreface + "SeenTags", result); -// return result; -// } - -// private boolean shouldUseMegatag2(MegatagPoseEstimate poseEstimate, boolean isTurretCamera, -// String logPreface) { -// return shouldUsePinhole(poseEstimate.timestampSeconds, isTurretCamera, logPreface); -// } - -// private boolean shouldUsePinhole(double timestamp, boolean isTurretCamera, String preface) { -// final double kLargePitchRollYawEventTimeWindowS = 0.1; -// final double kLargePitchRollThreshold = Units.degreesToRadians(10.0); -// final double kLargeYawThreshold = Units.degreesToRadians(100.0); -// if (isTurretCamera) { -// var maxTurretVel = state.getMaxAbsTurretYawAngularVelocityInRange( -// timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); -// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( -// timestamp - kLargePitchRollYawEventTimeWindowS, -// timestamp); - -// if (maxTurretVel.isPresent() && maxYawVel.isPresent() && -// Math.abs(maxTurretVel.get() + maxYawVel.get()) > kLargeYawThreshold) { -// Logger.recordOutput(preface + "PinholeTurretAngular", false); -// return false; -// } -// Logger.recordOutput(preface + "PinholeTurretAngular", true); -// } else { -// var maxYawVel = state.getMaxAbsDriveYawAngularVelocityInRange( -// timestamp - kLargePitchRollYawEventTimeWindowS, -// timestamp); -// if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { -// Logger.recordOutput(preface + "PinholeYawAngular", false); -// return false; -// } -// Logger.recordOutput(preface + "PinholeYawAngular", true); -// } - -// var maxPitchVel = state.getMaxAbsDrivePitchAngularVelocityInRange( -// timestamp - kLargePitchRollYawEventTimeWindowS, -// timestamp); -// if (maxPitchVel.isPresent() && Math.abs(maxPitchVel.get()) > kLargePitchRollThreshold) { -// Logger.recordOutput(preface + "PinholePitchAngular", false); -// return false; -// } -// Logger.recordOutput(preface + "PinholePitchAngular", true); - -// var maxRollVel = state.getMaxAbsDriveRollAngularVelocityInRange(timestamp - -// kLargePitchRollYawEventTimeWindowS, -// timestamp); -// if (maxRollVel.isPresent() && Math.abs(maxRollVel.get()) > kLargePitchRollThreshold) { -// Logger.recordOutput(preface + "PinholeRollAngular", false); -// return false; -// } -// Logger.recordOutput(preface + "PinholeRollAngular", true); - -// return true; -// } - -// private Optional getFieldToRobotEstimate(MegatagPoseEstimate poseEstimate, boolean -// isTurretCamera) { -// var fieldToCamera = poseEstimate.fieldToCamera; -// if (fieldToCamera.getX() == 0.0) { -// return Optional.empty(); -// } -// var turretToCameraTransform = state.getTurretToCamera(isTurretCamera); -// var cameraToTurretTransform = turretToCameraTransform.inverse(); -// var fieldToTurretPose = fieldToCamera.plus(cameraToTurretTransform); -// var fieldToRobotEstimate = MathHelpers.kPose2dZero; -// if (isTurretCamera) { -// var robotToTurretObservation = state.getRobotToTurret(poseEstimate.timestampSeconds); -// if (robotToTurretObservation.isEmpty()) { -// return Optional.empty(); -// } -// var turretToRobot = -// MathHelpers.transform2dFromRotation(robotToTurretObservation.get().unaryMinus()); -// fieldToRobotEstimate = fieldToTurretPose.plus(turretToRobot); -// } else { -// fieldToRobotEstimate = fieldToCamera.plus(turretToCameraTransform.inverse()); -// } - -// return Optional.of(fieldToRobotEstimate); -// } - -// private Optional processMegatag2PoseEstimate(MegatagPoseEstimate -// poseEstimate, -// boolean isTurretCamera, String logPreface) { -// var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); -// if (loggedFieldToRobot.isEmpty()) { -// return Optional.empty(); -// } - -// var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); -// if (maybeFieldToRobotEstimate.isEmpty()) -// return Optional.empty(); -// var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); - -// // distance from current pose to vision estimated pose -// double poseDifference = fieldToRobotEstimate.getTranslation() -// .getDistance(loggedFieldToRobot.get().getTranslation()); - -// var defaultSet = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; -// Set speakerTags = new HashSet<>(defaultSet); -// speakerTags.removeAll( +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 // -// Arrays.stream(poseEstimate.fiducialIds).boxed().collect(Collectors.toCollection(HashSet::new))); -// boolean seesSpeakerTags = speakerTags.size() < 2; - -// double xyStds; -// if (poseEstimate.fiducialIds.length > 0) { -// // multiple targets detected -// if (poseEstimate.fiducialIds.length >= 2 && poseEstimate.avgTagArea > 0.1) { -// xyStds = 0.2; -// } -// // we detect at least one of our speaker tags and we're close to it. -// else if (seesSpeakerTags && poseEstimate.avgTagArea > 0.2) { -// xyStds = 0.5; -// } -// // 1 target with large area and close to estimated pose -// else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { -// xyStds = 0.5; -// } -// // 1 target farther away and estimated pose is close -// else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { -// xyStds = 1.0; -// } else if (poseEstimate.fiducialIds.length > 1) { -// xyStds = 1.2; -// } else { -// xyStds = 2.0; -// } - -// Logger.recordOutput(logPreface + "Megatag2StdDev", xyStds); -// Logger.recordOutput(logPreface + "Megatag2AvgTagArea", poseEstimate.avgTagArea); -// Logger.recordOutput(logPreface + "Megatag2PoseDifference", poseDifference); - -// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, -// Units.degreesToRadians(50.0)); -// fieldToRobotEstimate = new Pose2d(fieldToRobotEstimate.getTranslation(), -// loggedFieldToRobot.get().getRotation()); -// return Optional.of( -// new VisionFieldPoseEstimate(fieldToRobotEstimate, -// poseEstimate.timestampSeconds, -// visionMeasurementStdDevs)); -// } -// return Optional.empty(); -// } - -// private Optional processMegatagPoseEstimate(MegatagPoseEstimate -// poseEstimate, -// boolean isTurretCamera) { -// var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); -// if (loggedFieldToRobot.isEmpty()) { -// return Optional.empty(); -// } - -// var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); -// if (maybeFieldToRobotEstimate.isEmpty()) -// return Optional.empty(); -// var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); - -// // distance from current pose to vision estimated pose -// double poseDifference = fieldToRobotEstimate.getTranslation() -// .getDistance(loggedFieldToRobot.get().getTranslation()); - -// if (poseEstimate.fiducialIds.length > 0) { -// double xyStds = 1.0; -// double degStds = 12; -// // multiple targets detected -// if (poseEstimate.fiducialIds.length >= 2) { -// xyStds = 0.5; -// degStds = 6; -// } -// // 1 target with large area and close to estimated pose -// else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { -// xyStds = 1.0; -// degStds = 12; -// } -// // 1 target farther away and estimated pose is close -// else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { -// xyStds = 2.0; -// degStds = 30; -// } - -// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, -// Units.degreesToRadians(degStds)); -// return Optional.of( -// new VisionFieldPoseEstimate(fieldToRobotEstimate, -// poseEstimate.timestampSeconds, -// visionMeasurementStdDevs)); -// } -// return Optional.empty(); -// } - -// @SuppressWarnings("unused") -// private Optional -// processPinholeVisionEstimate(List observations, -// double timestamp, boolean isTurretCamera) { -// observations = observations.stream().filter((observation) -> { -// if (observation.cameraToTag.getNorm() < 10.0) { -// return true; -// } -// Logger.recordOutput("Vision/RejectOnNormTimestamp", RobotTime.getTimestampSeconds()); -// return false; -// }).toList(); - -// if (observations.isEmpty()) -// return Optional.empty(); - -// int num_updates = 0; -// double x = 0.0, y = 0.0; -// // All timestamps and rotations are the same. If that changes, need to revisit. -// Rotation2d rotation = MathHelpers.kRotation2dZero; -// double avgRange = 0.0; -// var poseTurret = state.getRobotToTurret(timestamp); -// var poseRobot = state.getFieldToRobot(timestamp); -// if (poseRobot.isEmpty() || poseTurret.isEmpty()) { -// return Optional.empty(); -// } -// for (var observation : observations) { -// Pose2d fieldToRobotEstimate = estimateFieldToRobot( -// observation.cameraToTag, -// observation.tagPose, -// poseTurret.get(), -// poseRobot.get().getRotation(), -// MathHelpers.kRotation2dZero, isTurretCamera); -// x += fieldToRobotEstimate.getX(); -// y += fieldToRobotEstimate.getY(); -// rotation = fieldToRobotEstimate.getRotation(); -// avgRange += observation.cameraToTag.getNorm(); -// num_updates++; -// } - -// if (num_updates == 0) -// return Optional.empty(); - -// avgRange /= num_updates; - -// double xyStds = 100.0; -// var fieldToRobotEstimate = new Pose2d(x / num_updates, y / num_updates, rotation); -// var poseDifference = -// fieldToRobotEstimate.getTranslation().getDistance(poseRobot.get().getTranslation()); -// // multiple targets detected -// if (observations.size() >= 2 && avgRange < 3.0) { -// xyStds = 0.2; -// } else if (avgRange < 5.0 && poseDifference < 0.5) { -// xyStds = 0.5; -// } -// // 1 target farther away and estimated pose is close -// else if (avgRange < 3.0 && poseDifference < 0.3) { -// xyStds = 1.0; -// } else if (observations.size() > 1) { -// xyStds = 1.2; -// } else { -// xyStds = 2.0; -// } -// ; - -// final double rotStdDev = Units.degreesToRadians(50); -// Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, rotStdDev); -// return Optional.of(new VisionFieldPoseEstimate(fieldToRobotEstimate, timestamp, -// visionMeasurementStdDevs)); -// } - -// private Pose2d estimateFieldToRobot(Translation2d cameraToTarget, Pose3d fieldToTarget, -// Rotation2d robotToTurret, -// Rotation2d gyroAngle, Rotation2d cameraYawOffset, boolean isTurretCamera) { -// Transform2d cameraToTargetFixed = MathHelpers -// .transform2dFromTranslation(cameraToTarget.rotateBy(cameraYawOffset)); -// Transform2d turretToTarget = -// state.getTurretToCamera(isTurretCamera).plus(cameraToTargetFixed); -// // In robot frame -// Transform2d robotToTarget = turretToTarget; -// if (isTurretCamera) { -// robotToTarget = -// MathHelpers.transform2dFromRotation(robotToTurret).plus(turretToTarget); -// } - -// // In field frame -// Transform2d robotToTargetField = MathHelpers -// .transform2dFromTranslation(robotToTarget.getTranslation().rotateBy(gyroAngle)); - -// // In field frame -// Pose2d fieldToTarget2d = -// MathHelpers.pose2dFromTranslation(fieldToTarget.toPose2d().getTranslation()); - -// Pose2d fieldToRobot = fieldToTarget2d.transformBy(MathHelpers.transform2dFromTranslation( -// robotToTargetField.getTranslation().unaryMinus())); - -// Pose2d fieldToRobotYawAdjusted = new Pose2d(fieldToRobot.getTranslation(), gyroAngle); -// return fieldToRobotYawAdjusted; -// } - -// } +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.robot.util.MathHelpers; +import frc.robot.util.RobotTime; +import frc.robot.util.Util; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import org.littletonrobotics.junction.Logger; + +public class VisionSubsystem extends SubsystemBase { + private final VisionIO io; + + private final RobotState state; + + private final VisionIOInputsAutoLogged inputs = new VisionIOInputsAutoLogged(); + + private static class PinholeObservation { + public Translation2d cameraToTag; + public Pose3d tagPose; + } + + private double lastProcessedTurretTimestamp = 0.0; + private double lastProcessedElevatorTimestamp = 0.0; + + public VisionSubsystem(VisionIO io, RobotState state) { + this.io = io; + this.state = state; + } + + @Override + public void periodic() { + double timestamp = RobotTime.getTimestampSeconds(); + // Read inputs from IO + io.readInputs(inputs); + Logger.processInputs("Vision", inputs); + + // Optionally update RobotState + if (inputs.turretCameraSeesTarget) { + updateVision( + inputs.turretCameraSeesTarget, + inputs.turretCameraFiducialObservations, + inputs.turretCameraMegatagPoseEstimate, + inputs.turretCameraMegatag2PoseEstimate, + true); + } else if (inputs.elevatorCameraSeesTarget) { + updateVision( + inputs.elevatorCameraSeesTarget, + inputs.elevatorCameraFiducialObservations, + inputs.elevatorCameraMegatagPoseEstimate, + inputs.elevatorCameraMegatag2PoseEstimate, + false); + } + + Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - timestamp); + } + + private void updateVision( + boolean cameraSeesTarget, + FiducialObservation[] cameraFiducialObservations, + MegatagPoseEstimate cameraMegatagPoseEstimate, + MegatagPoseEstimate cameraMegatag2PoseEstimate, + boolean isTurretCamera) { + if (cameraMegatagPoseEstimate != null) { + + String logPreface = "Vision/" + (isTurretCamera ? "Turret/" : "Elevator/"); + var updateTimestamp = cameraMegatagPoseEstimate.timestampSeconds; + boolean alreadyProcessedTimestamp = + (isTurretCamera ? lastProcessedTurretTimestamp : lastProcessedElevatorTimestamp) + == updateTimestamp; + if (!alreadyProcessedTimestamp && cameraSeesTarget) { + if (!isTurretCamera && !Util.epsilonEquals(state.getElevatorHeight(), 0.0, 0.05)) return; + Optional pinholeEstimate = Optional.empty(); // + processPinholeVisionEstimate( + pinholeObservations, + // + updateTimestamp, + // + isTurretCamera); + + Optional megatagEstimate = + processMegatagPoseEstimate(cameraMegatagPoseEstimate, isTurretCamera); + Optional megatag2Estimate = + processMegatag2PoseEstimate(cameraMegatag2PoseEstimate, isTurretCamera, logPreface); + + boolean used_megatag = false; + if (megatagEstimate.isPresent()) { + if (shouldUseMegatag( + cameraMegatagPoseEstimate, cameraFiducialObservations, isTurretCamera, logPreface)) { + Logger.recordOutput( + logPreface + "MegatagEstimate", megatagEstimate.get().getVisionRobotPoseMeters()); + state.updateMegatagEstimate(megatagEstimate.get()); + used_megatag = true; + } else { + if (megatagEstimate.isPresent()) { + Logger.recordOutput( + logPreface + "MegatagEstimateRejected", + megatagEstimate.get().getVisionRobotPoseMeters()); + } + } + } + + if (megatag2Estimate.isPresent() && !used_megatag) { + if (shouldUseMegatag2(cameraMegatag2PoseEstimate, isTurretCamera, logPreface)) { + Logger.recordOutput( + logPreface + "Megatag2Estimate", megatag2Estimate.get().getVisionRobotPoseMeters()); + state.updateMegatagEstimate(megatag2Estimate.get()); + } else { + if (megatagEstimate.isPresent()) { + Logger.recordOutput( + logPreface + "Megatag2EstimateRejected", + megatag2Estimate.get().getVisionRobotPoseMeters()); + } + } + } + if (pinholeEstimate.isPresent()) { + if (shouldUsePinhole(updateTimestamp, isTurretCamera, logPreface)) { + Logger.recordOutput( + logPreface + "PinholeEstimate", pinholeEstimate.get().getVisionRobotPoseMeters()); + state.updatePinholeEstimate(pinholeEstimate.get()); + } else { + Logger.recordOutput( + logPreface + "PinholeEstimateRejected", + pinholeEstimate.get().getVisionRobotPoseMeters()); + } + } + + if (isTurretCamera) lastProcessedTurretTimestamp = updateTimestamp; + else lastProcessedElevatorTimestamp = updateTimestamp; + } + } + } + + @SuppressWarnings("unused") + private List getPinholeObservations( + FiducialObservation[] fiducials, boolean isTurretCamera) { + // Iterate over the fiducials to make VisionUpdates + return Arrays.stream(fiducials) + .map( + fiducial -> { + Optional tagPoseOptional = Constants.kAprilTagLayout.getTagPose(fiducial.id); + if (tagPoseOptional.isEmpty()) { + return null; + } + Pose3d tagPose = tagPoseOptional.get(); + Optional cameraToTarget = + getCameraToTargetTranslation(fiducial, tagPose, isTurretCamera); + + if (cameraToTarget.isEmpty()) { + return null; + } + + var observation = new PinholeObservation(); + observation.cameraToTag = cameraToTarget.get(); + observation.tagPose = tagPose; + return observation; + }) + .filter(Objects::nonNull) + .toList(); + } + + private Optional getCameraToTargetTranslation( + FiducialObservation fiducial, Pose3d tagLocation, boolean isTurretCamera) { + // Get the yaw and pitch angles from to target from the camera POV + double yawRadians = Math.toRadians(fiducial.txnc); + double pitchRadians = Math.toRadians(fiducial.tync); + + Transform3d cameraToTarget = + new Transform3d(new Translation3d(), new Rotation3d(0.0, pitchRadians, 0.0)); + cameraToTarget = + cameraToTarget.plus( + new Transform3d(new Translation3d(), new Rotation3d(0.0, 0.0, yawRadians))); + Transform3d cameraGroundPlaneToCamera = + new Transform3d( + new Translation3d(), + new Rotation3d( + 0.0, isTurretCamera ? Constants.kCameraPitchRads : Constants.kCameraBPitchRads, 0)); + Rotation3d cameraGroundPlaneToTarget = + new Pose3d() + .plus(cameraGroundPlaneToCamera.plus(cameraToTarget)) + .getRotation() + .unaryMinus(); + + // Built a unit vector from adjusted rotation. + // Raw vector: x = 1, y = tan(yaw), z = tan(pitch) + // Make it a unit vector by dividing each component by magnitude + // sqrt(x^2+y^2+z^2). + double tan_ty = Math.tan(cameraGroundPlaneToTarget.getZ()); // y and z switch intentional + double tan_tz = -Math.tan(cameraGroundPlaneToTarget.getY()); // y and z switch intentional + + if (tan_tz == 0.0) { + // Protect against divide by zero (i.e. target is at same height as camera). + return Optional.empty(); + } + + // Find the fixed height difference between the center of the tag and the camera + // lens + double differential_height = + tagLocation.getZ() + - (isTurretCamera + ? Constants.kCameraHeightOffGroundMeters + : Constants.kCameraBHeightOffGroundMeters); + + // We now obtain 3d distance by dividing differential_height by our normalized z + // component z / (Math.sqrt(x^2+y^2+z^2)) + double distance = + differential_height * Math.sqrt(1.0 + tan_tz * tan_tz + tan_ty * tan_ty) / tan_tz; + // Build a 3d vector from distance (which we now know) and orientation (which we + // already computed above). + Translation3d cameraToTargetTranslation = + new Translation3d(distance, cameraGroundPlaneToTarget); + + // Grab the x and y components. + return Optional.of( + new Translation2d(cameraToTargetTranslation.getX(), cameraToTargetTranslation.getY())); + } + + static final Set kTagsBlueSpeaker = new HashSet<>(List.of(7, 8)); + static final Set kTagsRedSpeaker = new HashSet<>(List.of(3, 4)); + + private boolean shouldUseMegatag( + MegatagPoseEstimate poseEstimate, + FiducialObservation[] fiducials, + boolean isTurretCamera, + String logPreface) { + final double kMinAreaForTurretMegatagEnabled = 0.4; + final double kMinAreaForTurretMegatagDisabled = 0.05; + + final double kMinAreaForElevatorMegatagEnabled = 0.4; + final double kMinAreaForElevatorMegatagDisabled = 0.05; + + double kMinAreaForMegatag = 0.0; + if (DriverStation.isDisabled()) { + kMinAreaForMegatag = + isTurretCamera ? kMinAreaForTurretMegatagDisabled : kMinAreaForElevatorMegatagDisabled; + } else { + kMinAreaForMegatag = + isTurretCamera ? kMinAreaForTurretMegatagEnabled : kMinAreaForElevatorMegatagEnabled; + } + + final int kExpectedTagCount = 2; + + final double kLargeYawThreshold = Units.degreesToRadians(200.0); + final double kLargeYawEventTimeWindowS = 0.05; + + if (!isTurretCamera) { + var maxYawVel = + state.getMaxAbsDriveYawAngularVelocityInRange( + poseEstimate.timestampSeconds - kLargeYawEventTimeWindowS, + poseEstimate.timestampSeconds); + if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { + Logger.recordOutput("Vision/Elevator/MegatagYawAngular", false); + return false; + } + Logger.recordOutput("Vision/Elevator/MegatagYawAngular", true); + } + + if (poseEstimate.avgTagArea < kMinAreaForMegatag) { + Logger.recordOutput(logPreface + "megaTagAvgTagArea", false); + return false; + } + Logger.recordOutput(logPreface + "megaTagAvgTagArea", true); + + if (poseEstimate.fiducialIds.length != kExpectedTagCount) { + Logger.recordOutput(logPreface + "fiducialLength", false); + return false; + } + Logger.recordOutput(logPreface + "fiducialLength", true); + + if (poseEstimate.fiducialIds.length < 1) { + Logger.recordOutput(logPreface + "fiducialLengthLess1", false); + return false; + } + Logger.recordOutput(logPreface + "fiducialLengthLess1", true); + + if (poseEstimate.fieldToCamera.getTranslation().getNorm() < 1.0) { + Logger.recordOutput(logPreface + "NormCheck", false); + return false; + } + Logger.recordOutput(logPreface + "NormCheck", true); + + for (var fiducial : fiducials) { + if (fiducial.ambiguity > .9) { + Logger.recordOutput(logPreface + "Ambiguity", false); + return false; + } + } + Logger.recordOutput(logPreface + "Ambiguity", true); + + Set seenTags = + Arrays.stream(poseEstimate.fiducialIds) + .boxed() + .collect(Collectors.toCollection(HashSet::new)); + Set expectedTags = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; + var result = expectedTags.equals(seenTags); + Logger.recordOutput(logPreface + "SeenTags", result); + return result; + } + + private boolean shouldUseMegatag2( + MegatagPoseEstimate poseEstimate, boolean isTurretCamera, String logPreface) { + return shouldUsePinhole(poseEstimate.timestampSeconds, isTurretCamera, logPreface); + } + + private boolean shouldUsePinhole(double timestamp, boolean isTurretCamera, String preface) { + final double kLargePitchRollYawEventTimeWindowS = 0.1; + final double kLargePitchRollThreshold = Units.degreesToRadians(10.0); + final double kLargeYawThreshold = Units.degreesToRadians(100.0); + if (isTurretCamera) { + var maxTurretVel = + state.getMaxAbsTurretYawAngularVelocityInRange( + timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); + var maxYawVel = + state.getMaxAbsDriveYawAngularVelocityInRange( + timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); + + if (maxTurretVel.isPresent() + && maxYawVel.isPresent() + && Math.abs(maxTurretVel.get() + maxYawVel.get()) > kLargeYawThreshold) { + Logger.recordOutput(preface + "PinholeTurretAngular", false); + return false; + } + Logger.recordOutput(preface + "PinholeTurretAngular", true); + } else { + var maxYawVel = + state.getMaxAbsDriveYawAngularVelocityInRange( + timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); + if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { + Logger.recordOutput(preface + "PinholeYawAngular", false); + return false; + } + Logger.recordOutput(preface + "PinholeYawAngular", true); + } + + var maxPitchVel = + state.getMaxAbsDrivePitchAngularVelocityInRange( + timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); + if (maxPitchVel.isPresent() && Math.abs(maxPitchVel.get()) > kLargePitchRollThreshold) { + Logger.recordOutput(preface + "PinholePitchAngular", false); + return false; + } + Logger.recordOutput(preface + "PinholePitchAngular", true); + + var maxRollVel = + state.getMaxAbsDriveRollAngularVelocityInRange( + timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); + if (maxRollVel.isPresent() && Math.abs(maxRollVel.get()) > kLargePitchRollThreshold) { + Logger.recordOutput(preface + "PinholeRollAngular", false); + return false; + } + Logger.recordOutput(preface + "PinholeRollAngular", true); + + return true; + } + + private Optional getFieldToRobotEstimate( + MegatagPoseEstimate poseEstimate, boolean isTurretCamera) { + var fieldToCamera = poseEstimate.fieldToCamera; + if (fieldToCamera.getX() == 0.0) { + return Optional.empty(); + } + var turretToCameraTransform = state.getTurretToCamera(isTurretCamera); + var cameraToTurretTransform = turretToCameraTransform.inverse(); + var fieldToTurretPose = fieldToCamera.plus(cameraToTurretTransform); + var fieldToRobotEstimate = MathHelpers.kPose2dZero; + if (isTurretCamera) { + var robotToTurretObservation = state.getRobotToTurret(poseEstimate.timestampSeconds); + if (robotToTurretObservation.isEmpty()) { + return Optional.empty(); + } + var turretToRobot = + MathHelpers.transform2dFromRotation(robotToTurretObservation.get().unaryMinus()); + fieldToRobotEstimate = fieldToTurretPose.plus(turretToRobot); + } else { + fieldToRobotEstimate = fieldToCamera.plus(turretToCameraTransform.inverse()); + } + + return Optional.of(fieldToRobotEstimate); + } + + private Optional processMegatag2PoseEstimate( + MegatagPoseEstimate poseEstimate, boolean isTurretCamera, String logPreface) { + var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); + if (loggedFieldToRobot.isEmpty()) { + return Optional.empty(); + } + + var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); + if (maybeFieldToRobotEstimate.isEmpty()) return Optional.empty(); + var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); + + // distance from current pose to vision estimated pose + double poseDifference = + fieldToRobotEstimate + .getTranslation() + .getDistance(loggedFieldToRobot.get().getTranslation()); + + var defaultSet = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; + Set speakerTags = new HashSet<>(defaultSet); + speakerTags.removeAll( + Arrays.stream(poseEstimate.fiducialIds) + .boxed() + .collect(Collectors.toCollection(HashSet::new))); + boolean seesSpeakerTags = speakerTags.size() < 2; + + double xyStds; + if (poseEstimate.fiducialIds.length > 0) { + // multiple targets detected + if (poseEstimate.fiducialIds.length >= 2 && poseEstimate.avgTagArea > 0.1) { + xyStds = 0.2; + } + // we detect at least one of our speaker tags and we're close to it. + else if (seesSpeakerTags && poseEstimate.avgTagArea > 0.2) { + xyStds = 0.5; + } + // 1 target with large area and close to estimated pose + else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { + xyStds = 0.5; + } + // 1 target farther away and estimated pose is close + else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { + xyStds = 1.0; + } else if (poseEstimate.fiducialIds.length > 1) { + xyStds = 1.2; + } else { + xyStds = 2.0; + } + + Logger.recordOutput(logPreface + "Megatag2StdDev", xyStds); + Logger.recordOutput(logPreface + "Megatag2AvgTagArea", poseEstimate.avgTagArea); + Logger.recordOutput(logPreface + "Megatag2PoseDifference", poseDifference); + + Matrix visionMeasurementStdDevs = + VecBuilder.fill(xyStds, xyStds, Units.degreesToRadians(50.0)); + fieldToRobotEstimate = + new Pose2d(fieldToRobotEstimate.getTranslation(), loggedFieldToRobot.get().getRotation()); + return Optional.of( + new VisionFieldPoseEstimate( + fieldToRobotEstimate, poseEstimate.timestampSeconds, visionMeasurementStdDevs)); + } + return Optional.empty(); + } + + private Optional processMegatagPoseEstimate( + MegatagPoseEstimate poseEstimate, boolean isTurretCamera) { + var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); + if (loggedFieldToRobot.isEmpty()) { + return Optional.empty(); + } + + var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); + if (maybeFieldToRobotEstimate.isEmpty()) return Optional.empty(); + var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); + + // distance from current pose to vision estimated pose + double poseDifference = + fieldToRobotEstimate + .getTranslation() + .getDistance(loggedFieldToRobot.get().getTranslation()); + + if (poseEstimate.fiducialIds.length > 0) { + double xyStds = 1.0; + double degStds = 12; + // multiple targets detected + if (poseEstimate.fiducialIds.length >= 2) { + xyStds = 0.5; + degStds = 6; + } + // 1 target with large area and close to estimated pose + else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { + xyStds = 1.0; + degStds = 12; + } + // 1 target farther away and estimated pose is close + else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { + xyStds = 2.0; + degStds = 30; + } + + Matrix visionMeasurementStdDevs = + VecBuilder.fill(xyStds, xyStds, Units.degreesToRadians(degStds)); + return Optional.of( + new VisionFieldPoseEstimate( + fieldToRobotEstimate, poseEstimate.timestampSeconds, visionMeasurementStdDevs)); + } + return Optional.empty(); + } + + @SuppressWarnings("unused") + private Optional processPinholeVisionEstimate( + List observations, double timestamp, boolean isTurretCamera) { + observations = + observations.stream() + .filter( + (observation) -> { + if (observation.cameraToTag.getNorm() < 10.0) { + return true; + } + Logger.recordOutput( + "Vision/RejectOnNormTimestamp", RobotTime.getTimestampSeconds()); + return false; + }) + .toList(); + + if (observations.isEmpty()) return Optional.empty(); + + int num_updates = 0; + double x = 0.0, y = 0.0; + // All timestamps and rotations are the same. If that changes, need to revisit. + Rotation2d rotation = MathHelpers.kRotation2dZero; + double avgRange = 0.0; + var poseTurret = state.getRobotToTurret(timestamp); + var poseRobot = state.getFieldToRobot(timestamp); + if (poseRobot.isEmpty() || poseTurret.isEmpty()) { + return Optional.empty(); + } + for (var observation : observations) { + Pose2d fieldToRobotEstimate = + estimateFieldToRobot( + observation.cameraToTag, + observation.tagPose, + poseTurret.get(), + poseRobot.get().getRotation(), + MathHelpers.kRotation2dZero, + isTurretCamera); + x += fieldToRobotEstimate.getX(); + y += fieldToRobotEstimate.getY(); + rotation = fieldToRobotEstimate.getRotation(); + avgRange += observation.cameraToTag.getNorm(); + num_updates++; + } + + if (num_updates == 0) return Optional.empty(); + + avgRange /= num_updates; + + double xyStds = 100.0; + var fieldToRobotEstimate = new Pose2d(x / num_updates, y / num_updates, rotation); + var poseDifference = + fieldToRobotEstimate.getTranslation().getDistance(poseRobot.get().getTranslation()); + // multiple targets detected + if (observations.size() >= 2 && avgRange < 3.0) { + xyStds = 0.2; + } else if (avgRange < 5.0 && poseDifference < 0.5) { + xyStds = 0.5; + } + // 1 target farther away and estimated pose is close + else if (avgRange < 3.0 && poseDifference < 0.3) { + xyStds = 1.0; + } else if (observations.size() > 1) { + xyStds = 1.2; + } else { + xyStds = 2.0; + } + ; + + final double rotStdDev = Units.degreesToRadians(50); + Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, rotStdDev); + return Optional.of( + new VisionFieldPoseEstimate(fieldToRobotEstimate, timestamp, visionMeasurementStdDevs)); + } + + private Pose2d estimateFieldToRobot( + Translation2d cameraToTarget, + Pose3d fieldToTarget, + Rotation2d robotToTurret, + Rotation2d gyroAngle, + Rotation2d cameraYawOffset, + boolean isTurretCamera) { + Transform2d cameraToTargetFixed = + MathHelpers.transform2dFromTranslation(cameraToTarget.rotateBy(cameraYawOffset)); + Transform2d turretToTarget = state.getTurretToCamera(isTurretCamera).plus(cameraToTargetFixed); + // In robot frame + Transform2d robotToTarget = turretToTarget; + if (isTurretCamera) { + robotToTarget = MathHelpers.transform2dFromRotation(robotToTurret).plus(turretToTarget); + } + + // In field frame + Transform2d robotToTargetField = + MathHelpers.transform2dFromTranslation(robotToTarget.getTranslation().rotateBy(gyroAngle)); + + // In field frame + Pose2d fieldToTarget2d = + MathHelpers.pose2dFromTranslation(fieldToTarget.toPose2d().getTranslation()); + + Pose2d fieldToRobot = + fieldToTarget2d.transformBy( + MathHelpers.transform2dFromTranslation( + robotToTargetField.getTranslation().unaryMinus())); + + Pose2d fieldToRobotYawAdjusted = new Pose2d(fieldToRobot.getTranslation(), gyroAngle); + return fieldToRobotYawAdjusted; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 34c676a..f812f86 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -96,7 +96,7 @@ public void periodic() { // Add tag poses for (int tagId : inputs[cameraIndex].tagIds) { - var tagPose = AprilTagConstants.aprilTagLayout.getTagPose(tagId); + var tagPose = AprilTagConstants.kAprilTagLayout.getTagPose(tagId); if (tagPose.isPresent()) { tagPoses.add(tagPose.get()); } @@ -114,9 +114,9 @@ public void periodic() { // Must be within the field boundaries || observation.pose().getX() < 0.0 - || observation.pose().getX() > AprilTagConstants.aprilTagLayout.getFieldLength() + || observation.pose().getX() > AprilTagConstants.kAprilTagLayout.getFieldLength() || observation.pose().getY() < 0.0 - || observation.pose().getY() > AprilTagConstants.aprilTagLayout.getFieldWidth(); + || observation.pose().getY() > AprilTagConstants.kAprilTagLayout.getFieldWidth(); // Add pose to log robotPoses.add(observation.pose()); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index df70163..9e3609e 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -92,7 +92,7 @@ public void updateInputs(VisionIOInputs inputs) { var target = result.targets.get(0); // Calculate robot pose - var tagPose = aprilTagLayout.getTagPose(target.fiducialId); + var tagPose = kAprilTagLayout.getTagPose(target.fiducialId); if (tagPose.isPresent()) { Transform3d fieldToTarget = new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index e657459..8a26705 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -44,7 +44,7 @@ public VisionIOPhotonVisionSim( // Initialize vision sim if (visionSim == null) { visionSim = new VisionSystemSim("main"); - visionSim.addAprilTags(AprilTagConstants.aprilTagLayout); + visionSim.addAprilTags(AprilTagConstants.kAprilTagLayout); } // Add sim camera diff --git a/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java new file mode 100644 index 0000000..df46efd --- /dev/null +++ b/src/main/java/frc/robot/util/ConcurrentTimeInterpolatableBuffer.java @@ -0,0 +1,164 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.interpolation.Interpolatable; +import edu.wpi.first.math.interpolation.Interpolator; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; + +/** + * A concurrent version of WPIlib's TimeInterpolatableBuffer class to avoid the need for explicit + * synchronization in our robot code. + * + * @param The type stored in this buffer. + */ +public final class ConcurrentTimeInterpolatableBuffer { + private final double m_historySize; + private final Interpolator m_interpolatingFunc; + private final ConcurrentNavigableMap m_pastSnapshots = new ConcurrentSkipListMap<>(); + + private ConcurrentTimeInterpolatableBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + this.m_historySize = historySizeSeconds; + this.m_interpolatingFunc = interpolateFunction; + } + + /** + * Create a new TimeInterpolatableBuffer. + * + * @param interpolateFunction The function used to interpolate between values. + * @param historySizeSeconds The history size of the buffer. + * @param The type of data to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createBuffer( + Interpolator interpolateFunction, double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(interpolateFunction, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer that stores a given subclass of {@link Interpolatable}. + * + * @param historySizeSeconds The history size of the buffer. + * @param The type of {@link Interpolatable} to store in the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static > ConcurrentTimeInterpolatableBuffer createBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>( + Interpolatable::interpolate, historySizeSeconds); + } + + /** + * Create a new TimeInterpolatableBuffer to store Double values. + * + * @param historySizeSeconds The history size of the buffer. + * @return The new TimeInterpolatableBuffer. + */ + public static ConcurrentTimeInterpolatableBuffer createDoubleBuffer( + double historySizeSeconds) { + return new ConcurrentTimeInterpolatableBuffer<>(MathUtil::interpolate, historySizeSeconds); + } + + /** + * Add a sample to the buffer. + * + * @param timeSeconds The timestamp of the sample. + * @param sample The sample object. + */ + public void addSample(double timeSeconds, T sample) { + m_pastSnapshots.put(timeSeconds, sample); + cleanUp(timeSeconds); + } + + /** + * Removes samples older than our current history size. + * + * @param time The current timestamp. + */ + private void cleanUp(double time) { + m_pastSnapshots.headMap(time - m_historySize, false).clear(); + } + + /** Clear all old samples. */ + public void clear() { + m_pastSnapshots.clear(); + } + + /** + * Sample the buffer at the given time. If the buffer is empty, an empty Optional is returned. + * + * @param timeSeconds The time at which to sample. + * @return The interpolated value at that timestamp or an empty Optional. + */ + public Optional getSample(double timeSeconds) { + if (m_pastSnapshots.isEmpty()) { + return Optional.empty(); + } + + // Special case for when the requested time is the same as a sample + var nowEntry = m_pastSnapshots.get(timeSeconds); + if (nowEntry != null) { + return Optional.of(nowEntry); + } + + var bottomBound = m_pastSnapshots.floorEntry(timeSeconds); + var topBound = m_pastSnapshots.ceilingEntry(timeSeconds); + + // Return null if neither sample exists, and the opposite bound if the other is + // null + if (topBound == null && bottomBound == null) { + return Optional.empty(); + } else if (topBound == null) { + return Optional.of(bottomBound.getValue()); + } else if (bottomBound == null) { + return Optional.of(topBound.getValue()); + } else { + // Otherwise, interpolate. Because T is between [0, 1], we want the ratio of + // (the difference + // between the current time and bottom bound) and (the difference between top + // and bottom + // bounds). + return Optional.of( + m_interpolatingFunc.interpolate( + bottomBound.getValue(), + topBound.getValue(), + (timeSeconds - bottomBound.getKey()) / (topBound.getKey() - bottomBound.getKey()))); + } + } + + public Entry getLatest() { + return m_pastSnapshots.lastEntry(); + } + + /** + * Grant access to the internal sample buffer. Used in Pose Estimation to replay odometry inputs + * stored within this buffer. + * + * @return The internal sample buffer. + */ + public ConcurrentNavigableMap getInternalBuffer() { + return m_pastSnapshots; + } +} diff --git a/src/main/java/frc/robot/util/LimelightHelpers.java b/src/main/java/frc/robot/util/LimelightHelpers.java new file mode 100644 index 0000000..9b7b20d --- /dev/null +++ b/src/main/java/frc/robot/util/LimelightHelpers.java @@ -0,0 +1,1130 @@ +// LimelightHelpers v1.6 (April 9, 2024) + +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonFormat.Shape; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.networktables.NetworkTable; +import edu.wpi.first.networktables.NetworkTableEntry; +import edu.wpi.first.networktables.NetworkTableInstance; +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.concurrent.CompletableFuture; + +public class LimelightHelpers { + + public static class LimelightTarget_Retro { + + @JsonProperty("t6c_ts") + private double[] cameraPose_TargetSpace; + + @JsonProperty("t6r_fs") + private double[] robotPose_FieldSpace; + + @JsonProperty("t6r_ts") + private double[] robotPose_TargetSpace; + + @JsonProperty("t6t_cs") + private double[] targetPose_CameraSpace; + + @JsonProperty("t6t_rs") + private double[] targetPose_RobotSpace; + + public Pose3d getCameraPose_TargetSpace() { + return toPose3D(cameraPose_TargetSpace); + } + + public Pose3d getRobotPose_FieldSpace() { + return toPose3D(robotPose_FieldSpace); + } + + public Pose3d getRobotPose_TargetSpace() { + return toPose3D(robotPose_TargetSpace); + } + + public Pose3d getTargetPose_CameraSpace() { + return toPose3D(targetPose_CameraSpace); + } + + public Pose3d getTargetPose_RobotSpace() { + return toPose3D(targetPose_RobotSpace); + } + + public Pose2d getCameraPose_TargetSpace2D() { + return toPose2D(cameraPose_TargetSpace); + } + + public Pose2d getRobotPose_FieldSpace2D() { + return toPose2D(robotPose_FieldSpace); + } + + public Pose2d getRobotPose_TargetSpace2D() { + return toPose2D(robotPose_TargetSpace); + } + + public Pose2d getTargetPose_CameraSpace2D() { + return toPose2D(targetPose_CameraSpace); + } + + public Pose2d getTargetPose_RobotSpace2D() { + return toPose2D(targetPose_RobotSpace); + } + + @JsonProperty("ta") + public double ta; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("typ") + public double ty_pixels; + + @JsonProperty("ts") + public double ts; + + public LimelightTarget_Retro() { + cameraPose_TargetSpace = new double[6]; + robotPose_FieldSpace = new double[6]; + robotPose_TargetSpace = new double[6]; + targetPose_CameraSpace = new double[6]; + targetPose_RobotSpace = new double[6]; + } + } + + public static class LimelightTarget_Fiducial { + + @JsonProperty("fID") + public double fiducialID; + + @JsonProperty("fam") + public String fiducialFamily; + + @JsonProperty("t6c_ts") + private double[] cameraPose_TargetSpace; + + @JsonProperty("t6r_fs") + private double[] robotPose_FieldSpace; + + @JsonProperty("t6r_ts") + private double[] robotPose_TargetSpace; + + @JsonProperty("t6t_cs") + private double[] targetPose_CameraSpace; + + @JsonProperty("t6t_rs") + private double[] targetPose_RobotSpace; + + public Pose3d getCameraPose_TargetSpace() { + return toPose3D(cameraPose_TargetSpace); + } + + public Pose3d getRobotPose_FieldSpace() { + return toPose3D(robotPose_FieldSpace); + } + + public Pose3d getRobotPose_TargetSpace() { + return toPose3D(robotPose_TargetSpace); + } + + public Pose3d getTargetPose_CameraSpace() { + return toPose3D(targetPose_CameraSpace); + } + + public Pose3d getTargetPose_RobotSpace() { + return toPose3D(targetPose_RobotSpace); + } + + public Pose2d getCameraPose_TargetSpace2D() { + return toPose2D(cameraPose_TargetSpace); + } + + public Pose2d getRobotPose_FieldSpace2D() { + return toPose2D(robotPose_FieldSpace); + } + + public Pose2d getRobotPose_TargetSpace2D() { + return toPose2D(robotPose_TargetSpace); + } + + public Pose2d getTargetPose_CameraSpace2D() { + return toPose2D(targetPose_CameraSpace); + } + + public Pose2d getTargetPose_RobotSpace2D() { + return toPose2D(targetPose_RobotSpace); + } + + @JsonProperty("ta") + public double ta; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("typ") + public double ty_pixels; + + @JsonProperty("ts") + public double ts; + + public LimelightTarget_Fiducial() { + cameraPose_TargetSpace = new double[6]; + robotPose_FieldSpace = new double[6]; + robotPose_TargetSpace = new double[6]; + targetPose_CameraSpace = new double[6]; + targetPose_RobotSpace = new double[6]; + } + } + + public static class LimelightTarget_Barcode {} + + public static class LimelightTarget_Classifier { + + @JsonProperty("class") + public String className; + + @JsonProperty("classID") + public double classID; + + @JsonProperty("conf") + public double confidence; + + @JsonProperty("zone") + public double zone; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("typ") + public double ty_pixels; + + public LimelightTarget_Classifier() {} + } + + public static class LimelightTarget_Detector { + + @JsonProperty("class") + public String className; + + @JsonProperty("classID") + public double classID; + + @JsonProperty("conf") + public double confidence; + + @JsonProperty("ta") + public double ta; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("typ") + public double ty_pixels; + + public LimelightTarget_Detector() {} + } + + public static class Results { + + @JsonProperty("pID") + public double pipelineID; + + @JsonProperty("tl") + public double latency_pipeline; + + @JsonProperty("cl") + public double latency_capture; + + public double latency_jsonParse; + + @JsonProperty("ts") + public double timestamp_LIMELIGHT_publish; + + @JsonProperty("ts_rio") + public double timestamp_RIOFPGA_capture; + + @JsonProperty("v") + @JsonFormat(shape = Shape.NUMBER) + public boolean valid; + + @JsonProperty("botpose") + public double[] botpose; + + @JsonProperty("botpose_wpired") + public double[] botpose_wpired; + + @JsonProperty("botpose_wpiblue") + public double[] botpose_wpiblue; + + @JsonProperty("botpose_tagcount") + public double botpose_tagcount; + + @JsonProperty("botpose_span") + public double botpose_span; + + @JsonProperty("botpose_avgdist") + public double botpose_avgdist; + + @JsonProperty("botpose_avgarea") + public double botpose_avgarea; + + @JsonProperty("t6c_rs") + public double[] camerapose_robotspace; + + public Pose3d getBotPose3d() { + return toPose3D(botpose); + } + + public Pose3d getBotPose3d_wpiRed() { + return toPose3D(botpose_wpired); + } + + public Pose3d getBotPose3d_wpiBlue() { + return toPose3D(botpose_wpiblue); + } + + public Pose2d getBotPose2d() { + return toPose2D(botpose); + } + + public Pose2d getBotPose2d_wpiRed() { + return toPose2D(botpose_wpired); + } + + public Pose2d getBotPose2d_wpiBlue() { + return toPose2D(botpose_wpiblue); + } + + @JsonProperty("Retro") + public LimelightTarget_Retro[] targets_Retro; + + @JsonProperty("Fiducial") + public LimelightTarget_Fiducial[] targets_Fiducials; + + @JsonProperty("Classifier") + public LimelightTarget_Classifier[] targets_Classifier; + + @JsonProperty("Detector") + public LimelightTarget_Detector[] targets_Detector; + + @JsonProperty("Barcode") + public LimelightTarget_Barcode[] targets_Barcode; + + public Results() { + botpose = new double[6]; + botpose_wpired = new double[6]; + botpose_wpiblue = new double[6]; + camerapose_robotspace = new double[6]; + targets_Retro = new LimelightTarget_Retro[0]; + targets_Fiducials = new LimelightTarget_Fiducial[0]; + targets_Classifier = new LimelightTarget_Classifier[0]; + targets_Detector = new LimelightTarget_Detector[0]; + targets_Barcode = new LimelightTarget_Barcode[0]; + } + } + + public static class LimelightResults { + @JsonProperty("Results") + public Results targetingResults; + + public String error; + + public LimelightResults() { + targetingResults = new Results(); + error = ""; + } + } + + public static class RawFiducial { + public int id = 0; + public double txnc = 0; + public double tync = 0; + public double ta = 0; + public double distToCamera = 0; + public double distToRobot = 0; + public double ambiguity = 0; + + public RawFiducial( + int id, + double txnc, + double tync, + double ta, + double distToCamera, + double distToRobot, + double ambiguity) { + this.id = id; + this.txnc = txnc; + this.tync = tync; + this.ta = ta; + this.distToCamera = distToCamera; + this.distToRobot = distToRobot; + this.ambiguity = ambiguity; + } + } + + public static class RawDetection { + public int classId = 0; + public double txnc = 0; + public double tync = 0; + public double ta = 0; + public double corner0_X = 0; + public double corner0_Y = 0; + public double corner1_X = 0; + public double corner1_Y = 0; + public double corner2_X = 0; + public double corner2_Y = 0; + public double corner3_X = 0; + public double corner3_Y = 0; + + public RawDetection( + int classId, + double txnc, + double tync, + double ta, + double corner0_X, + double corner0_Y, + double corner1_X, + double corner1_Y, + double corner2_X, + double corner2_Y, + double corner3_X, + double corner3_Y) { + this.classId = classId; + this.txnc = txnc; + this.tync = tync; + this.ta = ta; + this.corner0_X = corner0_X; + this.corner0_Y = corner0_Y; + this.corner1_X = corner1_X; + this.corner1_Y = corner1_Y; + this.corner2_X = corner2_X; + this.corner2_Y = corner2_Y; + this.corner3_X = corner3_X; + this.corner3_Y = corner3_Y; + } + } + + public static class PoseEstimate { + public Pose2d pose; + public double timestampSeconds; + public double latency; + public int tagCount; + public double tagSpan; + public double avgTagDist; + public double avgTagArea; + public RawFiducial[] rawFiducials; + + public PoseEstimate( + Pose2d pose, + double timestampSeconds, + double latency, + int tagCount, + double tagSpan, + double avgTagDist, + double avgTagArea, + RawFiducial[] rawFiducials) { + + this.pose = pose; + this.timestampSeconds = timestampSeconds; + this.latency = latency; + this.tagCount = tagCount; + this.tagSpan = tagSpan; + this.avgTagDist = avgTagDist; + this.avgTagArea = avgTagArea; + this.rawFiducials = rawFiducials; + } + } + + private static ObjectMapper mapper; + + /** Print JSON Parse time to the console in milliseconds */ + static boolean profileJSON = false; + + static final String sanitizeName(String name) { + if (name == "" || name == null) { + return "limelight"; + } + return name; + } + + private static Pose3d toPose3D(double[] inData) { + if (inData.length < 6) { + return new Pose3d(); + } + return new Pose3d( + new Translation3d(inData[0], inData[1], inData[2]), + new Rotation3d( + Units.degreesToRadians(inData[3]), + Units.degreesToRadians(inData[4]), + Units.degreesToRadians(inData[5]))); + } + + private static Pose2d toPose2D(double[] inData) { + if (inData.length < 6) { + return new Pose2d(); + } + Translation2d tran2d = new Translation2d(inData[0], inData[1]); + Rotation2d r2d = new Rotation2d(Units.degreesToRadians(inData[5])); + return new Pose2d(tran2d, r2d); + } + + private static double extractArrayEntry(double[] inData, int position) { + if (inData.length < position + 1) { + return 0; + } + return inData[position]; + } + + private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName) { + var poseEntry = LimelightHelpers.getLimelightNTTableEntry(limelightName, entryName); + var poseArray = poseEntry.getDoubleArray(new double[0]); + var pose = toPose2D(poseArray); + double latency = extractArrayEntry(poseArray, 6); + int tagCount = (int) extractArrayEntry(poseArray, 7); + double tagSpan = extractArrayEntry(poseArray, 8); + double tagDist = extractArrayEntry(poseArray, 9); + double tagArea = extractArrayEntry(poseArray, 10); + // getlastchange() in microseconds, ll latency in milliseconds + var timestamp = (poseEntry.getLastChange() / 1000000.0) - (latency / 1000.0); + + RawFiducial[] rawFiducials = new RawFiducial[tagCount]; + int valsPerFiducial = 7; + int expectedTotalVals = 11 + valsPerFiducial * tagCount; + + if (poseArray.length != expectedTotalVals) { + // Don't populate fiducials + } else { + for (int i = 0; i < tagCount; i++) { + int baseIndex = 11 + (i * valsPerFiducial); + int id = (int) poseArray[baseIndex]; + double txnc = poseArray[baseIndex + 1]; + double tync = poseArray[baseIndex + 2]; + double ta = poseArray[baseIndex + 3]; + double distToCamera = poseArray[baseIndex + 4]; + double distToRobot = poseArray[baseIndex + 5]; + double ambiguity = poseArray[baseIndex + 6]; + rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); + } + } + + return new PoseEstimate( + pose, timestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials); + } + + @SuppressWarnings("unused") + private static RawFiducial[] getRawFiducials(String limelightName) { + var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawfiducials"); + var rawFiducialArray = entry.getDoubleArray(new double[0]); + int valsPerEntry = 7; + if (rawFiducialArray.length % valsPerEntry != 0) { + return new RawFiducial[0]; + } + + int numFiducials = rawFiducialArray.length / valsPerEntry; + RawFiducial[] rawFiducials = new RawFiducial[numFiducials]; + + for (int i = 0; i < numFiducials; i++) { + int baseIndex = i * valsPerEntry; + int id = (int) extractArrayEntry(rawFiducialArray, baseIndex); + double txnc = extractArrayEntry(rawFiducialArray, baseIndex + 1); + double tync = extractArrayEntry(rawFiducialArray, baseIndex + 2); + double ta = extractArrayEntry(rawFiducialArray, baseIndex + 3); + double distToCamera = extractArrayEntry(rawFiducialArray, baseIndex + 4); + double distToRobot = extractArrayEntry(rawFiducialArray, baseIndex + 5); + double ambiguity = extractArrayEntry(rawFiducialArray, baseIndex + 6); + + rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); + } + + return rawFiducials; + } + + public static RawDetection[] getRawDetections(String limelightName) { + var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawdetections"); + var rawDetectionArray = entry.getDoubleArray(new double[0]); + int valsPerEntry = 11; + if (rawDetectionArray.length % valsPerEntry != 0) { + return new RawDetection[0]; + } + + int numDetections = rawDetectionArray.length / valsPerEntry; + RawDetection[] rawDetections = new RawDetection[numDetections]; + + for (int i = 0; i < numDetections; i++) { + int baseIndex = i * valsPerEntry; // Starting index for this detection's data + int classId = (int) extractArrayEntry(rawDetectionArray, baseIndex); + double txnc = extractArrayEntry(rawDetectionArray, baseIndex + 1); + double tync = extractArrayEntry(rawDetectionArray, baseIndex + 2); + double ta = extractArrayEntry(rawDetectionArray, baseIndex + 3); + double corner0_X = extractArrayEntry(rawDetectionArray, baseIndex + 4); + double corner0_Y = extractArrayEntry(rawDetectionArray, baseIndex + 5); + double corner1_X = extractArrayEntry(rawDetectionArray, baseIndex + 6); + double corner1_Y = extractArrayEntry(rawDetectionArray, baseIndex + 7); + double corner2_X = extractArrayEntry(rawDetectionArray, baseIndex + 8); + double corner2_Y = extractArrayEntry(rawDetectionArray, baseIndex + 9); + double corner3_X = extractArrayEntry(rawDetectionArray, baseIndex + 10); + double corner3_Y = extractArrayEntry(rawDetectionArray, baseIndex + 11); + + rawDetections[i] = + new RawDetection( + classId, txnc, tync, ta, corner0_X, corner0_Y, corner1_X, corner1_Y, corner2_X, + corner2_Y, corner3_X, corner3_Y); + } + + return rawDetections; + } + + @SuppressWarnings("unused") + private static void printPoseEstimate(PoseEstimate pose) { + if (pose == null) { + System.out.println("No PoseEstimate available."); + return; + } + + System.out.printf("Pose Estimate Information:%n"); + System.out.printf("Timestamp (Seconds): %.3f%n", pose.timestampSeconds); + System.out.printf("Latency: %.3f ms%n", pose.latency); + System.out.printf("Tag Count: %d%n", pose.tagCount); + System.out.printf("Tag Span: %.2f meters%n", pose.tagSpan); + System.out.printf("Average Tag Distance: %.2f meters%n", pose.avgTagDist); + System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); + System.out.println(); + + if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { + System.out.println("No RawFiducials data available."); + return; + } + + System.out.println("Raw Fiducials Details:"); + for (int i = 0; i < pose.rawFiducials.length; i++) { + RawFiducial fiducial = pose.rawFiducials[i]; + System.out.printf(" Fiducial #%d:%n", i + 1); + System.out.printf(" ID: %d%n", fiducial.id); + System.out.printf(" TXNC: %.2f%n", fiducial.txnc); + System.out.printf(" TYNC: %.2f%n", fiducial.tync); + System.out.printf(" TA: %.2f%n", fiducial.ta); + System.out.printf(" Distance to Camera: %.2f meters%n", fiducial.distToCamera); + System.out.printf(" Distance to Robot: %.2f meters%n", fiducial.distToRobot); + System.out.printf(" Ambiguity: %.2f%n", fiducial.ambiguity); + System.out.println(); + } + } + + public static NetworkTable getLimelightNTTable(String tableName) { + return NetworkTableInstance.getDefault().getTable(sanitizeName(tableName)); + } + + public static NetworkTableEntry getLimelightNTTableEntry(String tableName, String entryName) { + return getLimelightNTTable(tableName).getEntry(entryName); + } + + public static double getLimelightNTDouble(String tableName, String entryName) { + return getLimelightNTTableEntry(tableName, entryName).getDouble(0.0); + } + + public static void setLimelightNTDouble(String tableName, String entryName, double val) { + getLimelightNTTableEntry(tableName, entryName).setDouble(val); + } + + public static void setLimelightNTDoubleArray(String tableName, String entryName, double[] val) { + getLimelightNTTableEntry(tableName, entryName).setDoubleArray(val); + } + + public static double[] getLimelightNTDoubleArray(String tableName, String entryName) { + return getLimelightNTTableEntry(tableName, entryName).getDoubleArray(new double[0]); + } + + public static String getLimelightNTString(String tableName, String entryName) { + return getLimelightNTTableEntry(tableName, entryName).getString(""); + } + + public static URL getLimelightURLString(String tableName, String request) { + String urlString = "http://" + sanitizeName(tableName) + ".local:5807/" + request; + URL url; + try { + url = new URL(urlString); + return url; + } catch (MalformedURLException e) { + System.err.println("bad LL URL"); + } + return null; + } + + ///// + ///// + + public static double getTX(String limelightName) { + return getLimelightNTDouble(limelightName, "tx"); + } + + public static double getTY(String limelightName) { + return getLimelightNTDouble(limelightName, "ty"); + } + + public static double getTA(String limelightName) { + return getLimelightNTDouble(limelightName, "ta"); + } + + public static double getLatency_Pipeline(String limelightName) { + return getLimelightNTDouble(limelightName, "tl"); + } + + public static double getLatency_Capture(String limelightName) { + return getLimelightNTDouble(limelightName, "cl"); + } + + public static double getCurrentPipelineIndex(String limelightName) { + return getLimelightNTDouble(limelightName, "getpipe"); + } + + public static String getJSONDump(String limelightName) { + return getLimelightNTString(limelightName, "json"); + } + + /** + * Switch to getBotPose + * + * @param limelightName + * @return + */ + @Deprecated + public static double[] getBotpose(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose"); + } + + /** + * Switch to getBotPose_wpiRed + * + * @param limelightName + * @return + */ + @Deprecated + public static double[] getBotpose_wpiRed(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose_wpired"); + } + + /** + * Switch to getBotPose_wpiBlue + * + * @param limelightName + * @return + */ + @Deprecated + public static double[] getBotpose_wpiBlue(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); + } + + public static double[] getBotPose(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose"); + } + + public static double[] getBotPose_wpiRed(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose_wpired"); + } + + public static double[] getBotPose_wpiBlue(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); + } + + public static double[] getBotPose_TargetSpace(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); + } + + public static double[] getCameraPose_TargetSpace(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); + } + + public static double[] getTargetPose_CameraSpace(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); + } + + public static double[] getTargetPose_RobotSpace(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); + } + + public static double[] getTargetColor(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "tc"); + } + + public static double getFiducialID(String limelightName) { + return getLimelightNTDouble(limelightName, "tid"); + } + + public static String getNeuralClassID(String limelightName) { + return getLimelightNTString(limelightName, "tclass"); + } + + ///// + ///// + + public static Pose3d getBotPose3d(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose"); + return toPose3D(poseArray); + } + + public static Pose3d getBotPose3d_wpiRed(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); + return toPose3D(poseArray); + } + + public static Pose3d getBotPose3d_wpiBlue(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); + return toPose3D(poseArray); + } + + public static Pose3d getBotPose3d_TargetSpace(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); + return toPose3D(poseArray); + } + + public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); + return toPose3D(poseArray); + } + + public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); + return toPose3D(poseArray); + } + + public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); + return toPose3D(poseArray); + } + + public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { + double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); + return toPose3D(poseArray); + } + + /** + * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) + * + * @param limelightName + * @return + */ + public static Pose2d getBotPose2d_wpiBlue(String limelightName) { + + double[] result = getBotPose_wpiBlue(limelightName); + return toPose2D(result); + } + + /** + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when + * you are on the BLUE alliance + * + * @param limelightName + * @return + */ + public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { + return getBotPoseEstimate(limelightName, "botpose_wpiblue"); + } + + /** + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when + * you are on the BLUE alliance + * + * @param limelightName + * @return + */ + public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { + return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue"); + } + + /** + * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) + * + * @param limelightName + * @return + */ + public static Pose2d getBotPose2d_wpiRed(String limelightName) { + + double[] result = getBotPose_wpiRed(limelightName); + return toPose2D(result); + } + + /** + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when + * you are on the RED alliance + * + * @param limelightName + * @return + */ + public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { + return getBotPoseEstimate(limelightName, "botpose_wpired"); + } + + /** + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when + * you are on the RED alliance + * + * @param limelightName + * @return + */ + public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightName) { + return getBotPoseEstimate(limelightName, "botpose_orb_wpired"); + } + + /** + * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) + * + * @param limelightName + * @return + */ + public static Pose2d getBotPose2d(String limelightName) { + + double[] result = getBotPose(limelightName); + return toPose2D(result); + } + + public static boolean getTV(String limelightName) { + return 1.0 == getLimelightNTDouble(limelightName, "tv"); + } + + ///// + ///// + + public static void setPipelineIndex(String limelightName, int pipelineIndex) { + setLimelightNTDouble(limelightName, "pipeline", pipelineIndex); + } + + public static void setPriorityTagID(String limelightName, int ID) { + setLimelightNTDouble(limelightName, "priorityid", ID); + } + + /** The LEDs will be controlled by Limelight pipeline settings, and not by robot code. */ + public static void setLEDMode_PipelineControl(String limelightName) { + setLimelightNTDouble(limelightName, "ledMode", 0); + } + + public static void setLEDMode_ForceOff(String limelightName) { + setLimelightNTDouble(limelightName, "ledMode", 1); + } + + public static void setLEDMode_ForceBlink(String limelightName) { + setLimelightNTDouble(limelightName, "ledMode", 2); + } + + public static void setLEDMode_ForceOn(String limelightName) { + setLimelightNTDouble(limelightName, "ledMode", 3); + } + + public static void setStreamMode_Standard(String limelightName) { + setLimelightNTDouble(limelightName, "stream", 0); + } + + public static void setStreamMode_PiPMain(String limelightName) { + setLimelightNTDouble(limelightName, "stream", 1); + } + + public static void setStreamMode_PiPSecondary(String limelightName) { + setLimelightNTDouble(limelightName, "stream", 2); + } + + public static void setCameraMode_Processor(String limelightName) { + setLimelightNTDouble(limelightName, "camMode", 0); + } + + public static void setCameraMode_Driver(String limelightName) { + setLimelightNTDouble(limelightName, "camMode", 1); + } + + /** + * Sets the crop window. The crop window in the UI must be completely open for dynamic cropping to + * work. + */ + public static void setCropWindow( + String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { + double[] entries = new double[4]; + entries[0] = cropXMin; + entries[1] = cropXMax; + entries[2] = cropYMin; + entries[3] = cropYMax; + setLimelightNTDoubleArray(limelightName, "crop", entries); + } + + public static void SetRobotOrientation( + String limelightName, + double yaw, + double yawRate, + double pitch, + double pitchRate, + double roll, + double rollRate) { + + double[] entries = new double[6]; + entries[0] = yaw; + entries[1] = yawRate; + entries[2] = pitch; + entries[3] = pitchRate; + entries[4] = roll; + entries[5] = rollRate; + setLimelightNTDoubleArray(limelightName, "robot_orientation_set", entries); + } + + public static void SetFiducialIDFiltersOverride(String limelightName, int[] validIDs) { + double[] validIDsDouble = new double[validIDs.length]; + for (int i = 0; i < validIDs.length; i++) { + validIDsDouble[i] = validIDs[i]; + } + setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); + } + + public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { + int d = 0; // pipeline + if (downscale == 1.0) { + d = 1; + } + if (downscale == 1.5) { + d = 2; + } + if (downscale == 2) { + d = 3; + } + if (downscale == 3) { + d = 4; + } + if (downscale == 4) { + d = 5; + } + setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); + } + + public static void setCameraPose_RobotSpace( + String limelightName, + double forward, + double side, + double up, + double roll, + double pitch, + double yaw) { + double[] entries = new double[6]; + entries[0] = forward; + entries[1] = side; + entries[2] = up; + entries[3] = roll; + entries[4] = pitch; + entries[5] = yaw; + setLimelightNTDoubleArray(limelightName, "camerapose_robotspace_set", entries); + } + + ///// + ///// + + public static void setPythonScriptData(String limelightName, double[] outgoingPythonData) { + setLimelightNTDoubleArray(limelightName, "llrobot", outgoingPythonData); + } + + public static double[] getPythonScriptData(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "llpython"); + } + + ///// + ///// + + /** Asynchronously take snapshot. */ + public static CompletableFuture takeSnapshot(String tableName, String snapshotName) { + return CompletableFuture.supplyAsync( + () -> { + return SYNCH_TAKESNAPSHOT(tableName, snapshotName); + }); + } + + private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) { + URL url = getLimelightURLString(tableName, "capturesnapshot"); + try { + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod("GET"); + if (snapshotName != null && snapshotName != "") { + connection.setRequestProperty("snapname", snapshotName); + } + + int responseCode = connection.getResponseCode(); + if (responseCode == 200) { + return true; + } else { + System.err.println("Bad LL Request"); + } + } catch (IOException e) { + System.err.println(e.getMessage()); + } + return false; + } + + /** Parses Limelight's JSON results dump into a LimelightResults Object */ + public static LimelightResults getLatestResults(String limelightName) { + + long start = System.nanoTime(); + LimelightHelpers.LimelightResults results = new LimelightHelpers.LimelightResults(); + if (mapper == null) { + mapper = + new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + + try { + results = mapper.readValue(getJSONDump(limelightName), LimelightResults.class); + } catch (JsonProcessingException e) { + results.error = "lljson error: " + e.getMessage(); + } + + long end = System.nanoTime(); + double millis = (end - start) * .000001; + results.targetingResults.latency_jsonParse = millis; + if (profileJSON) { + System.out.printf("lljson: %.2f\r\n", millis); + } + + return results; + } +} diff --git a/src/main/java/frc/robot/util/MathHelpers.java b/src/main/java/frc/robot/util/MathHelpers.java new file mode 100644 index 0000000..3b6a97e --- /dev/null +++ b/src/main/java/frc/robot/util/MathHelpers.java @@ -0,0 +1,48 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; + +public class MathHelpers { + public static final Pose2d kPose2dZero = new Pose2d(); + + public static final Pose2d pose2dFromRotation(Rotation2d rotation) { + return new Pose2d(kTranslation2dZero, rotation); + } + + public static final Pose2d pose2dFromTranslation(Translation2d translation) { + return new Pose2d(translation, kRotation2dZero); + } + + public static final Rotation2d kRotation2dZero = new Rotation2d(); + public static final Rotation2d kRotation2dPi = Rotation2d.fromDegrees(180.0); + + public static final Translation2d kTranslation2dZero = new Translation2d(); + + public static final Transform2d kTransform2dZero = new Transform2d(); + + public static final Transform2d transform2dFromRotation(Rotation2d rotation) { + return new Transform2d(kTranslation2dZero, rotation); + } + + public static final Transform2d transform2dFromTranslation(Translation2d translation) { + return new Transform2d(translation, kRotation2dZero); + } +} diff --git a/src/main/java/frc/robot/util/RobotTime.java b/src/main/java/frc/robot/util/RobotTime.java new file mode 100644 index 0000000..1f4b48d --- /dev/null +++ b/src/main/java/frc/robot/util/RobotTime.java @@ -0,0 +1,25 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import org.littletonrobotics.junction.Logger; + +public class RobotTime { + public static double getTimestampSeconds() { + long micros = Logger.getTimestamp(); + return (double) micros * 1.0E-6; + } +} diff --git a/src/main/java/frc/robot/util/Util.java b/src/main/java/frc/robot/util/Util.java new file mode 100644 index 0000000..9b3c181 --- /dev/null +++ b/src/main/java/frc/robot/util/Util.java @@ -0,0 +1,123 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import static frc.robot.Constants.AprilTagConstants.*; + +import edu.wpi.first.math.geometry.Translation3d; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.IntSupplier; +import java.util.function.Supplier; + +/** Contains basic functions that are used often. */ +public class Util { + public static final double kEpsilon = 1e-12; + + /** Prevent this class from being instantiated. */ + private Util() {} + + /** Limits the given input to the given magnitude. */ + public static double limit(double v, double maxMagnitude) { + return limit(v, -maxMagnitude, maxMagnitude); + } + + public static double limit(double v, double min, double max) { + return Math.min(max, Math.max(min, v)); + } + + public static int limit(int v, int min, int max) { + return Math.min(max, Math.max(min, v)); + } + + public static boolean inRange(double v, double maxMagnitude) { + return inRange(v, -maxMagnitude, maxMagnitude); + } + + /** Checks if the given input is within the range (min, max), both exclusive. */ + public static boolean inRange(double v, double min, double max) { + return v > min && v < max; + } + + public static double interpolate(double a, double b, double x) { + x = limit(x, 0.0, 1.0); + return a + (b - a) * x; + } + + public static String joinStrings(final String delim, final List strings) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < strings.size(); ++i) { + sb.append(strings.get(i).toString()); + if (i < strings.size() - 1) { + sb.append(delim); + } + } + return sb.toString(); + } + + public static boolean epsilonEquals(double a, double b, double epsilon) { + return (a - epsilon <= b) && (a + epsilon >= b); + } + + public static boolean epsilonEquals(double a, double b) { + return epsilonEquals(a, b, kEpsilon); + } + + public static boolean epsilonEquals(int a, int b, int epsilon) { + return (a - epsilon <= b) && (a + epsilon >= b); + } + + public static boolean allCloseTo(final List list, double value, double epsilon) { + boolean result = true; + for (Double value_in : list) { + result &= epsilonEquals(value_in, value, epsilon); + } + return result; + } + + public static double handleDeadband(double value, double deadband) { + deadband = Math.abs(deadband); + if (deadband == 1) { + return 0; + } + double scaledValue = (value + (value < 0 ? deadband : -deadband)) / (1 - deadband); + return (Math.abs(value) > Math.abs(deadband)) ? scaledValue : 0; + } + + public static Translation3d flipRedBlue(Translation3d original) { + return new Translation3d( + kFieldLengthMeters - original.getX(), original.getY(), original.getZ()); + } + + public static Supplier memoizeByIteration(IntSupplier iteration, Supplier delegate) { + AtomicReference value = new AtomicReference<>(); + AtomicInteger last_iteration = new AtomicInteger(-1); + return () -> { + int last = last_iteration.get(); + int now = iteration.getAsInt(); + if (last != now) { + value.updateAndGet((cur) -> null); + } + T val = value.get(); + if (val == null) { + val = value.updateAndGet(cur -> cur == null ? delegate.get() : cur); + last_iteration.set(now); + } + return val; + }; + } +} From 3071c2b1e8cfbfaffbfb71c23669d3d5bf746a57 Mon Sep 17 00:00:00 2001 From: NutHouse coco-prog-3 Date: Fri, 16 May 2025 14:14:16 -0700 Subject: [PATCH 04/12] FRC254's 2024 vision code builds! Next step is to mesh in the parts of the vision code I need for the circular vision buffer to the base vision subsystem. new file: src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java new file: src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java --- .../subsystems/vision/FRC254/Constants.java | 408 ++++++++++++++++++ .../vision/FRC254/FiducialObservation.java | 5 + .../vision/FRC254/MegatagPoseEstimate.java | 5 + .../subsystems/vision/FRC254/RobotState.java | 37 +- .../vision/FRC254/VisionSubsystem.java | 6 - .../robot/util/ServoMotorSubsystemConfig.java | 34 ++ 6 files changed, 454 insertions(+), 41 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java create mode 100644 src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java new file mode 100644 index 0000000..cd02f1d --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java @@ -0,0 +1,408 @@ +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +import com.ctre.phoenix6.configs.ClosedLoopRampsConfigs; +import com.ctre.phoenix6.configs.OpenLoopRampsConfigs; +import com.ctre.phoenix6.signals.InvertedValue; +import com.pathplanner.lib.path.PathConstraints; +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.trajectory.TrapezoidProfile; +import edu.wpi.first.math.util.Units; +import frc.robot.util.RobotDeviceId; +import frc.robot.util.ServoMotorSubsystemConfig; +import java.net.NetworkInterface; +import java.net.SocketException; +import java.util.Enumeration; + +/** Class to store all constants for robot code. */ +public class Constants { + public static final String kCanBusCanivore = "canivore"; + public static boolean kIsReplay = false; + public static final String kPracticeBotMacAddress = "00:80:2F:33:D1:4B"; + public static boolean kIsPracticeBot = hasMacAddress(kPracticeBotMacAddress); + + public static final double kSteerJoystickDeadband = 0.05; + + public static final ClosedLoopRampsConfigs makeDefaultClosedLoopRampConfig() { + return new ClosedLoopRampsConfigs() + .withDutyCycleClosedLoopRampPeriod(0.02) + .withTorqueClosedLoopRampPeriod(0.02) + .withVoltageClosedLoopRampPeriod(0.02); + } + + public static final OpenLoopRampsConfigs makeDefaultOpenLoopRampConfig() { + return new OpenLoopRampsConfigs() + .withDutyCycleOpenLoopRampPeriod(0.02) + .withTorqueOpenLoopRampPeriod(0.02) + .withVoltageOpenLoopRampPeriod(0.02); + } + + public static final class DriveConstants { + public static final double kDriveMaxSpeed = 4.43676260556; + public static final double kDriveMaxAngularRate = Units.degreesToRadians(360 * 1.15); + public static final double kHeadingControllerP = 3.5; + public static final double kHeadingControllerI = 0; + public static final double kHeadingControllerD = 0; + } + + public static final class SensorConstants { + public static final int kAmpPostChopstickSensorPort = 4; + public static final int kShooterStage1BannerSensorPort = 3; + public static final int kAmpBannerSensorPort = 2; + public static final int kFeederBannerSensorPort = 1; + public static final int kIntakeBannerSensorPort = 0; + public static final double kShooterDebounceTime = 0.01; + public static final double kAmpDebounceTime = 0.01; + public static final double kFeederDebounceTime = 0.01; + public static final double kIntakeDebounceTime = 0.01; + } + + public static final class ClimberConstants { + public static final RobotDeviceId kClimberTalonCanID = + new RobotDeviceId(24, kCanBusCanivore, 0); + public static final double kClimberP = kIsPracticeBot ? 1.0 : 1.0; + public static final double kForwardMaxPositionRotations = kIsPracticeBot ? 119.0 : 132.0; + public static final double kHooksUpPositionRotations = kForwardMaxPositionRotations * 0.9; + public static final double kStageHooksRotations = kForwardMaxPositionRotations * 0.4; + public static final double kClimbClimbedPositionToleranceRotations = + kForwardMaxPositionRotations * 0.1; + public static final double kPositionToleranceRotations = 2.0; + public static final double kClimberGearRatio = 1.0 / (10.0); + public static double kReverseMinPositionRotations = 0.0; + } + + public static final class ElevatorConstants { + public static final double ElevatorMinPositionRotations = 0.0; + public static final double ElevatorMaxPositionRotations = 15.356933; + public static final double ElevatorMaxHeightInches = 16.5; + public static final double kElevatorGearRatio = (11.0 / 36.0) * (18. / 15.); + public static final double kElevatorPositionToleranceRotations = 0.1; + public static final double kAmpScoringHeightInches = 16.0; + public static final double kElevatorHomeHeightInches = 0.0; + public static final double kIntakeFromSourceHeightInches = 14.5; + public static final double kElevatorPositioningToleranceInches = 0.5; + public static final double kClimbHeightInches = 16.0; + public static final double kSpoolDiameter = Units.inchesToMeters(0.940); + } + + // Intake Constants + public static final class IntakeConstants { + public static final double kIntakeDutyCycleIntake = 1.; + public static final double kIntakeDutyCycleIntakeFromSource = 1.0; + public static final double kIntakeDutyCycleExhuast = -1.0; + } + + // Feeder Constants + public static final class FeederConstants { + public static final double kFeederRPSIntake = 70.0; + public static final boolean kRunClockwise = true; + } + + public static class AmpConstants { + public static final double kAmpIntakeFromSourceDutyCycle = -1.0; + public static final double kAmpExhaustToStageDutyCycle = 0.85; + public static final double kAmpSlowlyStageDutyCycle = 0.1; + + public static final double kAmpScoreDutyCycle = -1.0; + + // Number of rotations to turn after beam break is tripped (rising edge) to stow + // the note prior to scoring + public static final double kAmpChopsticksStageRotations = 6.0; + public static final double kTrapChopsticksStageRotations = 10.0; + public static final double kAmpChopsticksGoBackRotations = -1.75; + public static final double kAmpChopsticksGoBackRotationsTolerance = 0.25; + public static final double kAmpChopsticksStageRotationsTolerance = 0.1; + } + + public static final ServoMotorSubsystemConfig kAmpConfig = new ServoMotorSubsystemConfig(); + + static { + kAmpConfig.name = "Amp"; + kAmpConfig.talonCANID = new RobotDeviceId(25, kCanBusCanivore, 0); + kAmpConfig.fxConfig.MotorOutput.Inverted = InvertedValue.Clockwise_Positive; + + kAmpConfig.fxConfig.Audio.BeepOnBoot = false; + kAmpConfig.fxConfig.Audio.BeepOnConfig = false; + + kAmpConfig.fxConfig.Slot0.kS = 0.015 * 12.0; + kAmpConfig.fxConfig.Slot0.kP = 0.3 * 12.0; + kAmpConfig.fxConfig.Slot0.kV = 0.00925 * 12.0; + kAmpConfig.fxConfig.Slot0.kA = 0.0001 * 12.0; + kAmpConfig.fxConfig.MotionMagic.MotionMagicAcceleration = 500.0; + kAmpConfig.fxConfig.MotionMagic.MotionMagicCruiseVelocity = 50.0; + + kAmpConfig.fxConfig.CurrentLimits.SupplyCurrentLimitEnable = true; + kAmpConfig.fxConfig.CurrentLimits.SupplyCurrentLimit = 60.0; + + kAmpConfig.fxConfig.CurrentLimits.StatorCurrentLimit = 80.0; + kAmpConfig.fxConfig.CurrentLimits.StatorCurrentLimitEnable = true; + kAmpConfig.fxConfig.ClosedLoopRamps = makeDefaultClosedLoopRampConfig(); + kAmpConfig.fxConfig.OpenLoopRamps = makeDefaultOpenLoopRampConfig(); + } + + public static final class ShooterConstants { + public static final Rotation2d kTurretToShotCorrection = + new Rotation2d(Units.degreesToRadians(1.5)); + + public static final double kPoopMaxApexHeight = Units.inchesToMeters(160.0); + + public static final double kStage2ShooterWheelDiameter = Units.inchesToMeters(3.0); // in + public static final double kStage1ShooterWheelDiameter = Units.inchesToMeters(2.0); // in + + public static final double kRingLaunchVelMetersPerSecPerRotPerSec = 0.141; + public static final double kRingLaunchLiftCoeff = 0.013; // Multiply by v^2 to get lift accel + public static final double kShooterStage2RPSShortRange = 120.0; // rot/s + public static final double kShooterStage2MaxShortRangeDistance = 2.0; + public static final double kShooterStage2MinLongRangeDistance = 3.0; + public static final double kShooterStage2RPSLongRange = 120.0; // rot/s + public static final double kShooterStage2RPSCap = 130.0; // rot/s + public static final double kShooterStage1RPS = 70.0; // rot/s + public static final double kShooterStage2Epsilon = 3.0; + public static final double kShooterSpinupStage1RPS = 0.0; + + public static final double kShooterStage1IntakeRPS = 4.0; + public static final double kShooterStage1ExhaustRPS = -10.0; + + public static final double kFenderShotRPS = 100.0; + public static final double kPreloadShotRPS = 90.0; + + public static final double kBottomRollerSpeedupFactor = + 1.0; // multiplied to all setpoints to determine how + // much + // extra power to give the bottom roller. >1.0 = + // faster + // bottom roller + public static final double kTopRollerSpeedupFactor = 1.0; + } + + public static final double kJoystickThreshold = 0.1; + public static final int kDriveGamepadPort = 0; + + // Controls + public static final boolean kForceDriveGamepad = true; + public static final int kGamepadAdditionalControllerPort = 1; + public static final int kOperatorControllerPort = 2; + public static final int kMainThrottleJoystickPort = 0; + public static final int kMainTurnJoystickPort = 1; + public static final double kDriveJoystickThreshold = 0.03; + + // Limelight constants + + // TURRET LIMELIGHT + // Pitch angle: How many radians the camera is pitched up around Y axis. 0 is + // looking straight ahead, +is nodding up. + public static final double kCameraPitchDegrees = kIsPracticeBot ? 28.0 : 27.5; + public static final double kCameraPitchRads = Units.degreesToRadians(kCameraPitchDegrees); + public static final double kCameraHeightOffGroundMeters = + kIsPracticeBot ? Units.inchesToMeters(11.181) : Units.inchesToMeters(11.181); + public static final double kImageCaptureLatency = 11.0; // milliseconds + public static final double kLimelightTransmissionTimeLatency = 0.0; // milliseconds + public static final String kLimelightTableName = "limelight-turret"; + // Distance from turret center to camera lens in X axis (straight into lens) + public static final double kTurretToCameraX = + kIsPracticeBot ? Units.inchesToMeters(5.834) : Units.inchesToMeters(5.834); + // Distance from turret center to camera lens in Y + public static final double kTurretToCameraY = 0; + + // ELEVATOR LIMELIGHT + public static final String kLimelightBTableName = "limelight-eleva"; + public static final double kCameraBPitchDegrees = kIsPracticeBot ? 15.0 : 16.0; + public static final double kCameraBPitchRads = Units.degreesToRadians(kCameraBPitchDegrees); + public static final double kCameraBRollDegrees = kIsPracticeBot ? 0.0 : 0.0; + public static final double kCameraBRollRads = Units.degreesToRadians(kCameraBRollDegrees); + public static final double kCameraBHeightOffGroundMeters = + kIsPracticeBot + ? Units.inchesToMeters(19.477) + : Units.inchesToMeters(19.477); // verify for practice + // Distance from turret center to camera lens in X axis (straight into lens) + public static final double kTurretToCameraBX = + kIsPracticeBot + ? Units.inchesToMeters(14.882) + : Units.inchesToMeters(14.882); // verify for practice + // Distance from turret center to camera lens in Y + public static final double kTurretToCameraBY = 0; + + public static final double kTurretToRobotCenterX = Units.inchesToMeters(2.3115); + public static final double kTurretToRobotCenterY = 0; + public static final Transform2d kTurretToRobotCenter = + new Transform2d( + new Translation2d(Constants.kTurretToRobotCenterX, Constants.kTurretToRobotCenterY), + new Rotation2d()); + public static final Rotation2d kCameraYawOffset = new Rotation2d(0); + + // April Tag Layout + public static final AprilTagFieldLayout kAprilTagLayout = + AprilTagFields.k2024Crescendo.loadAprilTagLayoutField(); + + public static final double kFieldWidthMeters = + kAprilTagLayout.getFieldWidth(); // distance between field walls, + // 8.211m + public static final double kFieldLengthMeters = + kAprilTagLayout.getFieldLength(); // distance between driver station + // walls, 16.541m + + public static final Pose2d kBlueAmpPose = new Pose2d(1.820, 7.680, Rotation2d.fromDegrees(90.0)); + public static final Pose2d kRedAmpPose = + new Pose2d( + kFieldWidthMeters - kBlueAmpPose.getX(), + kBlueAmpPose.getY(), + Rotation2d.fromDegrees(180 - kBlueAmpPose.getRotation().getDegrees())); // X 14.7345 + + public static final Pose2d kBlueClimbPoseFeed = + new Pose2d(4.4, 3.3, Rotation2d.fromDegrees(60.0)); + public static final Pose2d kBlueClimbPoseAmp = + new Pose2d(4.43, 4.95, Rotation2d.fromDegrees(-60.0)); + public static final Pose2d kBlueClimbPoseMidline = + new Pose2d(5.8, 4.1, Rotation2d.fromDegrees(180.0)); + public static final Pose2d kRedClimbPoseFeed = + new Pose2d( + kFieldWidthMeters - kBlueClimbPoseFeed.getX(), + kBlueClimbPoseFeed.getY(), + Rotation2d.fromDegrees(180 - kBlueClimbPoseFeed.getRotation().getDegrees())); + public static final Pose2d kRedClimbPoseAmp = + new Pose2d( + kFieldWidthMeters - kBlueClimbPoseAmp.getX(), + kBlueClimbPoseAmp.getY(), + Rotation2d.fromDegrees(180 - kBlueClimbPoseAmp.getRotation().getDegrees())); + public static final Pose2d kRedClimbPoseMidline = + new Pose2d( + kFieldWidthMeters - kBlueClimbPoseMidline.getX(), + kBlueClimbPoseMidline.getY(), + Rotation2d.fromDegrees(180 - kBlueClimbPoseMidline.getRotation().getDegrees())); + public static final Translation3d kRedSpeakerPose = + new Translation3d( + Constants.kAprilTagLayout.getTagPose(4).get().getX(), + Constants.kAprilTagLayout.getTagPose(4).get().getY(), + 2.045); + public static final Translation3d kBlueSpeakerPose = + new Translation3d( + Constants.kAprilTagLayout.getTagPose(7).get().getX(), + Constants.kAprilTagLayout.getTagPose(7).get().getY(), + 2.045); + public static final Translation3d kRedSpeakerTopPose = + new Translation3d( + Constants.kAprilTagLayout.getTagPose(4).get().getX() - Units.inchesToMeters(20.057), + Constants.kAprilTagLayout.getTagPose(4).get().getY(), + Constants.kAprilTagLayout.getTagPose(4).get().getZ() + Units.inchesToMeters(32.563)); + public static final Translation3d kBlueSpeakerTopPose = + new Translation3d( + Constants.kAprilTagLayout.getTagPose(7).get().getX() + Units.inchesToMeters(20.057), + Constants.kAprilTagLayout.getTagPose(7).get().getY(), + Constants.kAprilTagLayout.getTagPose(7).get().getZ() + Units.inchesToMeters(32.563)); + + public static final Translation2d kBlueStageCenterPose = + new Translation2d(4.83, kFieldWidthMeters / 2.0); + public static final Translation2d kRedStageCenterPose = + new Translation2d( + kFieldLengthMeters - kBlueStageCenterPose.getX(), kBlueStageCenterPose.getY()); + public static final double kBlueSpeakerToStageAutoSwitchX = 7.0; + public static final double kRedSpeakerToStageAutoSwitchX = + kFieldLengthMeters - kBlueSpeakerToStageAutoSwitchX; + + public static final double kNoteReleaseHeight = Units.inchesToMeters(22.183); + public static final Pose3d kLeftRedSpeakerPose = + new Pose3d( + Constants.kAprilTagLayout.getTagPose(4).get().getX(), 5.875, 2.04, new Rotation3d()); + public static final Pose3d kRightRedSpeakerPose = + new Pose3d( + Constants.kAprilTagLayout.getTagPose(4).get().getX(), 5.29, 2.04, new Rotation3d()); + public static final Pose3d kLeftBlueSpeakerPose = + new Pose3d( + Constants.kAprilTagLayout.getTagPose(7).get().getX(), 5.875, 2.04, new Rotation3d()); + public static final Pose3d kRightBlueSpeakerPose = + new Pose3d( + Constants.kAprilTagLayout.getTagPose(7).get().getX(), 5.29, 2.04, new Rotation3d()); + public static final double kSpeakerLengthMeters = + kLeftRedSpeakerPose.getY() - kRightRedSpeakerPose.getY(); + + public static final class AutoConstants { + public static final double kMaxSpeedMetersPerSecond = 4.75; + public static final double kMaxAccelerationMetersPerSecondSquared = 4.85; + public static final double kMaxAngularSpeedRadiansPerSecond = Math.PI; + public static final double kMaxAngularSpeedRadiansPerSecondSquared = Math.PI; + + public static final double kPXController = 6.0; + public static final double kPYController = 2.0; + public static final double kPThetaController = 4.0; + + public static final double kTranslationKa = 0.0; + + // Constraint for the motion profiled robot angle controller + public static final TrapezoidProfile.Constraints kThetaControllerConstraints = + new TrapezoidProfile.Constraints( + kMaxAngularSpeedRadiansPerSecond, kMaxAngularSpeedRadiansPerSecondSquared); + + public static final PathConstraints kAutoAlignPathConstraints = + new PathConstraints( + kMaxSpeedMetersPerSecond, + kMaxAccelerationMetersPerSecondSquared, + kMaxAngularSpeedRadiansPerSecond, + kMaxAngularSpeedRadiansPerSecondSquared); + } + + public static final class LEDConstants { + public static final RobotDeviceId kCANdleId = new RobotDeviceId(30, kCanBusCanivore, 0); + public static final int kNonCandleLEDCount = 30; + public static final int kCandleLEDCount = 8; + public static final int kMaxLEDCount = kNonCandleLEDCount + kCandleLEDCount; + } + + /** + * Check if this system has a certain mac address in any network device. + * + * @param mac_address Mac address to check. + * @return true if some device with this mac address exists on this system. + */ + public static boolean hasMacAddress(final String mac_address) { + try { + Enumeration nwInterface = NetworkInterface.getNetworkInterfaces(); + while (nwInterface.hasMoreElements()) { + NetworkInterface nis = nwInterface.nextElement(); + if (nis == null) { + continue; + } + StringBuilder device_mac_sb = new StringBuilder(); + System.out.println("hasMacAddress: NIS: " + nis.getDisplayName()); + byte[] mac = nis.getHardwareAddress(); + if (mac != null) { + for (int i = 0; i < mac.length; i++) { + device_mac_sb.append(String.format("%02X%s", mac[i], (i < mac.length - 1) ? ":" : "")); + } + String device_mac = device_mac_sb.toString(); + System.out.println( + "hasMacAddress: NIS " + nis.getDisplayName() + " device_mac: " + device_mac); + if (mac_address.equals(device_mac)) { + System.out.println("hasMacAddress: ** Mac address match! " + device_mac); + return true; + } + } else { + System.out.println("hasMacAddress: Address doesn't exist or is not accessible"); + } + } + + } catch (SocketException e) { + e.printStackTrace(); + } + return false; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java index 40bd60e..c69e735 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java @@ -30,6 +30,11 @@ public Class getTypeClass() { return FiducialObservation.class; } + @Override + public String getTypeName() { + return "FiducialObservation"; + } + @Override public String getTypeString() { return "struct:FiducialObservation"; diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java index 924324f..a65c0b4 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java @@ -32,6 +32,11 @@ public Class getTypeClass() { return MegatagPoseEstimate.class; } + @Override + public String getTypeName() { + return "MegatagPoseEstimate"; + } + @Override public String getTypeString() { return "struct:MegatagPoseEstimate"; diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java index 0aeb482..b7d0014 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java @@ -13,17 +13,15 @@ package frc.robot.subsystems.vision.FRC254; -import edu.wpi.first.math.MathUtil; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.geometry.Twist2d; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; +import edu.wpi.first.wpilibj.RobotBase; import edu.wpi.first.wpilibj.Timer; import frc.robot.util.ConcurrentTimeInterpolatableBuffer; import frc.robot.util.MathHelpers; @@ -287,7 +285,7 @@ public Optional getMaxAbsTurretYawAngularVelocityInRange(double minTime, public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double maxTime) { // Gyro yaw rate not set in sim. - if (Robot.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); + if (RobotBase.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); } @@ -363,35 +361,4 @@ public void updateLogger() { Pose3d hoodPose3d = new Pose3d(); Pose3d shooterPose3d = new Pose3d(); Pose3d turretPose3d = new Pose3d(); - - public void updateViz() { - if (getLatestRobotToTurret().getValue() != null) { - shooterPose3d = - new Pose3d( - new Translation3d(), - new Rotation3d(0, 0, getLatestRobotToTurret().getValue().getRadians())); - hoodPose3d = - new Pose3d( - new Translation3d(), - new Rotation3d(0, 0, getLatestRobotToTurret().getValue().getRadians())); - } - ampPose3d = new Pose3d(new Translation3d(0.0, 0.0, this.elevatorHeightM), new Rotation3d()); - var climberRot = - MathUtil.interpolate( - 0.0, - -Math.PI / 2.0, - this.climberRotations - / (Constants.ClimberConstants.kForwardMaxPositionRotations - * Constants.kClimberConfig.unitToRotorRatio)); - climberPose3d = - new Pose3d(new Translation3d(0, 0.0, 0.15), new Rotation3d(0.0, (double) climberRot, 0.0)); - // model_0 is elevator - // model_1 is climber - // model_2 is hood plates - // model_3 is shooter - // model_4 is turret - Logger.recordOutput( - "ComponentsPoseArray", - new Pose3d[] {ampPose3d, climberPose3d, hoodPose3d, shooterPose3d, turretPose3d}); - } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java index cb90753..8feac65 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java @@ -103,12 +103,6 @@ private void updateVision( if (!alreadyProcessedTimestamp && cameraSeesTarget) { if (!isTurretCamera && !Util.epsilonEquals(state.getElevatorHeight(), 0.0, 0.05)) return; Optional pinholeEstimate = Optional.empty(); // - processPinholeVisionEstimate( - pinholeObservations, - // - updateTimestamp, - // - isTurretCamera); Optional megatagEstimate = processMegatagPoseEstimate(cameraMegatagPoseEstimate, isTurretCamera); diff --git a/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java b/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java new file mode 100644 index 0000000..2647a0f --- /dev/null +++ b/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java @@ -0,0 +1,34 @@ +// Copyright (c) 2025 Az-FIRST +// http://github.com/AZ-First +// Copyright (c) 2024 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +import com.ctre.phoenix6.configs.TalonFXConfiguration; + +public class ServoMotorSubsystemConfig { + public String name = "UNNAMED"; + public RobotDeviceId talonCANID; + public TalonFXConfiguration fxConfig = new TalonFXConfiguration(); + + // Ratio of rotor to units for this talon. rotor * by this ratio should + // be the units. + // <1 is reduction + public double unitToRotorRatio = 1.0; + public double kMinPositionUnits = Double.NEGATIVE_INFINITY; + public double kMaxPositionUnits = Double.POSITIVE_INFINITY; + + // Moment of Inertia (KgMetersSquared) for sim + public double momentOfInertia = 0.5; +} From d1e64fe07fe2f974d569f8660ec4252d88a58c97 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 19 May 2025 15:22:35 -0700 Subject: [PATCH 05/12] WIP: Adding SPAM's vision code new file: src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/Field.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java new file: src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java new file: src/main/java/frc/robot/util/TriConsumer.java --- .../vision/FRC180/CoralDetector.java | 56 ++ .../vision/FRC180/CoralDetectorReal.java | 200 +++++ .../vision/FRC180/CoralDetectorSim.java | 149 ++++ .../robot/subsystems/vision/FRC180/Field.java | 149 ++++ .../vision/FRC180/LEDSubsystem.java | 230 ++++++ .../vision/FRC180/ReefProximity.java | 61 ++ .../subsystems/vision/FRC180/VisionIO.java | 50 ++ .../vision/FRC180/VisionIOLimelight.java | 140 ++++ .../vision/FRC180/VisionIOPhoton.java | 49 ++ .../vision/FRC180/VisionSubsystem.java | 692 ++++++++++++++++++ .../subsystems/vision/FRC6995/Vision.java | 291 ++++++++ src/main/java/frc/robot/util/TriConsumer.java | 19 + 12 files changed, 2086 insertions(+) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/Field.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java create mode 100644 src/main/java/frc/robot/util/TriConsumer.java diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java new file mode 100644 index 0000000..e494f1d --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java @@ -0,0 +1,56 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import com.pathplanner.lib.util.FlippingUtil; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.wpilibj.RobotState; +import frc.robot.Robot; +import frc.robot.util.LimelightHelpers.RawDetection; + +public interface CoralDetector { + + public static final double MARGIN = 0.1; + + public static final double AUTO_BLUE_X = 4.8; + public static final double AUTO_RED_X = 12.9; + + public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] detections); + + public default void reset() {} + ; + + public static boolean isValid(Pose2d coralPose) { + // Reject coral detections that are outside the field + boolean auto = RobotState.isAutonomous(); + double currMargin = auto ? MARGIN : 0; + if (coralPose.getX() < currMargin + || coralPose.getX() > FlippingUtil.fieldSizeX - currMargin + || coralPose.getY() < currMargin + || coralPose.getY() > FlippingUtil.fieldSizeY - currMargin) { + return false; + } + + if (auto) { + boolean blue = Robot.isBlue(); + if (blue && coralPose.getX() > AUTO_BLUE_X) { + return false; + } else if (!blue && coralPose.getX() < AUTO_RED_X) { + return false; + } + } + + return true; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java new file mode 100644 index 0000000..7fa1214 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java @@ -0,0 +1,200 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import static edu.wpi.first.units.Units.*; + +import edu.wpi.first.epilogue.Logged; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.interpolation.InterpolatingDoubleTreeMap; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.wpilibj.RobotState; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.util.LimelightHelpers.RawDetection; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; + +@Logged +public class CoralDetectorReal implements CoralDetector { + + private final InterpolatingDoubleTreeMap distanceMap; + private final List sortedDetections; + private final List algaeDetections; + private final Comparator detectionTYComparator; + private final Comparator detectionTXYComparator; + + // How close to the robot a detected coral has to be to be considered "close" (i.e. intakeable) + private static final double CLOSE_CORAL_DISTANCE = 0.6; + private static final double CLOSE_CORAL_TX = 10; + // How close two detected coral have to be to each other to be considered the same/close enough + // to allow switching without a timeout + private static final double SIMILAR_CORAL_THRESHOLD = 0.75; + + private static final double ALGAE_AVOID_THRESHOLD_DEGREES = 2; // 4.5; + + private double lastDetectionTime = 0; + private double lastDetectionDistance = 0; + private double lastDetectionTX = 0; + private double lastDetectionWidth = 0; + private double lastDetectionHeight = 0; + private double lastDetectionRatio = 0; + private Pose2d lastDetection = null; + + // Additional flags for viewing in logs + private boolean newCoralValue = false; + private boolean returningCloseDetection = false; + private boolean rejectionAlgae = false; + private boolean rejectionOutsideField = false; + + public CoralDetectorReal() { + distanceMap = new InterpolatingDoubleTreeMap(); + addDistance(-22.2, 18.5); + addDistance(-14.79, 24); + addDistance(-6.05, 32); + addDistance(5.44, 48); + addDistance(14.89, 72); + addDistance(20.82, 100); + addDistance(24.7, 132); + + sortedDetections = new ArrayList<>(); + algaeDetections = new ArrayList<>(); + detectionTYComparator = (a, b) -> Double.compare(a.tync, b.tync); + detectionTXYComparator = (a, b) -> Double.compare(tXYCombined(a), tXYCombined(b)); + } + + private void addDistance(double ty, double inches) { + distanceMap.put(ty, Inches.of(inches).in(Meters)); + } + + @Override + public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] detections) { + newCoralValue = false; + returningCloseDetection = false; + rejectionAlgae = false; + rejectionOutsideField = false; + sortedDetections.clear(); + algaeDetections.clear(); + + Pose2d recentLastDetection = getRecentLastDetection(); + if (robotPose == null || detections == null || detections.length == 0) { + return recentLastDetection; + } + + boolean auto = RobotState.isAutonomous(); + + // If the last detected coral was very close to the robot, wait a bit in case + // we're trying to intake it + if (recentLastDetection != null && auto && lastDetectionClose()) { + returningCloseDetection = true; + return recentLastDetection; + } + + for (RawDetection detection : detections) { + if (detection.classId == 1) { + sortedDetections.add(detection); + } else { + algaeDetections.add(detection); + } + } + if (auto) { + sortedDetections.sort(detectionTYComparator); + } else { + sortedDetections.sort(detectionTXYComparator); + } + + Pose2d basePose = robotPose.transformBy(VisionSubsystem.ROBOT_TO_INTAKE_CAMERA_2D); + + for (RawDetection detection : sortedDetections) { + double degrees = detection.txnc; + + if (auto) { + // Skip any coral that are close to an algae on the X axis - these are likely lollipops + for (RawDetection algae : algaeDetections) { + if (Math.abs(degrees - algae.txnc) + < ALGAE_AVOID_THRESHOLD_DEGREES) { // && detection.tync < algae.tync) { + rejectionAlgae = true; + break; + } + } + if (rejectionAlgae) continue; + } + + double distanceMeters = distanceMap.get(detection.tync); + double radians = Units.degreesToRadians(degrees); + double yComponent = distanceMeters * Math.tan(radians); + Transform2d coralTransform = new Transform2d(distanceMeters, -yComponent, Rotation2d.kZero); + Pose2d coralPose = basePose.transformBy(coralTransform); + + if (!CoralDetector.isValid(coralPose)) { + rejectionOutsideField = true; + continue; + } + + double robotDist = coralPose.getTranslation().getDistance(robotPose.getTranslation()); + + lastDetection = coralPose; + lastDetectionTime = Timer.getFPGATimestamp(); + lastDetectionDistance = robotDist; + lastDetectionTX = detection.txnc; + lastDetectionWidth = width(detection); + lastDetectionHeight = height(detection); + lastDetectionRatio = lastDetectionWidth / lastDetectionHeight; + newCoralValue = true; + return coralPose; + } + + // If we didn't find any coral, return the last detection if it was very recent + return recentLastDetection; + } + + @Override + public void reset() { + lastDetection = null; + lastDetectionDistance = 0; + lastDetectionTime = 0; + } + + private Pose2d getRecentLastDetection() { + boolean lastClose = lastDetectionClose(); + boolean auto = RobotState.isAutonomous(); + + double timeoutSeconds = lastClose ? 3 : 0.5; + if (auto && lastClose) timeoutSeconds = 1; + if (Timer.getFPGATimestamp() - lastDetectionTime < timeoutSeconds) { + return lastDetection; + } + return null; + } + + public boolean lastDetectionClose() { + if (lastDetection == null) return false; + + return lastDetectionDistance < CLOSE_CORAL_DISTANCE; + } + + private double tXYCombined(RawDetection detection) { + return detection.tync + Math.abs(detection.txnc * 0.75); + } + + private double width(RawDetection detection) { + return Math.abs(detection.corner0_X - detection.corner1_X); + } + + private double height(RawDetection detection) { + return Math.abs(detection.corner0_Y - detection.corner2_Y); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java new file mode 100644 index 0000000..f26fadc --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java @@ -0,0 +1,149 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; +import frc.robot.Robot; +import frc.robot.RobotContainer; +import frc.robot.util.LimelightHelpers.RawDetection; +import frc.robot.util.simulation.SimLogic; +import org.ironmaple.simulation.SimulatedArena; + +/** + * A coral detector that returns poses based off simulation, ignoring any detections passed to it. + */ +public class CoralDetectorSim implements CoralDetector { + + private final double detectionDistance; + private final boolean useFOV; + + private double fovDegrees = 82; // Limelight 4 FOV + + /** + * Create a new CoralDetectorSim with the given detection distance. + * + * @param detectionMeters The maximum distance in meters the robot can be from the coral and still + * detect it. + * @param useFOV Whether to use the robot's field of view to determine if the coral is in sight. + */ + public CoralDetectorSim(double detectionMeters, boolean useFOV) { + detectionDistance = detectionMeters; + this.useFOV = useFOV; + } + + @Override + public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] _detections) { + if (robotPose == null) return null; + + if (RobotContainer.MAPLESIM) { + return getCoralPoseMapleSim(robotPose); + } else { + return getCoralPoseBasic(robotPose); + } + } + + private Pose2d getCoralPoseBasic(Pose2d robotPose) { + Pose2d coralPose = Robot.isBlue() ? SimLogic.blueHPCoralPose : SimLogic.redHPCoralPose; + + if (useFOV && poseWithinPOV(robotPose, coralPose)) { + return coralPose; + } else if (!useFOV + && robotPose.getTranslation().getDistance(coralPose.getTranslation()) + <= detectionDistance) { + return coralPose; + } + + return null; + } + + /** Selects the closest coral to the robot within its field of view. */ + private Pose2d getCoralPoseMapleSim(Pose2d robotPose) { + Pose3d[] corals = SimulatedArena.getInstance().getGamePiecesArrayByType("Coral"); + Pose2d bestCoral = null; + double bestDistance = Double.MAX_VALUE; + for (int i = 0; i < corals.length; i++) { + // Filter out upright coral + if (corals[i].getRotation().getY() > 0.2) continue; + + Pose2d coral = corals[i].toPose2d(); + if (poseWithinPOV(robotPose, coral)) { + double distance = robotPose.getTranslation().getDistance(coral.getTranslation()); + if (distance < bestDistance) { + if (!CoralDetector.isValid(coral)) { + continue; + } + + bestCoral = coral; + bestDistance = distance; + } + } + } + + if (bestCoral == null) return null; + double noise = 0.1; + return bestCoral.transformBy( + new Transform2d(randomRange(-noise, noise), randomRange(-noise, noise), Rotation2d.kZero)); + } + + private double randomRange(double min, double max) { + return Math.random() * (max - min) + min; + } + + // =========== Helper methods to calculate robot POV =========== + + private boolean poseWithinPOV(Pose2d robotPose, Pose2d coralPose) { + // Define the triangle vertices based on the robot's pose + Translation2d robotTranslation = robotPose.getTranslation(); + Rotation2d robotRotation = robotPose.getRotation().rotateBy(Rotation2d.k180deg); + + // Define the field of view (FOV) angle and distance + double fovAngle = Math.toRadians(fovDegrees); + double fovDistance = detectionDistance; + + // Calculate the vertices of the triangle + Translation2d vertex1 = robotTranslation; + Translation2d vertex2 = + robotTranslation.plus( + new Translation2d(fovDistance, fovDistance * Math.tan(fovAngle / 2)) + .rotateBy(robotRotation)); + Translation2d vertex3 = + robotTranslation.plus( + new Translation2d(fovDistance, -fovDistance * Math.tan(fovAngle / 2)) + .rotateBy(robotRotation)); + + // Check if the coralPose is within the triangle + return isPointInTriangle(coralPose.getTranslation(), vertex1, vertex2, vertex3); + } + + private boolean isPointInTriangle( + Translation2d pt, Translation2d v1, Translation2d v2, Translation2d v3) { + double d1 = sign(pt, v1, v2); + double d2 = sign(pt, v2, v3); + double d3 = sign(pt, v3, v1); + + boolean hasNeg = (d1 < 0) || (d2 < 0) || (d3 < 0); + boolean hasPos = (d1 > 0) || (d2 > 0) || (d3 > 0); + + return !(hasNeg && hasPos); + } + + private double sign(Translation2d p1, Translation2d p2, Translation2d p3) { + return (p1.getX() - p3.getX()) * (p2.getY() - p3.getY()) + - (p2.getX() - p3.getX()) * (p1.getY() - p3.getY()); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java new file mode 100644 index 0000000..f4861d0 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java @@ -0,0 +1,149 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import static edu.wpi.first.units.Units.*; + +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.units.measure.Distance; +import java.util.HashMap; +import java.util.Optional; + +public abstract class Field { + + private static HashMap ALGAE_HEIGHTS, CURRENT_ALGAE_HEIGHTS; + private static Pose3d[] reefAlgaePoses = null; + + // Game Manual Page 24 - "...pipes on the same face are 1 ft. 1 in. (~33 cm) apart (center to + // center)." + public static final Distance REEF_BRANCH_SEPARATION = Inches.of(13); + + // Game Manual Page 33 - "A CORAL is a 11 ⅞ in. long (~30 cm) piece of..." + public static final Distance CORAL_LENGTH = Centimeters.of(30); + + public static void init() { + ALGAE_HEIGHTS = new HashMap<>(); + // Red reef + ALGAE_HEIGHTS.put(6, 2); + ALGAE_HEIGHTS.put(7, 3); + ALGAE_HEIGHTS.put(8, 2); + ALGAE_HEIGHTS.put(9, 3); + ALGAE_HEIGHTS.put(10, 2); + ALGAE_HEIGHTS.put(11, 3); + + // Blue reef + for (int i = 6; i <= 11; i++) { + ALGAE_HEIGHTS.put(redReefTagToBlue(i), ALGAE_HEIGHTS.get(i)); + } + + CURRENT_ALGAE_HEIGHTS = new HashMap<>(ALGAE_HEIGHTS); + } + + public static Pose3d[] getReefAlgaePoses() { + if (reefAlgaePoses == null) { + AprilTagFieldLayout layout = RobotContainer.instance.vision.aprilTagFieldLayout; + reefAlgaePoses = new Pose3d[CURRENT_ALGAE_HEIGHTS.size()]; + int index = 0; + for (var entry : CURRENT_ALGAE_HEIGHTS.entrySet()) { + int tag = entry.getKey(); + int level = entry.getValue(); + Optional optionalTagPose = layout.getTagPose(tag); + if (optionalTagPose.isPresent()) { + Pose3d tagPose = optionalTagPose.get(); + reefAlgaePoses[index] = + tagPose.transformBy( + new Transform3d(-0.15, 0, level == 3 ? 1 : 0.6, Rotation3d.kZero)); + } else { + reefAlgaePoses[index] = Pose3d.kZero; + } + index += 1; + } + } + return reefAlgaePoses; + } + + public static boolean hasReefAlgae(int tag) { + return CURRENT_ALGAE_HEIGHTS.containsKey(tag); + } + + public static Pose3d getReefAlgaePose(int tag) { + Pose3d[] poses = getReefAlgaePoses(); + int index = 0; + for (var entry : CURRENT_ALGAE_HEIGHTS.entrySet()) { + int algaeTag = entry.getKey(); + if (tag == algaeTag) { + return poses[index]; + } + index++; + } + return null; + } + + public static void removeReefAlgae(int tag) { + CURRENT_ALGAE_HEIGHTS.remove(tag); + // Reset reefAlgaePoses so it will be recalculated next time it is requested + reefAlgaePoses = null; + } + + public static void resetReefAlgae() { + CURRENT_ALGAE_HEIGHTS = new HashMap<>(ALGAE_HEIGHTS); + reefAlgaePoses = null; + } + + public static int getAlgaeLevel(int tag) { + Integer level = ALGAE_HEIGHTS.get(tag); + return level != null ? level : -1; + } + + public static int blueReefTagToRed(int blueTag) { + switch (blueTag) { + case 17: + return 8; + case 18: + return 7; + case 19: + return 6; + case 20: + return 11; + case 21: + return 10; + case 22: + return 9; + default: + return -1; + } + } + + public static int redReefTagToBlue(int redTag) { + switch (redTag) { + case 6: + return 19; + case 7: + return 18; + case 8: + return 17; + case 9: + return 22; + case 10: + return 21; + case 11: + return 20; + default: + return -1; + } + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java new file mode 100644 index 0000000..feb3c67 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java @@ -0,0 +1,230 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import com.ctre.phoenix.ErrorCode; +import com.ctre.phoenix.led.Animation; +import com.ctre.phoenix.led.CANdle; +import com.ctre.phoenix.led.CANdle.LEDStripType; +import com.ctre.phoenix.led.CANdle.VBatOutputMode; +import com.ctre.phoenix.led.CANdleConfiguration; +import com.ctre.phoenix.led.ColorFlowAnimation; +import com.ctre.phoenix.led.ColorFlowAnimation.Direction; +import com.ctre.phoenix.led.LarsonAnimation; +import com.ctre.phoenix.led.LarsonAnimation.BounceMode; +import com.ctre.phoenix.led.RainbowAnimation; +import com.ctre.phoenix.led.SingleFadeAnimation; +import com.ctre.phoenix.led.StrobeAnimation; +import com.ctre.phoenix.led.TwinkleAnimation; +import com.ctre.phoenix.led.TwinkleAnimation.TwinklePercent; +import com.ctre.phoenix.led.TwinkleOffAnimation; +import com.ctre.phoenix.led.TwinkleOffAnimation.TwinkleOffPercent; +import edu.wpi.first.wpilibj2.command.Command; +import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.robot.Constants; + +public class LEDSubsystem extends SubsystemBase { + + public final LEDColor RED = new LEDColor(50, 0, 0, 255); + public final LEDColor BLUE = new LEDColor(21, 46, 150, 255); // navy blue + public final LEDColor GREEN = new LEDColor(0, 255, 0, 255); + public final LEDColor YELLOW = new LEDColor(255, 255, 0, 255); + public final LEDColor WHITE = new LEDColor(255, 255, 255, 255); + public final LEDColor ALGAE = new LEDColor(0, 255, 30, 255); + public final LEDColor PURPLE = new LEDColor(163, 49, 196, 255); + + public final RainbowAnimation rainbow; + public final SingleFadeAnimation blueFade, + redFade, + yellowFade, + whiteFade, + yellowFadeFast, + purpleFade; + public final TwinkleAnimation blueTwinkle, redTwinkle; + public final ColorFlowAnimation blueFlow, redFlow, yellowFlow; + public final ColorFlowAnimation blueLeftFlow, blueRightFlow; + public final LarsonAnimation yellowLarson; + public final StrobeAnimation greenStrobe; + + private final int STRIP_LENGTH = 49; + private final int STRIP_2_LENGTH = 47; + private final int NUM_LEDS = 8 + STRIP_LENGTH + STRIP_2_LENGTH; + private final int STRIP_OFFSET = 0; + private final int NO_CANDLE_OFFSET = 8; + + private final CANdle candle; + + private Animation currentAnimation = null; + private Animation currentAnimation2 = null; + private LEDColor currentColor = null; + private LEDColor currentSplitColor = null; + + public LEDSubsystem() { + CANdleConfiguration config = new CANdleConfiguration(); + config.disableWhenLOS = false; + config.statusLedOffWhenActive = true; + config.brightnessScalar = 0.2; + config.v5Enabled = true; + config.stripType = LEDStripType.GRB; + config.vBatOutputMode = VBatOutputMode.Off; + candle = new CANdle(Constants.CANDLE, Constants.CANIVORE); + candle.configAllSettings(config); + + rainbow = new RainbowAnimation(1, 1, NUM_LEDS, false, STRIP_OFFSET); + + blueFade = fade(BLUE, 0.5); + redFade = fade(RED, 0.5); + yellowFade = fade(YELLOW, 0.5); + purpleFade = fade(PURPLE, 1); + whiteFade = fade(WHITE, 1); + yellowFadeFast = fade(YELLOW, 1); + + blueTwinkle = twinkle(BLUE, 0.25, TwinklePercent.Percent64); + redTwinkle = twinkle(RED, 0.25, TwinklePercent.Percent64); + + blueFlow = colorFlow(BLUE, 0.7, Direction.Forward); + redFlow = colorFlow(RED, 0.7, Direction.Forward); + yellowFlow = colorFlow(YELLOW, 0.5, Direction.Forward); + yellowLarson = larson(YELLOW, 0.5, 5, BounceMode.Front); + + blueLeftFlow = colorFlow(BLUE, 0.7, Direction.Forward, STRIP_LENGTH, NO_CANDLE_OFFSET); + blueRightFlow = + colorFlow(BLUE, 0.7, Direction.Backward, STRIP_2_LENGTH, NO_CANDLE_OFFSET + STRIP_LENGTH); + + greenStrobe = strobe(GREEN, 0.25); + } + + public Command animate(Animation animation) { + return run(() -> setAnimation(animation)); + } + + public Command color(LEDColor color) { + return run(() -> setColor(color)); + } + + public void setAnimation(Animation animation) { + setAnimation(animation, 0); + } + + public void setAnimation(Animation animation, int slot) { + if (animation != currentAnimation) { + // clearAnimations(); + ErrorCode code = candle.animate(animation, slot); + if (code == ErrorCode.OK) { + clearState(); + currentAnimation = animation; + } + } + } + + public void setDualAnimation(Animation animation1, Animation animation2) { + if (currentAnimation != animation1 || currentAnimation2 != animation2) { + clearAnimations(); + ErrorCode code1 = candle.animate(animation1, 0); + ErrorCode code2 = candle.animate(animation2, 1); + + if (code1 == ErrorCode.OK && code2 == ErrorCode.OK) { + clearState(); + currentAnimation = animation1; + currentAnimation2 = animation2; + } + } + } + + public void setColor(LEDColor color) { + if (color != currentColor) { + clearAnimations(); + ErrorCode code = candle.setLEDs(color.r, color.g, color.b, color.w, STRIP_OFFSET, NUM_LEDS); + if (code == ErrorCode.OK) { + clearState(); + currentColor = color; + } + } + } + + public void setSplitColor(LEDColor top, LEDColor bottom) { + if (top != currentColor || bottom != currentSplitColor) { + clearAnimations(); + ErrorCode code1 = candle.setLEDs(bottom.r, bottom.g, bottom.b, bottom.w, 0, 24); + ErrorCode code2 = candle.setLEDs(top.r, top.g, top.b, top.w, 24, 25); + + ErrorCode code3 = candle.setLEDs(top.r, top.g, top.b, top.w, 49, 23); + ErrorCode code4 = candle.setLEDs(bottom.r, bottom.g, bottom.b, bottom.w, 72, 24); + + if (code1 == ErrorCode.OK + && code2 == ErrorCode.OK + && code3 == ErrorCode.OK + && code4 == ErrorCode.OK) { + clearState(); + currentColor = top; + currentSplitColor = bottom; + } + } + } + + private void clearAnimations() { + candle.clearAnimation(0); + } + + private void clearState() { + currentAnimation = null; + currentAnimation2 = null; + currentColor = null; + currentSplitColor = null; + } + + public SingleFadeAnimation fade(LEDColor color, double speed) { + return new SingleFadeAnimation( + color.r, color.g, color.b, color.w, speed, NUM_LEDS, STRIP_OFFSET); + } + + public TwinkleAnimation twinkle(LEDColor color, double speed, TwinklePercent percent) { + return new TwinkleAnimation( + color.r, color.g, color.b, color.w, speed, NUM_LEDS, percent, STRIP_OFFSET); + } + + public TwinkleOffAnimation twinkleOff(LEDColor color, double speed, TwinkleOffPercent percent) { + return new TwinkleOffAnimation( + color.r, color.g, color.b, color.w, speed, NUM_LEDS, percent, STRIP_OFFSET); + } + + public ColorFlowAnimation colorFlow(LEDColor color, double speed, Direction direction) { + return colorFlow(color, speed, direction, NUM_LEDS, STRIP_OFFSET); + } + + public ColorFlowAnimation colorFlow( + LEDColor color, double speed, Direction direction, int numLeds, int ledOffset) { + return new ColorFlowAnimation( + color.r, color.g, color.b, color.w, speed, numLeds, direction, ledOffset); + } + + public LarsonAnimation larson(LEDColor color, double speed, int size, BounceMode mode) { + return new LarsonAnimation( + color.r, + color.g, + color.b, + color.w, + speed, + NUM_LEDS - NO_CANDLE_OFFSET, + mode, + size, + NO_CANDLE_OFFSET); + } + + public StrobeAnimation strobe(LEDColor color, double speed) { + return new StrobeAnimation(color.r, color.g, color.b, color.w, speed, NUM_LEDS, STRIP_OFFSET); + } + + private record LEDColor(int r, int g, int b, int w) {} +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java b/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java new file mode 100644 index 0000000..1c9f5fd --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java @@ -0,0 +1,61 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import edu.wpi.first.math.geometry.Pose2d; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map.Entry; + +/** + * This class is used to determine the closest reef scoring position to the robot's current + * position. + */ +public class ReefProximity { + + final List> reefList; + + public ReefProximity( + HashMap leftReefHashMap, HashMap rightReefHashMap) { + reefList = new ArrayList>(); + leftReefHashMap.entrySet().forEach(reefList::add); + rightReefHashMap.entrySet().forEach(reefList::add); + } + + public Entry closestReefPose(Pose2d position, boolean blueAlliance) { + return closestReefPose( + position, blueAlliance ? VisionSubsystem.blueReefTags : VisionSubsystem.redReefTags); + } + + public Entry closestReefPose(Pose2d position, List tagOptions) { + double currentDistance = Double.MAX_VALUE; + Entry closest = null; + + for (var entry : reefList) { + // Ensure the reef tag is in the list of tags we are looking for + if (!tagOptions.contains(entry.getKey())) { + continue; + } + + double distance = position.getTranslation().getDistance(entry.getValue().getTranslation()); + if (distance < currentDistance) { + currentDistance = distance; + closest = entry; + } + } + + return closest; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java new file mode 100644 index 0000000..c826e27 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java @@ -0,0 +1,50 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import edu.wpi.first.epilogue.Logged; +import frc.robot.util.LimelightHelpers.PoseEstimate; +import frc.robot.util.LimelightHelpers.RawDetection; +import frc.robot.util.LimelightHelpers.RawFiducial; + +@Logged +public interface VisionIO { + + @Logged + public class VisionIOInputs { + final RawFiducial[] emptyFiducials = new RawFiducial[0]; + final RawDetection[] emptyDetections = new RawDetection[0]; + + boolean scoringCameraConnected = false; + PoseEstimate scoringPoseEstimate = null; + RawFiducial[] scoringFiducials = emptyFiducials; + double scoringTimestamp = 0.0; + double scoringCPUTemp = 0; + double scoringTemp = 0; + + boolean frontCameraConnected = false; + PoseEstimate frontPoseEstimate = null; + PoseEstimate frontPoseEstimateMT2 = null; + RawFiducial[] frontFiducials = emptyFiducials; + double frontTemp = 0; + + boolean backCameraConnected = false; + RawDetection[] backDetections = emptyDetections; + double backTimestamp = 0.0; + } + + public void update(VisionIOInputs inputs); + + public default void simulationPeriodic() {} +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java new file mode 100644 index 0000000..fc1a6f2 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java @@ -0,0 +1,140 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import com.spamrobotics.vision.LimelightStatus; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.Robot; +import frc.robot.RobotContainer; +import frc.robot.util.LimelightHelpers; +import frc.robot.util.LimelightHelpers.PoseEstimate; + +public class VisionIOLimelight implements VisionIO { + + private static final String SCORING_LIMELIGHT = "limelight"; + private static final String FRONT_LIMEIGHT = "limelight-front"; + private static final String BACK_LIMEIGHT = "limelight-back"; + + private final LimelightStatus scoringLimelightStatus; + private final LimelightStatus frontLimelightStatus; + private final LimelightStatus backLimelightStatus; + private final PoseEstimate simPoseEstimate = new PoseEstimate(); + + private final double[] blueReefTags; + private final double[] redReefTags; + private final double[] allReefTags; + + private double lastSettingsUpdate = -1; + + public VisionIOLimelight() { + scoringLimelightStatus = new LimelightStatus(SCORING_LIMELIGHT); + frontLimelightStatus = new LimelightStatus(FRONT_LIMEIGHT); + backLimelightStatus = new LimelightStatus(BACK_LIMEIGHT); + + blueReefTags = new double[VisionSubsystem.blueReefTags.size()]; + for (int i = 0; i < blueReefTags.length; i++) { + blueReefTags[i] = VisionSubsystem.blueReefTags.get(i); + } + + redReefTags = new double[VisionSubsystem.redReefTags.size()]; + for (int i = 0; i < redReefTags.length; i++) { + redReefTags[i] = VisionSubsystem.redReefTags.get(i); + } + + allReefTags = new double[VisionSubsystem.allReefTags.size()]; + for (int i = 0; i < allReefTags.length; i++) { + allReefTags[i] = VisionSubsystem.allReefTags.get(i); + } + } + + @Override + public void update(VisionIOInputs inputs) { + scoringLimelightStatus.update(); + frontLimelightStatus.update(); + backLimelightStatus.update(); + + inputs.scoringCameraConnected = scoringLimelightStatus.isConnected(); + inputs.frontCameraConnected = frontLimelightStatus.isConnected(); + inputs.backCameraConnected = backLimelightStatus.isConnected(); + + double time = Timer.getFPGATimestamp(); + if (lastSettingsUpdate == -1 || time - lastSettingsUpdate > 5) { + setLimelightPosition(SCORING_LIMELIGHT, VisionSubsystem.ROBOT_TO_SCORING_CAMERA); + setLimelightPosition(FRONT_LIMEIGHT, VisionSubsystem.ROBOT_TO_FRONT_CAMERA); + + double[] validTags = allReefTags; + setValidTags(SCORING_LIMELIGHT, validTags); + setValidTags(FRONT_LIMEIGHT, validTags); + + lastSettingsUpdate = time; + } + + inputs.scoringFiducials = LimelightHelpers.getRawFiducials(SCORING_LIMELIGHT); + inputs.frontFiducials = LimelightHelpers.getRawFiducials(FRONT_LIMEIGHT); + inputs.backDetections = LimelightHelpers.getRawDetections(BACK_LIMEIGHT); + if (inputs.backCameraConnected) { + inputs.backTimestamp = Timer.getFPGATimestamp() - getLatencySeconds(BACK_LIMEIGHT); + } + + if (Robot.isSimulation() && RobotContainer.MAPLESIM) { + inputs.scoringPoseEstimate = simPoseEstimate; + inputs.frontPoseEstimate = simPoseEstimate; + } else { + inputs.scoringPoseEstimate = LimelightHelpers.getBotPoseEstimate_wpiBlue(SCORING_LIMELIGHT); + inputs.frontPoseEstimate = LimelightHelpers.getBotPoseEstimate_wpiBlue(FRONT_LIMEIGHT); + } + + if (inputs.scoringPoseEstimate != null) { + inputs.scoringTimestamp = inputs.scoringPoseEstimate.timestampSeconds; + } + } + + @Override + public void simulationPeriodic() { + // If we're running in simulation, we can feed constant perfect vision data to the robot + // to stop the odometry from diverging from the real robot position if we're using a physics + // simulation (like MapleSim). + // To simulate realistic vision input using Apriltags, use {@link VisionIOPhoton} instead. + simPoseEstimate.pose = RobotContainer.instance.drivetrain.getSimPose(); + simPoseEstimate.timestampSeconds = Timer.getFPGATimestamp(); + simPoseEstimate.tagCount = 1; + simPoseEstimate.avgTagDist = 2; + simPoseEstimate.avgTagArea = 99; + } + + private double getLatencySeconds(String limelightName) { + double latency = + LimelightHelpers.getLatency_Capture(limelightName) + + LimelightHelpers.getLatency_Pipeline(limelightName); + return latency * 0.001; + } + + private void setLimelightPosition(String limelightName, Transform3d transform) { + LimelightHelpers.setCameraPose_RobotSpace( + limelightName, + transform.getX(), // forward + -transform.getY(), // side + transform.getZ(), // up + -Units.radiansToDegrees(transform.getRotation().getX()), // roll + -Units.radiansToDegrees(transform.getRotation().getY()), // pitch + -Units.radiansToDegrees(transform.getRotation().getZ()) // yaw + ); + } + + private void setValidTags(String limelightName, double[] validTags) { + LimelightHelpers.setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validTags); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java new file mode 100644 index 0000000..e75dc03 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java @@ -0,0 +1,49 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import com.spamrobotics.vision.SimCamera; +import edu.wpi.first.apriltag.AprilTagFieldLayout; + +public class VisionIOPhoton implements VisionIO { + final SimCamera scoringCamera, frontCamera; + + public VisionIOPhoton(AprilTagFieldLayout apriltagLayout) { + scoringCamera = + new SimCamera("scoring", VisionSubsystem.ROBOT_TO_SCORING_CAMERA, apriltagLayout); + frontCamera = new SimCamera("front", VisionSubsystem.ROBOT_TO_FRONT_CAMERA, apriltagLayout); + } + + @Override + public void update(VisionIOInputs inputs) { + inputs.scoringCameraConnected = true; + inputs.frontCameraConnected = true; + inputs.backCameraConnected = true; + + inputs.scoringPoseEstimate = scoringCamera.getPoseEstimate(); + if (inputs.scoringPoseEstimate != null) { + inputs.scoringTimestamp = inputs.scoringPoseEstimate.timestampSeconds; + // We're not fully simulating the other camera, but we can use the scoring camera's timestamp + inputs.backTimestamp = inputs.scoringTimestamp; + } + inputs.scoringFiducials = scoringCamera.getRawFiducials(); + inputs.frontPoseEstimate = frontCamera.getPoseEstimate(); + } + + @Override + public void simulationPeriodic() { + scoringCamera.update(); + frontCamera.update(); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java new file mode 100644 index 0000000..da554f4 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java @@ -0,0 +1,692 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import static edu.wpi.first.units.Units.*; + +import com.ctre.phoenix6.Utils; +import com.pathplanner.lib.util.FlippingUtil; +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.epilogue.Logged; +import edu.wpi.first.epilogue.NotLogged; +import edu.wpi.first.math.MatBuilder; +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.Nat; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.units.measure.Distance; +import edu.wpi.first.wpilibj.Alert; +import edu.wpi.first.wpilibj.Alert.AlertType; +import edu.wpi.first.wpilibj.Timer; +import edu.wpi.first.wpilibj2.command.Command; +import edu.wpi.first.wpilibj2.command.Commands; +import edu.wpi.first.wpilibj2.command.SubsystemBase; +import edu.wpi.first.wpilibj2.command.button.Trigger; +import frc.robot.Robot; +import frc.robot.RobotContainer; +import frc.robot.subsystems.vision.VisionIO.VisionIOInputs; +import frc.robot.util.LimelightHelpers.PoseEstimate; +import frc.robot.util.LimelightHelpers.RawFiducial; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; + +@Logged +public class VisionSubsystem extends SubsystemBase { + + /** + * The source of a pose estimate, used to determine the standard deviation of the pose estimate + * (i.e. how much we trust the pose estimate). The lower the number, the more we trust the pose + * estimate, and the more it'll affect the robot's position. + */ + enum PoseEstimateSource { + SCORING_CAMERA(0.025), + FRONT_CAMERA(0.15), + BACK_CAMERA(0.3), + NONE(99); + + private Matrix stdDev; + + PoseEstimateSource(double dev) { + this(dev, dev, dev); + } + + PoseEstimateSource(double xDev, double yDev, double thetaDev) { + stdDev = MatBuilder.fill(Nat.N3(), Nat.N1(), xDev, yDev, thetaDev); + } + } + + private static final List redTags = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11); + private static final List blueTags = List.of(12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22); + public static final List redReefTags = List.of(6, 7, 8, 9, 10, 11); + public static final List blueReefTags = List.of(17, 18, 19, 20, 21, 22); + public static final List allReefTags = new ArrayList<>(); + + // Used to correct for horizontal offset of tags on the field + private static final Map TAG_OFFSETS = + Map.of( + // 6, Inches.of(24) + ); + + public static final Transform3d ROBOT_TO_SCORING_CAMERA = + new Transform3d( + Inches.of(9.757).in(Meters), // forward + Inches.of(-7.2).in(Meters), // right + Inches.of(31.296).in(Meters), + new Rotation3d(0, Units.degreesToRadians(50), 0) // downward tilt + ); + + // ORIGINAL FRONT CAMERA MOUNT (Orlando & South Florida) + public static final Transform3d ROBOT_TO_FRONT_CAMERA = + new Transform3d( + Inches.of(13.998 + 3.5 - 1).in(Meters), // forward + Inches.of(-7 + 1 - 1).in(Meters), // right + Inches.of(6.767).in(Meters), + new Rotation3d(0, Units.degreesToRadians(-15), 0) // upward tilt + ); + + public static final Transform3d ROBOT_TO_INTAKE_CAMERA = + new Transform3d( + Inches.of(-8.7).in(Meters), // forward + Inches.of(-5.22).in(Meters), // left + Inches.of(30.049).in(Meters), + new Rotation3d( + 0, Units.degreesToRadians(50), Units.degreesToRadians(180)) // downward tilt + ); + + public static final Transform2d ROBOT_TO_INTAKE_CAMERA_2D = + new Transform2d( + ROBOT_TO_INTAKE_CAMERA.getTranslation().getX(), + ROBOT_TO_INTAKE_CAMERA.getTranslation().getY(), + Rotation2d.k180deg); + public static final Transform2d INTAKE_CAMERA_TO_ROBOT_2D = ROBOT_TO_INTAKE_CAMERA_2D.inverse(); + + private static final int RED_PROCESSOR_TAG = 3; + private static final int BLUE_PROCESSOR_TAG = 16; + + private static final double BAD_CAMERA_TEMP = 55; + + private final VisionIO io; + private final VisionIOInputs inputs; + + private final ReefProximity reefProximity; + private final CoralDetector coralDetector; + private final CoralDetectorReal coralDetectorReal; + + private final Distance reefBackDistance = Meters.of(0.55).plus(Inches.of(0.5 - 1)); + private final Distance reefSideDistance = + Field.REEF_BRANCH_SEPARATION.div(2); // field measurement based + + private final Transform2d leftReefTransform = + new Transform2d( + reefBackDistance.in(Meters), -reefSideDistance.in(Meters), Rotation2d.k180deg); + private final Transform2d rightReefTransform = + new Transform2d(reefBackDistance.in(Meters), reefSideDistance.in(Meters), Rotation2d.k180deg); + private final Transform2d processorTransform = + new Transform2d(0.55, 0.0, Rotation2d.fromDegrees(90)); + + // The "front lay down" + private final double l1BackDistance = 0.62 - Inches.of(1.5).in(Meters); + + private final Transform2d leftL1ReefTransform = + new Transform2d(l1BackDistance, -Inches.of(12).in(Meters), Rotation2d.k180deg); + private final Transform2d rightL1ReefTransform = + new Transform2d(l1BackDistance, 0, Rotation2d.k180deg); + + private final Transform2d leftL1ReefRotation = new Transform2d(0, 0, Rotation2d.fromDegrees(0)); + private final Transform2d rightL1ReefRotation = new Transform2d(0, 0, Rotation2d.fromDegrees(0)); + + // 1.25 inches closer (forward) than standard, applied on top of left/right reef transforms + private final Transform2d algaeReefTransform = + new Transform2d(Inches.of(0.75 + 0.5 - 1).in(Meters), 0, Rotation2d.kZero); + + private final Transform2d algaeReefBackup = new Transform2d(-0.3, 0, Rotation2d.kZero); + + private final Pose2d redProcessorPose; + private final Pose2d blueProcessorPose; + + public final HashMap tagPoses2d = new HashMap<>(); + public final HashMap leftReefHashMap = new HashMap<>(); + public final HashMap rightReefHashMap = new HashMap<>(); + private final HashMap leftL1ReefHashMap = new HashMap<>(); + private final HashMap rightL1ReefHashMap = new HashMap<>(); + private final HashMap leftReefAlgaePoses = new HashMap<>(); + private final HashMap rightReefAlgaePoses = new HashMap<>(); + private final HashMap leftReefAlgaeBackupPoses = new HashMap<>(); + private final HashMap rightReefAlgaeBackupPoses = new HashMap<>(); + + public int bestReefID = -1; + public int lastReefID = -1; + + private Pose2d coralPose = Pose2d.kZero; + private boolean coralPoseValid = false; + private Pose2d coralPickupPose = null; + + private boolean closestReefPoseValid = false; + private Pose2d closestReefPose = Pose2d.kZero; + + private Pose2d singleTagPose = Pose2d.kZero; + + private PoseEstimate poseEstimate = null; + private PoseEstimateSource poseEstimateSource = PoseEstimateSource.NONE; + private boolean allowPoseEstimates = true; + private Pose3d scoringCameraPosition = Pose3d.kZero; + private Pose3d frontCameraPosition = Pose3d.kZero; + private Pose3d backCameraPosition = Pose3d.kZero; + private double poseEstimateDiffX, poseEstimateDiffY, poseEstimateDiffTheta; + private double lastPoseEstimateTime = 0; + + private Alert scoringCameraDisconnectedAlert = + new Alert("Scoring Camera disconnected!", AlertType.kError); + private Alert frontCameraDisconnectedAlert = + new Alert("Front Camera disconnected!", AlertType.kError); + private Alert backCameraDisconnectedAlert = + new Alert("Back Camera disconnected!", AlertType.kError); + + private Alert scoringCameraTempAlert = new Alert("", AlertType.kWarning); + private Alert frontCameraTempAlert = new Alert("", AlertType.kWarning); + + public AprilTagFieldLayout aprilTagFieldLayout; + public final Trigger poseEstimateDiffLow; + @NotLogged public final Trigger scoringCameraConnected; + public final Trigger hasPoseEstimates = new Trigger(() -> poseEstimate != null).debounce(0.5); + + @SuppressWarnings("unused") + public VisionSubsystem() { + try { + aprilTagFieldLayout = + AprilTagFieldLayout.loadFromResource(AprilTagFields.k2025ReefscapeWelded.m_resourceFile); + } catch (Exception e) { + aprilTagFieldLayout = null; + } + + inputs = new VisionIOInputs(); + if (Robot.isReal()) { + io = new VisionIOLimelight(); + } else { + io = new VisionIOPhoton(aprilTagFieldLayout); + } + + allReefTags.addAll(redReefTags); + allReefTags.addAll(blueReefTags); + + for (var allianceTags : List.of(redTags, blueTags)) { + for (int tagID : allianceTags) { + Optional pose3d = aprilTagFieldLayout.getTagPose(tagID); + if (pose3d.isPresent()) { + tagPoses2d.put(tagID, pose3d.get().toPose2d()); + } + } + } + + // Pre-calculate reef poses for all reef tags + for (int i : redReefTags) { + leftReefHashMap.put(i, calculateReefPose(i, true)); + rightReefHashMap.put(i, calculateReefPose(i, false)); + + leftL1ReefHashMap.put(i, calculateL1ReefPose(i, true)); + rightL1ReefHashMap.put(i, calculateL1ReefPose(i, false)); + } + for (int i : blueReefTags) { + leftReefHashMap.put(i, calculateReefPose(i, true)); + rightReefHashMap.put(i, calculateReefPose(i, false)); + + leftL1ReefHashMap.put(i, calculateL1ReefPose(i, true)); + rightL1ReefHashMap.put(i, calculateL1ReefPose(i, false)); + } + + // Pre-calculate reef algae poses + leftReefHashMap + .keySet() + .forEach( + i -> { + leftReefAlgaePoses.put(i, calculateReefAlgaePose(i, true)); + leftReefAlgaeBackupPoses.put(i, calculateReefAlgaeBackupPose(i, true)); + }); + rightReefHashMap + .keySet() + .forEach( + i -> { + rightReefAlgaePoses.put(i, calculateReefAlgaePose(i, false)); + rightReefAlgaeBackupPoses.put(i, calculateReefAlgaeBackupPose(i, false)); + }); + + // Pre-calculate processor poses + redProcessorPose = calculateProcessorPose(false); + blueProcessorPose = calculateProcessorPose(true); + + reefProximity = new ReefProximity(leftReefHashMap, rightReefHashMap); + coralDetectorReal = new CoralDetectorReal(); + coralDetector = Robot.isReal() ? coralDetectorReal : new CoralDetectorSim(4.0, true); + + double diffMeters = Inches.of(1.5).in(Meters); + poseEstimateDiffLow = + new Trigger( + () -> { + return Math.abs(poseEstimateDiffX) <= diffMeters + && Math.abs(poseEstimateDiffY) <= diffMeters + && Math.abs(poseEstimateDiffTheta) < 5; + }); + scoringCameraConnected = new Trigger(() -> inputs.scoringCameraConnected); + } + + boolean wasEnabled = false; + + Pose2d futureRobotPose; + + @Override + public void periodic() { + io.update(inputs); + + scoringCameraDisconnectedAlert.set(!inputs.scoringCameraConnected); + frontCameraDisconnectedAlert.set(!inputs.frontCameraConnected); + backCameraDisconnectedAlert.set(!inputs.backCameraConnected); + + // cameraTemperatureAlert(scoringCameraTempAlert, "Scoring", inputs.scoringTemp); + // cameraTemperatureAlert(frontCameraTempAlert, "Front", inputs.frontTemp); + + // If the scoring camera is connected, use its pose estimate + if (inputs.scoringCameraConnected) { + poseEstimate = validatePoseEstimate(inputs.scoringPoseEstimate); + poseEstimateSource = PoseEstimateSource.SCORING_CAMERA; + } + + boolean invalidScoring = + inputs.scoringPoseEstimate == null || inputs.scoringPoseEstimate.tagCount == 0; + + // If we didn't get a pose estimate from the scoring camera, use the front camera's pose + // estimate + if (poseEstimate == null && invalidScoring && inputs.frontCameraConnected) { + poseEstimateSource = PoseEstimateSource.FRONT_CAMERA; + + if (Robot.isReal()) { + poseEstimate = validatePoseEstimate(inputs.frontPoseEstimate); + } + // if (RobotState.isEnabled() && Robot.isSimulation()) { + // poseEstimate = validateMT2PoseEstimate(inputs.frontPoseEstimateMT2); + // } else { + // poseEstimate = validatePoseEstimate(inputs.frontPoseEstimate); + // } + } + + Pose2d robotPose = null; + if (poseEstimate != null) { + RobotContainer.instance.drivetrain.addVisionMeasurement( + poseEstimate.pose, + Utils.fpgaToCurrentTime(poseEstimate.timestampSeconds), + poseEstimateSource.stdDev); + // Calculate the difference between the updated robot pose and the scoring pose estimate, to + // get an idea + // of how closely we are tracking the robot's actual position + robotPose = RobotContainer.instance.drivetrain.getPose(); + // Note - the goal of this if statement is to stop "bad" data from non-scoring cameras from + // allowing + // a coral to be scored. Unknown if this is working as intended + if (poseEstimateSource == PoseEstimateSource.SCORING_CAMERA) { + poseEstimateDiffX = robotPose.getX() - poseEstimate.pose.getX(); + poseEstimateDiffY = robotPose.getY() - poseEstimate.pose.getY(); + poseEstimateDiffTheta = + robotPose.getRotation().getDegrees() - poseEstimate.pose.getRotation().getDegrees(); + lastPoseEstimateTime = Timer.getFPGATimestamp(); + } else { + poseEstimateDiffX = 99; + poseEstimateDiffY = 99; + poseEstimateDiffTheta = 99; + } + } else { + poseEstimateSource = PoseEstimateSource.NONE; + } + + if (robotPose == null) robotPose = RobotContainer.instance.drivetrain.getPose(); + + // calculate scoring camera in 3D space, for previewing in AdvantageScope + if (Robot.isSimulation()) { + Pose3d robotPose3d = new Pose3d(RobotContainer.instance.drivetrain.getSimPose()); + scoringCameraPosition = robotPose3d.transformBy(ROBOT_TO_SCORING_CAMERA); + frontCameraPosition = robotPose3d.transformBy(ROBOT_TO_FRONT_CAMERA); + backCameraPosition = robotPose3d.transformBy(ROBOT_TO_INTAKE_CAMERA); + } + + ChassisSpeeds speeds = RobotContainer.instance.drivetrain.getCachedState().Speeds; + futureRobotPose = + robotPose.plus( + new Transform2d( + speeds.vxMetersPerSecond * 0.3, speeds.vyMetersPerSecond * 0.3, Rotation2d.kZero)); + + // Allow targeting opponent's reef tags, which is needed for stealing algae + // Entry closestTagAndPose = reefProximity.closestReefPose(futureRobotPose, + // Robot.isBlue()); + Entry closestTagAndPose = + reefProximity.closestReefPose(futureRobotPose, allReefTags); + if (closestTagAndPose == null) { + closestReefPose = Pose2d.kZero; + closestReefPoseValid = false; + } else { + closestReefPose = closestTagAndPose.getValue(); + closestReefPoseValid = true; + } + + Pose2d latencyCompensatedRobotPose; + if (Robot.isReal()) { + latencyCompensatedRobotPose = + RobotContainer.instance.drivetrain.getBufferPose(inputs.backTimestamp); + } else { + latencyCompensatedRobotPose = robotPose; + } + coralPose = coralDetector.getCoralPose(latencyCompensatedRobotPose, inputs.backDetections); + if (coralPose == null) { + coralPose = Pose2d.kZero; + coralPoseValid = false; + } else { + coralPoseValid = true; + } + // Invalidate any previously stored coralPickUpPose - this will be recalculated if + // getCoralPickupPose() is called + coralPickupPose = null; + + if (closestTagAndPose == null) { + bestReefID = -1; + } else { + bestReefID = closestTagAndPose.getKey(); + } + + if (bestReefID != -1) lastReefID = bestReefID; + } + + @Override + public void simulationPeriodic() { + io.simulationPeriodic(); + } + + public void resetCoralDetector() { + coralDetector.reset(); + } + + private Pose2d getReefTagPose(int tagID) { + Pose2d pose = tagPoses2d.get(tagID); + if (pose == null) return null; + + if (TAG_OFFSETS.containsKey(tagID)) { + pose = + pose.transformBy(new Transform2d(0, TAG_OFFSETS.get(tagID).in(Meters), Rotation2d.kZero)); + } + return pose; + } + + /** + * Generates the scoring pose of the robot relative to a reef AprilTag. This is used to + * pre-calculate and store all positions to prevent duplicate object creation. To access these + * pre-calculated poses, use {@link #getReefPose(int, boolean)}. + */ + private Pose2d calculateReefPose(int tagID, boolean left) { + Pose2d pose = getReefTagPose(tagID); + if (pose == null) return null; + + return pose.transformBy(left ? leftReefTransform : rightReefTransform); + } + + /** + * Generates the L1 scoring pose of the robot relative to a reef AprilTag. This is used to + * pre-calculate and store all positions to prevent duplicate object creation. To access these + * pre-calculated poses, use {@link #getL1ReefPose(int, boolean)}. + */ + private Pose2d calculateL1ReefPose(int tagID, boolean left) { + Pose2d pose = getReefTagPose(tagID); + if (pose == null) return null; + + return pose.transformBy(left ? leftL1ReefTransform : rightL1ReefTransform) + .transformBy(left ? leftL1ReefRotation : rightL1ReefRotation); + } + + /** + * Generates the right branch scoring pose of the robot relative to a reef AprilTag, closer than + * the standard reef pose in order to faciliate grabbing an algae. This is used to pre-calculate + * and store all positions to prevent duplicate object creation. To access these pre-calculated + * poses, use {@link #getReefAlgaePose(int)}. + */ + private Pose2d calculateReefAlgaePose(int tagID, boolean left) { + return getReefPose(tagID, left).transformBy(algaeReefTransform); + } + + private Pose2d calculateReefAlgaeBackupPose(int tagID, boolean left) { + return calculateReefAlgaePose(tagID, left).transformBy(algaeReefBackup); + } + + /** + * Generates the scoring pose of the robot relative to the processor AprilTag. This is used to + * pre-calculate and store all positions to prevent duplicate object creation. To access these + * pre-calculated poses, use {@link #getProcessorPose(boolean)}. + */ + private Pose2d calculateProcessorPose(boolean blue) { + int tagID = blue ? BLUE_PROCESSOR_TAG : RED_PROCESSOR_TAG; + + Pose2d pose = getReefTagPose(tagID); + if (pose == null) return null; + + return pose.transformBy(processorTransform); + } + + /** + * Returns the scoring pose of the robot relative to the reef AprilTag that was last seen. + * + * @param left whether to return the left branch or the right branch scoring pose + */ + public Pose2d getReefPose(boolean left) { + return getReefPose(lastReefID, left); + } + + /** + * Returns a scoring pose of the robot relative to a reef AprilTag. + * + * @param tagID the ID of the reef AprilTag + * @param left whether to return the left branch or the right branch scoring pose + */ + public Pose2d getReefPose(int tagID, boolean left) { + if (left) { + return leftReefHashMap.get(tagID); + } else { + return rightReefHashMap.get(tagID); + } + } + + /** + * Returns the L1 scoring pose of the robot relative to a reef AprilTag. + * + * @param tagID the ID of the reef AprilTag + * @param left whether to return the left or the right L1 scoring pose + */ + public Pose2d getL1ReefPose(int tagID, boolean left) { + if (left) { + return leftL1ReefHashMap.get(tagID); + } else { + return rightL1ReefHashMap.get(tagID); + } + } + + /** + * Returns a right branch scoring pose of the robot relative to a reef AprilTag, closer than the + * standard reef pose in order to faciliate grabbing an algae. + * + * @param tagID the ID of the reef AprilTag + */ + public Pose2d getReefAlgaePose(int tagID, boolean left) { + return (left ? leftReefAlgaePoses : rightReefAlgaePoses).get(tagID); + } + + public Pose2d getReefAlgaeBackupPose(int tagID, boolean left) { + return (left ? leftReefAlgaeBackupPoses : rightReefAlgaeBackupPoses).get(tagID); + } + + /** + * Returns the scoring pose of the robot relative to the processor. + * + * @param blue whether to return the blue processor pose or the red processor pose + */ + public Pose2d getProcessorPose(boolean blue) { + return blue ? blueProcessorPose : redProcessorPose; + } + + /** Returns the closest reef scoring pose to the robot (accounting for alliance), or null */ + @NotLogged + public Pose2d getClosestReefPose() { + return closestReefPoseValid ? closestReefPose : null; + } + + private static final double BARGE_BLUE_X = 7.28; + private static final double BARGE_RED_X = FlippingUtil.fieldSizeX - BARGE_BLUE_X; + + public Pose2d getBargePose() { + Pose2d robotPose = RobotContainer.instance.drivetrain.getPose(); + + return new Pose2d( + Robot.isBlue() ? BARGE_BLUE_X : BARGE_RED_X, + robotPose.getY(), + Robot.isBlue() ? Rotation2d.kZero : Rotation2d.k180deg); + } + + public int getReefTagFromPose(Pose2d pose) { + for (Entry entry : leftReefHashMap.entrySet()) { + if (entry.getValue().equals(pose)) { + return entry.getKey(); + } + } + for (Entry entry : rightReefHashMap.entrySet()) { + if (entry.getValue().equals(pose)) { + return entry.getKey(); + } + } + return -1; + } + + public boolean isScoringCameraConnected() { + return inputs.scoringCameraConnected; + } + + public boolean isFrontCameraConnected() { + return inputs.frontCameraConnected; + } + + public boolean isBackCameraConnected() { + return inputs.backCameraConnected; + } + + /** Returns the position of the closest detected coral, or null */ + @NotLogged + public Pose2d getCoralPose() { + return coralPoseValid ? coralPose : null; + } + + public Pose3d getCoralPose3D() { + if (!coralPoseValid) return Pose3d.kZero; + + return new Pose3d(coralPose); + } + + @NotLogged + public Pose2d getCoralPickupPose() { + if (coralPickupPose == null && coralPoseValid) { + Translation2d robotPosition = RobotContainer.instance.drivetrain.getPose().getTranslation(); + Translation2d coralPosition = coralPose.getTranslation(); + double pickupOffset = RobotContainer.instance.coralIntakeReady.getAsBoolean() ? 0.6 : 0.9; + double centerDistance = robotPosition.getDistance(coralPosition); + double pickupDistance = centerDistance - pickupOffset; + + Translation2d pickupPosition = + robotPosition.interpolate(coralPosition, pickupDistance / centerDistance); + double radiansToCoral = + Math.atan2( + coralPosition.getY() - robotPosition.getY(), + coralPosition.getX() - robotPosition.getX()); + + coralPickupPose = new Pose2d(pickupPosition, new Rotation2d(radiansToCoral + Math.PI)); + } + return coralPickupPose; + } + + public PoseEstimate validatePoseEstimate(PoseEstimate poseEstimate) { + if (poseEstimate == null) return null; + + double tagMin = 1; + double tagMax = 99; + double maxDist = poseEstimate.tagCount == 1 ? 3.7 : 6; + double minArea = poseEstimate.tagCount == 1 ? 0.18 : 0.08; + if (poseEstimate.tagCount > tagMax || poseEstimate.tagCount < tagMin) return null; + if (poseEstimate.avgTagArea < minArea) return null; + if (poseEstimate.avgTagDist > maxDist) return null; + + // Rejected if the pose estimate is too far from the last one + // if (lastPoseEstimate != null && deltaSeconds <= 0.25) { + // double maxReasonableDistance = deltaSeconds * DrivetrainSubsystem.MAX_SPEED; + // Translation2d diff = + // poseEstimate.pose.getTranslation().minus(lastPoseEstimate.pose.getTranslation()); + // if (!Helpers.withinTolerance(diff, maxReasonableDistance)) return null; + // } + + return poseEstimate; + } + + public PoseEstimate validateMT2PoseEstimate(PoseEstimate poseEstimate) { + if (poseEstimate == null) return null; + if (poseEstimate.tagCount == 0) return null; + // if (Math.abs(RobotContainer.instance.drivetrain.getGyroscopeRate()) > 720) return null; + + return poseEstimate; + } + + @NotLogged + public boolean reefVisible() { + boolean isReefVisible = false; + for (int i = 0; i < inputs.scoringFiducials.length; i++) { + RawFiducial fiducial = inputs.scoringFiducials[i]; + if (redReefTags.contains(fiducial.id) || blueReefTags.contains(fiducial.id)) { + isReefVisible = true; + } else { + isReefVisible = false; + } + } + return isReefVisible; + } + + /** Blocks pose estimates from updating the robot's position while this command is running. */ + public Command blockPoseEstimates() { + return Commands.runEnd(() -> allowPoseEstimates = false, () -> allowPoseEstimates = true); + } + + public void setAllowPoseEstimates(boolean allow) { + allowPoseEstimates = allow; + } + + private void cameraTemperatureAlert(Alert alert, String cameraName, double temperature) { + if (temperature >= BAD_CAMERA_TEMP) { + int roundedtemp = (int) Math.round(temperature); + alert.setText(cameraName + " Camera temp high (" + roundedtemp + "°F)"); + alert.set(true); + } else { + alert.set(false); + } + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java b/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java new file mode 100644 index 0000000..7d2a7dc --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java @@ -0,0 +1,291 @@ +// Copyright (c) 2024 FRC 6995 +// https://github.com/frc6995 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC6995; + +import com.ctre.phoenix6.Utils; +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.Vector; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.networktables.StructPublisher; +import edu.wpi.first.wpilibj.RobotBase; +import edu.wpi.first.wpilibj.Timer; +import frc.robot.util.TriConsumer; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonPoseEstimator; +import org.photonvision.PhotonPoseEstimator.PoseStrategy; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.targeting.PhotonPipelineResult; + +public class Vision { + public interface VisionConsumer extends TriConsumer> {} + + private class Camera { + String name; + PhotonCamera camera; + PhotonPoseEstimator estimator; + PhotonCameraSim simCamera; + Pose3d estimatedPose = new Pose3d(); + StructPublisher rawPosePublisher; + + public Camera( + String name, + PhotonCamera camera, + PhotonPoseEstimator estimator, + PhotonCameraSim simCamera) { + this.name = name; + this.camera = camera; + this.estimator = estimator; + this.simCamera = simCamera; + var table = NetworkTableInstance.getDefault().getTable("Cameras").getSubTable(name); + rawPosePublisher = table.getStructTopic("rawPose", Pose3d.struct).publish(); + } + + public void setRawPose(Pose3d pose) { + estimatedPose = pose; + rawPosePublisher.accept(estimatedPose); + } + } + ; + + public class VisionConstants { + private static SimCameraProperties cameraProp = new SimCameraProperties(); + + static { + // A 640 x 480 camera with a 100 degree diagonal FOV. + cameraProp.setCalibration(1280, 900, Rotation2d.fromDegrees(100)); + // Approximate detection noise with average and standard deviation error in pixels. + cameraProp.setCalibError(0.12, 0.04); + // Set the camera image capture framerate (Note: this is limited by robot loop rate). + cameraProp.setFPS(20); + // The average and standard deviation in milliseconds of image data latency. + cameraProp.setAvgLatencyMs(35); + cameraProp.setLatencyStdDevMs(5); + } + + public static final SimCameraProperties SIM_CAMERA_PROPERTIES = cameraProp; + + public static final Map CAMERAS = + Map.of( + "OV9281-BL", + new Transform3d( + -(Units.inchesToMeters(14.25) - 0.102), + (Units.inchesToMeters(14.25) - 0.066), + Units.inchesToMeters(8.5), + new Rotation3d( + Units.degreesToRadians(0), + Units.degreesToRadians(-13.65), + Units.degreesToRadians(-170))), + "OV9281-BR", + new Transform3d( + -(Units.inchesToMeters(14.25) - 0.102), + -(Units.inchesToMeters(14.25) - 0.072), + Units.inchesToMeters(8.5), + new Rotation3d( + Units.degreesToRadians(0), + Units.degreesToRadians(-13.65), + Units.degreesToRadians(170))), + "OV9281-FR", + new Transform3d( + Units.inchesToMeters(14.25) - 0.102, + -(Units.inchesToMeters(14.25) - 0.112), + Units.inchesToMeters(8.5), + new Rotation3d( + Units.degreesToRadians(0), + Units.degreesToRadians(-13.65), + Units.degreesToRadians(10))), + "OV9281-FL", + new Transform3d( + (Units.inchesToMeters(14.25) - 0.102), + (Units.inchesToMeters(14.25) - 0.066), + Units.inchesToMeters(8.5), + new Rotation3d( + Units.degreesToRadians(0), + Units.degreesToRadians(-13.65), + Units.degreesToRadians(-10)))); + public static final AprilTagFieldLayout FIELD_LAYOUT = + AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); + } + + VisionSystemSim visionSim = new VisionSystemSim("main"); + private List m_cameras; + private List m_actualCameras; + private List m_simCameras; + private VisionConsumer addVisionMeasurement; + private Supplier getPose; + private double lastPoseResetTimestamp = 0; + + public void resetPose() { + lastPoseResetTimestamp = Timer.getFPGATimestamp(); + } + + public Vision(VisionConsumer addVisionMeasurement, Supplier getPose) { + if (RobotBase.isSimulation()) { + visionSim.addAprilTags(AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField)); + } + + this.addVisionMeasurement = addVisionMeasurement; + this.getPose = getPose; + m_cameras = new ArrayList<>(); + VisionConstants.CAMERAS + .entrySet() + .iterator() + .forEachRemaining( + (entry) -> { + var translation = entry.getValue(); + var cam = new PhotonCamera(entry.getKey()); + var estimator = + new PhotonPoseEstimator( + VisionConstants.FIELD_LAYOUT, + PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, + translation); + estimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); + PhotonCameraSim simCam = null; + if (RobotBase.isSimulation()) { + simCam = new PhotonCameraSim(cam, VisionConstants.SIM_CAMERA_PROPERTIES); + visionSim.addCamera(simCam, translation); + } + m_cameras.add(new Camera(entry.getKey(), cam, estimator, simCam)); + }); + } + + public boolean filterPose(Pose3d robotPose) { + if (Math.abs(robotPose.getZ()) > 0.25) { + return false; + } + return true; + } + + private boolean isReef(int id) { + return (id >= 6 && id <= 11) || (id >= 17 && id <= 22); + } + + private void handleResult(Camera camera, PhotonPipelineResult result) { + var estimator = camera.estimator; + estimator.setReferencePose(getPose.get()); + + var robotPoseOpt = estimator.update(result); + if (robotPoseOpt.isEmpty()) { + return; + } + camera.setRawPose(robotPoseOpt.get().estimatedPose); + var pose = robotPoseOpt.get(); + if (pose.timestampSeconds < lastPoseResetTimestamp) { + return; + } + if (!filterPose(pose.estimatedPose)) { + return; + } + double xConfidence; + double yConfidence; + double angleConfidence; + if (pose.targetsUsed.size() == 0) { + return; // should never happen but measurement shouldn't be trusted + } + double closestDistance = 1000; + double avgDistance = 0; + double closeEnoughTgts = 0; + boolean ignore = false; + + final double border = 15; + for (var tgt : pose.targetsUsed) { + + // rule out + for (var corner : tgt.detectedCorners) { + if (MathUtil.isNear(0, corner.x, border) + || MathUtil.isNear( + VisionConstants.SIM_CAMERA_PROPERTIES.getResWidth(), corner.x, border) + || MathUtil.isNear(0, corner.y, border) + || MathUtil.isNear( + VisionConstants.SIM_CAMERA_PROPERTIES.getResHeight(), corner.y, border)) { + return; + } + } + double tdist = tgt.getBestCameraToTarget().getTranslation().getNorm(); + if (pose.targetsUsed.size() < 2) { + var trustedDistance = + isReef(pose.targetsUsed.get(0).fiducialId) + ? Units.feetToMeters(6) + : Units.feetToMeters(6); + if (tdist > trustedDistance) { + return; + + } else { + closeEnoughTgts = 1; + } + } + avgDistance += tdist; + if (tdist < closestDistance) { + closestDistance = tdist; + } + if (pose.targetsUsed.size() >= 2) { + var trustedDistance = + isReef(pose.targetsUsed.get(0).fiducialId) + ? Units.feetToMeters(10) + : Units.feetToMeters(8); + + if (tdist <= trustedDistance) { + closeEnoughTgts++; + } + } + // ignore |= (tgt.getFiducialId() == 13); + // ignore |= (tgt.getFiducialId() == 14); + } + if (ignore) { + return; + } + double distance = avgDistance / pose.targetsUsed.size(); + // SmartDashboard.putNumber(camera.name + "/distance", distance); + if (closeEnoughTgts == 0) { + return; + } + if (pose.targetsUsed.size() < 2) { + xConfidence = 0.5 * distance / 4.0; + yConfidence = 0.5 * distance / 4.0; + angleConfidence = 1; + } else { + xConfidence = 0.02 * distance * distance; + yConfidence = 0.02 * distance * distance; + angleConfidence = 0.3 * distance * distance; + } + this.addVisionMeasurement.accept( + pose.estimatedPose.toPose2d(), + Utils.fpgaToCurrentTime(pose.timestampSeconds), + VecBuilder.fill(xConfidence, yConfidence, angleConfidence)); + } + + public void update() { + visionSim.update(getPose.get()); + for (Camera camera : m_cameras) { + for (PhotonPipelineResult result : camera.camera.getAllUnreadResults()) { + handleResult(camera, result); + } + } + } +} diff --git a/src/main/java/frc/robot/util/TriConsumer.java b/src/main/java/frc/robot/util/TriConsumer.java new file mode 100644 index 0000000..67972db --- /dev/null +++ b/src/main/java/frc/robot/util/TriConsumer.java @@ -0,0 +1,19 @@ +// Copyright (c) 2024 FRC 6995 +// https://github.com/frc6995 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.util; + +@FunctionalInterface +public interface TriConsumer { + public void accept(T arg0, U arg1, V arg2); +} From 63776985ffd6a9ccd11f32ea62e5c0369a760c36 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Fri, 30 May 2025 16:46:50 -0700 Subject: [PATCH 06/12] SPAM / FRC180 vision now builds The vision subsystem from FRC180 / SPAM now builds within the 2486 code base. Next steps are to figure out which parts of the various vision combination schemes I want to use and hack together something! modified: src/main/java/frc/robot/Constants.java modified: src/main/java/frc/robot/Robot.java modified: src/main/java/frc/robot/RobotContainer.java modified: src/main/java/frc/robot/subsystems/drive/Drive.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/Field.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java new file: src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java modified: src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java modified: src/main/java/frc/robot/util/LimelightHelpers.java --- src/main/java/frc/robot/Constants.java | 1 + src/main/java/frc/robot/Robot.java | 15 + src/main/java/frc/robot/RobotContainer.java | 48 +- .../frc/robot/subsystems/drive/Drive.java | 22 +- .../vision/FRC180/CameraStatus.java | 21 + .../vision/FRC180/CoralDetectorSim.java | 1 - .../robot/subsystems/vision/FRC180/Field.java | 3 +- .../vision/FRC180/LEDSubsystem.java | 4 +- .../vision/FRC180/LimelightStatus.java | 47 ++ .../subsystems/vision/FRC180/SimCamera.java | 147 ++++ .../subsystems/vision/FRC180/SimLogic.java | 153 ++++ .../vision/FRC180/VisionIOLimelight.java | 3 +- .../vision/FRC180/VisionIOPhoton.java | 1 - .../vision/FRC180/VisionSubsystem.java | 43 +- .../subsystems/vision/FRC254/Constants.java | 2 +- .../java/frc/robot/util/LimelightHelpers.java | 706 ++++++++++++++++-- 16 files changed, 1127 insertions(+), 90 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index d236484..2b36b15 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -360,6 +360,7 @@ public static class CANandPowerPorts { // Example: public static final RobotDeviceId FLYWHEEL_LEADER = new RobotDeviceId(3, "", 8); public static final RobotDeviceId FLYWHEEL_FOLLOWER = new RobotDeviceId(4, "", 9); + public static final RobotDeviceId LED = new RobotDeviceId(36, "", null); /* BEAM BREAK and/or LIMIT SWITCH DIO CHANNELS */ // This is where digital I/O feedback devices are defined diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index e800a3c..15a1cc7 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -13,6 +13,8 @@ package frc.robot; +import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.DriverStation.Alliance; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; @@ -35,6 +37,7 @@ public class Robot extends LoggedRobot { private Command m_autoCommandPathPlanner; private RobotContainer m_robotContainer; private Timer m_disabledTimer; + private static boolean isBlueAlliance = false; /** * This function is run when the robot is first started up and should be used for any @@ -99,6 +102,9 @@ public Robot() { /** This function is called periodically during all modes. */ @Override public void robotPeriodic() { + + isBlueAlliance = DriverStation.getAlliance().orElse(Alliance.Blue) == Alliance.Blue; + // Switch thread to high priority to improve loop timing Threads.setCurrentThreadPriority(true, 99); @@ -202,4 +208,13 @@ public void simulationInit() {} /** This function is called periodically whilst in simulation. */ @Override public void simulationPeriodic() {} + + // Helper method to simplify checking if the robot is blue or red alliance + public static boolean isBlue() { + return isBlueAlliance; + } + + public static boolean isRed() { + return !isBlue(); + } } diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index beb2eb1..269a4d5 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -26,6 +26,10 @@ import choreo.auto.AutoRoutine; import choreo.auto.AutoTrajectory; import com.pathplanner.lib.auto.AutoBuilder; +import com.pathplanner.lib.path.GoalEndState; +import com.pathplanner.lib.path.PathConstraints; +import com.pathplanner.lib.path.PathPlannerPath; +import com.pathplanner.lib.path.Waypoint; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.wpilibj.GenericHID; @@ -56,11 +60,48 @@ import frc.robot.util.OverrideSwitches; import frc.robot.util.PowerMonitoring; import frc.robot.util.RBSIEnum; +import java.util.List; import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; /** This is the location for defining robot hardware, commands, and controller button bindings. */ public class RobotContainer { + private static final boolean USE_MAPLESIM = true; + public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); + + // **** This is a Pathplanner On-the-Fly Command ****/ + // Create a list of waypoints from poses. Each pose represents one waypoint. + // The rotation component of the pose should be the direction of travel. Do not use + // holonomic rotation. + List woahpoints = + PathPlannerPath.waypointsFromPoses( + new Pose2d(8.180, 6.184, Rotation2d.fromDegrees(0)), + new Pose2d(9.4, 6.184, Rotation2d.fromDegrees(0))); + + PathConstraints constraints = + new PathConstraints(1.0, 1.0, 2 * Math.PI, 4 * Math.PI); // The constraints for this path. + // PathConstraints constraints = PathConstraints.unlimitedConstraints(12.0); // You can + // also use unlimited constraints, only limited by motor torque and nominal battery + // voltage + + // Create the path using the waypoints created above + PathPlannerPath woah = + new PathPlannerPath( + woahpoints, + constraints, + null, // The ideal starting state, this is only relevant for pre-planned paths, + // so + // can be null for on-the-fly paths. + new GoalEndState( + 0.0, + Rotation2d.fromDegrees( + 180)) // Goal end state. You can set a holonomic rotation here. If + // using a + // differential drivetrain, the rotation will have no effect. + ); + + // Prevent the path from being flipped if the coordinates are already correct + /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -70,7 +111,7 @@ public class RobotContainer { /** Declare the robot subsystems here ************************************ */ // These are the "Active Subsystems" that the robot controlls - private final Drive m_drivebase; + public final Drive m_drivebase; private final Flywheel m_flywheel; // These are "Virtual Subsystems" that report information but have no motors @@ -94,12 +135,15 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); + public static RobotContainer instance; + /** * Constructor for the Robot Container. This container holds subsystems, opertator interface * devices, and commands. */ public RobotContainer() { - + instance = this; + woah.preventFlipping = true; // Instantiate Robot Subsystems based on RobotType switch (Constants.getMode()) { case REAL: diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 63664b6..96b064f 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -19,6 +19,7 @@ import static frc.robot.subsystems.drive.SwerveConstants.*; import choreo.trajectory.SwerveSample; +import com.ctre.phoenix6.swerve.SwerveDrivetrain.SwerveDriveState; import com.ctre.phoenix6.swerve.SwerveRequest; import com.pathplanner.lib.auto.AutoBuilder; import com.pathplanner.lib.controllers.PPHolonomicDriveController; @@ -34,6 +35,7 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.geometry.Twist2d; +import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.math.kinematics.SwerveDriveKinematics; import edu.wpi.first.math.kinematics.SwerveModulePosition; @@ -44,6 +46,7 @@ import edu.wpi.first.wpilibj.Alert.AlertType; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; +import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.SubsystemBase; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; @@ -53,6 +56,7 @@ import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; +import java.util.Optional; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.littletonrobotics.junction.AutoLogOutput; @@ -60,6 +64,10 @@ public class Drive extends SubsystemBase { + private SwerveDriveState cachedState = null; + private final TimeInterpolatableBuffer poseBuffer = + TimeInterpolatableBuffer.createBuffer(2); + static final Lock odometryLock = new ReentrantLock(); private final GyroIO gyroIO; private final GyroIOInputsAutoLogged gyroInputs = new GyroIOInputsAutoLogged(); @@ -211,6 +219,8 @@ public void periodic() { Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); } + poseBuffer.addSample(Timer.getFPGATimestamp(), getPose()); + // Update odometry double[] sampleTimestamps = modules[0].getOdometryTimestamps(); // All signals are sampled together @@ -348,7 +358,7 @@ private SwerveModulePosition[] getModulePositions() { /** Returns the measured chassis speeds of the robot. */ @AutoLogOutput(key = "SwerveChassisSpeeds/Measured") - private ChassisSpeeds getChassisSpeeds() { + public ChassisSpeeds getChassisSpeeds() { return kinematics.toChassisSpeeds(getModuleStates()); } @@ -410,6 +420,7 @@ public Object getGyro() { /** Resets the current odometry pose. */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(rawGyroRotation, getModulePositions(), pose); + poseBuffer.clear(); } /** Adds a new timestamped vision measurement. */ @@ -474,4 +485,13 @@ public void followTrajectory(SwerveSample sample) { // Apply the generated speeds runVelocity(speeds); } + + // Thing from FRC180 + public Pose2d getBufferPose(double timestamp) { + Optional pose = poseBuffer.getSample(timestamp); + if (pose.isPresent()) { + return pose.get(); + } + return null; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java new file mode 100644 index 0000000..4e5d72f --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java @@ -0,0 +1,21 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +public interface CameraStatus { + + public void update(); + + public boolean isConnected(); +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java index f26fadc..c303fab 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java @@ -21,7 +21,6 @@ import frc.robot.Robot; import frc.robot.RobotContainer; import frc.robot.util.LimelightHelpers.RawDetection; -import frc.robot.util.simulation.SimLogic; import org.ironmaple.simulation.SimulatedArena; /** diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java index f4861d0..98e7589 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java @@ -20,6 +20,7 @@ import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.units.measure.Distance; +import frc.robot.Constants.AprilTagConstants; import java.util.HashMap; import java.util.Optional; @@ -55,7 +56,7 @@ public static void init() { public static Pose3d[] getReefAlgaePoses() { if (reefAlgaePoses == null) { - AprilTagFieldLayout layout = RobotContainer.instance.vision.aprilTagFieldLayout; + AprilTagFieldLayout layout = AprilTagConstants.kAprilTagLayout; reefAlgaePoses = new Pose3d[CURRENT_ALGAE_HEIGHTS.size()]; int index = 0; for (var entry : CURRENT_ALGAE_HEIGHTS.entrySet()) { diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java index feb3c67..498139f 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java @@ -32,7 +32,7 @@ import com.ctre.phoenix.led.TwinkleOffAnimation.TwinkleOffPercent; import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.SubsystemBase; -import frc.robot.Constants; +import frc.robot.Constants.CANandPowerPorts; public class LEDSubsystem extends SubsystemBase { @@ -78,7 +78,7 @@ public LEDSubsystem() { config.v5Enabled = true; config.stripType = LEDStripType.GRB; config.vBatOutputMode = VBatOutputMode.Off; - candle = new CANdle(Constants.CANDLE, Constants.CANIVORE); + candle = new CANdle(CANandPowerPorts.LED.getDeviceNumber(), CANandPowerPorts.LED.getBus()); candle.configAllSettings(config); rainbow = new RainbowAnimation(1, 1, NUM_LEDS, false, STRIP_OFFSET); diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java b/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java new file mode 100644 index 0000000..84f84bd --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java @@ -0,0 +1,47 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import edu.wpi.first.wpilibj.Timer; +import frc.robot.util.LimelightHelpers; + +public class LimelightStatus implements CameraStatus { + + private final String limelightName; + + private double limelightHeartbeat = 0; + private double lastHeartbeatTime = 0; + private boolean limelightConnected = false; + + public LimelightStatus(String limelightName) { + this.limelightName = limelightName; + } + + @Override + public void update() { + double newHeartbeat = LimelightHelpers.getLimelightNTDouble(limelightName, "hb"); + if (newHeartbeat > limelightHeartbeat) { + limelightConnected = true; + limelightHeartbeat = newHeartbeat; + lastHeartbeatTime = Timer.getFPGATimestamp(); + } else if (Timer.getFPGATimestamp() - lastHeartbeatTime >= 1) { + limelightConnected = false; + } + } + + @Override + public boolean isConnected() { + return limelightConnected; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java b/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java new file mode 100644 index 0000000..a2def21 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java @@ -0,0 +1,147 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation3d; +import frc.robot.Robot; +import frc.robot.util.LimelightHelpers.PoseEstimate; +import frc.robot.util.LimelightHelpers.RawFiducial; +import java.util.List; +import java.util.Optional; +import org.photonvision.EstimatedRobotPose; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonPoseEstimator; +import org.photonvision.PhotonPoseEstimator.PoseStrategy; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.targeting.PhotonPipelineResult; +import org.photonvision.targeting.PhotonTrackedTarget; + +/** Simulates a Limelight 4 using PhotonVision. */ +public class SimCamera { + + static final RawFiducial[] EMPTY_FIDUCIALS = new RawFiducial[0]; + + final PhotonCamera camera; + final PhotonPoseEstimator photonPoseEstimator; + + VisionSystemSim visionSim = null; + PhotonCameraSim cameraSim = null; + + public SimCamera(String name, Transform3d cameraPosition, AprilTagFieldLayout apriltagLayout) { + camera = new PhotonCamera(name); + photonPoseEstimator = + new PhotonPoseEstimator(apriltagLayout, PoseStrategy.LOWEST_AMBIGUITY, cameraPosition); + + // Everything past this point is simulation only + if (Robot.isReal()) return; + + visionSim = new VisionSystemSim("main"); + visionSim.addAprilTags(apriltagLayout); + + SimCameraProperties cameraProp = new SimCameraProperties(); + // TODO: set LL4 diagonal FOV instead of horizontal (82) + cameraProp.setCalibration(1280, 960, Rotation2d.fromDegrees(100)); + cameraProp.setCalibError(0.35, 0.10); + cameraProp.setFPS(45); + cameraProp.setAvgLatencyMs(50); + cameraProp.setLatencyStdDevMs(15); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. + cameraSim = new PhotonCameraSim(camera, cameraProp); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera(cameraSim, cameraPosition); + + cameraSim.enableDrawWireframe(true); + } + + PoseEstimate poseEstimate = null; + RawFiducial[] rawFiducials = null; + double timestamp = 0; + + public void update() { + + PhotonPipelineResult latestResult = null; + EstimatedRobotPose latestEstimate = null; + for (PhotonPipelineResult result : camera.getAllUnreadResults()) { + Optional poseEst = photonPoseEstimator.update(result); + if (poseEst.isPresent()) { + latestEstimate = poseEst.get(); + latestResult = result; + } + } + List targets = latestResult != null ? latestResult.getTargets() : null; + + poseEstimate = toPoseEstimate(latestEstimate, targets); + if (poseEstimate != null) timestamp = poseEstimate.timestampSeconds; + + if (targets != null) { + rawFiducials = toRawFiducials(targets); + } else { + rawFiducials = EMPTY_FIDUCIALS; + } + } + + public PoseEstimate getPoseEstimate() { + return poseEstimate; + } + + public RawFiducial[] getRawFiducials() { + return rawFiducials; + } + + /** Converts a PhotonVision EstimatedRobotPose to a Limelight PoseEstimate */ + private PoseEstimate toPoseEstimate( + EstimatedRobotPose estimate, List targets) { + if (estimate == null) return null; + + PoseEstimate est = new PoseEstimate(); + est.pose = estimate.estimatedPose.toPose2d(); + est.timestampSeconds = estimate.timestampSeconds; + est.tagCount = targets.size(); + est.avgTagDist = + targets.stream() + .mapToDouble( + t -> t.getBestCameraToTarget().getTranslation().getDistance(Translation3d.kZero)) + .average() + .orElse(0); + est.avgTagArea = 99; // targets.stream().mapToDouble(t -> t.area).average().orElse(0); + return est; + } + + /** Converts a PhotonTrackedTarget list to a Limelight RawFiducials array */ + private RawFiducial[] toRawFiducials(List targets) { + RawFiducial[] fiducials = new RawFiducial[targets.size()]; + for (int i = 0; i < targets.size(); i++) { + PhotonTrackedTarget target = targets.get(i); + double dist = + target.getBestCameraToTarget().getTranslation().getDistance(Translation3d.kZero); + + fiducials[i] = + new RawFiducial( + target.getFiducialId(), + target.getYaw(), + target.getPitch(), + target.area, + dist, + dist, + target.poseAmbiguity); + } + return fiducials; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java b/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java new file mode 100644 index 0000000..9c9c462 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java @@ -0,0 +1,153 @@ +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC180; + +import static edu.wpi.first.units.Units.*; + +import com.pathplanner.lib.util.FlippingUtil; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.units.measure.Distance; +import frc.robot.Robot; +import frc.robot.RobotContainer; +import org.ironmaple.simulation.SimulatedArena; +import org.ironmaple.simulation.seasonspecific.reefscape2025.ReefscapeCoralOnField; + +public abstract class SimLogic { + + public static final Pose2d redHPCoralPose = new Pose2d(16.17, 1.33, new Rotation2d()); + public static final Pose2d blueHPCoralPose = FlippingUtil.flipFieldPose(redHPCoralPose); + public static final double CORAL_LENGTH = Field.CORAL_LENGTH.in(Meters); + + public static boolean intakeHasCoral = false; + public static boolean armHasCoral = false; + public static boolean intakeHasAlgae = false; + public static boolean armHasAlgae = false; + public static int coralScored = 0; + + public static double armCoralPosition = -1; + + public static boolean robotHasCoral() { + return intakeHasCoral || armHasCoral; + } + + public static boolean robotHasAlgae() { + return intakeHasAlgae || armHasAlgae; + } + + public static void spawnHumanPlayerCoral() { + spawnHumanPlayerCoral(Robot.isBlue()); + } + + public static void spawnHumanPlayerCoral(boolean blue) { + if (!RobotContainer.MAPLESIM) { + return; + } + + for (int i = 0; i < 2; i++) { + Pose2d coralPose = blue ? blueHPCoralPose : redHPCoralPose; + + if (i == 1) { + coralPose = coralPose.transformBy(new Transform2d(0, 5, new Rotation2d())); + } + + // generate a random physical offset between -0.3 and 0.3 meters and a random rotation + double xOffset = randomNumberPlusMinus(0.3); + double yOffset = randomNumberPlusMinus(0.3); + double rotationOffset = Math.random() * 360; + Transform2d randomTransform = + new Transform2d(xOffset, yOffset, Rotation2d.fromDegrees(rotationOffset)); + + spawnCoral(coralPose.transformBy(randomTransform)); + } + } + + public static void spawnCoral(Pose2d pose) { + SimulatedArena.getInstance().addGamePiece(new ReefscapeCoralOnField(pose)); + } + + public static void scoreCoral() { + if (!RobotContainer.MAPLESIM) { + return; + } + + RobotContainer rc = RobotContainer.instance; + // SwerveDriveSimulation swerveSim = rc.drivetrain.getDriveSim(); + // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); + double coralAngle; + double heightOffset = 0.6; + double xOffset; + Distance coralHeight = Meters.of(0. + heightOffset); + + // SimulatedArena.getInstance() + // .addGamePieceProjectile( + // new ReefscapeCoralOnFly( + // simRobotPose.getTranslation(), + // // The scoring mechanism position on the robot + // new Translation2d(xOffset, 0), + // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), + // simRobotPose.getRotation(), + // coralHeight, + // // The initial speed of the coral + // MetersPerSecond.of(2), + // Degrees.of(coralAngle))); + } + + public static void outtakeAlgae() { + if (!RobotContainer.MAPLESIM) { + return; + } + + // SwerveDriveSimulation swerveSim = RobotContainer.instance.drivetrain.getDriveSim(); + // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); + + // SimulatedArena.getInstance() + // .addGamePieceProjectile( + // new ReefscapeAlgaeOnFly( + // simRobotPose.getTranslation(), + // new Translation2d(0.6, 0), // scoring mechanism position on the robot + // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), + // simRobotPose.getRotation().rotateBy(Rotation2d.kCCW_90deg), + // Inches.of(16), // outtake height + // MetersPerSecond.of(2), // outtake speed + // Degrees.of(0))); + } + + public static void netAlgae(boolean forwards) { + if (!RobotContainer.MAPLESIM) { + return; + } + + // SwerveDriveSimulation swerveSim = RobotContainer.instance.drivetrain.getDriveSim(); + // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); + + // SimulatedArena.getInstance() + // .addGamePieceProjectile( + // new ReefscapeAlgaeOnFly( + // simRobotPose.getTranslation(), + // new Translation2d(-0.1, 0), // scoring mechanism position on the robot + // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), + // simRobotPose + // .getRotation() + // .rotateBy(forwards ? Rotation2d.kZero : Rotation2d.k180deg), + // Meters.of(10.), // outtake height + // MetersPerSecond.of(6), // outtake speed + // Degrees.of(75))); + } + + private static double randomNumberPlusMinus(double range) { + return Math.random() * (range * 2) - range; + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java index fc1a6f2..049d2e6 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java @@ -13,7 +13,6 @@ package frc.robot.subsystems.vision.FRC180; -import com.spamrobotics.vision.LimelightStatus; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Timer; @@ -108,7 +107,7 @@ public void simulationPeriodic() { // to stop the odometry from diverging from the real robot position if we're using a physics // simulation (like MapleSim). // To simulate realistic vision input using Apriltags, use {@link VisionIOPhoton} instead. - simPoseEstimate.pose = RobotContainer.instance.drivetrain.getSimPose(); + // simPoseEstimate.pose = RobotContainer.instance.drivetrain.getSimPose(); simPoseEstimate.timestampSeconds = Timer.getFPGATimestamp(); simPoseEstimate.tagCount = 1; simPoseEstimate.avgTagDist = 2; diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java index e75dc03..04c0ed5 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java @@ -13,7 +13,6 @@ package frc.robot.subsystems.vision.FRC180; -import com.spamrobotics.vision.SimCamera; import edu.wpi.first.apriltag.AprilTagFieldLayout; public class VisionIOPhoton implements VisionIO { diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java index da554f4..903c91b 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java @@ -44,8 +44,8 @@ import edu.wpi.first.wpilibj2.command.SubsystemBase; import edu.wpi.first.wpilibj2.command.button.Trigger; import frc.robot.Robot; -import frc.robot.RobotContainer; -import frc.robot.subsystems.vision.VisionIO.VisionIOInputs; +import frc.robot.subsystems.drive.Drive; +import frc.robot.subsystems.vision.FRC180.VisionIO.VisionIOInputs; import frc.robot.util.LimelightHelpers.PoseEstimate; import frc.robot.util.LimelightHelpers.RawFiducial; import java.util.ArrayList; @@ -54,9 +54,13 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.function.Supplier; @Logged public class VisionSubsystem extends SubsystemBase { + private final VisionConsumer consumer; + private final Supplier poseSupplier; + private final Drive m_drivebase; /** * The source of a pose estimate, used to determine the standard deviation of the pose estimate @@ -216,7 +220,12 @@ enum PoseEstimateSource { public final Trigger hasPoseEstimates = new Trigger(() -> poseEstimate != null).debounce(0.5); @SuppressWarnings("unused") - public VisionSubsystem() { + public VisionSubsystem( + VisionConsumer consumer, Supplier poseSupplier, Drive m_drivebase) { + this.consumer = consumer; + this.poseSupplier = poseSupplier; + this.m_drivebase = m_drivebase; + try { aprilTagFieldLayout = AprilTagFieldLayout.loadFromResource(AprilTagFields.k2025ReefscapeWelded.m_resourceFile); @@ -335,14 +344,14 @@ public void periodic() { Pose2d robotPose = null; if (poseEstimate != null) { - RobotContainer.instance.drivetrain.addVisionMeasurement( + consumer.accept( poseEstimate.pose, Utils.fpgaToCurrentTime(poseEstimate.timestampSeconds), poseEstimateSource.stdDev); // Calculate the difference between the updated robot pose and the scoring pose estimate, to // get an idea // of how closely we are tracking the robot's actual position - robotPose = RobotContainer.instance.drivetrain.getPose(); + robotPose = poseSupplier.get(); // Note - the goal of this if statement is to stop "bad" data from non-scoring cameras from // allowing // a coral to be scored. Unknown if this is working as intended @@ -361,17 +370,17 @@ public void periodic() { poseEstimateSource = PoseEstimateSource.NONE; } - if (robotPose == null) robotPose = RobotContainer.instance.drivetrain.getPose(); + if (robotPose == null) robotPose = poseSupplier.get(); // calculate scoring camera in 3D space, for previewing in AdvantageScope if (Robot.isSimulation()) { - Pose3d robotPose3d = new Pose3d(RobotContainer.instance.drivetrain.getSimPose()); + Pose3d robotPose3d = new Pose3d(poseSupplier.get()); scoringCameraPosition = robotPose3d.transformBy(ROBOT_TO_SCORING_CAMERA); frontCameraPosition = robotPose3d.transformBy(ROBOT_TO_FRONT_CAMERA); backCameraPosition = robotPose3d.transformBy(ROBOT_TO_INTAKE_CAMERA); } - ChassisSpeeds speeds = RobotContainer.instance.drivetrain.getCachedState().Speeds; + ChassisSpeeds speeds = m_drivebase.getChassisSpeeds(); futureRobotPose = robotPose.plus( new Transform2d( @@ -392,8 +401,7 @@ public void periodic() { Pose2d latencyCompensatedRobotPose; if (Robot.isReal()) { - latencyCompensatedRobotPose = - RobotContainer.instance.drivetrain.getBufferPose(inputs.backTimestamp); + latencyCompensatedRobotPose = m_drivebase.getBufferPose(inputs.backTimestamp); } else { latencyCompensatedRobotPose = robotPose; } @@ -560,7 +568,7 @@ public Pose2d getClosestReefPose() { private static final double BARGE_RED_X = FlippingUtil.fieldSizeX - BARGE_BLUE_X; public Pose2d getBargePose() { - Pose2d robotPose = RobotContainer.instance.drivetrain.getPose(); + Pose2d robotPose = poseSupplier.get(); return new Pose2d( Robot.isBlue() ? BARGE_BLUE_X : BARGE_RED_X, @@ -609,9 +617,9 @@ public Pose3d getCoralPose3D() { @NotLogged public Pose2d getCoralPickupPose() { if (coralPickupPose == null && coralPoseValid) { - Translation2d robotPosition = RobotContainer.instance.drivetrain.getPose().getTranslation(); + Translation2d robotPosition = poseSupplier.get().getTranslation(); Translation2d coralPosition = coralPose.getTranslation(); - double pickupOffset = RobotContainer.instance.coralIntakeReady.getAsBoolean() ? 0.6 : 0.9; + double pickupOffset = true ? 0.6 : 0.9; double centerDistance = robotPosition.getDistance(coralPosition); double pickupDistance = centerDistance - pickupOffset; @@ -689,4 +697,13 @@ private void cameraTemperatureAlert(Alert alert, String cameraName, double tempe alert.set(false); } } + + /** Vision Consumer */ + @FunctionalInterface + public static interface VisionConsumer { + public void accept( + Pose2d visionRobotPoseMeters, + double timestampSeconds, + Matrix visionMeasurementStdDevs); + } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java index cd02f1d..bffec5a 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java @@ -252,7 +252,7 @@ public static final class ShooterConstants { // April Tag Layout public static final AprilTagFieldLayout kAprilTagLayout = - AprilTagFields.k2024Crescendo.loadAprilTagLayoutField(); + AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); public static final double kFieldWidthMeters = kAprilTagLayout.getFieldWidth(); // distance between field walls, diff --git a/src/main/java/frc/robot/util/LimelightHelpers.java b/src/main/java/frc/robot/util/LimelightHelpers.java index 9b7b20d..c00dcc9 100644 --- a/src/main/java/frc/robot/util/LimelightHelpers.java +++ b/src/main/java/frc/robot/util/LimelightHelpers.java @@ -1,9 +1,7 @@ -// LimelightHelpers v1.6 (April 9, 2024) +// LimelightHelpers v1.11 (REQUIRES LLOS 2025.0 OR LATER) -// Copyright (c) 2025 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2024 FRC 254 -// https://github.com/team254 +// Copyright (c) 2025 FRC 180 +// https://github.com/frc180 // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License @@ -30,17 +28,29 @@ import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.util.Units; +import edu.wpi.first.networktables.DoubleArrayEntry; import edu.wpi.first.networktables.NetworkTable; import edu.wpi.first.networktables.NetworkTableEntry; import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.networktables.TimestampedDoubleArray; import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; +import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +/** + * LimelightHelpers provides static methods and classes for interfacing with Limelight vision + * cameras in FRC. This library supports all Limelight features including AprilTag tracking, Neural + * Networks, and standard color/retroreflective tracking. + */ public class LimelightHelpers { + private static final Map doubleArrayEntries = new ConcurrentHashMap<>(); + + /** Represents a Color/Retroreflective Target Result extracted from JSON Output */ public static class LimelightTarget_Retro { @JsonProperty("t6c_ts") @@ -104,15 +114,21 @@ public Pose2d getTargetPose_RobotSpace2D() { @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -125,6 +141,7 @@ public LimelightTarget_Retro() { } } + /** Represents an AprilTag/Fiducial Target Result extracted from JSON Output */ public static class LimelightTarget_Fiducial { @JsonProperty("fID") @@ -194,15 +211,21 @@ public Pose2d getTargetPose_RobotSpace2D() { @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -215,8 +238,49 @@ public LimelightTarget_Fiducial() { } } - public static class LimelightTarget_Barcode {} + /** Represents a Barcode Target Result extracted from JSON Output */ + public static class LimelightTarget_Barcode { + + /** Barcode family type (e.g. "QR", "DataMatrix", etc.) */ + @JsonProperty("fam") + public String family; + + /** Gets the decoded data content of the barcode */ + @JsonProperty("data") + public String data; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("typ") + public double ty_pixels; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + + @JsonProperty("ta") + public double ta; + + @JsonProperty("pts") + public double[][] corners; + public LimelightTarget_Barcode() {} + + public String getFamily() { + return family; + } + } + + /** Represents a Neural Classifier Pipeline Result extracted from JSON Output */ public static class LimelightTarget_Classifier { @JsonProperty("class") @@ -246,6 +310,7 @@ public static class LimelightTarget_Classifier { public LimelightTarget_Classifier() {} } + /** Represents a Neural Detector Pipeline Result extracted from JSON Output */ public static class LimelightTarget_Detector { @JsonProperty("class") @@ -263,19 +328,28 @@ public static class LimelightTarget_Detector { @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + public LimelightTarget_Detector() {} } - public static class Results { + /** Limelight Results object, parsed from a Limelight's JSON results output. */ + public static class LimelightResults { + + public String error; @JsonProperty("pID") public double pipelineID; @@ -361,7 +435,7 @@ public Pose2d getBotPose2d_wpiBlue() { @JsonProperty("Barcode") public LimelightTarget_Barcode[] targets_Barcode; - public Results() { + public LimelightResults() { botpose = new double[6]; botpose_wpired = new double[6]; botpose_wpiblue = new double[6]; @@ -374,18 +448,7 @@ public Results() { } } - public static class LimelightResults { - @JsonProperty("Results") - public Results targetingResults; - - public String error; - - public LimelightResults() { - targetingResults = new Results(); - error = ""; - } - } - + /** Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. */ public static class RawFiducial { public int id = 0; public double txnc = 0; @@ -413,6 +476,7 @@ public RawFiducial( } } + /** Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. */ public static class RawDetection { public int classId = 0; public double txnc = 0; @@ -455,6 +519,7 @@ public RawDetection( } } + /** Represents a 3D Pose Estimate. */ public static class PoseEstimate { public Pose2d pose; public double timestampSeconds; @@ -463,7 +528,22 @@ public static class PoseEstimate { public double tagSpan; public double avgTagDist; public double avgTagArea; + public RawFiducial[] rawFiducials; + public boolean isMegaTag2; + + /** Instantiates a PoseEstimate object with default values */ + public PoseEstimate() { + this.pose = new Pose2d(); + this.timestampSeconds = 0; + this.latency = 0; + this.tagCount = 0; + this.tagSpan = 0; + this.avgTagDist = 0; + this.avgTagArea = 0; + this.rawFiducials = new RawFiducial[] {}; + this.isMegaTag2 = false; + } public PoseEstimate( Pose2d pose, @@ -473,7 +553,8 @@ public PoseEstimate( double tagSpan, double avgTagDist, double avgTagArea, - RawFiducial[] rawFiducials) { + RawFiducial[] rawFiducials, + boolean isMegaTag2) { this.pose = pose; this.timestampSeconds = timestampSeconds; @@ -483,6 +564,38 @@ public PoseEstimate( this.avgTagDist = avgTagDist; this.avgTagArea = avgTagArea; this.rawFiducials = rawFiducials; + this.isMegaTag2 = isMegaTag2; + } + } + + /** Encapsulates the state of an internal Limelight IMU. */ + public static class IMUData { + public double robotYaw = 0.0; + public double Roll = 0.0; + public double Pitch = 0.0; + public double Yaw = 0.0; + public double gyroX = 0.0; + public double gyroY = 0.0; + public double gyroZ = 0.0; + public double accelX = 0.0; + public double accelY = 0.0; + public double accelZ = 0.0; + + public IMUData() {} + + public IMUData(double[] imuData) { + if (imuData != null && imuData.length >= 10) { + this.robotYaw = imuData[0]; + this.Roll = imuData[1]; + this.Pitch = imuData[2]; + this.Yaw = imuData[3]; + this.gyroX = imuData[4]; + this.gyroY = imuData[5]; + this.gyroZ = imuData[6]; + this.accelX = imuData[7]; + this.accelY = imuData[8]; + this.accelZ = imuData[9]; + } } } @@ -498,8 +611,16 @@ static final String sanitizeName(String name) { return name; } - private static Pose3d toPose3D(double[] inData) { + /** + * Takes a 6-length array of pose data and converts it to a Pose3d object. Array format: [x, y, z, + * roll, pitch, yaw] where angles are in degrees. + * + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose3d object representing the pose, or empty Pose3d if invalid data + */ + public static Pose3d toPose3D(double[] inData) { if (inData.length < 6) { + // System.err.println("Bad LL 3D Pose Data!"); return new Pose3d(); } return new Pose3d( @@ -510,8 +631,17 @@ private static Pose3d toPose3D(double[] inData) { Units.degreesToRadians(inData[5]))); } - private static Pose2d toPose2D(double[] inData) { + /** + * Takes a 6-length array of pose data and converts it to a Pose2d object. Uses only x, y, and yaw + * components, ignoring z, roll, and pitch. Array format: [x, y, z, roll, pitch, yaw] where angles + * are in degrees. + * + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose2d object representing the pose, or empty Pose2d if invalid data + */ + public static Pose2d toPose2D(double[] inData) { if (inData.length < 6) { + // System.err.println("Bad LL 2D Pose Data!"); return new Pose2d(); } Translation2d tran2d = new Translation2d(inData[0], inData[1]); @@ -519,6 +649,43 @@ private static Pose2d toPose2D(double[] inData) { return new Pose2d(tran2d, r2d); } + /** + * Converts a Pose3d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. + * + * @param pose The Pose3d object to convert + * @return A 6-element array containing [x, y, z, roll, pitch, yaw] + */ + public static double[] pose3dToArray(Pose3d pose) { + double[] result = new double[6]; + result[0] = pose.getTranslation().getX(); + result[1] = pose.getTranslation().getY(); + result[2] = pose.getTranslation().getZ(); + result[3] = Units.radiansToDegrees(pose.getRotation().getX()); + result[4] = Units.radiansToDegrees(pose.getRotation().getY()); + result[5] = Units.radiansToDegrees(pose.getRotation().getZ()); + return result; + } + + /** + * Converts a Pose2d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. Note: z, roll, and + * pitch will be 0 since Pose2d only contains x, y, and yaw. + * + * @param pose The Pose2d object to convert + * @return A 6-element array containing [x, y, 0, 0, 0, yaw] + */ + public static double[] pose2dToArray(Pose2d pose) { + double[] result = new double[6]; + result[0] = pose.getTranslation().getX(); + result[1] = pose.getTranslation().getY(); + result[2] = 0; + result[3] = Units.radiansToDegrees(0); + result[4] = Units.radiansToDegrees(0); + result[5] = Units.radiansToDegrees(pose.getRotation().getRadians()); + return result; + } + private static double extractArrayEntry(double[] inData, int position) { if (inData.length < position + 1) { return 0; @@ -526,17 +693,29 @@ private static double extractArrayEntry(double[] inData, int position) { return inData[position]; } - private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName) { - var poseEntry = LimelightHelpers.getLimelightNTTableEntry(limelightName, entryName); - var poseArray = poseEntry.getDoubleArray(new double[0]); + private static PoseEstimate getBotPoseEstimate( + String limelightName, String entryName, boolean isMegaTag2) { + DoubleArrayEntry poseEntry = + LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); + + TimestampedDoubleArray tsValue = poseEntry.getAtomic(); + double[] poseArray = tsValue.value; + long timestamp = tsValue.timestamp; + + if (poseArray.length == 0) { + // Handle the case where no data is available + return null; // or some default PoseEstimate + } + var pose = toPose2D(poseArray); double latency = extractArrayEntry(poseArray, 6); int tagCount = (int) extractArrayEntry(poseArray, 7); double tagSpan = extractArrayEntry(poseArray, 8); double tagDist = extractArrayEntry(poseArray, 9); double tagArea = extractArrayEntry(poseArray, 10); - // getlastchange() in microseconds, ll latency in milliseconds - var timestamp = (poseEntry.getLastChange() / 1000000.0) - (latency / 1000.0); + + // Convert server timestamp from microseconds to seconds and adjust for latency + double adjustedTimestamp = (timestamp / 1000000.0) - (latency / 1000.0); RawFiducial[] rawFiducials = new RawFiducial[tagCount]; int valsPerFiducial = 7; @@ -559,11 +738,24 @@ private static PoseEstimate getBotPoseEstimate(String limelightName, String entr } return new PoseEstimate( - pose, timestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials); + pose, + adjustedTimestamp, + latency, + tagCount, + tagSpan, + tagDist, + tagArea, + rawFiducials, + isMegaTag2); } - @SuppressWarnings("unused") - private static RawFiducial[] getRawFiducials(String limelightName) { + /** + * Gets the latest raw fiducial/AprilTag detection results from NetworkTables. + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawFiducial objects containing detection details + */ + public static RawFiducial[] getRawFiducials(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawfiducials"); var rawFiducialArray = entry.getDoubleArray(new double[0]); int valsPerEntry = 7; @@ -590,10 +782,16 @@ private static RawFiducial[] getRawFiducials(String limelightName) { return rawFiducials; } + /** + * Gets the latest raw neural detector results from NetworkTables + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawDetection objects containing detection details + */ public static RawDetection[] getRawDetections(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawdetections"); var rawDetectionArray = entry.getDoubleArray(new double[0]); - int valsPerEntry = 11; + int valsPerEntry = 12; if (rawDetectionArray.length % valsPerEntry != 0) { return new RawDetection[0]; } @@ -625,8 +823,14 @@ public static RawDetection[] getRawDetections(String limelightName) { return rawDetections; } - @SuppressWarnings("unused") - private static void printPoseEstimate(PoseEstimate pose) { + /** + * Prints detailed information about a PoseEstimate to standard output. Includes timestamp, + * latency, tag count, tag span, average tag distance, average tag area, and detailed information + * about each detected fiducial. + * + * @param pose The PoseEstimate object to print. If null, prints "No PoseEstimate available." + */ + public static void printPoseEstimate(PoseEstimate pose) { if (pose == null) { System.out.println("No PoseEstimate available."); return; @@ -639,6 +843,7 @@ private static void printPoseEstimate(PoseEstimate pose) { System.out.printf("Tag Span: %.2f meters%n", pose.tagSpan); System.out.printf("Average Tag Distance: %.2f meters%n", pose.avgTagDist); System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); + System.out.printf("Is MegaTag2: %b%n", pose.isMegaTag2); System.out.println(); if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { @@ -661,14 +866,32 @@ private static void printPoseEstimate(PoseEstimate pose) { } } + public static Boolean validPoseEstimate(PoseEstimate pose) { + return pose != null && pose.rawFiducials != null && pose.rawFiducials.length != 0; + } + public static NetworkTable getLimelightNTTable(String tableName) { return NetworkTableInstance.getDefault().getTable(sanitizeName(tableName)); } + public static void Flush() { + NetworkTableInstance.getDefault().flush(); + } + public static NetworkTableEntry getLimelightNTTableEntry(String tableName, String entryName) { return getLimelightNTTable(tableName).getEntry(entryName); } + public static DoubleArrayEntry getLimelightDoubleArrayEntry(String tableName, String entryName) { + String key = tableName + "/" + entryName; + return doubleArrayEntries.computeIfAbsent( + key, + k -> { + NetworkTable table = getLimelightNTTable(tableName); + return table.getDoubleArrayTopic(entryName).getEntry(new double[0]); + }); + } + public static double getLimelightNTDouble(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getDouble(0.0); } @@ -689,6 +912,10 @@ public static String getLimelightNTString(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getString(""); } + public static String[] getLimelightNTStringArray(String tableName, String entryName) { + return getLimelightNTTableEntry(tableName, entryName).getStringArray(new String[0]); + } + public static URL getLimelightURLString(String tableName, String request) { String urlString = "http://" + sanitizeName(tableName) + ".local:5807/" + request; URL url; @@ -704,30 +931,192 @@ public static URL getLimelightURLString(String tableName, String request) { ///// ///// + /** + * Does the Limelight have a valid target? + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return True if a valid target is present, false otherwise + */ + public static boolean getTV(String limelightName) { + return 1.0 == getLimelightNTDouble(limelightName, "tv"); + } + + /** + * Gets the horizontal offset from the crosshair to the target in degrees. + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ public static double getTX(String limelightName) { return getLimelightNTDouble(limelightName, "tx"); } + /** + * Gets the vertical offset from the crosshair to the target in degrees. + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ public static double getTY(String limelightName) { return getLimelightNTDouble(limelightName, "ty"); } + /** + * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is the + * most accurate 2d metric if you are using a calibrated camera and you don't need adjustable + * crosshair functionality. + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ + public static double getTXNC(String limelightName) { + return getLimelightNTDouble(limelightName, "txnc"); + } + + /** + * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the + * most accurate 2d metric if you are using a calibrated camera and you don't need adjustable + * crosshair functionality. + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ + public static double getTYNC(String limelightName) { + return getLimelightNTDouble(limelightName, "tync"); + } + + /** + * Gets the target area as a percentage of the image (0-100%). + * + * @param limelightName Name of the Limelight camera ("" for default) + * @return Target area percentage (0-100) + */ public static double getTA(String limelightName) { return getLimelightNTDouble(limelightName, "ta"); } + /** + * T2D is an array that contains several targeting metrcis + * + * @param limelightName Name of the Limelight camera + * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, + * txnc, tync, ta, tid, targetClassIndexDetector, targetClassIndexClassifier, + * targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, + * targetVerticalExtentPixels, targetSkewDegrees] + */ + public static double[] getT2DArray(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "t2d"); + } + + /** + * Gets the number of targets currently detected. + * + * @param limelightName Name of the Limelight camera + * @return Number of detected targets + */ + public static int getTargetCount(String limelightName) { + double[] t2d = getT2DArray(limelightName); + if (t2d.length == 17) { + return (int) t2d[1]; + } + return 0; + } + + /** + * Gets the classifier class index from the currently running neural classifier pipeline + * + * @param limelightName Name of the Limelight camera + * @return Class index from classifier pipeline + */ + public static int getClassifierClassIndex(String limelightName) { + double[] t2d = getT2DArray(limelightName); + if (t2d.length == 17) { + return (int) t2d[10]; + } + return 0; + } + + /** + * Gets the detector class index from the primary result of the currently running neural detector + * pipeline. + * + * @param limelightName Name of the Limelight camera + * @return Class index from detector pipeline + */ + public static int getDetectorClassIndex(String limelightName) { + double[] t2d = getT2DArray(limelightName); + if (t2d.length == 17) { + return (int) t2d[11]; + } + return 0; + } + + /** + * Gets the current neural classifier result class name. + * + * @param limelightName Name of the Limelight camera + * @return Class name string from classifier pipeline + */ + public static String getClassifierClass(String limelightName) { + return getLimelightNTString(limelightName, "tcclass"); + } + + /** + * Gets the primary neural detector result class name. + * + * @param limelightName Name of the Limelight camera + * @return Class name string from detector pipeline + */ + public static String getDetectorClass(String limelightName) { + return getLimelightNTString(limelightName, "tdclass"); + } + + /** + * Gets the pipeline's processing latency contribution. + * + * @param limelightName Name of the Limelight camera + * @return Pipeline latency in milliseconds + */ public static double getLatency_Pipeline(String limelightName) { return getLimelightNTDouble(limelightName, "tl"); } + /** + * Gets the capture latency. + * + * @param limelightName Name of the Limelight camera + * @return Capture latency in milliseconds + */ public static double getLatency_Capture(String limelightName) { return getLimelightNTDouble(limelightName, "cl"); } + /** + * Gets the active pipeline index. + * + * @param limelightName Name of the Limelight camera + * @return Current pipeline index (0-9) + */ public static double getCurrentPipelineIndex(String limelightName) { return getLimelightNTDouble(limelightName, "getpipe"); } + /** + * Gets the current pipeline type. + * + * @param limelightName Name of the Limelight camera + * @return Pipeline type string (e.g. "retro", "apriltag", etc) + */ + public static String getCurrentPipelineType(String limelightName) { + return getLimelightNTString(limelightName, "getpipetype"); + } + + /** + * Gets the full JSON results dump. + * + * @param limelightName Name of the Limelight camera + * @return JSON string containing all current results + */ public static String getJSONDump(String limelightName) { return getLimelightNTString(limelightName, "json"); } @@ -805,6 +1194,10 @@ public static String getNeuralClassID(String limelightName) { return getLimelightNTString(limelightName, "tclass"); } + public static String[] getRawBarcodeData(String limelightName) { + return getLimelightNTStringArray(limelightName, "rawbarcodes"); + } + ///// ///// @@ -813,36 +1206,80 @@ public static Pose3d getBotPose3d(String limelightName) { return toPose3D(poseArray); } + /** + * (Not Recommended) Gets the robot's 3D pose in the WPILib Red Alliance Coordinate System. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Red Alliance field + * space + */ public static Pose3d getBotPose3d_wpiRed(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); return toPose3D(poseArray); } + /** + * (Recommended) Gets the robot's 3D pose in the WPILib Blue Alliance Coordinate System. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Blue Alliance field + * space + */ public static Pose3d getBotPose3d_wpiBlue(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); return toPose3D(poseArray); } + /** + * Gets the robot's 3D pose with respect to the currently tracked target's coordinate system. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation relative to the target + */ public static Pose3d getBotPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the currently tracked target's coordinate system. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the target + */ public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the camera's coordinate system. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the camera + */ public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the robot's coordinate system. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the robot + */ public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the robot's coordinate system. + * + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the robot + */ public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); return toPose3D(poseArray); @@ -861,25 +1298,26 @@ public static Pose2d getBotPose2d_wpiBlue(String limelightName) { } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the BLUE alliance + * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator + * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_wpiblue", false); } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the BLUE alliance + * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator + * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. Make sure you are calling + * setRobotOrientation() before calling this method. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue", true); } /** @@ -902,7 +1340,7 @@ public static Pose2d getBotPose2d_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpired"); + return getBotPoseEstimate(limelightName, "botpose_wpired", false); } /** @@ -913,7 +1351,7 @@ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpired"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpired", true); } /** @@ -928,8 +1366,20 @@ public static Pose2d getBotPose2d(String limelightName) { return toPose2D(result); } - public static boolean getTV(String limelightName) { - return 1.0 == getLimelightNTDouble(limelightName, "tv"); + /** + * Gets the current IMU data from NetworkTables. IMU data is formatted as [robotYaw, Roll, Pitch, + * Yaw, gyroX, gyroY, gyroZ, accelX, accelY, accelZ]. Returns all zeros if data is invalid or + * unavailable. + * + * @param limelightName Name/identifier of the Limelight + * @return IMUData object containing all current IMU data + */ + public static IMUData getIMUData(String limelightName) { + double[] imuData = getLimelightNTDoubleArray(limelightName, "imu"); + if (imuData == null || imuData.length < 10) { + return new IMUData(); // Returns object with all zeros + } + return new IMUData(imuData); } ///// @@ -943,7 +1393,11 @@ public static void setPriorityTagID(String limelightName, int ID) { setLimelightNTDouble(limelightName, "priorityid", ID); } - /** The LEDs will be controlled by Limelight pipeline settings, and not by robot code. */ + /** + * Sets LED mode to be controlled by the current pipeline. + * + * @param limelightName Name of the Limelight camera + */ public static void setLEDMode_PipelineControl(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 0); } @@ -960,29 +1414,41 @@ public static void setLEDMode_ForceOn(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 3); } + /** + * Enables standard side-by-side stream mode. + * + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_Standard(String limelightName) { setLimelightNTDouble(limelightName, "stream", 0); } + /** + * Enables Picture-in-Picture mode with secondary stream in the corner. + * + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPMain(String limelightName) { setLimelightNTDouble(limelightName, "stream", 1); } + /** + * Enables Picture-in-Picture mode with primary stream in the corner. + * + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPSecondary(String limelightName) { setLimelightNTDouble(limelightName, "stream", 2); } - public static void setCameraMode_Processor(String limelightName) { - setLimelightNTDouble(limelightName, "camMode", 0); - } - - public static void setCameraMode_Driver(String limelightName) { - setLimelightNTDouble(limelightName, "camMode", 1); - } - /** - * Sets the crop window. The crop window in the UI must be completely open for dynamic cropping to - * work. + * Sets the crop window for the camera. The crop window in the UI must be completely open. + * + * @param limelightName Name of the Limelight camera + * @param cropXMin Minimum X value (-1 to 1) + * @param cropXMax Maximum X value (-1 to 1) + * @param cropYMin Minimum Y value (-1 to 1) + * @param cropYMax Maximum Y value (-1 to 1) */ public static void setCropWindow( String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { @@ -994,6 +1460,27 @@ public static void setCropWindow( setLimelightNTDoubleArray(limelightName, "crop", entries); } + /** Sets 3D offset point for easy 3D targeting. */ + public static void setFiducial3DOffset( + String limelightName, double offsetX, double offsetY, double offsetZ) { + double[] entries = new double[3]; + entries[0] = offsetX; + entries[1] = offsetY; + entries[2] = offsetZ; + setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); + } + + /** + * Sets robot orientation values used by MegaTag2 localization algorithm. + * + * @param limelightName Name/identifier of the Limelight + * @param yaw Robot yaw in degrees. 0 = robot facing red alliance wall in FRC + * @param yawRate (Unnecessary) Angular velocity of robot yaw in degrees per second + * @param pitch (Unnecessary) Robot pitch in degrees + * @param pitchRate (Unnecessary) Angular velocity of robot pitch in degrees per second + * @param roll (Unnecessary) Robot roll in degrees + * @param rollRate (Unnecessary) Angular velocity of robot roll in degrees per second + */ public static void SetRobotOrientation( String limelightName, double yaw, @@ -1002,6 +1489,31 @@ public static void SetRobotOrientation( double pitchRate, double roll, double rollRate) { + SetRobotOrientation_INTERNAL( + limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, true); + } + + public static void SetRobotOrientation_NoFlush( + String limelightName, + double yaw, + double yawRate, + double pitch, + double pitchRate, + double roll, + double rollRate) { + SetRobotOrientation_INTERNAL( + limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, false); + } + + private static void SetRobotOrientation_INTERNAL( + String limelightName, + double yaw, + double yawRate, + double pitch, + double pitchRate, + double roll, + double rollRate, + boolean flush) { double[] entries = new double[6]; entries[0] = yaw; @@ -1011,8 +1523,46 @@ public static void SetRobotOrientation( entries[4] = roll; entries[5] = rollRate; setLimelightNTDoubleArray(limelightName, "robot_orientation_set", entries); + if (flush) { + Flush(); + } + } + + /** + * Configures the IMU mode for MegaTag2 Localization + * + * @param limelightName Name/identifier of the Limelight + * @param mode IMU mode. + */ + public static void SetIMUMode(String limelightName, int mode) { + setLimelightNTDouble(limelightName, "imumode_set", mode); + } + + /** + * Sets the 3D point-of-interest offset for the current fiducial pipeline. + * https://docs.limelightvision.io/docs/docs-limelight/pipeline-apriltag/apriltag-3d#point-of-interest-tracking + * + * @param limelightName Name/identifier of the Limelight + * @param x X offset in meters + * @param y Y offset in meters + * @param z Z offset in meters + */ + public static void SetFidcuial3DOffset(String limelightName, double x, double y, double z) { + + double[] entries = new double[3]; + entries[0] = x; + entries[1] = y; + entries[2] = z; + setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); } + /** + * Overrides the valid AprilTag IDs that will be used for localization. Tags not in this list will + * be ignored for robot pose estimation. + * + * @param limelightName Name/identifier of the Limelight + * @param validIDs Array of valid AprilTag IDs to track + */ public static void SetFiducialIDFiltersOverride(String limelightName, int[] validIDs) { double[] validIDsDouble = new double[validIDs.length]; for (int i = 0; i < validIDs.length; i++) { @@ -1021,6 +1571,14 @@ public static void SetFiducialIDFiltersOverride(String limelightName, int[] vali setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); } + /** + * Sets the downscaling factor for AprilTag detection. Increasing downscale can improve + * performance at the cost of potentially reduced detection range. + * + * @param limelightName Name/identifier of the Limelight + * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set to + * 0 for pipeline control. + */ public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { int d = 0; // pipeline if (downscale == 1.0) { @@ -1041,6 +1599,17 @@ public static void SetFiducialDownscalingOverride(String limelightName, float do setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); } + /** + * Sets the camera pose relative to the robot. + * + * @param limelightName Name of the Limelight camera + * @param forward Forward offset in meters + * @param side Side offset in meters + * @param up Up offset in meters + * @param roll Roll angle in degrees + * @param pitch Pitch angle in degrees + * @param yaw Yaw angle in degrees + */ public static void setCameraPose_RobotSpace( String limelightName, double forward, @@ -1102,7 +1671,12 @@ private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) return false; } - /** Parses Limelight's JSON results dump into a LimelightResults Object */ + /** + * Gets the latest JSON results output and returns a LimelightResults object. + * + * @param limelightName Name of the Limelight camera + * @return LimelightResults object containing all current target data + */ public static LimelightResults getLatestResults(String limelightName) { long start = System.nanoTime(); @@ -1120,7 +1694,7 @@ public static LimelightResults getLatestResults(String limelightName) { long end = System.nanoTime(); double millis = (end - start) * .000001; - results.targetingResults.latency_jsonParse = millis; + results.latency_jsonParse = millis; if (profileJSON) { System.out.printf("lljson: %.2f\r\n", millis); } From 48f0de286f1d365cbf0106127d6c1b9fed659b2d Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Tue, 11 Nov 2025 08:08:12 -0700 Subject: [PATCH 07/12] Add 6328's 2025 drive-to-pose algorithm modified: src/main/java/frc/robot/Constants.java new file: src/main/java/frc/robot/commands/DriveToPose.java new file: src/main/java/frc/robot/commands/DriveToPoseFast.java --- src/main/java/frc/robot/Constants.java | 41 +++ .../java/frc/robot/commands/DriveToPose.java | 209 +++++++++++++++ .../frc/robot/commands/DriveToPoseFast.java | 241 ++++++++++++++++++ 3 files changed, 491 insertions(+) create mode 100644 src/main/java/frc/robot/commands/DriveToPose.java create mode 100644 src/main/java/frc/robot/commands/DriveToPoseFast.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index 2b36b15..fb0a7cf 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -27,6 +27,7 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.math.util.Units; @@ -374,6 +375,46 @@ public static class CANandPowerPorts { // public static final int INTAKE_SERVO = 0; } + public static class DriveToPositionConstatnts { + + // The robot is facing AWAY from the tag, so its pose angle matches that of the tag. + // Scoring position has the bumpers 3" from the tag. Bumper-to-center distance is 18", ergo the + // robot pose is 21" from the tag. + public static Translation2d kLeftReefPost = + new Translation2d(Units.inchesToMeters(17.5), Units.inchesToMeters(-5.9)); + // public static Translation2d kLeftReefPostClose = + // new Translation2d(Units.inchesToMeters(16.75), Units.inchesToMeters(-6.75)); + public static Translation2d kRightReefPost = + new Translation2d(Units.inchesToMeters(17.5), Units.inchesToMeters(+6.5)); + public static Translation2d kRightReefPostClose = + new Translation2d(Units.inchesToMeters(16.75), Units.inchesToMeters(+6)); + public static Translation2d kAlgaeGrab = + new Translation2d(Units.inchesToMeters(26.0), Units.inchesToMeters(0.0)); + + public static Translation2d kProcessor = + new Translation2d(Units.inchesToMeters(26.0), Units.inchesToMeters(+6.0)); + + public static Translation2d kStation = + new Translation2d(Units.inchesToMeters(18.0), Units.inchesToMeters(0.0)); + + // Constants used by the DriveToPose Command + public static final double drivekP = AutoConstants.kPPdrivePID.kP; // 0.8; + public static final double drivekD = AutoConstants.kPPdrivePID.kD; // 0.0; + public static final double thetakP = AutoConstants.kPPsteerPID.kP; // 4.0; + public static final double thetakD = AutoConstants.kPPsteerPID.kD; // 0.0; + + // Values in m/s + public static final double driveMaxVelocity = 3.8; + public static final double driveMaxAcceleration = 3.5; + public static final double thetaMaxVelocity = Units.degreesToRadians(360.0); + public static final double thetaMaxAcceleration = Units.degreesToRadians(480.0); + + public static final double driveTolerance = 0.01; + public static final double thetaTolerance = Units.degreesToRadians(1.0); + public static final double ffMinRadius = 0.05; // 0.2; + public static final double ffMaxRadius = 0.1; // 0.8; + } + /** AprilTag Field Layout ************************************************ */ /* SEASON SPECIFIC! -- This section is for 2025 (Reefscape) */ public static class AprilTagConstants { diff --git a/src/main/java/frc/robot/commands/DriveToPose.java b/src/main/java/frc/robot/commands/DriveToPose.java new file mode 100644 index 0000000..3cfb335 --- /dev/null +++ b/src/main/java/frc/robot/commands/DriveToPose.java @@ -0,0 +1,209 @@ +// Copyright (c) 2025 FRC 2486 +// http://github.com/Coconuts2486-FRC +// Copyright (c) 2025 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file at +// the root directory of this project. + +package frc.robot.commands; + +import static frc.robot.Constants.DriveToPositionConstatnts.*; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.controller.ProfiledPIDController; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.math.trajectory.TrapezoidProfile; +import edu.wpi.first.wpilibj.Timer; +import edu.wpi.first.wpilibj2.command.Command; +import frc.robot.Constants; +import frc.robot.Constants.DrivebaseConstants; +import frc.robot.subsystems.drive.Drive; +import frc.robot.util.GeomUtil; +import java.util.function.DoubleSupplier; +import java.util.function.Supplier; +import lombok.Getter; +import org.littletonrobotics.junction.Logger; + +/** + * Drive to Pose Command + * + *

This command is used in both teleop and auto periods for autonomously driving the robot to a + * desired pose. + */ +public class DriveToPose extends Command { + + private final Drive drive; + private final Supplier target; + + // Profiled PID controllers to run trapezoidal motion from current to desired pose + private final ProfiledPIDController driveController = + new ProfiledPIDController( + 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); + private final ProfiledPIDController thetaController = + new ProfiledPIDController( + 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); + + // Various other values + private Translation2d lastSetpointTranslation = Translation2d.kZero; + private Rotation2d lastSetpointRotation = Rotation2d.kZero; + private double lastTime = 0.0; + private double driveErrorAbs = 0.0; + private double thetaErrorAbs = 0.0; + @Getter private boolean running = false; + + private Supplier linearFF = () -> Translation2d.kZero; + private DoubleSupplier omegaFF = () -> 0.0; + + /** Constructor */ + public DriveToPose(Drive drive, Supplier target) { + this.drive = drive; + this.target = target; + + // Enable continuous input for theta controller + thetaController.enableContinuousInput(-Math.PI, Math.PI); + + // Lock-out Drive from other commands + addRequirements(drive); + } + + /** Initialize the Command */ + @Override + public void initialize() { + Pose2d currentPose = drive.getPose(); + ChassisSpeeds fieldVelocity = + ChassisSpeeds.fromRobotRelativeSpeeds( + drive.getChassisSpeeds(), drive.getPose().getRotation()); + Translation2d linearFieldVelocity = + new Translation2d(fieldVelocity.vxMetersPerSecond, fieldVelocity.vyMetersPerSecond); + driveController.reset( + currentPose.getTranslation().getDistance(target.get().getTranslation()), + Math.min( + 0.0, + -linearFieldVelocity + .rotateBy( + target + .get() + .getTranslation() + .minus(currentPose.getTranslation()) + .getAngle() + .unaryMinus()) + .getX())); + thetaController.reset( + currentPose.getRotation().getRadians(), fieldVelocity.omegaRadiansPerSecond); + lastSetpointTranslation = currentPose.getTranslation(); + lastSetpointRotation = target.get().getRotation(); + lastTime = Timer.getTimestamp(); + } + + /** Execute the command */ + @Override + public void execute() { + running = true; + + // Get current pose and target pose + Pose2d currentPose = drive.getPose(); + Pose2d targetPose = target.get(); + + // Calculate drive speed + double currentDistance = currentPose.getTranslation().getDistance(targetPose.getTranslation()); + double ffScaler = + MathUtil.clamp((currentDistance - ffMinRadius) / (ffMaxRadius - ffMinRadius), 0.0, 1.0); + driveErrorAbs = currentDistance; + driveController.reset( + lastSetpointTranslation.getDistance(targetPose.getTranslation()), + driveController.getSetpoint().velocity); + double driveVelocityScalar = + driveController.calculate(driveErrorAbs, 0.0) + + driveController.getSetpoint().velocity * ffScaler; + if (currentDistance < driveController.getPositionTolerance()) driveVelocityScalar = 0.0; + lastSetpointTranslation = + new Pose2d( + targetPose.getTranslation(), + new Rotation2d( + Math.atan2( + currentPose.getTranslation().getY() - targetPose.getTranslation().getY(), + currentPose.getTranslation().getX() - targetPose.getTranslation().getX()))) + .transformBy(GeomUtil.toTransform2d(driveController.getSetpoint().position, 0.0)) + .getTranslation(); + + // Calculate theta speed + double thetaVelocity = + thetaController.calculate( + currentPose.getRotation().getRadians(), + new TrapezoidProfile.State( + targetPose.getRotation().getRadians(), + (targetPose.getRotation().minus(lastSetpointRotation)).getRadians() + / (Timer.getTimestamp() - lastTime))) + + thetaController.getSetpoint().velocity * ffScaler; + thetaErrorAbs = + Math.abs(currentPose.getRotation().minus(targetPose.getRotation()).getRadians()); + if (thetaErrorAbs < thetaController.getPositionTolerance()) thetaVelocity = 0.0; + lastSetpointRotation = targetPose.getRotation(); + Translation2d driveVelocity = + new Pose2d( + Translation2d.kZero, + new Rotation2d( + Math.atan2( + currentPose.getTranslation().getY() - targetPose.getTranslation().getY(), + currentPose.getTranslation().getX() - targetPose.getTranslation().getX()))) + .transformBy(GeomUtil.toTransform2d(driveVelocityScalar, 0.0)) + .getTranslation(); + lastTime = Timer.getTimestamp(); + + // Scale feedback velocities by input ff + final double linearS = linearFF.get().getNorm() * 3.0; + final double thetaS = Math.abs(omegaFF.getAsDouble()) * 3.0; + driveVelocity = + driveVelocity.interpolate( + linearFF.get().times(DrivebaseConstants.kMaxLinearSpeed), linearS); + thetaVelocity = + MathUtil.interpolate( + thetaVelocity, omegaFF.getAsDouble() * DrivebaseConstants.kMaxAngularSpeed, thetaS); + + // Command speeds + drive.runVelocity( + ChassisSpeeds.fromFieldRelativeSpeeds( + driveVelocity.getX(), driveVelocity.getY(), thetaVelocity, currentPose.getRotation())); + + // Log data + Logger.recordOutput("DriveToPose/DistanceMeasured", currentDistance); + Logger.recordOutput("DriveToPose/DistanceSetpoint", driveController.getSetpoint().position); + Logger.recordOutput("DriveToPose/ThetaMeasured", currentPose.getRotation().getRadians()); + Logger.recordOutput("DriveToPose/ThetaSetpoint", thetaController.getSetpoint().position); + Logger.recordOutput( + "DriveToPose/Setpoint", + new Pose2d[] { + new Pose2d( + lastSetpointTranslation, + Rotation2d.fromRadians(thetaController.getSetpoint().position)) + }); + Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {targetPose}); + } + + /** End the command */ + @Override + public void end(boolean interrupted) { + drive.stop(); + running = false; + // Clear logs + Logger.recordOutput("DriveToPose/Setpoint", new Pose2d[] {}); + Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {}); + } + + /** Checks if the robot is stopped at the final pose. */ + public boolean atGoal() { + return running && driveController.atGoal(); // && thetaController.atGoal(); + } + + /** Checks if the robot pose is within the allowed drive and theta tolerances. */ + public boolean withinTolerance(double driveTolerance, Rotation2d thetaTolerance) { + return running + && Math.abs(driveErrorAbs) < driveTolerance + && Math.abs(thetaErrorAbs) < thetaTolerance.getRadians(); + } +} diff --git a/src/main/java/frc/robot/commands/DriveToPoseFast.java b/src/main/java/frc/robot/commands/DriveToPoseFast.java new file mode 100644 index 0000000..1096c92 --- /dev/null +++ b/src/main/java/frc/robot/commands/DriveToPoseFast.java @@ -0,0 +1,241 @@ +// Copyright (c) 2025 FRC 2486 +// http://github.com/Coconuts2486-FRC +// Copyright (c) 2025 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file at +// the root directory of this project. + +package frc.robot.commands; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.controller.ProfiledPIDController; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.math.trajectory.TrapezoidProfile; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.wpilibj2.command.Command; +import frc.robot.Constants; +import frc.robot.subsystems.drive.Drive; +import frc.robot.util.GeomUtil; +import frc.robot.util.LoggedTunableNumber; +import java.util.function.Supplier; +import lombok.Getter; +import org.littletonrobotics.junction.Logger; + +public class DriveToPoseFast extends Command { + private static final LoggedTunableNumber drivekP = new LoggedTunableNumber("DriveToPose/DrivekP"); + private static final LoggedTunableNumber drivekD = new LoggedTunableNumber("DriveToPose/DrivekD"); + private static final LoggedTunableNumber thetakP = new LoggedTunableNumber("DriveToPose/ThetakP"); + private static final LoggedTunableNumber thetakD = new LoggedTunableNumber("DriveToPose/ThetakD"); + private static final LoggedTunableNumber driveMaxVelocity = + new LoggedTunableNumber("DriveToPose/DriveMaxVelocity"); + private static final LoggedTunableNumber driveMaxVelocitySlow = + new LoggedTunableNumber("DriveToPose/DriveMaxVelocitySlow"); + private static final LoggedTunableNumber driveMaxAcceleration = + new LoggedTunableNumber("DriveToPose/DriveMaxAcceleration"); + private static final LoggedTunableNumber thetaMaxVelocity = + new LoggedTunableNumber("DriveToPose/ThetaMaxVelocity"); + private static final LoggedTunableNumber thetaMaxVelocitySlow = + new LoggedTunableNumber("DriveToPose/ThetaMaxVelocitySlow"); + private static final LoggedTunableNumber thetaMaxAcceleration = + new LoggedTunableNumber("DriveToPose/ThetaMaxAcceleration"); + private static final LoggedTunableNumber driveTolerance = + new LoggedTunableNumber("DriveToPose/DriveTolerance"); + private static final LoggedTunableNumber driveToleranceSlow = + new LoggedTunableNumber("DriveToPose/DriveToleranceSlow"); + private static final LoggedTunableNumber thetaTolerance = + new LoggedTunableNumber("DriveToPose/ThetaTolerance"); + private static final LoggedTunableNumber thetaToleranceSlow = + new LoggedTunableNumber("DriveToPose/ThetaToleranceSlow"); + private static final LoggedTunableNumber ffMinRadius = + new LoggedTunableNumber("DriveToPose/FFMinRadius"); + private static final LoggedTunableNumber ffMaxRadius = + new LoggedTunableNumber("DriveToPose/FFMinRadius"); + + static { + drivekP.initDefault(9.0); + drivekD.initDefault(0.0); + thetakP.initDefault(4.0); + thetakD.initDefault(0.0); + // Values in m/s + driveMaxVelocity.initDefault(5); + driveMaxVelocitySlow.initDefault(1.5); + driveMaxAcceleration.initDefault(3.5); + thetaMaxVelocity.initDefault(Units.degreesToRadians(360.0)); + thetaMaxVelocitySlow.initDefault(Units.degreesToRadians(90.0)); + thetaMaxAcceleration.initDefault(Units.degreesToRadians(720.0)); + driveTolerance.initDefault(0.01); + driveToleranceSlow.initDefault(0.06); + thetaTolerance.initDefault(Units.degreesToRadians(1.0)); + thetaToleranceSlow.initDefault(Units.degreesToRadians(3.0)); + ffMinRadius.initDefault(0.2); + ffMaxRadius.initDefault(0.8); + } + + private final Drive drive; + private final Supplier target; + + private final ProfiledPIDController driveController = + new ProfiledPIDController( + 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); + private final ProfiledPIDController thetaController = + new ProfiledPIDController( + 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); + + private Translation2d lastSetpointTranslation = new Translation2d(); + private double driveErrorAbs = 0.0; + private double thetaErrorAbs = 0.0; + @Getter private boolean running = false; + + public DriveToPoseFast(Drive drive, Supplier target) { + this.drive = drive; + this.target = target; + + // Enable continuous input for theta controller + thetaController.enableContinuousInput(-Math.PI, Math.PI); + + addRequirements(drive); + } + + @Override + public void initialize() { + Pose2d currentPose = drive.getPose(); + ChassisSpeeds fieldVelocity = + ChassisSpeeds.fromRobotRelativeSpeeds( + drive.getChassisSpeeds(), drive.getPose().getRotation()); + Translation2d linearFieldVelocity = + new Translation2d(fieldVelocity.vxMetersPerSecond, fieldVelocity.vyMetersPerSecond); + driveController.reset( + currentPose.getTranslation().getDistance(target.get().getTranslation()), + Math.min( + 0.0, + -linearFieldVelocity + .rotateBy( + target + .get() + .getTranslation() + .minus(currentPose.getTranslation()) + .getAngle() + .unaryMinus()) + .getX())); + thetaController.reset( + currentPose.getRotation().getRadians(), fieldVelocity.omegaRadiansPerSecond); + lastSetpointTranslation = currentPose.getTranslation(); + } + + @Override + public void execute() { + running = true; + + // Update from tunable numbers + if (driveMaxVelocity.hasChanged(hashCode()) + || driveMaxVelocitySlow.hasChanged(hashCode()) + || driveMaxAcceleration.hasChanged(hashCode()) + || driveTolerance.hasChanged(hashCode()) + || driveToleranceSlow.hasChanged(hashCode()) + || thetaMaxVelocity.hasChanged(hashCode()) + || thetaMaxVelocitySlow.hasChanged(hashCode()) + || thetaMaxAcceleration.hasChanged(hashCode()) + || thetaTolerance.hasChanged(hashCode()) + || thetaToleranceSlow.hasChanged(hashCode()) + || drivekP.hasChanged(hashCode()) + || drivekD.hasChanged(hashCode()) + || thetakP.hasChanged(hashCode()) + || thetakD.hasChanged(hashCode())) { + driveController.setP(drivekP.get()); + driveController.setD(drivekD.get()); + driveController.setConstraints( + new TrapezoidProfile.Constraints(driveMaxVelocity.get(), driveMaxAcceleration.get())); + driveController.setTolerance(driveTolerance.get()); + thetaController.setP(thetakP.get()); + thetaController.setD(thetakD.get()); + thetaController.setConstraints( + new TrapezoidProfile.Constraints(thetaMaxVelocity.get(), thetaMaxAcceleration.get())); + thetaController.setTolerance(thetaTolerance.get()); + } + + // Get current pose and target pose + Pose2d currentPose = drive.getPose(); + Pose2d targetPose = target.get(); + + // Calculate drive speed + double currentDistance = currentPose.getTranslation().getDistance(targetPose.getTranslation()); + double ffScaler = + MathUtil.clamp( + (currentDistance - ffMinRadius.get()) / (ffMaxRadius.get() - ffMinRadius.get()), + 0.0, + 1.0); + driveErrorAbs = currentDistance; + driveController.reset( + lastSetpointTranslation.getDistance(targetPose.getTranslation()), + driveController.getSetpoint().velocity); + double driveVelocityScalar = + driveController.getSetpoint().velocity * ffScaler + + driveController.calculate(driveErrorAbs, 0.0); + if (currentDistance < driveController.getPositionTolerance()) driveVelocityScalar = 0.0; + lastSetpointTranslation = + new Pose2d( + targetPose.getTranslation(), + currentPose.getTranslation().minus(targetPose.getTranslation()).getAngle()) + .transformBy(GeomUtil.toTransform2d(driveController.getSetpoint().position, 0.0)) + .getTranslation(); + + // Calculate theta speed + double thetaVelocity = + thetaController.getSetpoint().velocity * ffScaler + + thetaController.calculate( + currentPose.getRotation().getRadians(), targetPose.getRotation().getRadians()); + thetaErrorAbs = + Math.abs(currentPose.getRotation().minus(targetPose.getRotation()).getRadians()); + if (thetaErrorAbs < thetaController.getPositionTolerance()) thetaVelocity = 0.0; + + // Command speeds + var driveVelocity = + new Pose2d( + new Translation2d(), + currentPose.getTranslation().minus(targetPose.getTranslation()).getAngle()) + .transformBy(GeomUtil.toTransform2d(driveVelocityScalar, 0.0)) + .getTranslation(); + drive.runVelocity( + ChassisSpeeds.fromFieldRelativeSpeeds( + driveVelocity.getX(), driveVelocity.getY(), thetaVelocity, currentPose.getRotation())); + + // Log data + Logger.recordOutput("DriveToPose/DistanceMeasured", currentDistance); + Logger.recordOutput("DriveToPose/DistanceSetpoint", driveController.getSetpoint().position); + Logger.recordOutput("DriveToPose/ThetaMeasured", currentPose.getRotation().getRadians()); + Logger.recordOutput("DriveToPose/ThetaSetpoint", thetaController.getSetpoint().position); + Logger.recordOutput( + "DriveToPose/Setpoint", + new Pose2d( + lastSetpointTranslation, + Rotation2d.fromRadians(thetaController.getSetpoint().position))); + Logger.recordOutput("DriveToPose/Goal", targetPose); + Logger.recordOutput("DriveToPose/Velocity", driveVelocityScalar); + } + + @Override + public void end(boolean interrupted) { + drive.stop(); + running = false; + // Clear logs + Logger.recordOutput("DriveToPose/Setpoint", new Pose2d[] {}); + Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {}); + } + + /** Checks if the robot is stopped at the final pose. */ + public boolean atGoal() { + return running && driveController.atGoal() && thetaController.atGoal(); + } + + /** Checks if the robot pose is within the allowed drive and theta tolerances. */ + public boolean withinTolerance(double driveTolerance, Rotation2d thetaTolerance) { + return running + && Math.abs(driveErrorAbs) < driveTolerance + && Math.abs(thetaErrorAbs) < thetaTolerance.getRadians(); + } +} From 2396361d186f7de078817bc23230bbd8007afd60 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 22 Dec 2025 11:04:57 -0700 Subject: [PATCH 08/12] Make cleaner diff against Az-RBSI modified: src/main/java/frc/robot/Constants.java modified: src/main/java/frc/robot/RobotContainer.java --- src/main/java/frc/robot/Constants.java | 6 +-- src/main/java/frc/robot/RobotContainer.java | 43 +-------------------- 2 files changed, 4 insertions(+), 45 deletions(-) diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index fb0a7cf..e01f9f4 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -426,12 +426,10 @@ public static class AprilTagConstants { public static final AprilTagFieldLayout kAprilTagLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); public static final double kFieldWidthMeters = - kAprilTagLayout.getFieldWidth(); // distance between field walls, - // 8.211m + kAprilTagLayout.getFieldWidth(); // distance between field walls, 8.211m public static final double kFieldLengthMeters = - kAprilTagLayout.getFieldLength(); // distance between driver station + kAprilTagLayout.getFieldLength(); // distance between driver station walls, 16.541m - // walls, 16.541m @Getter public enum AprilTagLayoutType { OFFICIAL("2025-official"); diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 269a4d5..ac36e37 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -26,10 +26,6 @@ import choreo.auto.AutoRoutine; import choreo.auto.AutoTrajectory; import com.pathplanner.lib.auto.AutoBuilder; -import com.pathplanner.lib.path.GoalEndState; -import com.pathplanner.lib.path.PathConstraints; -import com.pathplanner.lib.path.PathPlannerPath; -import com.pathplanner.lib.path.Waypoint; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.wpilibj.GenericHID; @@ -60,7 +56,6 @@ import frc.robot.util.OverrideSwitches; import frc.robot.util.PowerMonitoring; import frc.robot.util.RBSIEnum; -import java.util.List; import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; /** This is the location for defining robot hardware, commands, and controller button bindings. */ @@ -69,39 +64,6 @@ public class RobotContainer { private static final boolean USE_MAPLESIM = true; public static final boolean MAPLESIM = USE_MAPLESIM && Robot.isSimulation(); - // **** This is a Pathplanner On-the-Fly Command ****/ - // Create a list of waypoints from poses. Each pose represents one waypoint. - // The rotation component of the pose should be the direction of travel. Do not use - // holonomic rotation. - List woahpoints = - PathPlannerPath.waypointsFromPoses( - new Pose2d(8.180, 6.184, Rotation2d.fromDegrees(0)), - new Pose2d(9.4, 6.184, Rotation2d.fromDegrees(0))); - - PathConstraints constraints = - new PathConstraints(1.0, 1.0, 2 * Math.PI, 4 * Math.PI); // The constraints for this path. - // PathConstraints constraints = PathConstraints.unlimitedConstraints(12.0); // You can - // also use unlimited constraints, only limited by motor torque and nominal battery - // voltage - - // Create the path using the waypoints created above - PathPlannerPath woah = - new PathPlannerPath( - woahpoints, - constraints, - null, // The ideal starting state, this is only relevant for pre-planned paths, - // so - // can be null for on-the-fly paths. - new GoalEndState( - 0.0, - Rotation2d.fromDegrees( - 180)) // Goal end state. You can set a holonomic rotation here. If - // using a - // differential drivetrain, the rotation will have no effect. - ); - - // Prevent the path from being flipped if the coordinates are already correct - /** Define the Driver and, optionally, the Operator/Co-Driver Controllers */ // Replace with ``CommandPS4Controller`` or ``CommandJoystick`` if needed final CommandXboxController driverController = new CommandXboxController(0); // Main Driver @@ -111,7 +73,7 @@ public class RobotContainer { /** Declare the robot subsystems here ************************************ */ // These are the "Active Subsystems" that the robot controlls - public final Drive m_drivebase; + private final Drive m_drivebase; private final Flywheel m_flywheel; // These are "Virtual Subsystems" that report information but have no motors @@ -142,8 +104,7 @@ public class RobotContainer { * devices, and commands. */ public RobotContainer() { - instance = this; - woah.preventFlipping = true; + // Instantiate Robot Subsystems based on RobotType switch (Constants.getMode()) { case REAL: From 7a5a7b05b48b0377040d77e18779fd14c6446f80 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 22 Dec 2025 12:09:31 -0700 Subject: [PATCH 09/12] Update FRC254's vision to 2025 version modified: src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java new file: src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java modified: src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java modified: src/main/java/frc/robot/subsystems/vision/Vision.java new file: src/main/java/frc/robot/util/FieldConstants.java --- .../subsystems/vision/FRC254/Constants.java | 422 +++------- .../vision/FRC254/FiducialObservation.java | 147 ++-- .../subsystems/vision/FRC254/LedState.java | 203 +++++ .../vision/FRC254/MegatagPoseEstimate.java | 119 +-- .../subsystems/vision/FRC254/RobotState.java | 371 ++++++--- .../FRC254/VisionFieldPoseEstimate.java | 22 +- .../subsystems/vision/FRC254/VisionIO.java | 44 +- .../vision/FRC254/VisionSubsystem.java | 771 ++++++------------ .../frc/robot/subsystems/vision/Vision.java | 23 +- .../java/frc/robot/util/FieldConstants.java | 265 ++++++ 10 files changed, 1288 insertions(+), 1099 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java create mode 100644 src/main/java/frc/robot/util/FieldConstants.java diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java index bffec5a..349c64e 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -15,33 +15,51 @@ import com.ctre.phoenix6.configs.ClosedLoopRampsConfigs; import com.ctre.phoenix6.configs.OpenLoopRampsConfigs; -import com.ctre.phoenix6.signals.InvertedValue; -import com.pathplanner.lib.path.PathConstraints; import edu.wpi.first.apriltag.AprilTagFieldLayout; import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform2d; import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.trajectory.TrapezoidProfile; import edu.wpi.first.math.util.Units; -import frc.robot.util.RobotDeviceId; -import frc.robot.util.ServoMotorSubsystemConfig; +import edu.wpi.first.wpilibj.RobotBase; import java.net.NetworkInterface; import java.net.SocketException; +import java.util.Arrays; import java.util.Enumeration; -/** Class to store all constants for robot code. */ public class Constants { - public static final String kCanBusCanivore = "canivore"; + public static final Mode simMode = Mode.SIM; + public static final Mode currentMode = RobotBase.isReal() ? Mode.REAL : simMode; + public static final SimControllerType kSimControllerType = SimControllerType.XBOX; + + public enum SimControllerType { + XBOX, + DUAL_SENSE + } + + public enum Mode { + /** Running on a real robot. */ + REAL, + + /** Running a physics simulator. */ + SIM, + + /** Replaying from a log file. */ + REPLAY + } + + public static final String kCanBusDrivebaseClimberCanivore = "drivebase-climber"; + public static final String kCanBusSuperstructureCanivore = "superstructure"; public static boolean kIsReplay = false; - public static final String kPracticeBotMacAddress = "00:80:2F:33:D1:4B"; + public static final String kPracticeBotMacAddress = "00:80:2F:33:BF:BB"; public static boolean kIsPracticeBot = hasMacAddress(kPracticeBotMacAddress); public static final double kSteerJoystickDeadband = 0.05; + public static final double kRobotWidth = Units.inchesToMeters(35.625); + public static final double kRobotDiagonal = Math.sqrt(2.0) * kRobotWidth; + public static final double kRobotMassKg = Units.lbsToKilograms(147.92); + public static final double kRobotMomentOfInertia = 2 * 9.38; // kg * m^2 + public static final double kCOGHeightMeters = Units.inchesToMeters(0.0); public static final ClosedLoopRampsConfigs makeDefaultClosedLoopRampConfig() { return new ClosedLoopRampsConfigs() @@ -57,314 +75,88 @@ public static final OpenLoopRampsConfigs makeDefaultOpenLoopRampConfig() { .withVoltageOpenLoopRampPeriod(0.02); } - public static final class DriveConstants { - public static final double kDriveMaxSpeed = 4.43676260556; - public static final double kDriveMaxAngularRate = Units.degreesToRadians(360 * 1.15); - public static final double kHeadingControllerP = 3.5; - public static final double kHeadingControllerI = 0; - public static final double kHeadingControllerD = 0; - } - - public static final class SensorConstants { - public static final int kAmpPostChopstickSensorPort = 4; - public static final int kShooterStage1BannerSensorPort = 3; - public static final int kAmpBannerSensorPort = 2; - public static final int kFeederBannerSensorPort = 1; - public static final int kIntakeBannerSensorPort = 0; - public static final double kShooterDebounceTime = 0.01; - public static final double kAmpDebounceTime = 0.01; - public static final double kFeederDebounceTime = 0.01; - public static final double kIntakeDebounceTime = 0.01; - } - - public static final class ClimberConstants { - public static final RobotDeviceId kClimberTalonCanID = - new RobotDeviceId(24, kCanBusCanivore, 0); - public static final double kClimberP = kIsPracticeBot ? 1.0 : 1.0; - public static final double kForwardMaxPositionRotations = kIsPracticeBot ? 119.0 : 132.0; - public static final double kHooksUpPositionRotations = kForwardMaxPositionRotations * 0.9; - public static final double kStageHooksRotations = kForwardMaxPositionRotations * 0.4; - public static final double kClimbClimbedPositionToleranceRotations = - kForwardMaxPositionRotations * 0.1; - public static final double kPositionToleranceRotations = 2.0; - public static final double kClimberGearRatio = 1.0 / (10.0); - public static double kReverseMinPositionRotations = 0.0; - } - - public static final class ElevatorConstants { - public static final double ElevatorMinPositionRotations = 0.0; - public static final double ElevatorMaxPositionRotations = 15.356933; - public static final double ElevatorMaxHeightInches = 16.5; - public static final double kElevatorGearRatio = (11.0 / 36.0) * (18. / 15.); - public static final double kElevatorPositionToleranceRotations = 0.1; - public static final double kAmpScoringHeightInches = 16.0; - public static final double kElevatorHomeHeightInches = 0.0; - public static final double kIntakeFromSourceHeightInches = 14.5; - public static final double kElevatorPositioningToleranceInches = 0.5; - public static final double kClimbHeightInches = 16.0; - public static final double kSpoolDiameter = Units.inchesToMeters(0.940); - } - - // Intake Constants - public static final class IntakeConstants { - public static final double kIntakeDutyCycleIntake = 1.; - public static final double kIntakeDutyCycleIntakeFromSource = 1.0; - public static final double kIntakeDutyCycleExhuast = -1.0; - } - - // Feeder Constants - public static final class FeederConstants { - public static final double kFeederRPSIntake = 70.0; - public static final boolean kRunClockwise = true; - } - - public static class AmpConstants { - public static final double kAmpIntakeFromSourceDutyCycle = -1.0; - public static final double kAmpExhaustToStageDutyCycle = 0.85; - public static final double kAmpSlowlyStageDutyCycle = 0.1; - - public static final double kAmpScoreDutyCycle = -1.0; - - // Number of rotations to turn after beam break is tripped (rising edge) to stow - // the note prior to scoring - public static final double kAmpChopsticksStageRotations = 6.0; - public static final double kTrapChopsticksStageRotations = 10.0; - public static final double kAmpChopsticksGoBackRotations = -1.75; - public static final double kAmpChopsticksGoBackRotationsTolerance = 0.25; - public static final double kAmpChopsticksStageRotationsTolerance = 0.1; - } - - public static final ServoMotorSubsystemConfig kAmpConfig = new ServoMotorSubsystemConfig(); - - static { - kAmpConfig.name = "Amp"; - kAmpConfig.talonCANID = new RobotDeviceId(25, kCanBusCanivore, 0); - kAmpConfig.fxConfig.MotorOutput.Inverted = InvertedValue.Clockwise_Positive; - - kAmpConfig.fxConfig.Audio.BeepOnBoot = false; - kAmpConfig.fxConfig.Audio.BeepOnConfig = false; - - kAmpConfig.fxConfig.Slot0.kS = 0.015 * 12.0; - kAmpConfig.fxConfig.Slot0.kP = 0.3 * 12.0; - kAmpConfig.fxConfig.Slot0.kV = 0.00925 * 12.0; - kAmpConfig.fxConfig.Slot0.kA = 0.0001 * 12.0; - kAmpConfig.fxConfig.MotionMagic.MotionMagicAcceleration = 500.0; - kAmpConfig.fxConfig.MotionMagic.MotionMagicCruiseVelocity = 50.0; - - kAmpConfig.fxConfig.CurrentLimits.SupplyCurrentLimitEnable = true; - kAmpConfig.fxConfig.CurrentLimits.SupplyCurrentLimit = 60.0; - - kAmpConfig.fxConfig.CurrentLimits.StatorCurrentLimit = 80.0; - kAmpConfig.fxConfig.CurrentLimits.StatorCurrentLimitEnable = true; - kAmpConfig.fxConfig.ClosedLoopRamps = makeDefaultClosedLoopRampConfig(); - kAmpConfig.fxConfig.OpenLoopRamps = makeDefaultOpenLoopRampConfig(); - } - - public static final class ShooterConstants { - public static final Rotation2d kTurretToShotCorrection = - new Rotation2d(Units.degreesToRadians(1.5)); - - public static final double kPoopMaxApexHeight = Units.inchesToMeters(160.0); - - public static final double kStage2ShooterWheelDiameter = Units.inchesToMeters(3.0); // in - public static final double kStage1ShooterWheelDiameter = Units.inchesToMeters(2.0); // in - - public static final double kRingLaunchVelMetersPerSecPerRotPerSec = 0.141; - public static final double kRingLaunchLiftCoeff = 0.013; // Multiply by v^2 to get lift accel - public static final double kShooterStage2RPSShortRange = 120.0; // rot/s - public static final double kShooterStage2MaxShortRangeDistance = 2.0; - public static final double kShooterStage2MinLongRangeDistance = 3.0; - public static final double kShooterStage2RPSLongRange = 120.0; // rot/s - public static final double kShooterStage2RPSCap = 130.0; // rot/s - public static final double kShooterStage1RPS = 70.0; // rot/s - public static final double kShooterStage2Epsilon = 3.0; - public static final double kShooterSpinupStage1RPS = 0.0; - - public static final double kShooterStage1IntakeRPS = 4.0; - public static final double kShooterStage1ExhaustRPS = -10.0; - - public static final double kFenderShotRPS = 100.0; - public static final double kPreloadShotRPS = 90.0; - - public static final double kBottomRollerSpeedupFactor = - 1.0; // multiplied to all setpoints to determine how - // much - // extra power to give the bottom roller. >1.0 = - // faster - // bottom roller - public static final double kTopRollerSpeedupFactor = 1.0; - } - - public static final double kJoystickThreshold = 0.1; - public static final int kDriveGamepadPort = 0; - - // Controls - public static final boolean kForceDriveGamepad = true; - public static final int kGamepadAdditionalControllerPort = 1; - public static final int kOperatorControllerPort = 2; - public static final int kMainThrottleJoystickPort = 0; - public static final int kMainTurnJoystickPort = 1; - public static final double kDriveJoystickThreshold = 0.03; - - // Limelight constants - - // TURRET LIMELIGHT - // Pitch angle: How many radians the camera is pitched up around Y axis. 0 is - // looking straight ahead, +is nodding up. - public static final double kCameraPitchDegrees = kIsPracticeBot ? 28.0 : 27.5; - public static final double kCameraPitchRads = Units.degreesToRadians(kCameraPitchDegrees); - public static final double kCameraHeightOffGroundMeters = - kIsPracticeBot ? Units.inchesToMeters(11.181) : Units.inchesToMeters(11.181); - public static final double kImageCaptureLatency = 11.0; // milliseconds - public static final double kLimelightTransmissionTimeLatency = 0.0; // milliseconds - public static final String kLimelightTableName = "limelight-turret"; - // Distance from turret center to camera lens in X axis (straight into lens) - public static final double kTurretToCameraX = - kIsPracticeBot ? Units.inchesToMeters(5.834) : Units.inchesToMeters(5.834); - // Distance from turret center to camera lens in Y - public static final double kTurretToCameraY = 0; - - // ELEVATOR LIMELIGHT - public static final String kLimelightBTableName = "limelight-eleva"; - public static final double kCameraBPitchDegrees = kIsPracticeBot ? 15.0 : 16.0; - public static final double kCameraBPitchRads = Units.degreesToRadians(kCameraBPitchDegrees); - public static final double kCameraBRollDegrees = kIsPracticeBot ? 0.0 : 0.0; - public static final double kCameraBRollRads = Units.degreesToRadians(kCameraBRollDegrees); - public static final double kCameraBHeightOffGroundMeters = - kIsPracticeBot - ? Units.inchesToMeters(19.477) - : Units.inchesToMeters(19.477); // verify for practice - // Distance from turret center to camera lens in X axis (straight into lens) - public static final double kTurretToCameraBX = - kIsPracticeBot - ? Units.inchesToMeters(14.882) - : Units.inchesToMeters(14.882); // verify for practice - // Distance from turret center to camera lens in Y - public static final double kTurretToCameraBY = 0; - - public static final double kTurretToRobotCenterX = Units.inchesToMeters(2.3115); - public static final double kTurretToRobotCenterY = 0; - public static final Transform2d kTurretToRobotCenter = - new Transform2d( - new Translation2d(Constants.kTurretToRobotCenterX, Constants.kTurretToRobotCenterY), - new Rotation2d()); - public static final Rotation2d kCameraYawOffset = new Rotation2d(0); - // April Tag Layout public static final AprilTagFieldLayout kAprilTagLayout = - AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); - - public static final double kFieldWidthMeters = - kAprilTagLayout.getFieldWidth(); // distance between field walls, - // 8.211m - public static final double kFieldLengthMeters = - kAprilTagLayout.getFieldLength(); // distance between driver station - // walls, 16.541m - - public static final Pose2d kBlueAmpPose = new Pose2d(1.820, 7.680, Rotation2d.fromDegrees(90.0)); - public static final Pose2d kRedAmpPose = - new Pose2d( - kFieldWidthMeters - kBlueAmpPose.getX(), - kBlueAmpPose.getY(), - Rotation2d.fromDegrees(180 - kBlueAmpPose.getRotation().getDegrees())); // X 14.7345 - - public static final Pose2d kBlueClimbPoseFeed = - new Pose2d(4.4, 3.3, Rotation2d.fromDegrees(60.0)); - public static final Pose2d kBlueClimbPoseAmp = - new Pose2d(4.43, 4.95, Rotation2d.fromDegrees(-60.0)); - public static final Pose2d kBlueClimbPoseMidline = - new Pose2d(5.8, 4.1, Rotation2d.fromDegrees(180.0)); - public static final Pose2d kRedClimbPoseFeed = - new Pose2d( - kFieldWidthMeters - kBlueClimbPoseFeed.getX(), - kBlueClimbPoseFeed.getY(), - Rotation2d.fromDegrees(180 - kBlueClimbPoseFeed.getRotation().getDegrees())); - public static final Pose2d kRedClimbPoseAmp = - new Pose2d( - kFieldWidthMeters - kBlueClimbPoseAmp.getX(), - kBlueClimbPoseAmp.getY(), - Rotation2d.fromDegrees(180 - kBlueClimbPoseAmp.getRotation().getDegrees())); - public static final Pose2d kRedClimbPoseMidline = - new Pose2d( - kFieldWidthMeters - kBlueClimbPoseMidline.getX(), - kBlueClimbPoseMidline.getY(), - Rotation2d.fromDegrees(180 - kBlueClimbPoseMidline.getRotation().getDegrees())); - public static final Translation3d kRedSpeakerPose = - new Translation3d( - Constants.kAprilTagLayout.getTagPose(4).get().getX(), - Constants.kAprilTagLayout.getTagPose(4).get().getY(), - 2.045); - public static final Translation3d kBlueSpeakerPose = - new Translation3d( - Constants.kAprilTagLayout.getTagPose(7).get().getX(), - Constants.kAprilTagLayout.getTagPose(7).get().getY(), - 2.045); - public static final Translation3d kRedSpeakerTopPose = - new Translation3d( - Constants.kAprilTagLayout.getTagPose(4).get().getX() - Units.inchesToMeters(20.057), - Constants.kAprilTagLayout.getTagPose(4).get().getY(), - Constants.kAprilTagLayout.getTagPose(4).get().getZ() + Units.inchesToMeters(32.563)); - public static final Translation3d kBlueSpeakerTopPose = - new Translation3d( - Constants.kAprilTagLayout.getTagPose(7).get().getX() + Units.inchesToMeters(20.057), - Constants.kAprilTagLayout.getTagPose(7).get().getY(), - Constants.kAprilTagLayout.getTagPose(7).get().getZ() + Units.inchesToMeters(32.563)); + AprilTagFieldLayout.loadField(AprilTagFields.k2025ReefscapeWelded); + public static final int[] kAllowedTagIDs = {17, 18, 19, 20, 21, 22, 6, 7, 8, 9, 10, 11}; + public static final AprilTagFieldLayout kAprilTagLayoutReefsOnly = + new AprilTagFieldLayout( + kAprilTagLayout.getTags().stream() + .filter(tag -> Arrays.stream(kAllowedTagIDs).anyMatch(element -> element == tag.ID)) + .toList(), + kAprilTagLayout.getFieldLength(), + kAprilTagLayout.getFieldWidth()); - public static final Translation2d kBlueStageCenterPose = - new Translation2d(4.83, kFieldWidthMeters / 2.0); - public static final Translation2d kRedStageCenterPose = - new Translation2d( - kFieldLengthMeters - kBlueStageCenterPose.getX(), kBlueStageCenterPose.getY()); - public static final double kBlueSpeakerToStageAutoSwitchX = 7.0; - public static final double kRedSpeakerToStageAutoSwitchX = - kFieldLengthMeters - kBlueSpeakerToStageAutoSwitchX; + public static final double kFieldWidthMeters = kAprilTagLayout.getFieldWidth(); + public static final double kFieldLengthMeters = kAprilTagLayout.getFieldLength(); - public static final double kNoteReleaseHeight = Units.inchesToMeters(22.183); - public static final Pose3d kLeftRedSpeakerPose = - new Pose3d( - Constants.kAprilTagLayout.getTagPose(4).get().getX(), 5.875, 2.04, new Rotation3d()); - public static final Pose3d kRightRedSpeakerPose = - new Pose3d( - Constants.kAprilTagLayout.getTagPose(4).get().getX(), 5.29, 2.04, new Rotation3d()); - public static final Pose3d kLeftBlueSpeakerPose = - new Pose3d( - Constants.kAprilTagLayout.getTagPose(7).get().getX(), 5.875, 2.04, new Rotation3d()); - public static final Pose3d kRightBlueSpeakerPose = - new Pose3d( - Constants.kAprilTagLayout.getTagPose(7).get().getX(), 5.29, 2.04, new Rotation3d()); - public static final double kSpeakerLengthMeters = - kLeftRedSpeakerPose.getY() - kRightRedSpeakerPose.getY(); + public static final double kReefRadius = 0.9604; // m - public static final class AutoConstants { - public static final double kMaxSpeedMetersPerSecond = 4.75; - public static final double kMaxAccelerationMetersPerSecondSquared = 4.85; - public static final double kMaxAngularSpeedRadiansPerSecond = Math.PI; - public static final double kMaxAngularSpeedRadiansPerSecondSquared = Math.PI; + public static final double kMidlineBuffer = 1.0; - public static final double kPXController = 6.0; - public static final double kPYController = 2.0; - public static final double kPThetaController = 4.0; - - public static final double kTranslationKa = 0.0; - - // Constraint for the motion profiled robot angle controller - public static final TrapezoidProfile.Constraints kThetaControllerConstraints = - new TrapezoidProfile.Constraints( - kMaxAngularSpeedRadiansPerSecond, kMaxAngularSpeedRadiansPerSecondSquared); - - public static final PathConstraints kAutoAlignPathConstraints = - new PathConstraints( - kMaxSpeedMetersPerSecond, - kMaxAccelerationMetersPerSecondSquared, - kMaxAngularSpeedRadiansPerSecond, - kMaxAngularSpeedRadiansPerSecondSquared); - } - - public static final class LEDConstants { - public static final RobotDeviceId kCANdleId = new RobotDeviceId(30, kCanBusCanivore, 0); - public static final int kNonCandleLEDCount = 30; - public static final int kCandleLEDCount = 8; - public static final int kMaxLEDCount = kNonCandleLEDCount + kCandleLEDCount; + // Limelight constants + public static final class VisionConstants { + + // Large variance used to downweight unreliable vision measurements + public static final double kLargeVariance = 1e6; + + // Standard deviation constants + public static final int kMegatag1XStdDevIndex = 0; + public static final int kMegatag1YStdDevIndex = 1; + public static final int kMegatag1YawStdDevIndex = 5; + + // Standard deviation array indices for Megatag2 + public static final int kMegatag2XStdDevIndex = 6; + public static final int kMegatag2YStdDevIndex = 7; + public static final int kMegatag2YawStdDevIndex = 11; + + // Validation constants + public static final int kExpectedStdDevArrayLength = 12; + + public static final int kMinFiducialCount = 1; + + // Camera A (Left-side Camera) + public static final double kCameraAPitchDegrees = 20.0; + public static final double kCameraAPitchRads = Units.degreesToRadians(kCameraAPitchDegrees); + public static final double kCameraAHeightOffGroundMeters = Units.inchesToMeters(8.3787); + public static final String kLimelightATableName = "limelight-left"; + public static final double kRobotToCameraAForward = Units.inchesToMeters(7.8757); + public static final double kRobotToCameraASide = Units.inchesToMeters(-11.9269); + public static final Rotation2d kCameraAYawOffset = Rotation2d.fromDegrees(0.0); + public static final Transform2d kRobotToCameraA = + new Transform2d( + new Translation2d(kRobotToCameraAForward, kRobotToCameraASide), kCameraAYawOffset); + + // Camera B (Right-side camera) + public static final double kCameraBPitchDegrees = 20.0; + public static final double kCameraBPitchRads = Units.degreesToRadians(kCameraBPitchDegrees); + public static final double kCameraBHeightOffGroundMeters = Units.inchesToMeters(8.3787); + public static final String kLimelightBTableName = "limelight-right"; + public static final double kRobotToCameraBForward = Units.inchesToMeters(7.8757); + public static final double kRobotToCameraBSide = Units.inchesToMeters(11.9269); + public static final Rotation2d kCameraBYawOffset = Rotation2d.fromDegrees(0.0); + public static final Transform2d kRobotToCameraB = + new Transform2d( + new Translation2d(kRobotToCameraBForward, kRobotToCameraBSide), kCameraBYawOffset); + + // Vision processing constants + public static final double kDefaultAmbiguityThreshold = 0.19; + public static final double kDefaultYawDiffThreshold = 5.0; + public static final double kTagAreaThresholdForYawCheck = 2.0; + public static final double kTagMinAreaForSingleTagMegatag = 1.0; + public static final double kDefaultZThreshold = 0.2; + public static final double kDefaultNormThreshold = 1.0; + public static final double kMinAmbiguityToFlip = 0.08; + + public static final double kCameraHorizontalFOVDegrees = 81.0; + public static final double kCameraVerticalFOVDegrees = 55.0; + public static final int kCameraImageWidth = 1280; + public static final int kCameraImageHeight = 800; + + public static final double kScoringConfidenceThreshold = 0.7; + + // NetworkTables constants + public static final String kBoundingBoxTableName = "BoundingBoxes"; } /** diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java index c69e735..0203329 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -17,82 +17,85 @@ import edu.wpi.first.util.struct.StructSerializable; import frc.robot.util.LimelightHelpers; import java.nio.ByteBuffer; - -public class FiducialObservation implements StructSerializable { - public static class FiducialObservationStruct implements Struct { - public int id; - public double txnc; - public double tync; - public double ambiguity; - - @Override - public Class getTypeClass() { - return FiducialObservation.class; - } - - @Override - public String getTypeName() { - return "FiducialObservation"; - } - - @Override - public String getTypeString() { - return "struct:FiducialObservation"; - } - - @Override - public int getSize() { - return kSizeInt32 + 3 * kSizeDouble; - } - - @Override - public String getSchema() { - return "int id;double txnc;double tync;double ambiguity"; - } - - @Override - public FiducialObservation unpack(ByteBuffer bb) { - FiducialObservation rv = new FiducialObservation(); - rv.id = bb.getInt(); - rv.txnc = bb.getDouble(); - rv.tync = bb.getDouble(); - rv.ambiguity = bb.getDouble(); - return rv; - } - - @Override - public void pack(ByteBuffer bb, FiducialObservation value) { - bb.putInt(value.id); - bb.putDouble(value.txnc); - bb.putDouble(value.tync); - bb.putDouble(value.ambiguity); - } - } - - public int id; - public double txnc; - public double tync; - public double ambiguity; - - public FiducialObservation() {} - +import java.util.Arrays; +import java.util.Objects; + +/** + * Represents an observation of a fiducial marker (AprilTag) with position and quality data. + * + * @param id The fiducial marker ID + * @param txnc Normalized horizontal offset (-1 to 1) + * @param tync Normalized vertical offset (-1 to 1) + * @param ambiguity Pose ambiguity score (0 = confident, 1 = ambiguous) + * @param area Target area as percentage of image + */ +public record FiducialObservation(int id, double txnc, double tync, double ambiguity, double area) + implements StructSerializable { + + /** Converts a Limelight raw fiducial to a FiducialObservation. */ public static FiducialObservation fromLimelight(LimelightHelpers.RawFiducial fiducial) { - FiducialObservation rv = new FiducialObservation(); - rv.id = fiducial.id; - rv.txnc = fiducial.txnc; - rv.tync = fiducial.tync; - rv.ambiguity = fiducial.ambiguity; - - return rv; + if (fiducial == null) { + return null; + } + return new FiducialObservation( + fiducial.id, fiducial.txnc, fiducial.tync, fiducial.ambiguity, fiducial.ta); } + /** Converts an array of Limelight raw fiducials to FiducialObservation array. */ public static FiducialObservation[] fromLimelight(LimelightHelpers.RawFiducial[] fiducials) { - FiducialObservation[] rv = new FiducialObservation[fiducials.length]; - for (int i = 0; i < fiducials.length; ++i) { - rv[i] = fromLimelight(fiducials[i]); + if (fiducials == null) { + return new FiducialObservation[0]; } - return rv; + return Arrays.stream(fiducials) + .map(FiducialObservation::fromLimelight) + .filter(Objects::nonNull) + .toArray(FiducialObservation[]::new); } - public static final FiducialObservationStruct struct = new FiducialObservationStruct(); + public static final Struct struct = + new Struct() { + @Override + public Class getTypeClass() { + return FiducialObservation.class; + } + + @Override + public String getTypeString() { + return "record:FiducialObservation"; + } + + @Override + public int getSize() { + return Integer.BYTES + 4 * Double.BYTES; + } + + @Override + public String getSchema() { + return "int id;double txnc;double tync;double ambiguity"; + } + + @Override + public FiducialObservation unpack(ByteBuffer bb) { + int id = bb.getInt(); + double txnc = bb.getDouble(); + double tync = bb.getDouble(); + double ambiguity = bb.getDouble(); + double area = bb.getDouble(); + return new FiducialObservation(id, txnc, tync, ambiguity, area); + } + + @Override + public void pack(ByteBuffer bb, FiducialObservation value) { + bb.putInt(value.id()); + bb.putDouble(value.txnc()); + bb.putDouble(value.tync()); + bb.putDouble(value.ambiguity()); + bb.putDouble(value.area()); + } + + @Override + public String getTypeName() { + return "FiducialObservation"; + } + }; } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java new file mode 100644 index 0000000..c45c64e --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java @@ -0,0 +1,203 @@ +// Copyright (c) 2025 FRC 254 +// https://github.com/team254 +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision.FRC254; + +public class LedState { + public static final LedState kRed = new LedState(255, 0, 0); + public static final LedState kBlue = new LedState(0, 0, 255); + public static final LedState kCyan = new LedState(0, 255, 255); + public static final LedState kGreen = new LedState(0, 255, 0); + public static final LedState kYellow = new LedState(255, 215, 0); + public static final LedState kOrange = new LedState(255, 80, 0); + public static final LedState kPurple = new LedState(255, 0, 255); + public static final LedState kWhite = new LedState(255, 255, 255); + public static final LedState kPink = new LedState(255, 0, 100); + + public static final LedState kOff = new LedState(0, 0, 0); // No Color + public static final LedState kLowBattery = kRed; + public static final LedState kGoodBattery = kGreen; + public static final LedState[] kRainbow = { + kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kOff, kRed, kOrange, kYellow, + kGreen, kCyan, kBlue, kPurple, kOff, kOff + }; + + public static final LedState kCoralMode = kWhite; + public static final LedState kAlgaeMode = kGreen; + public static final LedState kCoralManual = kCyan; + + // 3 leds for l2, 7 leds for l3, full for l4 + public static final LedState[] kL2StagingLeds = { + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kOff, + kOff, + kOff, + kOff, + kOff, + kOff + }; + public static final LedState[] kL3StagingLeds = { + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kCoralMode, + kOff, + kOff, + kOff + }; + + public static final LedState[] kL2StagingManualLeds = { + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kOff, + kOff, + kOff, + kOff, + kOff, + kOff + }; + public static final LedState[] kL3StagingManualLeds = { + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kCoralManual, + kOff, + kOff, + kOff + }; + + public static final LedState[] kAlgaeL2Leds = { + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kOff, + kOff, + kOff, + kOff, + kOff, + kOff + }; + public static final LedState[] kAlgaeL3Leds = { + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kAlgaeMode, + kOff, + kOff, + kOff + }; + + public int blue; + public int green; + public int red; + + public LedState() { + blue = 0; + green = 0; + red = 0; + } + + public LedState(int r, int g, int b) { + blue = b; + green = g; + red = r; + } + + public void copyFrom(LedState other) { + this.blue = other.blue; + this.green = other.green; + this.red = other.red; + } + + @Override + public boolean equals(Object other) { + if (other == null) { + return false; + } + + if (other.getClass() != this.getClass()) { + return false; + } + + LedState s = (LedState) other; + return this.blue == s.blue && this.red == s.red && this.green == s.green; + } + + @Override + public String toString() { + return "#" + Integer.toHexString(red) + Integer.toHexString(green) + Integer.toHexString(blue); + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java index a65c0b4..32e4f3d 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -20,36 +20,77 @@ import frc.robot.util.MathHelpers; import java.nio.ByteBuffer; -public class MegatagPoseEstimate implements StructSerializable { +/** + * Represents a robot pose estimate using multiple AprilTags (Megatag). + * + * @param fieldToRobot The estimated robot pose on the field + * @param timestampSeconds The timestamp when this estimate was captured + * @param latency Processing latency in seconds + * @param avgTagArea Average area of detected tags + * @param quality Quality score of the pose estimate (0-1) + * @param fiducialIds IDs of fiducials used for this estimate + */ +public record MegatagPoseEstimate( + Pose2d fieldToRobot, + double timestampSeconds, + double latency, + double avgTagArea, + double quality, + int[] fiducialIds) + implements StructSerializable { + + public MegatagPoseEstimate { + if (fieldToRobot == null) { + fieldToRobot = MathHelpers.kPose2dZero; + } + if (fiducialIds == null) { + fiducialIds = new int[0]; + } + } + + /** Converts a Limelight pose estimate to a MegatagPoseEstimate. */ + public static MegatagPoseEstimate fromLimelight(LimelightHelpers.PoseEstimate poseEstimate) { + Pose2d fieldToRobot = poseEstimate.pose; + if (fieldToRobot == null) { + fieldToRobot = MathHelpers.kPose2dZero; + } + int[] fiducialIds = new int[poseEstimate.rawFiducials.length]; + for (int i = 0; i < poseEstimate.rawFiducials.length; i++) { + if (poseEstimate.rawFiducials[i] != null) { + fiducialIds[i] = poseEstimate.rawFiducials[i].id; + } + } + return new MegatagPoseEstimate( + fieldToRobot, + poseEstimate.timestampSeconds, + poseEstimate.latency, + poseEstimate.avgTagArea, + fiducialIds.length > 1 ? 1.0 : 1.0 - poseEstimate.rawFiducials[0].ambiguity, + fiducialIds); + } + + public static final MegatagPoseEstimateStruct struct = new MegatagPoseEstimateStruct(); + public static class MegatagPoseEstimateStruct implements Struct { - public Pose2d fieldToCamera = MathHelpers.kPose2dZero; - public double timestampSeconds; - public double latency; - public double avgTagArea; @Override public Class getTypeClass() { return MegatagPoseEstimate.class; } - @Override - public String getTypeName() { - return "MegatagPoseEstimate"; - } - @Override public String getTypeString() { - return "struct:MegatagPoseEstimate"; + return "record:MegatagPoseEstimate"; } @Override public int getSize() { - return Pose2d.struct.getSize() + kSizeDouble * 3; + return Pose2d.struct.getSize() + 3 * Double.BYTES; } @Override public String getSchema() { - return "Pose2d fieldToCamera;double timestampSeconds;double latency;double avgTagArea"; + return "Pose2d fieldToRobot; double timestampSeconds; double latency; double avgTagArea"; } @Override @@ -59,46 +100,28 @@ public Struct[] getNested() { @Override public MegatagPoseEstimate unpack(ByteBuffer bb) { - MegatagPoseEstimate rv = new MegatagPoseEstimate(); - rv.fieldToCamera = Pose2d.struct.unpack(bb); - rv.timestampSeconds = bb.getDouble(); - rv.latency = bb.getDouble(); - rv.avgTagArea = bb.getDouble(); - rv.fiducialIds = new int[0]; - return rv; + Pose2d fieldToRobot = Pose2d.struct.unpack(bb); + double timestampSeconds = bb.getDouble(); + double latency = bb.getDouble(); + double avgTagArea = bb.getDouble(); + double quality = bb.getDouble(); + int[] fiducialIds = new int[0]; + return new MegatagPoseEstimate( + fieldToRobot, timestampSeconds, latency, avgTagArea, quality, fiducialIds); } @Override public void pack(ByteBuffer bb, MegatagPoseEstimate value) { - Pose2d.struct.pack(bb, value.fieldToCamera); - bb.putDouble(value.timestampSeconds); - bb.putDouble(value.latency); - bb.putDouble(value.avgTagArea); + Pose2d.struct.pack(bb, value.fieldToRobot()); + bb.putDouble(value.timestampSeconds()); + bb.putDouble(value.latency()); + bb.putDouble(value.avgTagArea()); + bb.putDouble(value.quality()); } - } - public Pose2d fieldToCamera = MathHelpers.kPose2dZero; - public double timestampSeconds; - public double latency; - public double avgTagArea; - public int[] fiducialIds; - - public MegatagPoseEstimate() {} - - public static MegatagPoseEstimate fromLimelight(LimelightHelpers.PoseEstimate poseEstimate) { - MegatagPoseEstimate rv = new MegatagPoseEstimate(); - rv.fieldToCamera = poseEstimate.pose; - if (rv.fieldToCamera == null) rv.fieldToCamera = MathHelpers.kPose2dZero; - rv.timestampSeconds = poseEstimate.timestampSeconds; - rv.latency = poseEstimate.latency; - rv.avgTagArea = poseEstimate.avgTagArea; - rv.fiducialIds = new int[poseEstimate.rawFiducials.length]; - for (int i = 0; i < rv.fiducialIds.length; ++i) { - rv.fiducialIds[i] = poseEstimate.rawFiducials[i].id; + @Override + public String getTypeName() { + return "MegatagPoseEstimate"; } - - return rv; } - - public static final MegatagPoseEstimateStruct struct = new MegatagPoseEstimateStruct(); } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java index b7d0014..4d3f59d 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -14,18 +14,14 @@ package frc.robot.subsystems.vision.FRC254; import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; import edu.wpi.first.math.geometry.Twist2d; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; -import edu.wpi.first.wpilibj.RobotBase; -import edu.wpi.first.wpilibj.Timer; +import frc.robot.Robot; import frc.robot.util.ConcurrentTimeInterpolatableBuffer; +import frc.robot.util.FieldConstants; import frc.robot.util.MathHelpers; -import frc.robot.util.RobotTime; import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; @@ -35,6 +31,7 @@ import java.util.function.IntSupplier; import org.littletonrobotics.junction.Logger; +/** Tracks robot state including pose, velocities, and mechanism positions. */ public class RobotState { public static final double LOOKBACK_TIME = 1.0; @@ -43,31 +40,32 @@ public class RobotState { public RobotState(Consumer visionEstimateConsumer) { this.visionEstimateConsumer = visionEstimateConsumer; - // Make sure to add one sample to these methods to protect callers against null. fieldToRobot.addSample(0.0, MathHelpers.kPose2dZero); - robotToTurret.addSample(0.0, MathHelpers.kRotation2dZero); - turretAngularVelocity.addSample(0.0, 0.0); driveYawAngularVelocity.addSample(0.0, 0.0); - turretPositionRadians.addSample(0.0, 0.0); + + // Initialize mechanism positions + elevatorHeightMeters.set(0.0); + wristRadians.set(0.0); + intakeRollerRotations.set(0.0); + clawRollerRotations.set(0.0); } // State of robot. // Kinematic Frames + // Robot's pose in field coordinates over time private final ConcurrentTimeInterpolatableBuffer fieldToRobot = ConcurrentTimeInterpolatableBuffer.createBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer robotToTurret = - ConcurrentTimeInterpolatableBuffer.createBuffer(LOOKBACK_TIME); - private static final Transform2d TURRET_TO_CAMERA = - new Transform2d( - Constants.kTurretToCameraX, Constants.kTurretToCameraY, MathHelpers.kRotation2dZero); - private static final Transform2d ROBOT_TO_CAMERA_B = - new Transform2d( - Constants.kTurretToCameraBX, Constants.kTurretToCameraBY, MathHelpers.kRotation2dZero); + // Current robot-relative chassis speeds (measured from encoders) private final AtomicReference measuredRobotRelativeChassisSpeeds = new AtomicReference<>(new ChassisSpeeds()); + // Current field-relative chassis speeds (measured from encoders) private final AtomicReference measuredFieldRelativeChassisSpeeds = new AtomicReference<>(new ChassisSpeeds()); + // Desired robot-relative chassis speeds (set by control systems) + private final AtomicReference desiredRobotRelativeChassisSpeeds = + new AtomicReference<>(new ChassisSpeeds()); + // Desired field-relative chassis speeds (set by control systems) private final AtomicReference desiredFieldRelativeChassisSpeeds = new AtomicReference<>(new ChassisSpeeds()); private final AtomicReference fusedFieldRelativeChassisSpeeds = @@ -76,31 +74,30 @@ public RobotState(Consumer visionEstimateConsumer) { private final AtomicInteger iteration = new AtomicInteger(0); private double lastUsedMegatagTimestamp = 0; - private double lastTriggeredIntakeSensorTimestamp = 0; - private ConcurrentTimeInterpolatableBuffer turretAngularVelocity = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer turretPositionRadians = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = + private Pose2d lastUsedMegatagPose = Pose2d.kZero; + private final ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = + private final ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = + private final ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer drivePitchRads = + private final ConcurrentTimeInterpolatableBuffer drivePitchRads = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer driveRollRads = + private final ConcurrentTimeInterpolatableBuffer driveRollRads = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer accelX = + private final ConcurrentTimeInterpolatableBuffer accelX = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private ConcurrentTimeInterpolatableBuffer accelY = + private final ConcurrentTimeInterpolatableBuffer accelY = ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); private final AtomicBoolean enablePathCancel = new AtomicBoolean(false); private double autoStartTime; + private Optional trajectoryTargetPose = Optional.empty(); + private Optional trajectoryCurrentPose = Optional.empty(); + public void setAutoStartTime(double timestamp) { autoStartTime = timestamp; } @@ -146,6 +143,7 @@ public void addDriveMotionMeasurements( double rollRads, double accelX, double accelY, + ChassisSpeeds desiredRobotRelativeChassisSpeeds, ChassisSpeeds desiredFieldRelativeSpeeds, ChassisSpeeds measuredSpeeds, ChassisSpeeds measuredFieldRelativeSpeeds, @@ -157,6 +155,7 @@ public void addDriveMotionMeasurements( this.driveRollRads.addSample(timestamp, rollRads); this.accelY.addSample(timestamp, accelY); this.accelX.addSample(timestamp, accelX); + this.desiredRobotRelativeChassisSpeeds.set(desiredRobotRelativeChassisSpeeds); this.desiredFieldRelativeChassisSpeeds.set(desiredFieldRelativeSpeeds); this.measuredRobotRelativeChassisSpeeds.set(measuredSpeeds); this.measuredFieldRelativeChassisSpeeds.set(measuredFieldRelativeSpeeds); @@ -167,6 +166,12 @@ public Map.Entry getLatestFieldToRobot() { return fieldToRobot.getLatest(); } + /** + * Predicts robot's future pose based on current velocity. + * + * @param lookaheadTimeS How far ahead to predict (seconds) + * @return Predicted pose + */ public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { var maybeFieldToRobot = getLatestFieldToRobot(); Pose2d fieldToRobot = @@ -177,72 +182,27 @@ public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { new Twist2d(delta.vxMetersPerSecond, delta.vyMetersPerSecond, delta.omegaRadiansPerSecond)); } - public Pose2d getLatestFieldToRobotCenter() { - return fieldToRobot.getLatest().getValue().transformBy(Constants.kTurretToRobotCenter); - } - - // This has rotation and radians to allow for wrapping tracking. - public void addTurretUpdates( - double timestamp, - Rotation2d turretRotation, - double turretRadians, - double angularYawRadsPerS) { - // turret frame 180 degrees off from robot frame - robotToTurret.addSample(timestamp, turretRotation.rotateBy(MathHelpers.kRotation2dPi)); - this.turretAngularVelocity.addSample(timestamp, angularYawRadsPerS); - this.turretPositionRadians.addSample(timestamp, turretRadians); - } - - public double getLatestTurretPositionRadians() { - return this.turretPositionRadians.getInternalBuffer().lastEntry().getValue(); - } - - public double getLatestTurretAngularVelocity() { - return this.turretAngularVelocity.getInternalBuffer().lastEntry().getValue(); - } - - private Rotation2d hoodRotation = new Rotation2d(); - - public void addHoodRotation(Rotation2d rotationFromZero) { - hoodRotation = rotationFromZero; - } - - public Rotation2d getHoodRotation() { - return hoodRotation; - } - - private double elevatorHeightM = 0.0; - - public void setElevatorHeight(double heightM) { - this.elevatorHeightM = heightM; - } - - public double getElevatorHeight() { - return elevatorHeightM; - } - - private double climberRotations = 0.0; - - public void setClimberRotations(double rotations) { - this.climberRotations = rotations; - } - - public Optional getRobotToTurret(double timestamp) { - return robotToTurret.getSample(timestamp); + /** + * Like getPredictedFieldToRobot but caps negative velocities to zero. Used for non-holonomic path + * planning. + */ + public Pose2d getPredictedCappedFieldToRobot(double lookaheadTimeS) { + var maybeFieldToRobot = getLatestFieldToRobot(); + Pose2d fieldToRobot = + maybeFieldToRobot == null ? MathHelpers.kPose2dZero : maybeFieldToRobot.getValue(); + var delta = getLatestRobotRelativeChassisSpeed(); + delta = delta.times(lookaheadTimeS); + return fieldToRobot.exp( + new Twist2d( + Math.max(0.0, delta.vxMetersPerSecond), + Math.max(0.0, delta.vyMetersPerSecond), + delta.omegaRadiansPerSecond)); } public Optional getFieldToRobot(double timestamp) { return fieldToRobot.getSample(timestamp); } - public Transform2d getTurretToCamera(boolean isTurretCamera) { - return isTurretCamera ? TURRET_TO_CAMERA : ROBOT_TO_CAMERA_B; - } - - public Map.Entry getLatestRobotToTurret() { - return robotToTurret.getLatest(); - } - public ChassisSpeeds getLatestMeasuredFieldRelativeChassisSpeeds() { return measuredFieldRelativeChassisSpeeds.get(); } @@ -251,6 +211,10 @@ public ChassisSpeeds getLatestRobotRelativeChassisSpeed() { return measuredRobotRelativeChassisSpeeds.get(); } + public ChassisSpeeds getLatestDesiredRobotRelativeChassisSpeeds() { + return desiredRobotRelativeChassisSpeeds.get(); + } + public ChassisSpeeds getLatestDesiredFieldRelativeChassisSpeed() { return desiredFieldRelativeChassisSpeeds.get(); } @@ -265,8 +229,12 @@ public ChassisSpeeds getLatestFusedRobotRelativeChassisSpeed() { return speeds; } - public Optional getTurretAngularVelocity(double timestamp) { - return turretAngularVelocity.getSample(timestamp); + public void setLedState(LedState state) { + ledState.set(state); + } + + public LedState getLedState() { + return ledState.get(); } private Optional getMaxAbsValueInRange( @@ -279,13 +247,9 @@ private Optional getMaxAbsValueInRange( else return min; } - public Optional getMaxAbsTurretYawAngularVelocityInRange(double minTime, double maxTime) { - return getMaxAbsValueInRange(turretAngularVelocity, minTime, maxTime); - } - public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double maxTime) { // Gyro yaw rate not set in sim. - if (RobotBase.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); + if (Robot.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); } @@ -299,24 +263,17 @@ public Optional getMaxAbsDriveRollAngularVelocityInRange(double minTime, } public void updateMegatagEstimate(VisionFieldPoseEstimate megatagEstimate) { - lastUsedMegatagTimestamp = Timer.getFPGATimestamp(); + lastUsedMegatagTimestamp = megatagEstimate.getTimestampSeconds(); + lastUsedMegatagPose = megatagEstimate.getVisionRobotPoseMeters(); visionEstimateConsumer.accept(megatagEstimate); } - public void updatePinholeEstimate(VisionFieldPoseEstimate pinholeEstimate) { - visionEstimateConsumer.accept(pinholeEstimate); - } - - public void updateLastTriggeredIntakeSensorTimestamp(boolean triggered) { - if (triggered) lastTriggeredIntakeSensorTimestamp = RobotTime.getTimestampSeconds(); - } - public double lastUsedMegatagTimestamp() { return lastUsedMegatagTimestamp; } - public double lastTriggeredIntakeSensorTimestamp() { - return lastTriggeredIntakeSensorTimestamp; + public Pose2d lastUsedMegatagPose() { + return lastUsedMegatagPose; } public boolean isRedAlliance() { @@ -340,6 +297,14 @@ public void updateLogger() { "RobotState/PitchAngularVelocity", this.drivePitchAngularVelocity.getInternalBuffer().lastEntry().getValue()); } + if (this.drivePitchRads.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/PitchRads", this.drivePitchRads.getInternalBuffer().lastEntry().getValue()); + } + if (this.driveRollRads.getInternalBuffer().lastEntry() != null) { + Logger.recordOutput( + "RobotState/RollRads", this.driveRollRads.getInternalBuffer().lastEntry().getValue()); + } if (this.accelX.getInternalBuffer().lastEntry() != null) { Logger.recordOutput( "RobotState/AccelX", this.accelX.getInternalBuffer().lastEntry().getValue()); @@ -350,15 +315,193 @@ public void updateLogger() { } Logger.recordOutput( "RobotState/DesiredChassisSpeedFieldFrame", getLatestDesiredFieldRelativeChassisSpeed()); + Logger.recordOutput( + "RobotState/DesiredChassisSpeedRobotFrame", getLatestDesiredRobotRelativeChassisSpeeds()); Logger.recordOutput( "RobotState/MeasuredChassisSpeedFieldFrame", getLatestMeasuredFieldRelativeChassisSpeeds()); Logger.recordOutput( "RobotState/FusedChassisSpeedFieldFrame", getLatestFusedFieldRelativeChassisSpeed()); + + // Add mechanism logging + Logger.recordOutput("RobotState/ElevatorHeightMeters", getElevatorHeightMeters()); + Logger.recordOutput("RobotState/WristRadians", getWristRadians()); + Logger.recordOutput("RobotState/IntakeRollerRotations", getIntakeRollerRotations()); + Logger.recordOutput("RobotState/CoralRollerRotations", getClawRollerRotations()); + + // Add LED state logging + LedState currentLEDState = getLedState(); + Logger.recordOutput( + "RobotState/LEDState", + String.format( + "R:%d G:%d B:%d", currentLEDState.red, currentLEDState.green, currentLEDState.blue)); + } + + private final AtomicReference> exclusiveTag = + new AtomicReference<>(Optional.empty()); + + private final AtomicReference elevatorHeightMeters = new AtomicReference<>(0.0); + private final AtomicReference wristRadians = new AtomicReference<>(0.0); + private final AtomicReference clawRollerRotations = new AtomicReference<>(0.0); + + private final AtomicReference intakeRollerRotations = new AtomicReference<>(0.0); + private final AtomicReference intakeRollerRPS = new AtomicReference<>(0.0); + private final AtomicReference intakePivotRadians = new AtomicReference<>(0.0); + + private final AtomicReference indexerRotations = new AtomicReference<>(0.0); + private final AtomicReference indexerRPS = new AtomicReference<>(0.0); + + private final AtomicReference climberPivotRadians = new AtomicReference<>(0.0); + + private final AtomicReference climberRollerRotations = new AtomicReference<>(0.0); + + private final AtomicReference clawRollerRPS = new AtomicReference<>(0.0); + + private final AtomicReference ledState = new AtomicReference<>(LedState.kBlue); + + public void setClimberRollerRotations(double rotations) { + climberRollerRotations.set(rotations); } - Pose3d ampPose3d = new Pose3d(); - Pose3d climberPose3d = new Pose3d(); - Pose3d hoodPose3d = new Pose3d(); - Pose3d shooterPose3d = new Pose3d(); - Pose3d turretPose3d = new Pose3d(); + public void setClimberPivotRadians(double radians) { + climberPivotRadians.set(radians); + } + + public void setIndexerRotations(double rotations) { + indexerRotations.set(rotations); + } + + public void setIndexerRPS(double rps) { + indexerRPS.set(rps); + } + + public double getIndexerRotations() { + return indexerRotations.get(); + } + + public double getIndexerRPS() { + return indexerRPS.get(); + } + + public void setIntakePivotRadians(double radians) { + intakePivotRadians.set(radians); + } + + public double getIntakePivotRadians() { + return intakePivotRadians.get(); + } + + public void setElevatorHeightMeters(double heightMeters) { + elevatorHeightMeters.set(heightMeters); + } + + public void setWristRadians(double radians) { + wristRadians.set(radians); + } + + public void setIntakeRollerRotations(double rotations) { + intakeRollerRotations.set(rotations); + } + + public void setIntakeRollerRPS(double rps) { + intakeRollerRPS.set(rps); + } + + public void setClawRollerRotations(double rotations) { + clawRollerRotations.set(rotations); + } + + public double getElevatorHeightMeters() { + return elevatorHeightMeters.get(); + } + + public double getWristRadians() { + return wristRadians.get(); + } + + public double getIntakeRollerRotations() { + return intakeRollerRotations.get(); + } + + public double getIntakeRollerRPS() { + return intakeRollerRPS.get(); + } + + public double getClawRollerRotations() { + return clawRollerRotations.get(); + } + + public double getClimberRollerRotations() { + return climberRollerRotations.get(); + } + + public double getClimberPivotRadians() { + return climberPivotRadians.get(); + } + + public void setClawRollerRPS(double rps) { + clawRollerRPS.set(rps); + } + + public double getClawRollerRPS() { + return clawRollerRPS.get(); + } + + public void setExclusiveTag(int id) { + exclusiveTag.set(Optional.of(id)); + } + + public void clearExclusiveTag() { + exclusiveTag.set(Optional.empty()); + } + + public Optional getExclusiveTag() { + return exclusiveTag.get(); + } + + public void setTrajectoryTargetPose(Pose2d pose) { + trajectoryTargetPose = Optional.of(pose); + } + + public Optional getTrajectoryTargetPose() { + return trajectoryTargetPose; + } + + public void setTrajectoryCurrentPose(Pose2d pose) { + trajectoryCurrentPose = Optional.of(pose); + } + + public Optional getTrajectoryCurrentPose() { + return trajectoryCurrentPose; + } + + public double getDrivePitchRadians() { + if (this.drivePitchRads.getInternalBuffer().lastEntry() != null) { + return drivePitchRads.getInternalBuffer().lastEntry().getValue(); + } + return 0.0; + } + + public double getDriveRollRadians() { + if (this.driveRollRads.getInternalBuffer().lastEntry() != null) { + return driveRollRads.getInternalBuffer().lastEntry().getValue(); + } + return 0.0; + } + + // public void logControllerMode() { + // Logger.recordOutput("Controller Mode", ModalControls.getInstance().getMode().toString()); + // } + + public static boolean onOpponentSide(boolean isRedAlliance, Pose2d pose) { + return (isRedAlliance + && pose.getTranslation().getX() + < FieldConstants.fieldLength / 2 - Constants.kMidlineBuffer) + || (!isRedAlliance + && pose.getTranslation().getX() + > FieldConstants.fieldLength / 2 + Constants.kMidlineBuffer); + } + + public boolean onOpponentSide() { + return onOpponentSide(this.isRedAlliance(), this.getLatestFieldToRobot().getValue()); + } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java index 8a95f20..e254ed3 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -18,19 +18,31 @@ import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; +/** Represents a robot pose estimate from vision with associated uncertainty and metadata. */ public class VisionFieldPoseEstimate { private final Pose2d visionRobotPoseMeters; private final double timestampSeconds; private final Matrix visionMeasurementStdDevs; - + private final int numTags; + + /** + * Creates a new vision field pose estimate. + * + * @param visionRobotPoseMeters The estimated robot pose on the field in meters + * @param timestampSeconds The timestamp when this estimate was captured + * @param visionMeasurementStdDevs Standard deviations representing measurement uncertainty + * @param numTags Number of AprilTags used in this pose estimate + */ public VisionFieldPoseEstimate( Pose2d visionRobotPoseMeters, double timestampSeconds, - Matrix visionMeasurementStdDevs) { + Matrix visionMeasurementStdDevs, + int numTags) { this.visionRobotPoseMeters = visionRobotPoseMeters; this.timestampSeconds = timestampSeconds; this.visionMeasurementStdDevs = visionMeasurementStdDevs; + this.numTags = numTags; } public Pose2d getVisionRobotPoseMeters() { @@ -44,4 +56,8 @@ public double getTimestampSeconds() { public Matrix getVisionMeasurementStdDevs() { return visionMeasurementStdDevs; } + + public int getNumTags() { + return numTags; + } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java index 0bc5ada..1765636 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -13,34 +13,30 @@ package frc.robot.subsystems.vision.FRC254; -import org.littletonrobotics.junction.AutoLog; +import edu.wpi.first.math.geometry.Pose3d; +/** Interface for vision system hardware abstraction. */ public interface VisionIO { - @AutoLog + /** Container for all vision input data. */ class VisionIOInputs { - public boolean turretCameraSeesTarget; - - public boolean elevatorCameraSeesTarget; - - public FiducialObservation[] turretCameraFiducialObservations; - - public FiducialObservation[] elevatorCameraFiducialObservations; - - public MegatagPoseEstimate turretCameraMegatagPoseEstimate; - - public int turretCameraMegatagCount; - - public MegatagPoseEstimate elevatorCameraMegatagPoseEstimate; - - public int elevatorCameraMegatagCount; - - public MegatagPoseEstimate turretCameraMegatag2PoseEstimate; - - public MegatagPoseEstimate elevatorCameraMegatag2PoseEstimate; + /** Input data from a single camera. */ + public static class CameraInputs { + public boolean seesTarget; + public FiducialObservation[] fiducialObservations; + public MegatagPoseEstimate megatagPoseEstimate; + public MegatagPoseEstimate megatag2PoseEstimate; + public int megatag2Count; + public int megatagCount; + public Pose3d pose3d; + public double[] standardDeviations = + new double[12]; // [MT1x, MT1y, MT1z, MT1roll, MT1pitch, MT1Yaw, MT2x, + // MT2y, MT2z, MT2roll, MT2pitch, MT2yaw] + } + + public CameraInputs cameraA = new CameraInputs(); + public CameraInputs cameraB = new CameraInputs(); } void readInputs(VisionIOInputs inputs); - - void pollNetworkTables(); } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java index 8feac65..71b8bd4 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024 FRC 254 +// Copyright (c) 2024-2025 FRC 254 // https://github.com/team254 // // This program is free software; you can redistribute it and/or @@ -13,609 +13,354 @@ package frc.robot.subsystems.vision.FRC254; +import edu.wpi.first.math.MathUtil; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.VecBuilder; import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.DriverStation; +import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj2.command.SubsystemBase; -import frc.robot.util.MathHelpers; +import frc.robot.subsystems.vision.FRC254.Constants.VisionConstants; import frc.robot.util.RobotTime; -import frc.robot.util.Util; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; import org.littletonrobotics.junction.Logger; +/** + * Vision subsystem that processes AprilTag detections and provides robot pose estimates. Supports + * multiple cameras and various pose estimation algorithms including MegaTag. + */ public class VisionSubsystem extends SubsystemBase { - private final VisionIO io; + private final VisionIO io; private final RobotState state; + private final VisionIO.VisionIOInputs inputs = new VisionIO.VisionIOInputs(); - private final VisionIOInputsAutoLogged inputs = new VisionIOInputsAutoLogged(); - - private static class PinholeObservation { - public Translation2d cameraToTag; - public Pose3d tagPose; - } - - private double lastProcessedTurretTimestamp = 0.0; - private double lastProcessedElevatorTimestamp = 0.0; + private boolean useVision = true; + /** Creates a new vision subsystem. */ public VisionSubsystem(VisionIO io, RobotState state) { this.io = io; this.state = state; } - @Override - public void periodic() { - double timestamp = RobotTime.getTimestampSeconds(); - // Read inputs from IO - io.readInputs(inputs); - Logger.processInputs("Vision", inputs); - - // Optionally update RobotState - if (inputs.turretCameraSeesTarget) { - updateVision( - inputs.turretCameraSeesTarget, - inputs.turretCameraFiducialObservations, - inputs.turretCameraMegatagPoseEstimate, - inputs.turretCameraMegatag2PoseEstimate, - true); - } else if (inputs.elevatorCameraSeesTarget) { - updateVision( - inputs.elevatorCameraSeesTarget, - inputs.elevatorCameraFiducialObservations, - inputs.elevatorCameraMegatagPoseEstimate, - inputs.elevatorCameraMegatag2PoseEstimate, - false); + /** Fuses two vision pose estimates using inverse-variance weighting. */ + private VisionFieldPoseEstimate fuseEstimates( + VisionFieldPoseEstimate a, VisionFieldPoseEstimate b) { + // Ensure b is the newer measurement + if (b.getTimestampSeconds() < a.getTimestampSeconds()) { + VisionFieldPoseEstimate tmp = a; + a = b; + b = tmp; } - Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - timestamp); + // Preview both estimates to the same timestamp + Transform2d a_T_b = + state + .getFieldToRobot(b.getTimestampSeconds()) + .get() + .minus(state.getFieldToRobot(a.getTimestampSeconds()).get()); + + Pose2d poseA = a.getVisionRobotPoseMeters().transformBy(a_T_b); + Pose2d poseB = b.getVisionRobotPoseMeters(); + + // Inverse‑variance weighting + var varianceA = a.getVisionMeasurementStdDevs().elementTimes(a.getVisionMeasurementStdDevs()); + var varianceB = b.getVisionMeasurementStdDevs().elementTimes(b.getVisionMeasurementStdDevs()); + + Rotation2d fusedHeading = poseB.getRotation(); + if (varianceA.get(2, 0) < VisionConstants.kLargeVariance + && varianceB.get(2, 0) < VisionConstants.kLargeVariance) { + fusedHeading = + new Rotation2d( + poseA.getRotation().getCos() / varianceA.get(2, 0) + + poseB.getRotation().getCos() / varianceB.get(2, 0), + poseA.getRotation().getSin() / varianceA.get(2, 0) + + poseB.getRotation().getSin() / varianceB.get(2, 0)); + } + + double weightAx = 1.0 / varianceA.get(0, 0); + double weightAy = 1.0 / varianceA.get(1, 0); + double weightBx = 1.0 / varianceB.get(0, 0); + double weightBy = 1.0 / varianceB.get(1, 0); + + Pose2d fusedPose = + new Pose2d( + new Translation2d( + (poseA.getTranslation().getX() * weightAx + + poseB.getTranslation().getX() * weightBx) + / (weightAx + weightBx), + (poseA.getTranslation().getY() * weightAy + + poseB.getTranslation().getY() * weightBy) + / (weightAy + weightBy)), + fusedHeading); + + Matrix fusedStdDev = + VecBuilder.fill( + Math.sqrt(1.0 / (weightAx + weightBx)), + Math.sqrt(1.0 / (weightAy + weightBy)), + Math.sqrt(1.0 / (1.0 / varianceA.get(2, 0) + 1.0 / varianceB.get(2, 0)))); + + int numTags = a.getNumTags() + b.getNumTags(); + double time = b.getTimestampSeconds(); + + return new VisionFieldPoseEstimate(fusedPose, time, fusedStdDev, numTags); } - private void updateVision( - boolean cameraSeesTarget, - FiducialObservation[] cameraFiducialObservations, - MegatagPoseEstimate cameraMegatagPoseEstimate, - MegatagPoseEstimate cameraMegatag2PoseEstimate, - boolean isTurretCamera) { - if (cameraMegatagPoseEstimate != null) { - - String logPreface = "Vision/" + (isTurretCamera ? "Turret/" : "Elevator/"); - var updateTimestamp = cameraMegatagPoseEstimate.timestampSeconds; - boolean alreadyProcessedTimestamp = - (isTurretCamera ? lastProcessedTurretTimestamp : lastProcessedElevatorTimestamp) - == updateTimestamp; - if (!alreadyProcessedTimestamp && cameraSeesTarget) { - if (!isTurretCamera && !Util.epsilonEquals(state.getElevatorHeight(), 0.0, 0.05)) return; - Optional pinholeEstimate = Optional.empty(); // - - Optional megatagEstimate = - processMegatagPoseEstimate(cameraMegatagPoseEstimate, isTurretCamera); - Optional megatag2Estimate = - processMegatag2PoseEstimate(cameraMegatag2PoseEstimate, isTurretCamera, logPreface); - - boolean used_megatag = false; - if (megatagEstimate.isPresent()) { - if (shouldUseMegatag( - cameraMegatagPoseEstimate, cameraFiducialObservations, isTurretCamera, logPreface)) { - Logger.recordOutput( - logPreface + "MegatagEstimate", megatagEstimate.get().getVisionRobotPoseMeters()); - state.updateMegatagEstimate(megatagEstimate.get()); - used_megatag = true; - } else { - if (megatagEstimate.isPresent()) { - Logger.recordOutput( - logPreface + "MegatagEstimateRejected", - megatagEstimate.get().getVisionRobotPoseMeters()); - } - } - } + @Override + public void periodic() { + double startTime = RobotTime.getTimestampSeconds(); + io.readInputs(inputs); - if (megatag2Estimate.isPresent() && !used_megatag) { - if (shouldUseMegatag2(cameraMegatag2PoseEstimate, isTurretCamera, logPreface)) { - Logger.recordOutput( - logPreface + "Megatag2Estimate", megatag2Estimate.get().getVisionRobotPoseMeters()); - state.updateMegatagEstimate(megatag2Estimate.get()); - } else { - if (megatagEstimate.isPresent()) { - Logger.recordOutput( - logPreface + "Megatag2EstimateRejected", - megatag2Estimate.get().getVisionRobotPoseMeters()); - } - } - } - if (pinholeEstimate.isPresent()) { - if (shouldUsePinhole(updateTimestamp, isTurretCamera, logPreface)) { - Logger.recordOutput( - logPreface + "PinholeEstimate", pinholeEstimate.get().getVisionRobotPoseMeters()); - state.updatePinholeEstimate(pinholeEstimate.get()); - } else { - Logger.recordOutput( - logPreface + "PinholeEstimateRejected", - pinholeEstimate.get().getVisionRobotPoseMeters()); - } - } + logCameraInputs("Vision/CameraA", inputs.cameraA); + logCameraInputs("Vision/CameraB", inputs.cameraB); - if (isTurretCamera) lastProcessedTurretTimestamp = updateTimestamp; - else lastProcessedElevatorTimestamp = updateTimestamp; - } + var maybeMTA = processCamera(inputs.cameraA, "CameraA", VisionConstants.kRobotToCameraA); + var maybeMTB = processCamera(inputs.cameraB, "CameraB", VisionConstants.kRobotToCameraB); + + if (!useVision) { + Logger.recordOutput("Vision/usingVision", false); + Logger.recordOutput("Vision/exclusiveTagId", state.getExclusiveTag().orElse(-1)); + Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - startTime); + return; } - } - @SuppressWarnings("unused") - private List getPinholeObservations( - FiducialObservation[] fiducials, boolean isTurretCamera) { - // Iterate over the fiducials to make VisionUpdates - return Arrays.stream(fiducials) - .map( - fiducial -> { - Optional tagPoseOptional = Constants.kAprilTagLayout.getTagPose(fiducial.id); - if (tagPoseOptional.isEmpty()) { - return null; - } - Pose3d tagPose = tagPoseOptional.get(); - Optional cameraToTarget = - getCameraToTargetTranslation(fiducial, tagPose, isTurretCamera); - - if (cameraToTarget.isEmpty()) { - return null; - } - - var observation = new PinholeObservation(); - observation.cameraToTag = cameraToTarget.get(); - observation.tagPose = tagPose; - return observation; - }) - .filter(Objects::nonNull) - .toList(); - } + Logger.recordOutput("Vision/usingVision", true); - private Optional getCameraToTargetTranslation( - FiducialObservation fiducial, Pose3d tagLocation, boolean isTurretCamera) { - // Get the yaw and pitch angles from to target from the camera POV - double yawRadians = Math.toRadians(fiducial.txnc); - double pitchRadians = Math.toRadians(fiducial.tync); - - Transform3d cameraToTarget = - new Transform3d(new Translation3d(), new Rotation3d(0.0, pitchRadians, 0.0)); - cameraToTarget = - cameraToTarget.plus( - new Transform3d(new Translation3d(), new Rotation3d(0.0, 0.0, yawRadians))); - Transform3d cameraGroundPlaneToCamera = - new Transform3d( - new Translation3d(), - new Rotation3d( - 0.0, isTurretCamera ? Constants.kCameraPitchRads : Constants.kCameraBPitchRads, 0)); - Rotation3d cameraGroundPlaneToTarget = - new Pose3d() - .plus(cameraGroundPlaneToCamera.plus(cameraToTarget)) - .getRotation() - .unaryMinus(); - - // Built a unit vector from adjusted rotation. - // Raw vector: x = 1, y = tan(yaw), z = tan(pitch) - // Make it a unit vector by dividing each component by magnitude - // sqrt(x^2+y^2+z^2). - double tan_ty = Math.tan(cameraGroundPlaneToTarget.getZ()); // y and z switch intentional - double tan_tz = -Math.tan(cameraGroundPlaneToTarget.getY()); // y and z switch intentional - - if (tan_tz == 0.0) { - // Protect against divide by zero (i.e. target is at same height as camera). - return Optional.empty(); + Optional accepted = Optional.empty(); + if (maybeMTA.isPresent() != maybeMTB.isPresent()) { + accepted = maybeMTA.isPresent() ? maybeMTA : maybeMTB; + } else if (maybeMTA.isPresent() && maybeMTB.isPresent()) { + accepted = Optional.of(fuseEstimates(maybeMTA.get(), maybeMTB.get())); } - // Find the fixed height difference between the center of the tag and the camera - // lens - double differential_height = - tagLocation.getZ() - - (isTurretCamera - ? Constants.kCameraHeightOffGroundMeters - : Constants.kCameraBHeightOffGroundMeters); - - // We now obtain 3d distance by dividing differential_height by our normalized z - // component z / (Math.sqrt(x^2+y^2+z^2)) - double distance = - differential_height * Math.sqrt(1.0 + tan_tz * tan_tz + tan_ty * tan_ty) / tan_tz; - // Build a 3d vector from distance (which we now know) and orientation (which we - // already computed above). - Translation3d cameraToTargetTranslation = - new Translation3d(distance, cameraGroundPlaneToTarget); - - // Grab the x and y components. - return Optional.of( - new Translation2d(cameraToTargetTranslation.getX(), cameraToTargetTranslation.getY())); - } - - static final Set kTagsBlueSpeaker = new HashSet<>(List.of(7, 8)); - static final Set kTagsRedSpeaker = new HashSet<>(List.of(3, 4)); + accepted.ifPresent( + est -> { + Logger.recordOutput("Vision/fusedAccepted", est.getVisionRobotPoseMeters()); + state.updateMegatagEstimate(est); + }); - private boolean shouldUseMegatag( - MegatagPoseEstimate poseEstimate, - FiducialObservation[] fiducials, - boolean isTurretCamera, - String logPreface) { - final double kMinAreaForTurretMegatagEnabled = 0.4; - final double kMinAreaForTurretMegatagDisabled = 0.05; + Logger.recordOutput("Vision/exclusiveTagId", state.getExclusiveTag().orElse(-1)); + Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - startTime); + } - final double kMinAreaForElevatorMegatagEnabled = 0.4; - final double kMinAreaForElevatorMegatagDisabled = 0.05; + private void logCameraInputs(String prefix, VisionIO.VisionIOInputs.CameraInputs cam) { + Logger.recordOutput(prefix + "/SeesTarget", cam.seesTarget); + Logger.recordOutput(prefix + "/MegatagCount", cam.megatagCount); - double kMinAreaForMegatag = 0.0; if (DriverStation.isDisabled()) { - kMinAreaForMegatag = - isTurretCamera ? kMinAreaForTurretMegatagDisabled : kMinAreaForElevatorMegatagDisabled; - } else { - kMinAreaForMegatag = - isTurretCamera ? kMinAreaForTurretMegatagEnabled : kMinAreaForElevatorMegatagEnabled; + SmartDashboard.putBoolean(prefix + "/SeesTarget", cam.seesTarget); + SmartDashboard.putNumber(prefix + "/MegatagCount", cam.megatagCount); } - final int kExpectedTagCount = 2; - - final double kLargeYawThreshold = Units.degreesToRadians(200.0); - final double kLargeYawEventTimeWindowS = 0.05; - - if (!isTurretCamera) { - var maxYawVel = - state.getMaxAbsDriveYawAngularVelocityInRange( - poseEstimate.timestampSeconds - kLargeYawEventTimeWindowS, - poseEstimate.timestampSeconds); - if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { - Logger.recordOutput("Vision/Elevator/MegatagYawAngular", false); - return false; - } - Logger.recordOutput("Vision/Elevator/MegatagYawAngular", true); + if (cam.pose3d != null) { + Logger.recordOutput(prefix + "/Pose3d", cam.pose3d); } - if (poseEstimate.avgTagArea < kMinAreaForMegatag) { - Logger.recordOutput(logPreface + "megaTagAvgTagArea", false); - return false; + if (cam.megatagPoseEstimate != null) { + Logger.recordOutput(prefix + "/MegatagPoseEstimate", cam.megatagPoseEstimate.fieldToRobot()); + Logger.recordOutput(prefix + "/Quality", cam.megatagPoseEstimate.quality()); + Logger.recordOutput(prefix + "/AvgTagArea", cam.megatagPoseEstimate.avgTagArea()); } - Logger.recordOutput(logPreface + "megaTagAvgTagArea", true); - if (poseEstimate.fiducialIds.length != kExpectedTagCount) { - Logger.recordOutput(logPreface + "fiducialLength", false); - return false; + if (cam.fiducialObservations != null) { + Logger.recordOutput(prefix + "/FiducialCount", cam.fiducialObservations.length); } - Logger.recordOutput(logPreface + "fiducialLength", true); + } - if (poseEstimate.fiducialIds.length < 1) { - Logger.recordOutput(logPreface + "fiducialLengthLess1", false); - return false; - } - Logger.recordOutput(logPreface + "fiducialLengthLess1", true); + private Optional processCamera( + VisionIO.VisionIOInputs.CameraInputs cam, String label, Transform2d robotToCamera) { - if (poseEstimate.fieldToCamera.getTranslation().getNorm() < 1.0) { - Logger.recordOutput(logPreface + "NormCheck", false); - return false; + String logPrefix = "Vision/" + label; + + if (!cam.seesTarget) { + return Optional.empty(); } - Logger.recordOutput(logPreface + "NormCheck", true); - for (var fiducial : fiducials) { - if (fiducial.ambiguity > .9) { - Logger.recordOutput(logPreface + "Ambiguity", false); - return false; + Optional estimate = Optional.empty(); + + if (cam.megatagPoseEstimate != null) { + Optional mtEstimate = + processMegatagPoseEstimate(cam.megatagPoseEstimate, cam, logPrefix); + + mtEstimate.ifPresent( + est -> + Logger.recordOutput( + logPrefix + "/AcceptedMegatagEstimate", est.getVisionRobotPoseMeters())); + + Optional gyroEstimate = + fuseWithGyro(cam.megatagPoseEstimate, cam, logPrefix); + + gyroEstimate.ifPresent( + est -> + Logger.recordOutput( + logPrefix + "/FuseWithGyroEstimate", est.getVisionRobotPoseMeters())); + + // Prefer Megatag when available + if (mtEstimate.isPresent()) { + estimate = mtEstimate; + Logger.recordOutput(logPrefix + "/AcceptMegatag", true); + Logger.recordOutput(logPrefix + "/AcceptGyro", false); + } else if (gyroEstimate.isPresent()) { + estimate = gyroEstimate; + Logger.recordOutput(logPrefix + "/AcceptMegatag", false); + Logger.recordOutput(logPrefix + "/AcceptGyro", true); + } else { + Logger.recordOutput(logPrefix + "/AcceptMegatag", false); + Logger.recordOutput(logPrefix + "/AcceptGyro", false); } } - Logger.recordOutput(logPreface + "Ambiguity", true); - - Set seenTags = - Arrays.stream(poseEstimate.fiducialIds) - .boxed() - .collect(Collectors.toCollection(HashSet::new)); - Set expectedTags = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; - var result = expectedTags.equals(seenTags); - Logger.recordOutput(logPreface + "SeenTags", result); - return result; - } - private boolean shouldUseMegatag2( - MegatagPoseEstimate poseEstimate, boolean isTurretCamera, String logPreface) { - return shouldUsePinhole(poseEstimate.timestampSeconds, isTurretCamera, logPreface); + return estimate; } - private boolean shouldUsePinhole(double timestamp, boolean isTurretCamera, String preface) { - final double kLargePitchRollYawEventTimeWindowS = 0.1; - final double kLargePitchRollThreshold = Units.degreesToRadians(10.0); - final double kLargeYawThreshold = Units.degreesToRadians(100.0); - if (isTurretCamera) { - var maxTurretVel = - state.getMaxAbsTurretYawAngularVelocityInRange( - timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); - var maxYawVel = - state.getMaxAbsDriveYawAngularVelocityInRange( - timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); - - if (maxTurretVel.isPresent() - && maxYawVel.isPresent() - && Math.abs(maxTurretVel.get() + maxYawVel.get()) > kLargeYawThreshold) { - Logger.recordOutput(preface + "PinholeTurretAngular", false); - return false; - } - Logger.recordOutput(preface + "PinholeTurretAngular", true); - } else { - var maxYawVel = - state.getMaxAbsDriveYawAngularVelocityInRange( - timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); - if (maxYawVel.isPresent() && Math.abs(maxYawVel.get()) > kLargeYawThreshold) { - Logger.recordOutput(preface + "PinholeYawAngular", false); - return false; - } - Logger.recordOutput(preface + "PinholeYawAngular", true); - } + private Optional fuseWithGyro( + MegatagPoseEstimate poseEstimate, + VisionIO.VisionIOInputs.CameraInputs cam, + String logPrefix) { - var maxPitchVel = - state.getMaxAbsDrivePitchAngularVelocityInRange( - timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); - if (maxPitchVel.isPresent() && Math.abs(maxPitchVel.get()) > kLargePitchRollThreshold) { - Logger.recordOutput(preface + "PinholePitchAngular", false); - return false; + if (poseEstimate.timestampSeconds() <= state.lastUsedMegatagTimestamp()) { + return Optional.empty(); } - Logger.recordOutput(preface + "PinholePitchAngular", true); - - var maxRollVel = - state.getMaxAbsDriveRollAngularVelocityInRange( - timestamp - kLargePitchRollYawEventTimeWindowS, timestamp); - if (maxRollVel.isPresent() && Math.abs(maxRollVel.get()) > kLargePitchRollThreshold) { - Logger.recordOutput(preface + "PinholeRollAngular", false); - return false; + + // Use Megatag directly when 2 or more tags are visible + if (poseEstimate.fiducialIds().length > 1) { + return Optional.empty(); } - Logger.recordOutput(preface + "PinholeRollAngular", true); - return true; - } + // Reject if the robot is yawing rapidly (time‑sync unreliable) + final double kHighYawLookbackS = 0.3; + final double kHighYawVelocityRadS = 5.0; - private Optional getFieldToRobotEstimate( - MegatagPoseEstimate poseEstimate, boolean isTurretCamera) { - var fieldToCamera = poseEstimate.fieldToCamera; - if (fieldToCamera.getX() == 0.0) { + if (state + .getMaxAbsDriveYawAngularVelocityInRange( + poseEstimate.timestampSeconds() - kHighYawLookbackS, + poseEstimate.timestampSeconds()) + .orElse(Double.POSITIVE_INFINITY) + > kHighYawVelocityRadS) { return Optional.empty(); } - var turretToCameraTransform = state.getTurretToCamera(isTurretCamera); - var cameraToTurretTransform = turretToCameraTransform.inverse(); - var fieldToTurretPose = fieldToCamera.plus(cameraToTurretTransform); - var fieldToRobotEstimate = MathHelpers.kPose2dZero; - if (isTurretCamera) { - var robotToTurretObservation = state.getRobotToTurret(poseEstimate.timestampSeconds); - if (robotToTurretObservation.isEmpty()) { - return Optional.empty(); - } - var turretToRobot = - MathHelpers.transform2dFromRotation(robotToTurretObservation.get().unaryMinus()); - fieldToRobotEstimate = fieldToTurretPose.plus(turretToRobot); - } else { - fieldToRobotEstimate = fieldToCamera.plus(turretToCameraTransform.inverse()); - } - return Optional.of(fieldToRobotEstimate); - } + var priorPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); + if (priorPose.isEmpty()) { + return Optional.empty(); + } - private Optional processMegatag2PoseEstimate( - MegatagPoseEstimate poseEstimate, boolean isTurretCamera, String logPreface) { - var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); - if (loggedFieldToRobot.isEmpty()) { + var maybeFieldToTag = + Constants.kAprilTagLayoutReefsOnly.getTagPose(poseEstimate.fiducialIds()[0]); + if (maybeFieldToTag.isEmpty()) { return Optional.empty(); } - var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); - if (maybeFieldToRobotEstimate.isEmpty()) return Optional.empty(); - var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); - - // distance from current pose to vision estimated pose - double poseDifference = - fieldToRobotEstimate - .getTranslation() - .getDistance(loggedFieldToRobot.get().getTranslation()); - - var defaultSet = state.isRedAlliance() ? kTagsRedSpeaker : kTagsBlueSpeaker; - Set speakerTags = new HashSet<>(defaultSet); - speakerTags.removeAll( - Arrays.stream(poseEstimate.fiducialIds) - .boxed() - .collect(Collectors.toCollection(HashSet::new))); - boolean seesSpeakerTags = speakerTags.size() < 2; - - double xyStds; - if (poseEstimate.fiducialIds.length > 0) { - // multiple targets detected - if (poseEstimate.fiducialIds.length >= 2 && poseEstimate.avgTagArea > 0.1) { - xyStds = 0.2; - } - // we detect at least one of our speaker tags and we're close to it. - else if (seesSpeakerTags && poseEstimate.avgTagArea > 0.2) { - xyStds = 0.5; - } - // 1 target with large area and close to estimated pose - else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { - xyStds = 0.5; - } - // 1 target farther away and estimated pose is close - else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { - xyStds = 1.0; - } else if (poseEstimate.fiducialIds.length > 1) { - xyStds = 1.2; - } else { - xyStds = 2.0; - } + Pose2d fieldToTag = + new Pose2d(maybeFieldToTag.get().toPose2d().getTranslation(), Rotation2d.kZero); - Logger.recordOutput(logPreface + "Megatag2StdDev", xyStds); - Logger.recordOutput(logPreface + "Megatag2AvgTagArea", poseEstimate.avgTagArea); - Logger.recordOutput(logPreface + "Megatag2PoseDifference", poseDifference); - - Matrix visionMeasurementStdDevs = - VecBuilder.fill(xyStds, xyStds, Units.degreesToRadians(50.0)); - fieldToRobotEstimate = - new Pose2d(fieldToRobotEstimate.getTranslation(), loggedFieldToRobot.get().getRotation()); - return Optional.of( - new VisionFieldPoseEstimate( - fieldToRobotEstimate, poseEstimate.timestampSeconds, visionMeasurementStdDevs)); - } - return Optional.empty(); + Pose2d robotToTag = fieldToTag.relativeTo(poseEstimate.fieldToRobot()); + + Pose2d posteriorPose = + new Pose2d( + fieldToTag + .getTranslation() + .minus(robotToTag.getTranslation().rotateBy(priorPose.get().getRotation())), + priorPose.get().getRotation()); + + double xStd = cam.standardDeviations[VisionConstants.kMegatag1XStdDevIndex]; + double yStd = cam.standardDeviations[VisionConstants.kMegatag1YStdDevIndex]; + double xyStd = Math.max(xStd, yStd); + + return Optional.of( + new VisionFieldPoseEstimate( + posteriorPose, + poseEstimate.timestampSeconds(), + VecBuilder.fill(xyStd, xyStd, VisionConstants.kLargeVariance), + poseEstimate.fiducialIds().length)); } private Optional processMegatagPoseEstimate( - MegatagPoseEstimate poseEstimate, boolean isTurretCamera) { - var loggedFieldToRobot = state.getFieldToRobot(poseEstimate.timestampSeconds); - if (loggedFieldToRobot.isEmpty()) { + MegatagPoseEstimate poseEstimate, + VisionIO.VisionIOInputs.CameraInputs cam, + String logPrefix) { + + if (poseEstimate.timestampSeconds() <= state.lastUsedMegatagTimestamp()) { return Optional.empty(); } - var maybeFieldToRobotEstimate = getFieldToRobotEstimate(poseEstimate, isTurretCamera); - if (maybeFieldToRobotEstimate.isEmpty()) return Optional.empty(); - var fieldToRobotEstimate = maybeFieldToRobotEstimate.get(); - - // distance from current pose to vision estimated pose - double poseDifference = - fieldToRobotEstimate - .getTranslation() - .getDistance(loggedFieldToRobot.get().getTranslation()); - - if (poseEstimate.fiducialIds.length > 0) { - double xyStds = 1.0; - double degStds = 12; - // multiple targets detected - if (poseEstimate.fiducialIds.length >= 2) { - xyStds = 0.5; - degStds = 6; - } - // 1 target with large area and close to estimated pose - else if (poseEstimate.avgTagArea > 0.8 && poseDifference < 0.5) { - xyStds = 1.0; - degStds = 12; + // Single‑tag extra checks + if (poseEstimate.fiducialIds().length < 2) { + for (var fiducial : cam.fiducialObservations) { + if (fiducial.ambiguity() > VisionConstants.kDefaultAmbiguityThreshold) { + return Optional.empty(); + } } - // 1 target farther away and estimated pose is close - else if (poseEstimate.avgTagArea > 0.1 && poseDifference < 0.3) { - xyStds = 2.0; - degStds = 30; + + if (poseEstimate.avgTagArea() < VisionConstants.kTagMinAreaForSingleTagMegatag) { + return Optional.empty(); } - Matrix visionMeasurementStdDevs = - VecBuilder.fill(xyStds, xyStds, Units.degreesToRadians(degStds)); - return Optional.of( - new VisionFieldPoseEstimate( - fieldToRobotEstimate, poseEstimate.timestampSeconds, visionMeasurementStdDevs)); + var priorPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); + if (poseEstimate.avgTagArea() < VisionConstants.kTagAreaThresholdForYawCheck + && priorPose.isPresent()) { + double yawDiff = + Math.abs( + MathUtil.angleModulus( + priorPose.get().getRotation().getRadians() + - poseEstimate.fieldToRobot().getRotation().getRadians())); + + if (yawDiff > Units.degreesToRadians(VisionConstants.kDefaultYawDiffThreshold)) { + return Optional.empty(); + } + } } - return Optional.empty(); - } - @SuppressWarnings("unused") - private Optional processPinholeVisionEstimate( - List observations, double timestamp, boolean isTurretCamera) { - observations = - observations.stream() - .filter( - (observation) -> { - if (observation.cameraToTag.getNorm() < 10.0) { - return true; - } - Logger.recordOutput( - "Vision/RejectOnNormTimestamp", RobotTime.getTimestampSeconds()); - return false; - }) - .toList(); - - if (observations.isEmpty()) return Optional.empty(); - - int num_updates = 0; - double x = 0.0, y = 0.0; - // All timestamps and rotations are the same. If that changes, need to revisit. - Rotation2d rotation = MathHelpers.kRotation2dZero; - double avgRange = 0.0; - var poseTurret = state.getRobotToTurret(timestamp); - var poseRobot = state.getFieldToRobot(timestamp); - if (poseRobot.isEmpty() || poseTurret.isEmpty()) { + if (poseEstimate.fieldToRobot().getTranslation().getNorm() + < VisionConstants.kDefaultNormThreshold) { return Optional.empty(); } - for (var observation : observations) { - Pose2d fieldToRobotEstimate = - estimateFieldToRobot( - observation.cameraToTag, - observation.tagPose, - poseTurret.get(), - poseRobot.get().getRotation(), - MathHelpers.kRotation2dZero, - isTurretCamera); - x += fieldToRobotEstimate.getX(); - y += fieldToRobotEstimate.getY(); - rotation = fieldToRobotEstimate.getRotation(); - avgRange += observation.cameraToTag.getNorm(); - num_updates++; - } - if (num_updates == 0) return Optional.empty(); + if (Math.abs(cam.pose3d.getZ()) > VisionConstants.kDefaultZThreshold) { + return Optional.empty(); + } - avgRange /= num_updates; + // Exclusive‑tag filtering + var exclusiveTag = state.getExclusiveTag(); + boolean hasExclusiveId = + exclusiveTag.isPresent() + && java.util.Arrays.stream(poseEstimate.fiducialIds()) + .anyMatch(id -> id == exclusiveTag.get()); - double xyStds = 100.0; - var fieldToRobotEstimate = new Pose2d(x / num_updates, y / num_updates, rotation); - var poseDifference = - fieldToRobotEstimate.getTranslation().getDistance(poseRobot.get().getTranslation()); - // multiple targets detected - if (observations.size() >= 2 && avgRange < 3.0) { - xyStds = 0.2; - } else if (avgRange < 5.0 && poseDifference < 0.5) { - xyStds = 0.5; - } - // 1 target farther away and estimated pose is close - else if (avgRange < 3.0 && poseDifference < 0.3) { - xyStds = 1.0; - } else if (observations.size() > 1) { - xyStds = 1.2; - } else { - xyStds = 2.0; + if (exclusiveTag.isPresent() && !hasExclusiveId) { + return Optional.empty(); } - ; - final double rotStdDev = Units.degreesToRadians(50); - Matrix visionMeasurementStdDevs = VecBuilder.fill(xyStds, xyStds, rotStdDev); - return Optional.of( - new VisionFieldPoseEstimate(fieldToRobotEstimate, timestamp, visionMeasurementStdDevs)); - } - - private Pose2d estimateFieldToRobot( - Translation2d cameraToTarget, - Pose3d fieldToTarget, - Rotation2d robotToTurret, - Rotation2d gyroAngle, - Rotation2d cameraYawOffset, - boolean isTurretCamera) { - Transform2d cameraToTargetFixed = - MathHelpers.transform2dFromTranslation(cameraToTarget.rotateBy(cameraYawOffset)); - Transform2d turretToTarget = state.getTurretToCamera(isTurretCamera).plus(cameraToTargetFixed); - // In robot frame - Transform2d robotToTarget = turretToTarget; - if (isTurretCamera) { - robotToTarget = MathHelpers.transform2dFromRotation(robotToTurret).plus(turretToTarget); + var loggedPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); + if (loggedPose.isEmpty()) { + return Optional.empty(); } - // In field frame - Transform2d robotToTargetField = - MathHelpers.transform2dFromTranslation(robotToTarget.getTranslation().rotateBy(gyroAngle)); + Pose2d estimatePose = poseEstimate.fieldToRobot(); + + double scaleFactor = 1.0 / poseEstimate.quality(); + double xStd = cam.standardDeviations[VisionConstants.kMegatag1XStdDevIndex] * scaleFactor; + double yStd = cam.standardDeviations[VisionConstants.kMegatag1YStdDevIndex] * scaleFactor; + double rotStd = cam.standardDeviations[VisionConstants.kMegatag1YawStdDevIndex] * scaleFactor; - // In field frame - Pose2d fieldToTarget2d = - MathHelpers.pose2dFromTranslation(fieldToTarget.toPose2d().getTranslation()); + double xyStd = Math.max(xStd, yStd); + Matrix visionStdDevs = VecBuilder.fill(xyStd, xyStd, rotStd); - Pose2d fieldToRobot = - fieldToTarget2d.transformBy( - MathHelpers.transform2dFromTranslation( - robotToTargetField.getTranslation().unaryMinus())); + return Optional.of( + new VisionFieldPoseEstimate( + estimatePose, + poseEstimate.timestampSeconds(), + visionStdDevs, + poseEstimate.fiducialIds().length)); + } - Pose2d fieldToRobotYawAdjusted = new Pose2d(fieldToRobot.getTranslation(), gyroAngle); - return fieldToRobotYawAdjusted; + public void setUseVision(boolean useVision) { + this.useVision = useVision; } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index f812f86..08e878f 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,4 +1,4 @@ -// Copyright (c) 2024-2025 Az-FIRST +// Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First // Copyright (c) 2024-2025 FRC 2486 // http://github.com/Coconuts2486-FRC @@ -20,23 +20,26 @@ import static frc.robot.Constants.Cameras.*; import static frc.robot.Constants.VisionConstants.*; -import edu.wpi.first.math.Matrix; import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.Vector; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import edu.wpi.first.wpilibj.Alert; import edu.wpi.first.wpilibj.Alert.AlertType; import edu.wpi.first.wpilibj2.command.SubsystemBase; import frc.robot.Constants.AprilTagConstants; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; +import frc.robot.util.TriConsumer; import java.util.LinkedList; import java.util.List; import org.littletonrobotics.junction.Logger; public class Vision extends SubsystemBase { + + public interface VisionConsumer extends TriConsumer> {} + private final VisionConsumer consumer; private final VisionIO[] io; private final VisionIOInputsAutoLogged[] inputs; @@ -184,11 +187,11 @@ public void periodic() { allRobotPosesRejected.toArray(new Pose3d[allRobotPosesRejected.size()])); } - @FunctionalInterface - public static interface VisionConsumer { - public void accept( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs); - } + // @FunctionalInterface + // public static interface VisionConsumer { + // public void accept( + // Pose2d visionRobotPoseMeters, + // double timestampSeconds, + // Matrix visionMeasurementStdDevs); + // } } diff --git a/src/main/java/frc/robot/util/FieldConstants.java b/src/main/java/frc/robot/util/FieldConstants.java new file mode 100644 index 0000000..42de6a5 --- /dev/null +++ b/src/main/java/frc/robot/util/FieldConstants.java @@ -0,0 +1,265 @@ +// Copyright (c) 2025 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file at +// the root directory of this project. + +package frc.robot.util; + +import edu.wpi.first.math.geometry.*; +import edu.wpi.first.math.util.Units; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Contains various field dimensions and useful reference points. All units are in meters and poses + * have a blue alliance origin. + */ +public class FieldConstants { + public static final double fieldLength = Units.inchesToMeters(690.876); + public static final double fieldWidth = Units.inchesToMeters(317); + public static final double startingLineX = + Units.inchesToMeters(299.438); // Measured from the inside of starting line + + public static class Processor { + public static final Pose2d centerFace = + new Pose2d(Units.inchesToMeters(235.726), 0, Rotation2d.fromDegrees(90)); + } + + public static class Barge { + public static final Translation2d farCage = + new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(286.779)); + public static final Translation2d middleCage = + new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(242.855)); + public static final Translation2d closeCage = + new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(199.947)); + + // Measured from floor to bottom of cage + public static final double deepHeight = Units.inchesToMeters(3.125); + public static final double shallowHeight = Units.inchesToMeters(30.125); + } + + public static class CoralStation { + public static final Pose2d leftCenterFace = + new Pose2d( + Units.inchesToMeters(33.526), + Units.inchesToMeters(291.176), + Rotation2d.fromDegrees(90 - 144.011)); + public static final Pose2d rightCenterFace = + new Pose2d( + Units.inchesToMeters(33.526), + Units.inchesToMeters(25.824), + Rotation2d.fromDegrees(144.011 - 90)); + } + + @SuppressWarnings("unchecked") + public static class Reef { + public static final Translation2d center = + new Translation2d(Units.inchesToMeters(176.746), Units.inchesToMeters(158.501)); + public static final double faceToZoneLine = + Units.inchesToMeters(12); // Side of the reef to the inside of the reef zone line + + public static final Pose2d[] centerFaces = + new Pose2d[6]; // Starting facing the driver station in clockwise order + public static final List> branchPositions = + new ArrayList<>(); // Starting at the right branch facing the driver station in + // clockwise + public static final List> branchTipPositions = + new ArrayList<>(); // Starting at the right branch facing the driver station in + + // clockwise + + static { + // Initialize faces + centerFaces[0] = + new Pose2d( + Units.inchesToMeters(144.003), + Units.inchesToMeters(158.500), + Rotation2d.fromDegrees(180)); + centerFaces[1] = + new Pose2d( + Units.inchesToMeters(160.373), + Units.inchesToMeters(186.857), + Rotation2d.fromDegrees(120)); + centerFaces[2] = + new Pose2d( + Units.inchesToMeters(193.116), + Units.inchesToMeters(186.858), + Rotation2d.fromDegrees(60)); + centerFaces[3] = + new Pose2d( + Units.inchesToMeters(209.489), + Units.inchesToMeters(158.502), + Rotation2d.fromDegrees(0)); + centerFaces[4] = + new Pose2d( + Units.inchesToMeters(193.118), + Units.inchesToMeters(130.145), + Rotation2d.fromDegrees(-60)); + centerFaces[5] = + new Pose2d( + Units.inchesToMeters(160.375), + Units.inchesToMeters(130.144), + Rotation2d.fromDegrees(-120)); + + // Initialize branch positions + for (int face = 0; face < 6; face++) { + Map fillRight = new HashMap<>(); + Map fillLeft = new HashMap<>(); + for (var level : ReefHeight.values()) { + Pose2d poseDirection = new Pose2d(center, Rotation2d.fromDegrees(180 - (60 * face))); + double adjustX = Units.inchesToMeters(30.738); + double adjustY = Units.inchesToMeters(6.469); + + fillRight.put( + level, + new Pose3d( + new Translation3d( + poseDirection + .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) + .getX(), + poseDirection + .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) + .getY(), + level.height), + new Rotation3d( + 0, + Units.degreesToRadians(level.pitch), + poseDirection.getRotation().getRadians()))); + fillLeft.put( + level, + new Pose3d( + new Translation3d( + poseDirection + .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) + .getX(), + poseDirection + .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) + .getY(), + level.height), + new Rotation3d( + 0, + Units.degreesToRadians(level.pitch), + poseDirection.getRotation().getRadians()))); + } + branchPositions.add(fillRight); + branchPositions.add(fillLeft); + } + + // Mirror for red alliance + for (Map blueBranch : branchPositions.toArray(new Map[0])) { + Map redBranch = new HashMap<>(); + for (Map.Entry entry : blueBranch.entrySet()) { + Pose3d bluePose = entry.getValue(); + redBranch.put( + entry.getKey(), + new Pose3d( + Util.flipRedBlue( + new Translation3d(bluePose.getX(), bluePose.getY(), bluePose.getZ())), + new Rotation3d( + 0, -bluePose.getRotation().getY(), -bluePose.getRotation().getZ()))); + } + branchPositions.add(redBranch); + } + for (int face = 0; face < 6; face++) { + Map fillRight = new HashMap<>(); + Map fillLeft = new HashMap<>(); + for (var level : ReefHeight.values()) { + Pose2d poseDirection = new Pose2d(center, Rotation2d.fromDegrees(180 - (60 * face))); + double adjustX = Units.inchesToMeters(-2); + double adjustY = Units.inchesToMeters(6.469); + + fillRight.put( + level, + new Pose3d( + new Translation3d( + poseDirection + .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) + .getX(), + poseDirection + .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) + .getY(), + level.height), + new Rotation3d( + 0, + Units.degreesToRadians(level.pitch), + poseDirection.getRotation().getRadians()))); + fillLeft.put( + level, + new Pose3d( + new Translation3d( + poseDirection + .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) + .getX(), + poseDirection + .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) + .getY(), + level.height), + new Rotation3d( + 0, + Units.degreesToRadians(level.pitch), + poseDirection.getRotation().getRadians()))); + } + branchTipPositions.add(fillRight); + branchTipPositions.add(fillLeft); + } + } + } + + public static class StagingPositions { + // Measured from the center of the ice cream + public static final Pose2d leftIceCream = + new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(230.5), new Rotation2d()); + public static final Pose2d middleIceCream = + new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(170.5), new Rotation2d()); + public static final Pose2d rightIceCream = + new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(86.5), new Rotation2d()); + } + + public static class HPIntake { + // HP Intake positions for blue alliance + public static final Pose2d kStationA = + new Pose2d( + new Translation2d(0.7571420669555664, 0.6461422443389893), new Rotation2d(Math.PI / 4)); + + public static final Pose2d kStationB = + new Pose2d( + new Translation2d(0.7571420669555664, 7.364640235900879), new Rotation2d(-Math.PI / 4)); + } + + public enum ReefHeight { + L4(Units.inchesToMeters(72), -90), + L3(Units.inchesToMeters(47.625), -35), + L2(Units.inchesToMeters(31.875), -35), + L1(Units.inchesToMeters(18), 0); + + ReefHeight(double height, double pitch) { + this.height = height; + this.pitch = pitch; // in degrees + } + + public final double height; + public final double pitch; + } + + public enum BranchCode { + A(0), + B(1), + C(2), + D(3), + E(4), + F(5), + G(6), + H(7), + L(8); + + BranchCode(int indexOffset) { + this.indexOffset = indexOffset; + } + + public final int indexOffset; + } +} From dafaf047863b806d4e60532967ce421b999d8a49 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Sun, 28 Dec 2025 20:18:32 -0700 Subject: [PATCH 10/12] Fix rebase error modified: .github/workflows/create-dependabot-labels.yaml --- .github/workflows/create-dependabot-labels.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/create-dependabot-labels.yaml b/.github/workflows/create-dependabot-labels.yaml index 64f7e3b..d2795e5 100644 --- a/.github/workflows/create-dependabot-labels.yaml +++ b/.github/workflows/create-dependabot-labels.yaml @@ -14,11 +14,7 @@ jobs: issues: write # Needed to create labels via API steps: - name: Create labels for Dependabot PRs -<<<<<<< HEAD uses: actions/github-script@v8 -======= - uses: actions/github-script@v7 ->>>>>>> ec71cef (New template-syncing Action) with: script: | const labels = [ From 62de89ccaa1f3af7b991e76328f3edf568ddfb43 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Thu, 8 Jan 2026 10:49:13 -0700 Subject: [PATCH 11/12] Clean non-ASCII characters --- src/main/java/frc/robot/subsystems/drive/Drive.java | 2 +- .../java/frc/robot/subsystems/vision/FRC180/Field.java | 2 +- .../robot/subsystems/vision/FRC180/VisionSubsystem.java | 2 +- .../robot/subsystems/vision/FRC254/VisionSubsystem.java | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 6603ce9..eb161cf 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -202,7 +202,7 @@ public void periodic() { } } - // Update the IMU inputs — logging happens automatically + // Update the IMU inputs - logging happens automatically imuIO.updateInputs(imuInputs); // Feed historical samples into odometry diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java index 98e7589..f6929bd 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java @@ -33,7 +33,7 @@ public abstract class Field { // center)." public static final Distance REEF_BRANCH_SEPARATION = Inches.of(13); - // Game Manual Page 33 - "A CORAL is a 11 ⅞ in. long (~30 cm) piece of..." + // Game Manual Page 33 - "A CORAL is a 11 7/8 in. long (~30 cm) piece of..." public static final Distance CORAL_LENGTH = Centimeters.of(30); public static void init() { diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java index 903c91b..87b312d 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java @@ -691,7 +691,7 @@ public void setAllowPoseEstimates(boolean allow) { private void cameraTemperatureAlert(Alert alert, String cameraName, double temperature) { if (temperature >= BAD_CAMERA_TEMP) { int roundedtemp = (int) Math.round(temperature); - alert.setText(cameraName + " Camera temp high (" + roundedtemp + "°F)"); + alert.setText(cameraName + " Camera temp high (" + roundedtemp + "deg F)"); alert.set(true); } else { alert.set(false); diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java index 71b8bd4..a6ec916 100644 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java +++ b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java @@ -69,7 +69,7 @@ private VisionFieldPoseEstimate fuseEstimates( Pose2d poseA = a.getVisionRobotPoseMeters().transformBy(a_T_b); Pose2d poseB = b.getVisionRobotPoseMeters(); - // Inverse‑variance weighting + // Inverse-variance weighting var varianceA = a.getVisionMeasurementStdDevs().elementTimes(a.getVisionMeasurementStdDevs()); var varianceB = b.getVisionMeasurementStdDevs().elementTimes(b.getVisionMeasurementStdDevs()); @@ -233,7 +233,7 @@ private Optional fuseWithGyro( return Optional.empty(); } - // Reject if the robot is yawing rapidly (time‑sync unreliable) + // Reject if the robot is yawing rapidly (time-sync unreliable) final double kHighYawLookbackS = 0.3; final double kHighYawVelocityRadS = 5.0; @@ -290,7 +290,7 @@ private Optional processMegatagPoseEstimate( return Optional.empty(); } - // Single‑tag extra checks + // Single-tag extra checks if (poseEstimate.fiducialIds().length < 2) { for (var fiducial : cam.fiducialObservations) { if (fiducial.ambiguity() > VisionConstants.kDefaultAmbiguityThreshold) { @@ -326,7 +326,7 @@ private Optional processMegatagPoseEstimate( return Optional.empty(); } - // Exclusive‑tag filtering + // Exclusive-tag filtering var exclusiveTag = state.getExclusiveTag(); boolean hasExclusiveId = exclusiveTag.isPresent() From 9838623fcf92579eafff96fdf4af7f75eb8a6e69 Mon Sep 17 00:00:00 2001 From: "Timothy P. Ellsworth Bowers" Date: Mon, 2 Feb 2026 17:08:57 -0700 Subject: [PATCH 12/12] Compilable Vision with pose buffering Create a pose buffer and use timestamps to match vision values with those from odometry to reduce jitter. Also be able to define certain tags in whose location we trust more (e.g. HUB in 2026) in the event of field misshappenness. Weave the new Vision structure together with the drivetrain to allow for timestamped pose insertion into the estimator. --- src/main/java/frc/robot/Constants.java | 51 +- src/main/java/frc/robot/Robot.java | 1 + src/main/java/frc/robot/RobotContainer.java | 86 +- .../java/frc/robot/commands/DriveToPose.java | 209 -- .../frc/robot/commands/DriveToPoseFast.java | 241 --- .../frc/robot/subsystems/drive/Drive.java | 273 ++- .../vision/FRC180/CameraStatus.java | 21 - .../vision/FRC180/CoralDetector.java | 56 - .../vision/FRC180/CoralDetectorReal.java | 200 -- .../vision/FRC180/CoralDetectorSim.java | 148 -- .../robot/subsystems/vision/FRC180/Field.java | 150 -- .../vision/FRC180/LEDSubsystem.java | 230 --- .../vision/FRC180/LimelightStatus.java | 47 - .../vision/FRC180/ReefProximity.java | 61 - .../subsystems/vision/FRC180/SimCamera.java | 147 -- .../subsystems/vision/FRC180/SimLogic.java | 153 -- .../subsystems/vision/FRC180/VisionIO.java | 50 - .../vision/FRC180/VisionIOLimelight.java | 139 -- .../vision/FRC180/VisionIOPhoton.java | 48 - .../vision/FRC180/VisionSubsystem.java | 709 ------- .../subsystems/vision/FRC254/Constants.java | 200 -- .../vision/FRC254/FiducialObservation.java | 101 - .../subsystems/vision/FRC254/LedState.java | 203 -- .../vision/FRC254/MegatagPoseEstimate.java | 127 -- .../subsystems/vision/FRC254/RobotState.java | 507 ----- .../FRC254/VisionFieldPoseEstimate.java | 63 - .../subsystems/vision/FRC254/VisionIO.java | 42 - .../vision/FRC254/VisionSubsystem.java | 366 ---- .../subsystems/vision/FRC6328/RobotState.java | 448 ----- .../subsystems/vision/FRC6328/Vision.java | 401 ---- .../vision/FRC6328/VisionConstants.java | 175 -- .../subsystems/vision/FRC6995/Vision.java | 291 --- .../frc/robot/subsystems/vision/Vision.java | 519 +++-- .../frc/robot/subsystems/vision/VisionIO.java | 27 +- .../subsystems/vision/VisionIOLimelight.java | 10 +- .../vision/VisionIOPhotonVision.java | 133 +- .../vision/VisionIOPhotonVisionSim.java | 24 +- .../java/frc/robot/util/FieldConstants.java | 265 --- src/main/java/frc/robot/util/GeomUtil.java | 165 -- .../java/frc/robot/util/LimelightHelpers.java | 1704 ----------------- src/main/java/frc/robot/util/MathHelpers.java | 48 - src/main/java/frc/robot/util/RobotTime.java | 25 - .../robot/util/ServoMotorSubsystemConfig.java | 34 - src/main/java/frc/robot/util/TriConsumer.java | 19 - src/main/java/frc/robot/util/Util.java | 123 -- 45 files changed, 755 insertions(+), 8285 deletions(-) delete mode 100644 src/main/java/frc/robot/commands/DriveToPose.java delete mode 100644 src/main/java/frc/robot/commands/DriveToPoseFast.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/Field.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java delete mode 100644 src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java delete mode 100644 src/main/java/frc/robot/util/FieldConstants.java delete mode 100644 src/main/java/frc/robot/util/GeomUtil.java delete mode 100644 src/main/java/frc/robot/util/LimelightHelpers.java delete mode 100644 src/main/java/frc/robot/util/MathHelpers.java delete mode 100644 src/main/java/frc/robot/util/RobotTime.java delete mode 100644 src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java delete mode 100644 src/main/java/frc/robot/util/TriConsumer.java delete mode 100644 src/main/java/frc/robot/util/Util.java diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index ac0ee96..0b39dcc 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -28,7 +28,6 @@ import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.units.measure.Mass; @@ -229,46 +228,6 @@ public static class RobotDevices { // public static final int INTAKE_SERVO = 0; } - public static class DriveToPositionConstatnts { - - // The robot is facing AWAY from the tag, so its pose angle matches that of the tag. - // Scoring position has the bumpers 3" from the tag. Bumper-to-center distance is 18", ergo the - // robot pose is 21" from the tag. - public static Translation2d kLeftReefPost = - new Translation2d(Units.inchesToMeters(17.5), Units.inchesToMeters(-5.9)); - // public static Translation2d kLeftReefPostClose = - // new Translation2d(Units.inchesToMeters(16.75), Units.inchesToMeters(-6.75)); - public static Translation2d kRightReefPost = - new Translation2d(Units.inchesToMeters(17.5), Units.inchesToMeters(+6.5)); - public static Translation2d kRightReefPostClose = - new Translation2d(Units.inchesToMeters(16.75), Units.inchesToMeters(+6)); - public static Translation2d kAlgaeGrab = - new Translation2d(Units.inchesToMeters(26.0), Units.inchesToMeters(0.0)); - - public static Translation2d kProcessor = - new Translation2d(Units.inchesToMeters(26.0), Units.inchesToMeters(+6.0)); - - public static Translation2d kStation = - new Translation2d(Units.inchesToMeters(18.0), Units.inchesToMeters(0.0)); - - // Constants used by the DriveToPose Command - public static final double drivekP = AutoConstants.kPPdrivePID.kP; // 0.8; - public static final double drivekD = AutoConstants.kPPdrivePID.kD; // 0.0; - public static final double thetakP = AutoConstants.kPPsteerPID.kP; // 4.0; - public static final double thetakD = AutoConstants.kPPsteerPID.kD; // 0.0; - - // Values in m/s - public static final double driveMaxVelocity = 3.8; - public static final double driveMaxAcceleration = 3.5; - public static final double thetaMaxVelocity = Units.degreesToRadians(360.0); - public static final double thetaMaxAcceleration = Units.degreesToRadians(480.0); - - public static final double driveTolerance = 0.01; - public static final double thetaTolerance = Units.degreesToRadians(1.0); - public static final double ffMinRadius = 0.05; // 0.2; - public static final double ffMaxRadius = 0.1; // 0.8; - } - /************************************************************************* */ /** Operator Constants *************************************************** */ public static class OperatorConstants { @@ -457,6 +416,16 @@ public static final class AutoConstants { /** Vision Constants (Assuming PhotonVision) ***************************** */ public static class VisionConstants { + public static final java.util.Set kTrustedTags = + java.util.Set.of(2, 3, 4, 5, 8, 9, 10, 11, 18, 19, 20, 21, 24, 25, 26, 27); // HUB AprilTags + + // Noise scaling factors (lower = more trusted) + public static final double kTrustedTagStdDevScale = 0.6; // 40% more weight + public static final double kUntrustedTagStdDevScale = 1.3; // 30% less weight + + // Optional: if true, reject observations that contain no trusted tags + public static final boolean kRequireTrustedTag = false; + // AprilTag Identification Constants public static final double kAmbiguityThreshold = 0.4; public static final double kTargetLogTimeSecs = 0.1; diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index eb4e2c4..b9668ae 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -192,6 +192,7 @@ public void autonomousInit() { CommandScheduler.getInstance().cancelAll(); m_robotContainer.getDrivebase().setMotorBrake(true); m_robotContainer.getDrivebase().resetHeadingController(); + m_robotContainer.getVision().resetPoseGate(Timer.getFPGATimestamp()); // TODO: Make sure Gyro inits here with whatever is in the path planning thingie switch (Constants.getAutoType()) { diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 8f9594e..9c5a167 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -34,7 +34,6 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; -import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; @@ -131,37 +130,6 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); - // Vision Factories - private VisionIO[] buildVisionIOsReal(Drive drive) { - return switch (Constants.getVisionType()) { - case PHOTON -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) - .toArray(VisionIO[]::new); - - case LIMELIGHT -> - Arrays.stream(Cameras.ALL) - .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) - .toArray(VisionIO[]::new); - - case NONE -> new VisionIO[] {new VisionIO() {}}; - }; - } - - private static VisionIO[] buildVisionIOsSim(Drive drive) { - var cams = Constants.Cameras.ALL; - VisionIO[] ios = new VisionIO[cams.length]; - for (int i = 0; i < cams.length; i++) { - var cfg = cams[i]; - ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); - } - return ios; - } - - private VisionIO[] buildVisionIOsReplay() { - return new VisionIO[] {new VisionIO() {}}; - } - public static RobotContainer instance; /** @@ -184,7 +152,9 @@ public RobotContainer() { m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -198,11 +168,9 @@ public RobotContainer() { // ---------------- Vision IOs (robot code) ---------------- var cams = frc.robot.Constants.Cameras.ALL; - - // If you keep Vision expecting exactly two cameras: - VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); - m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); - + m_vision = + new Vision( + m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsSim(m_drivebase)); m_accel = new Accelerometer(m_imu); // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- @@ -236,7 +204,8 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); + m_vision = + new Vision(m_drivebase, m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; break; @@ -498,6 +467,11 @@ public Drive getDrivebase() { return m_drivebase; } + /** Vision getter method for use with Robot.java */ + public Vision getVision() { + return m_vision; + } + /** * Set up the SysID routines from AdvantageKit * @@ -541,6 +515,40 @@ private void definesysIdRoutines() { } } + // Vision Factories + // Vision Factories (REAL) + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + java.util.Arrays.stream(Constants.Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {}; // recommended: no cameras + }; + } + + // Vision Factories (SIM) + private VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + // Vision Factories (REPLAY) + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {}; // simplest: Vision does nothing during replay + } + /** * Example Choreo auto command * diff --git a/src/main/java/frc/robot/commands/DriveToPose.java b/src/main/java/frc/robot/commands/DriveToPose.java deleted file mode 100644 index 3cfb335..0000000 --- a/src/main/java/frc/robot/commands/DriveToPose.java +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright (c) 2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2025 FRC 6328 -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file at -// the root directory of this project. - -package frc.robot.commands; - -import static frc.robot.Constants.DriveToPositionConstatnts.*; - -import edu.wpi.first.math.MathUtil; -import edu.wpi.first.math.controller.ProfiledPIDController; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.trajectory.TrapezoidProfile; -import edu.wpi.first.wpilibj.Timer; -import edu.wpi.first.wpilibj2.command.Command; -import frc.robot.Constants; -import frc.robot.Constants.DrivebaseConstants; -import frc.robot.subsystems.drive.Drive; -import frc.robot.util.GeomUtil; -import java.util.function.DoubleSupplier; -import java.util.function.Supplier; -import lombok.Getter; -import org.littletonrobotics.junction.Logger; - -/** - * Drive to Pose Command - * - *

This command is used in both teleop and auto periods for autonomously driving the robot to a - * desired pose. - */ -public class DriveToPose extends Command { - - private final Drive drive; - private final Supplier target; - - // Profiled PID controllers to run trapezoidal motion from current to desired pose - private final ProfiledPIDController driveController = - new ProfiledPIDController( - 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); - private final ProfiledPIDController thetaController = - new ProfiledPIDController( - 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); - - // Various other values - private Translation2d lastSetpointTranslation = Translation2d.kZero; - private Rotation2d lastSetpointRotation = Rotation2d.kZero; - private double lastTime = 0.0; - private double driveErrorAbs = 0.0; - private double thetaErrorAbs = 0.0; - @Getter private boolean running = false; - - private Supplier linearFF = () -> Translation2d.kZero; - private DoubleSupplier omegaFF = () -> 0.0; - - /** Constructor */ - public DriveToPose(Drive drive, Supplier target) { - this.drive = drive; - this.target = target; - - // Enable continuous input for theta controller - thetaController.enableContinuousInput(-Math.PI, Math.PI); - - // Lock-out Drive from other commands - addRequirements(drive); - } - - /** Initialize the Command */ - @Override - public void initialize() { - Pose2d currentPose = drive.getPose(); - ChassisSpeeds fieldVelocity = - ChassisSpeeds.fromRobotRelativeSpeeds( - drive.getChassisSpeeds(), drive.getPose().getRotation()); - Translation2d linearFieldVelocity = - new Translation2d(fieldVelocity.vxMetersPerSecond, fieldVelocity.vyMetersPerSecond); - driveController.reset( - currentPose.getTranslation().getDistance(target.get().getTranslation()), - Math.min( - 0.0, - -linearFieldVelocity - .rotateBy( - target - .get() - .getTranslation() - .minus(currentPose.getTranslation()) - .getAngle() - .unaryMinus()) - .getX())); - thetaController.reset( - currentPose.getRotation().getRadians(), fieldVelocity.omegaRadiansPerSecond); - lastSetpointTranslation = currentPose.getTranslation(); - lastSetpointRotation = target.get().getRotation(); - lastTime = Timer.getTimestamp(); - } - - /** Execute the command */ - @Override - public void execute() { - running = true; - - // Get current pose and target pose - Pose2d currentPose = drive.getPose(); - Pose2d targetPose = target.get(); - - // Calculate drive speed - double currentDistance = currentPose.getTranslation().getDistance(targetPose.getTranslation()); - double ffScaler = - MathUtil.clamp((currentDistance - ffMinRadius) / (ffMaxRadius - ffMinRadius), 0.0, 1.0); - driveErrorAbs = currentDistance; - driveController.reset( - lastSetpointTranslation.getDistance(targetPose.getTranslation()), - driveController.getSetpoint().velocity); - double driveVelocityScalar = - driveController.calculate(driveErrorAbs, 0.0) - + driveController.getSetpoint().velocity * ffScaler; - if (currentDistance < driveController.getPositionTolerance()) driveVelocityScalar = 0.0; - lastSetpointTranslation = - new Pose2d( - targetPose.getTranslation(), - new Rotation2d( - Math.atan2( - currentPose.getTranslation().getY() - targetPose.getTranslation().getY(), - currentPose.getTranslation().getX() - targetPose.getTranslation().getX()))) - .transformBy(GeomUtil.toTransform2d(driveController.getSetpoint().position, 0.0)) - .getTranslation(); - - // Calculate theta speed - double thetaVelocity = - thetaController.calculate( - currentPose.getRotation().getRadians(), - new TrapezoidProfile.State( - targetPose.getRotation().getRadians(), - (targetPose.getRotation().minus(lastSetpointRotation)).getRadians() - / (Timer.getTimestamp() - lastTime))) - + thetaController.getSetpoint().velocity * ffScaler; - thetaErrorAbs = - Math.abs(currentPose.getRotation().minus(targetPose.getRotation()).getRadians()); - if (thetaErrorAbs < thetaController.getPositionTolerance()) thetaVelocity = 0.0; - lastSetpointRotation = targetPose.getRotation(); - Translation2d driveVelocity = - new Pose2d( - Translation2d.kZero, - new Rotation2d( - Math.atan2( - currentPose.getTranslation().getY() - targetPose.getTranslation().getY(), - currentPose.getTranslation().getX() - targetPose.getTranslation().getX()))) - .transformBy(GeomUtil.toTransform2d(driveVelocityScalar, 0.0)) - .getTranslation(); - lastTime = Timer.getTimestamp(); - - // Scale feedback velocities by input ff - final double linearS = linearFF.get().getNorm() * 3.0; - final double thetaS = Math.abs(omegaFF.getAsDouble()) * 3.0; - driveVelocity = - driveVelocity.interpolate( - linearFF.get().times(DrivebaseConstants.kMaxLinearSpeed), linearS); - thetaVelocity = - MathUtil.interpolate( - thetaVelocity, omegaFF.getAsDouble() * DrivebaseConstants.kMaxAngularSpeed, thetaS); - - // Command speeds - drive.runVelocity( - ChassisSpeeds.fromFieldRelativeSpeeds( - driveVelocity.getX(), driveVelocity.getY(), thetaVelocity, currentPose.getRotation())); - - // Log data - Logger.recordOutput("DriveToPose/DistanceMeasured", currentDistance); - Logger.recordOutput("DriveToPose/DistanceSetpoint", driveController.getSetpoint().position); - Logger.recordOutput("DriveToPose/ThetaMeasured", currentPose.getRotation().getRadians()); - Logger.recordOutput("DriveToPose/ThetaSetpoint", thetaController.getSetpoint().position); - Logger.recordOutput( - "DriveToPose/Setpoint", - new Pose2d[] { - new Pose2d( - lastSetpointTranslation, - Rotation2d.fromRadians(thetaController.getSetpoint().position)) - }); - Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {targetPose}); - } - - /** End the command */ - @Override - public void end(boolean interrupted) { - drive.stop(); - running = false; - // Clear logs - Logger.recordOutput("DriveToPose/Setpoint", new Pose2d[] {}); - Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {}); - } - - /** Checks if the robot is stopped at the final pose. */ - public boolean atGoal() { - return running && driveController.atGoal(); // && thetaController.atGoal(); - } - - /** Checks if the robot pose is within the allowed drive and theta tolerances. */ - public boolean withinTolerance(double driveTolerance, Rotation2d thetaTolerance) { - return running - && Math.abs(driveErrorAbs) < driveTolerance - && Math.abs(thetaErrorAbs) < thetaTolerance.getRadians(); - } -} diff --git a/src/main/java/frc/robot/commands/DriveToPoseFast.java b/src/main/java/frc/robot/commands/DriveToPoseFast.java deleted file mode 100644 index 1096c92..0000000 --- a/src/main/java/frc/robot/commands/DriveToPoseFast.java +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright (c) 2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2025 FRC 6328 -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file at -// the root directory of this project. - -package frc.robot.commands; - -import edu.wpi.first.math.MathUtil; -import edu.wpi.first.math.controller.ProfiledPIDController; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.trajectory.TrapezoidProfile; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj2.command.Command; -import frc.robot.Constants; -import frc.robot.subsystems.drive.Drive; -import frc.robot.util.GeomUtil; -import frc.robot.util.LoggedTunableNumber; -import java.util.function.Supplier; -import lombok.Getter; -import org.littletonrobotics.junction.Logger; - -public class DriveToPoseFast extends Command { - private static final LoggedTunableNumber drivekP = new LoggedTunableNumber("DriveToPose/DrivekP"); - private static final LoggedTunableNumber drivekD = new LoggedTunableNumber("DriveToPose/DrivekD"); - private static final LoggedTunableNumber thetakP = new LoggedTunableNumber("DriveToPose/ThetakP"); - private static final LoggedTunableNumber thetakD = new LoggedTunableNumber("DriveToPose/ThetakD"); - private static final LoggedTunableNumber driveMaxVelocity = - new LoggedTunableNumber("DriveToPose/DriveMaxVelocity"); - private static final LoggedTunableNumber driveMaxVelocitySlow = - new LoggedTunableNumber("DriveToPose/DriveMaxVelocitySlow"); - private static final LoggedTunableNumber driveMaxAcceleration = - new LoggedTunableNumber("DriveToPose/DriveMaxAcceleration"); - private static final LoggedTunableNumber thetaMaxVelocity = - new LoggedTunableNumber("DriveToPose/ThetaMaxVelocity"); - private static final LoggedTunableNumber thetaMaxVelocitySlow = - new LoggedTunableNumber("DriveToPose/ThetaMaxVelocitySlow"); - private static final LoggedTunableNumber thetaMaxAcceleration = - new LoggedTunableNumber("DriveToPose/ThetaMaxAcceleration"); - private static final LoggedTunableNumber driveTolerance = - new LoggedTunableNumber("DriveToPose/DriveTolerance"); - private static final LoggedTunableNumber driveToleranceSlow = - new LoggedTunableNumber("DriveToPose/DriveToleranceSlow"); - private static final LoggedTunableNumber thetaTolerance = - new LoggedTunableNumber("DriveToPose/ThetaTolerance"); - private static final LoggedTunableNumber thetaToleranceSlow = - new LoggedTunableNumber("DriveToPose/ThetaToleranceSlow"); - private static final LoggedTunableNumber ffMinRadius = - new LoggedTunableNumber("DriveToPose/FFMinRadius"); - private static final LoggedTunableNumber ffMaxRadius = - new LoggedTunableNumber("DriveToPose/FFMinRadius"); - - static { - drivekP.initDefault(9.0); - drivekD.initDefault(0.0); - thetakP.initDefault(4.0); - thetakD.initDefault(0.0); - // Values in m/s - driveMaxVelocity.initDefault(5); - driveMaxVelocitySlow.initDefault(1.5); - driveMaxAcceleration.initDefault(3.5); - thetaMaxVelocity.initDefault(Units.degreesToRadians(360.0)); - thetaMaxVelocitySlow.initDefault(Units.degreesToRadians(90.0)); - thetaMaxAcceleration.initDefault(Units.degreesToRadians(720.0)); - driveTolerance.initDefault(0.01); - driveToleranceSlow.initDefault(0.06); - thetaTolerance.initDefault(Units.degreesToRadians(1.0)); - thetaToleranceSlow.initDefault(Units.degreesToRadians(3.0)); - ffMinRadius.initDefault(0.2); - ffMaxRadius.initDefault(0.8); - } - - private final Drive drive; - private final Supplier target; - - private final ProfiledPIDController driveController = - new ProfiledPIDController( - 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); - private final ProfiledPIDController thetaController = - new ProfiledPIDController( - 0, 0.0, 0.0, new TrapezoidProfile.Constraints(0.0, 0.0), Constants.loopPeriodSecs); - - private Translation2d lastSetpointTranslation = new Translation2d(); - private double driveErrorAbs = 0.0; - private double thetaErrorAbs = 0.0; - @Getter private boolean running = false; - - public DriveToPoseFast(Drive drive, Supplier target) { - this.drive = drive; - this.target = target; - - // Enable continuous input for theta controller - thetaController.enableContinuousInput(-Math.PI, Math.PI); - - addRequirements(drive); - } - - @Override - public void initialize() { - Pose2d currentPose = drive.getPose(); - ChassisSpeeds fieldVelocity = - ChassisSpeeds.fromRobotRelativeSpeeds( - drive.getChassisSpeeds(), drive.getPose().getRotation()); - Translation2d linearFieldVelocity = - new Translation2d(fieldVelocity.vxMetersPerSecond, fieldVelocity.vyMetersPerSecond); - driveController.reset( - currentPose.getTranslation().getDistance(target.get().getTranslation()), - Math.min( - 0.0, - -linearFieldVelocity - .rotateBy( - target - .get() - .getTranslation() - .minus(currentPose.getTranslation()) - .getAngle() - .unaryMinus()) - .getX())); - thetaController.reset( - currentPose.getRotation().getRadians(), fieldVelocity.omegaRadiansPerSecond); - lastSetpointTranslation = currentPose.getTranslation(); - } - - @Override - public void execute() { - running = true; - - // Update from tunable numbers - if (driveMaxVelocity.hasChanged(hashCode()) - || driveMaxVelocitySlow.hasChanged(hashCode()) - || driveMaxAcceleration.hasChanged(hashCode()) - || driveTolerance.hasChanged(hashCode()) - || driveToleranceSlow.hasChanged(hashCode()) - || thetaMaxVelocity.hasChanged(hashCode()) - || thetaMaxVelocitySlow.hasChanged(hashCode()) - || thetaMaxAcceleration.hasChanged(hashCode()) - || thetaTolerance.hasChanged(hashCode()) - || thetaToleranceSlow.hasChanged(hashCode()) - || drivekP.hasChanged(hashCode()) - || drivekD.hasChanged(hashCode()) - || thetakP.hasChanged(hashCode()) - || thetakD.hasChanged(hashCode())) { - driveController.setP(drivekP.get()); - driveController.setD(drivekD.get()); - driveController.setConstraints( - new TrapezoidProfile.Constraints(driveMaxVelocity.get(), driveMaxAcceleration.get())); - driveController.setTolerance(driveTolerance.get()); - thetaController.setP(thetakP.get()); - thetaController.setD(thetakD.get()); - thetaController.setConstraints( - new TrapezoidProfile.Constraints(thetaMaxVelocity.get(), thetaMaxAcceleration.get())); - thetaController.setTolerance(thetaTolerance.get()); - } - - // Get current pose and target pose - Pose2d currentPose = drive.getPose(); - Pose2d targetPose = target.get(); - - // Calculate drive speed - double currentDistance = currentPose.getTranslation().getDistance(targetPose.getTranslation()); - double ffScaler = - MathUtil.clamp( - (currentDistance - ffMinRadius.get()) / (ffMaxRadius.get() - ffMinRadius.get()), - 0.0, - 1.0); - driveErrorAbs = currentDistance; - driveController.reset( - lastSetpointTranslation.getDistance(targetPose.getTranslation()), - driveController.getSetpoint().velocity); - double driveVelocityScalar = - driveController.getSetpoint().velocity * ffScaler - + driveController.calculate(driveErrorAbs, 0.0); - if (currentDistance < driveController.getPositionTolerance()) driveVelocityScalar = 0.0; - lastSetpointTranslation = - new Pose2d( - targetPose.getTranslation(), - currentPose.getTranslation().minus(targetPose.getTranslation()).getAngle()) - .transformBy(GeomUtil.toTransform2d(driveController.getSetpoint().position, 0.0)) - .getTranslation(); - - // Calculate theta speed - double thetaVelocity = - thetaController.getSetpoint().velocity * ffScaler - + thetaController.calculate( - currentPose.getRotation().getRadians(), targetPose.getRotation().getRadians()); - thetaErrorAbs = - Math.abs(currentPose.getRotation().minus(targetPose.getRotation()).getRadians()); - if (thetaErrorAbs < thetaController.getPositionTolerance()) thetaVelocity = 0.0; - - // Command speeds - var driveVelocity = - new Pose2d( - new Translation2d(), - currentPose.getTranslation().minus(targetPose.getTranslation()).getAngle()) - .transformBy(GeomUtil.toTransform2d(driveVelocityScalar, 0.0)) - .getTranslation(); - drive.runVelocity( - ChassisSpeeds.fromFieldRelativeSpeeds( - driveVelocity.getX(), driveVelocity.getY(), thetaVelocity, currentPose.getRotation())); - - // Log data - Logger.recordOutput("DriveToPose/DistanceMeasured", currentDistance); - Logger.recordOutput("DriveToPose/DistanceSetpoint", driveController.getSetpoint().position); - Logger.recordOutput("DriveToPose/ThetaMeasured", currentPose.getRotation().getRadians()); - Logger.recordOutput("DriveToPose/ThetaSetpoint", thetaController.getSetpoint().position); - Logger.recordOutput( - "DriveToPose/Setpoint", - new Pose2d( - lastSetpointTranslation, - Rotation2d.fromRadians(thetaController.getSetpoint().position))); - Logger.recordOutput("DriveToPose/Goal", targetPose); - Logger.recordOutput("DriveToPose/Velocity", driveVelocityScalar); - } - - @Override - public void end(boolean interrupted) { - drive.stop(); - running = false; - // Clear logs - Logger.recordOutput("DriveToPose/Setpoint", new Pose2d[] {}); - Logger.recordOutput("DriveToPose/Goal", new Pose2d[] {}); - } - - /** Checks if the robot is stopped at the final pose. */ - public boolean atGoal() { - return running && driveController.atGoal() && thetaController.atGoal(); - } - - /** Checks if the robot pose is within the allowed drive and theta tolerances. */ - public boolean withinTolerance(double driveTolerance, Rotation2d thetaTolerance) { - return running - && Math.abs(driveErrorAbs) < driveTolerance - && Math.abs(thetaErrorAbs) < thetaTolerance.getRadians(); - } -} diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index d8dceab..46788c2 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -13,7 +13,6 @@ import static frc.robot.subsystems.drive.SwerveConstants.*; import choreo.trajectory.SwerveSample; -import com.ctre.phoenix6.swerve.SwerveDrivetrain.SwerveDriveState; import com.ctre.phoenix6.swerve.SwerveRequest; import com.pathplanner.lib.auto.AutoBuilder; import com.pathplanner.lib.config.PIDConstants; @@ -30,7 +29,6 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.math.kinematics.SwerveDriveKinematics; import edu.wpi.first.math.kinematics.SwerveModulePosition; @@ -50,11 +48,13 @@ import frc.robot.Constants.DrivebaseConstants; import frc.robot.Constants.RobotConstants; import frc.robot.subsystems.imu.Imu; +import frc.robot.util.ConcurrentTimeInterpolatableBuffer; import frc.robot.util.LocalADStarAK; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIParsing; import frc.robot.util.RBSISubsystem; import java.util.Optional; +import java.util.OptionalDouble; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.littletonrobotics.junction.AutoLogOutput; @@ -62,9 +62,15 @@ public class Drive extends RBSISubsystem { - private SwerveDriveState cachedState = null; - private final TimeInterpolatableBuffer poseBuffer = - TimeInterpolatableBuffer.createBuffer(2); + // --- buffers for time-aligned pose + yaw + yawRate history --- + private final ConcurrentTimeInterpolatableBuffer poseBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); + + private final ConcurrentTimeInterpolatableBuffer yawRateBuffer = + frc.robot.util.ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(1.5); static final Lock odometryLock = new ReentrantLock(); private final Imu imu; @@ -81,6 +87,7 @@ public class Drive extends RBSISubsystem { new SwerveModulePosition(), new SwerveModulePosition() }; + private final SwerveModulePosition[] odomPositions = new SwerveModulePosition[4]; private SwerveDrivePoseEstimator m_PoseEstimator = new SwerveDrivePoseEstimator(kinematics, Rotation2d.kZero, lastModulePositions, Pose2d.kZero); @@ -225,64 +232,98 @@ public Drive(Imu imu) { @Override public void rbsiPeriodic() { odometryLock.lock(); + try { + // Get the IMU inputs + final var imuInputs = imu.getInputs(); + + // Stop modules & log empty setpoint states if disabled + if (DriverStation.isDisabled()) { + for (var module : modules) module.stop(); + Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); + Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); + } - // Get the IMU inputs - final var imuInputs = imu.getInputs(); // primitive inputs + // Module periodic updates, which drains queues this cycle + for (var module : modules) module.periodic(); - // Stop modules & log empty setpoint states if disabled - if (DriverStation.isDisabled()) { - for (var module : modules) { - module.stop(); - } - Logger.recordOutput("SwerveStates/Setpoints", new SwerveModuleState[] {}); - Logger.recordOutput("SwerveStates/SetpointsOptimized", new SwerveModuleState[] {}); - } + // Feed historical samples into odometry if REAL robot + if (Constants.getMode() != Mode.SIM) { - // Module periodic updates, which drains queues this cycle - for (var module : modules) { - module.periodic(); - } + // ---- Gather per-module histories ---- + final double[][] perModuleTs = new double[4][]; + final SwerveModulePosition[][] perModulePos = new SwerveModulePosition[4][]; + int unionCap = 0; + + for (int m = 0; m < 4; m++) { + perModuleTs[m] = modules[m].getOdometryTimestamps(); + perModulePos[m] = modules[m].getOdometryPositions(); + unionCap += perModuleTs[m].length; + } - // Feed historical samples into odometry if REAL robot - if (Constants.getMode() != Mode.SIM) { - final double[] sampleTimestamps = modules[0].getOdometryTimestamps(); - final int sampleCount = sampleTimestamps.length; - - // Reuse arrays to reduce GC (you likely already have lastModulePositions as a field) - final SwerveModulePosition[] modulePositions = new SwerveModulePosition[4]; - final SwerveModulePosition[] moduleDeltas = new SwerveModulePosition[4]; - - for (int i = 0; i < sampleCount; i++) { - for (int moduleIndex = 0; moduleIndex < 4; moduleIndex++) { - modulePositions[moduleIndex] = modules[moduleIndex].getOdometryPositions()[i]; - moduleDeltas[moduleIndex] = - new SwerveModulePosition( - modulePositions[moduleIndex].distanceMeters - - lastModulePositions[moduleIndex].distanceMeters, - modulePositions[moduleIndex].angle); - lastModulePositions[moduleIndex] = modulePositions[moduleIndex]; + if (unionCap == 0) { + gyroDisconnectedAlert.set(!imuInputs.connected); + return; } - // Pick yaw sample if available; otherwise fall back to current yaw - final double yawRad = - (imuInputs.connected - && imuInputs.odometryYawPositionsRad != null - && imuInputs.odometryYawPositionsRad.length > i) - ? imuInputs.odometryYawPositionsRad[i] - : imuInputs.yawPositionRad; + // ---- Fill yaw buffer from IMU odometry samples (preferred) ---- + // These timestamps are authoritative for yaw interpolation and yaw-rate gating. + if (imuInputs.connected + && imuInputs.odometryYawTimestamps != null + && imuInputs.odometryYawPositionsRad != null + && imuInputs.odometryYawTimestamps.length == imuInputs.odometryYawPositionsRad.length + && imuInputs.odometryYawTimestamps.length > 0) { + + final double[] yts = imuInputs.odometryYawTimestamps; + final double[] yaws = imuInputs.odometryYawPositionsRad; + + for (int i = 0; i < yts.length; i++) { + yawBuffer.addSample(yts[i], yaws[i]); + if (i > 0) { + double dt = yts[i] - yts[i - 1]; + if (dt > 1e-6) { + yawRateBuffer.addSample(yts[i], (yaws[i] - yaws[i - 1]) / dt); + } + } + } + } else { + // fallback: buffer "now" yaw only + double now = Timer.getFPGATimestamp(); + yawBuffer.addSample(now, imuInputs.yawPositionRad); + yawRateBuffer.addSample(now, imuInputs.yawRateRadPerSec); + } + + // ---- Build union timeline ---- + final double[] unionTs = new double[Math.max(unionCap, 1)]; + final double EPS = 1e-4; // 0.1 ms + final int unionN = buildUnionTimeline(perModuleTs, unionTs, EPS); + + // ---- Replay at union times ---- + for (int i = 0; i < unionN; i++) { + final double t = unionTs[i]; + + // Interpolate each module to union time + for (int m = 0; m < 4; m++) { + SwerveModulePosition p = interpolateModulePosition(perModuleTs[m], perModulePos[m], t); + odomPositions[m] = p; + } - // Boundary conversion: PoseEstimator requires Rotation2d - final Rotation2d yaw = Rotation2d.fromRadians(yawRad); + // Interpolate yaw at union time + final double yawRad = yawBuffer.getSample(t).orElse(imuInputs.yawPositionRad); - // Apply to pose estimator - m_PoseEstimator.updateWithTime(sampleTimestamps[i], yaw, modulePositions); + m_PoseEstimator.updateWithTime(t, Rotation2d.fromRadians(yawRad), odomPositions); + + // keep buffer in the same timebase as estimator + poseBuffer.addSample(t, m_PoseEstimator.getEstimatedPosition()); + } + + Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); } - Logger.recordOutput("Drive/Pose", m_PoseEstimator.getEstimatedPosition()); - } - odometryLock.unlock(); + gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); - gyroDisconnectedAlert.set(!imuInputs.connected && Constants.getMode() != Mode.SIM); + } finally { + odometryLock.unlock(); + } } /** Simulation Periodic Method */ @@ -334,6 +375,8 @@ public void simulationPeriodic() { m_PoseEstimator.addVisionMeasurement(visionPose, visionTimestamp, visionStdDevs); } + poseBuffer.addSample(Timer.getFPGATimestamp(), simPhysics.getPose()); + // 7) Logging Logger.recordOutput("Sim/Pose", simPhysics.getPose()); Logger.recordOutput("Sim/YawRad", yawRad); @@ -470,6 +513,37 @@ public Pose2d getPose() { return m_PoseEstimator.getEstimatedPosition(); } + /** Returns interpolated odometry pose at a given timestamp. */ + public Optional getPoseAtTime(double timestampSeconds) { + return poseBuffer.getSample(timestampSeconds); + } + + /** Adds a vision measurement safely into the PoseEstimator. */ + public void addVisionMeasurement(Pose2d pose, double timestampSeconds, Matrix stdDevs) { + odometryLock.lock(); + try { + m_PoseEstimator.addVisionMeasurement(pose, timestampSeconds, stdDevs); + } finally { + odometryLock.unlock(); + } + } + + /** Max abs yaw rate over [t0, t1] using buffered yaw-rate history (no streams). */ + public OptionalDouble getMaxAbsYawRateRadPerSec(double t0, double t1) { + if (t1 < t0) return OptionalDouble.empty(); + var sub = yawRateBuffer.getInternalBuffer().subMap(t0, true, t1, true); + if (sub.isEmpty()) return OptionalDouble.empty(); + + double maxAbs = 0.0; + boolean any = false; + for (double v : sub.values()) { + any = true; + double a = Math.abs(v); + if (a > maxAbs) maxAbs = a; + } + return any ? OptionalDouble.of(maxAbs) : OptionalDouble.empty(); + } + /** Returns the current odometry rotation. */ @AutoLogOutput(key = "Odometry/Yaw") public Rotation2d getHeading() { @@ -557,6 +631,7 @@ public double getMaxAngularAccelRadPerSecPerSec() { /** Resets the current odometry pose. */ public void resetPose(Pose2d pose) { m_PoseEstimator.resetPosition(getHeading(), getModulePositions(), pose); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the gyro based on alliance color */ @@ -566,21 +641,33 @@ public void zeroHeadingForAlliance() { ? Rotation2d.kZero : Rotation2d.k180deg); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); } /** Zeros the heading */ public void zeroHeading() { imu.zeroYaw(Rotation2d.kZero); resetHeadingController(); + markPoseReset(Timer.getFPGATimestamp()); + } + + // ---- Pose reset gate (vision + anything latency-sensitive) ---- + private volatile long poseResetEpoch = 0; // monotonic counter + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + public long getPoseResetEpoch() { + return poseResetEpoch; + } + + public double getLastPoseResetTimestamp() { + return lastPoseResetTimestamp; } - /** Adds a new timestamped vision measurement. */ - public void addVisionMeasurement( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs) { - m_PoseEstimator.addVisionMeasurement( - visionRobotPoseMeters, timestampSeconds, visionMeasurementStdDevs); + private void markPoseReset(double fpgaNow) { + lastPoseResetTimestamp = fpgaNow; + poseResetEpoch++; + Logger.recordOutput("Drive/PoseResetEpoch", poseResetEpoch); + Logger.recordOutput("Drive/PoseResetTimestamp", lastPoseResetTimestamp); } /** CHOREO SECTION (Ignore if AutoType == PATHPLANNER) ******************* */ @@ -678,12 +765,68 @@ private edu.wpi.first.math.Matrix getSimulatedVisionStdDevs() { return stdDevs; } - // Thing from FRC180 - public Pose2d getBufferPose(double timestamp) { - Optional pose = poseBuffer.getSample(timestamp); - if (pose.isPresent()) { - return pose.get(); + private static SwerveModulePosition interpolateModulePosition( + double[] ts, SwerveModulePosition[] samples, double t) { + + final int n = ts.length; + if (n == 0) return new SwerveModulePosition(); + + if (t <= ts[0]) return samples[0]; + if (t >= ts[n - 1]) return samples[n - 1]; + + int lo = 0, hi = n - 1; + while (lo < hi) { + int mid = (lo + hi) >>> 1; + if (ts[mid] < t) lo = mid + 1; + else hi = mid; + } + int i1 = lo; + int i0 = i1 - 1; + + double t0 = ts[i0]; + double t1 = ts[i1]; + if (t1 <= t0) return samples[i1]; + + double alpha = (t - t0) / (t1 - t0); + + double dist = + samples[i0].distanceMeters + + (samples[i1].distanceMeters - samples[i0].distanceMeters) * alpha; + + Rotation2d angle = samples[i0].angle.interpolate(samples[i1].angle, alpha); + + return new SwerveModulePosition(dist, angle); + } + + private static int buildUnionTimeline(double[][] perModuleTs, double[] outUnion, double epsSec) { + int[] idx = new int[perModuleTs.length]; + int outN = 0; + + while (true) { + double minT = Double.POSITIVE_INFINITY; + boolean any = false; + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + if (idx[m] >= ts.length) continue; + any = true; + minT = Math.min(minT, ts[idx[m]]); + } + + if (!any) break; + + if (outN == 0 || Math.abs(minT - outUnion[outN - 1]) > epsSec) { + outUnion[outN++] = minT; + } + + for (int m = 0; m < perModuleTs.length; m++) { + double[] ts = perModuleTs[m]; + while (idx[m] < ts.length && Math.abs(ts[idx[m]] - minT) <= epsSec) { + idx[m]++; + } + } } - return null; + + return outN; } } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java deleted file mode 100644 index 4e5d72f..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/CameraStatus.java +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -public interface CameraStatus { - - public void update(); - - public boolean isConnected(); -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java deleted file mode 100644 index e494f1d..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetector.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import com.pathplanner.lib.util.FlippingUtil; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.wpilibj.RobotState; -import frc.robot.Robot; -import frc.robot.util.LimelightHelpers.RawDetection; - -public interface CoralDetector { - - public static final double MARGIN = 0.1; - - public static final double AUTO_BLUE_X = 4.8; - public static final double AUTO_RED_X = 12.9; - - public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] detections); - - public default void reset() {} - ; - - public static boolean isValid(Pose2d coralPose) { - // Reject coral detections that are outside the field - boolean auto = RobotState.isAutonomous(); - double currMargin = auto ? MARGIN : 0; - if (coralPose.getX() < currMargin - || coralPose.getX() > FlippingUtil.fieldSizeX - currMargin - || coralPose.getY() < currMargin - || coralPose.getY() > FlippingUtil.fieldSizeY - currMargin) { - return false; - } - - if (auto) { - boolean blue = Robot.isBlue(); - if (blue && coralPose.getX() > AUTO_BLUE_X) { - return false; - } else if (!blue && coralPose.getX() < AUTO_RED_X) { - return false; - } - } - - return true; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java deleted file mode 100644 index 7fa1214..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorReal.java +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import static edu.wpi.first.units.Units.*; - -import edu.wpi.first.epilogue.Logged; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.interpolation.InterpolatingDoubleTreeMap; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.RobotState; -import edu.wpi.first.wpilibj.Timer; -import frc.robot.util.LimelightHelpers.RawDetection; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; - -@Logged -public class CoralDetectorReal implements CoralDetector { - - private final InterpolatingDoubleTreeMap distanceMap; - private final List sortedDetections; - private final List algaeDetections; - private final Comparator detectionTYComparator; - private final Comparator detectionTXYComparator; - - // How close to the robot a detected coral has to be to be considered "close" (i.e. intakeable) - private static final double CLOSE_CORAL_DISTANCE = 0.6; - private static final double CLOSE_CORAL_TX = 10; - // How close two detected coral have to be to each other to be considered the same/close enough - // to allow switching without a timeout - private static final double SIMILAR_CORAL_THRESHOLD = 0.75; - - private static final double ALGAE_AVOID_THRESHOLD_DEGREES = 2; // 4.5; - - private double lastDetectionTime = 0; - private double lastDetectionDistance = 0; - private double lastDetectionTX = 0; - private double lastDetectionWidth = 0; - private double lastDetectionHeight = 0; - private double lastDetectionRatio = 0; - private Pose2d lastDetection = null; - - // Additional flags for viewing in logs - private boolean newCoralValue = false; - private boolean returningCloseDetection = false; - private boolean rejectionAlgae = false; - private boolean rejectionOutsideField = false; - - public CoralDetectorReal() { - distanceMap = new InterpolatingDoubleTreeMap(); - addDistance(-22.2, 18.5); - addDistance(-14.79, 24); - addDistance(-6.05, 32); - addDistance(5.44, 48); - addDistance(14.89, 72); - addDistance(20.82, 100); - addDistance(24.7, 132); - - sortedDetections = new ArrayList<>(); - algaeDetections = new ArrayList<>(); - detectionTYComparator = (a, b) -> Double.compare(a.tync, b.tync); - detectionTXYComparator = (a, b) -> Double.compare(tXYCombined(a), tXYCombined(b)); - } - - private void addDistance(double ty, double inches) { - distanceMap.put(ty, Inches.of(inches).in(Meters)); - } - - @Override - public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] detections) { - newCoralValue = false; - returningCloseDetection = false; - rejectionAlgae = false; - rejectionOutsideField = false; - sortedDetections.clear(); - algaeDetections.clear(); - - Pose2d recentLastDetection = getRecentLastDetection(); - if (robotPose == null || detections == null || detections.length == 0) { - return recentLastDetection; - } - - boolean auto = RobotState.isAutonomous(); - - // If the last detected coral was very close to the robot, wait a bit in case - // we're trying to intake it - if (recentLastDetection != null && auto && lastDetectionClose()) { - returningCloseDetection = true; - return recentLastDetection; - } - - for (RawDetection detection : detections) { - if (detection.classId == 1) { - sortedDetections.add(detection); - } else { - algaeDetections.add(detection); - } - } - if (auto) { - sortedDetections.sort(detectionTYComparator); - } else { - sortedDetections.sort(detectionTXYComparator); - } - - Pose2d basePose = robotPose.transformBy(VisionSubsystem.ROBOT_TO_INTAKE_CAMERA_2D); - - for (RawDetection detection : sortedDetections) { - double degrees = detection.txnc; - - if (auto) { - // Skip any coral that are close to an algae on the X axis - these are likely lollipops - for (RawDetection algae : algaeDetections) { - if (Math.abs(degrees - algae.txnc) - < ALGAE_AVOID_THRESHOLD_DEGREES) { // && detection.tync < algae.tync) { - rejectionAlgae = true; - break; - } - } - if (rejectionAlgae) continue; - } - - double distanceMeters = distanceMap.get(detection.tync); - double radians = Units.degreesToRadians(degrees); - double yComponent = distanceMeters * Math.tan(radians); - Transform2d coralTransform = new Transform2d(distanceMeters, -yComponent, Rotation2d.kZero); - Pose2d coralPose = basePose.transformBy(coralTransform); - - if (!CoralDetector.isValid(coralPose)) { - rejectionOutsideField = true; - continue; - } - - double robotDist = coralPose.getTranslation().getDistance(robotPose.getTranslation()); - - lastDetection = coralPose; - lastDetectionTime = Timer.getFPGATimestamp(); - lastDetectionDistance = robotDist; - lastDetectionTX = detection.txnc; - lastDetectionWidth = width(detection); - lastDetectionHeight = height(detection); - lastDetectionRatio = lastDetectionWidth / lastDetectionHeight; - newCoralValue = true; - return coralPose; - } - - // If we didn't find any coral, return the last detection if it was very recent - return recentLastDetection; - } - - @Override - public void reset() { - lastDetection = null; - lastDetectionDistance = 0; - lastDetectionTime = 0; - } - - private Pose2d getRecentLastDetection() { - boolean lastClose = lastDetectionClose(); - boolean auto = RobotState.isAutonomous(); - - double timeoutSeconds = lastClose ? 3 : 0.5; - if (auto && lastClose) timeoutSeconds = 1; - if (Timer.getFPGATimestamp() - lastDetectionTime < timeoutSeconds) { - return lastDetection; - } - return null; - } - - public boolean lastDetectionClose() { - if (lastDetection == null) return false; - - return lastDetectionDistance < CLOSE_CORAL_DISTANCE; - } - - private double tXYCombined(RawDetection detection) { - return detection.tync + Math.abs(detection.txnc * 0.75); - } - - private double width(RawDetection detection) { - return Math.abs(detection.corner0_X - detection.corner1_X); - } - - private double height(RawDetection detection) { - return Math.abs(detection.corner0_Y - detection.corner2_Y); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java b/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java deleted file mode 100644 index c303fab..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/CoralDetectorSim.java +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation2d; -import frc.robot.Robot; -import frc.robot.RobotContainer; -import frc.robot.util.LimelightHelpers.RawDetection; -import org.ironmaple.simulation.SimulatedArena; - -/** - * A coral detector that returns poses based off simulation, ignoring any detections passed to it. - */ -public class CoralDetectorSim implements CoralDetector { - - private final double detectionDistance; - private final boolean useFOV; - - private double fovDegrees = 82; // Limelight 4 FOV - - /** - * Create a new CoralDetectorSim with the given detection distance. - * - * @param detectionMeters The maximum distance in meters the robot can be from the coral and still - * detect it. - * @param useFOV Whether to use the robot's field of view to determine if the coral is in sight. - */ - public CoralDetectorSim(double detectionMeters, boolean useFOV) { - detectionDistance = detectionMeters; - this.useFOV = useFOV; - } - - @Override - public Pose2d getCoralPose(Pose2d robotPose, RawDetection[] _detections) { - if (robotPose == null) return null; - - if (RobotContainer.MAPLESIM) { - return getCoralPoseMapleSim(robotPose); - } else { - return getCoralPoseBasic(robotPose); - } - } - - private Pose2d getCoralPoseBasic(Pose2d robotPose) { - Pose2d coralPose = Robot.isBlue() ? SimLogic.blueHPCoralPose : SimLogic.redHPCoralPose; - - if (useFOV && poseWithinPOV(robotPose, coralPose)) { - return coralPose; - } else if (!useFOV - && robotPose.getTranslation().getDistance(coralPose.getTranslation()) - <= detectionDistance) { - return coralPose; - } - - return null; - } - - /** Selects the closest coral to the robot within its field of view. */ - private Pose2d getCoralPoseMapleSim(Pose2d robotPose) { - Pose3d[] corals = SimulatedArena.getInstance().getGamePiecesArrayByType("Coral"); - Pose2d bestCoral = null; - double bestDistance = Double.MAX_VALUE; - for (int i = 0; i < corals.length; i++) { - // Filter out upright coral - if (corals[i].getRotation().getY() > 0.2) continue; - - Pose2d coral = corals[i].toPose2d(); - if (poseWithinPOV(robotPose, coral)) { - double distance = robotPose.getTranslation().getDistance(coral.getTranslation()); - if (distance < bestDistance) { - if (!CoralDetector.isValid(coral)) { - continue; - } - - bestCoral = coral; - bestDistance = distance; - } - } - } - - if (bestCoral == null) return null; - double noise = 0.1; - return bestCoral.transformBy( - new Transform2d(randomRange(-noise, noise), randomRange(-noise, noise), Rotation2d.kZero)); - } - - private double randomRange(double min, double max) { - return Math.random() * (max - min) + min; - } - - // =========== Helper methods to calculate robot POV =========== - - private boolean poseWithinPOV(Pose2d robotPose, Pose2d coralPose) { - // Define the triangle vertices based on the robot's pose - Translation2d robotTranslation = robotPose.getTranslation(); - Rotation2d robotRotation = robotPose.getRotation().rotateBy(Rotation2d.k180deg); - - // Define the field of view (FOV) angle and distance - double fovAngle = Math.toRadians(fovDegrees); - double fovDistance = detectionDistance; - - // Calculate the vertices of the triangle - Translation2d vertex1 = robotTranslation; - Translation2d vertex2 = - robotTranslation.plus( - new Translation2d(fovDistance, fovDistance * Math.tan(fovAngle / 2)) - .rotateBy(robotRotation)); - Translation2d vertex3 = - robotTranslation.plus( - new Translation2d(fovDistance, -fovDistance * Math.tan(fovAngle / 2)) - .rotateBy(robotRotation)); - - // Check if the coralPose is within the triangle - return isPointInTriangle(coralPose.getTranslation(), vertex1, vertex2, vertex3); - } - - private boolean isPointInTriangle( - Translation2d pt, Translation2d v1, Translation2d v2, Translation2d v3) { - double d1 = sign(pt, v1, v2); - double d2 = sign(pt, v2, v3); - double d3 = sign(pt, v3, v1); - - boolean hasNeg = (d1 < 0) || (d2 < 0) || (d3 < 0); - boolean hasPos = (d1 > 0) || (d2 > 0) || (d3 > 0); - - return !(hasNeg && hasPos); - } - - private double sign(Translation2d p1, Translation2d p2, Translation2d p3) { - return (p1.getX() - p3.getX()) * (p2.getY() - p3.getY()) - - (p2.getX() - p3.getX()) * (p1.getY() - p3.getY()); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java b/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java deleted file mode 100644 index f6929bd..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/Field.java +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import static edu.wpi.first.units.Units.*; - -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.units.measure.Distance; -import frc.robot.Constants.AprilTagConstants; -import java.util.HashMap; -import java.util.Optional; - -public abstract class Field { - - private static HashMap ALGAE_HEIGHTS, CURRENT_ALGAE_HEIGHTS; - private static Pose3d[] reefAlgaePoses = null; - - // Game Manual Page 24 - "...pipes on the same face are 1 ft. 1 in. (~33 cm) apart (center to - // center)." - public static final Distance REEF_BRANCH_SEPARATION = Inches.of(13); - - // Game Manual Page 33 - "A CORAL is a 11 7/8 in. long (~30 cm) piece of..." - public static final Distance CORAL_LENGTH = Centimeters.of(30); - - public static void init() { - ALGAE_HEIGHTS = new HashMap<>(); - // Red reef - ALGAE_HEIGHTS.put(6, 2); - ALGAE_HEIGHTS.put(7, 3); - ALGAE_HEIGHTS.put(8, 2); - ALGAE_HEIGHTS.put(9, 3); - ALGAE_HEIGHTS.put(10, 2); - ALGAE_HEIGHTS.put(11, 3); - - // Blue reef - for (int i = 6; i <= 11; i++) { - ALGAE_HEIGHTS.put(redReefTagToBlue(i), ALGAE_HEIGHTS.get(i)); - } - - CURRENT_ALGAE_HEIGHTS = new HashMap<>(ALGAE_HEIGHTS); - } - - public static Pose3d[] getReefAlgaePoses() { - if (reefAlgaePoses == null) { - AprilTagFieldLayout layout = AprilTagConstants.kAprilTagLayout; - reefAlgaePoses = new Pose3d[CURRENT_ALGAE_HEIGHTS.size()]; - int index = 0; - for (var entry : CURRENT_ALGAE_HEIGHTS.entrySet()) { - int tag = entry.getKey(); - int level = entry.getValue(); - Optional optionalTagPose = layout.getTagPose(tag); - if (optionalTagPose.isPresent()) { - Pose3d tagPose = optionalTagPose.get(); - reefAlgaePoses[index] = - tagPose.transformBy( - new Transform3d(-0.15, 0, level == 3 ? 1 : 0.6, Rotation3d.kZero)); - } else { - reefAlgaePoses[index] = Pose3d.kZero; - } - index += 1; - } - } - return reefAlgaePoses; - } - - public static boolean hasReefAlgae(int tag) { - return CURRENT_ALGAE_HEIGHTS.containsKey(tag); - } - - public static Pose3d getReefAlgaePose(int tag) { - Pose3d[] poses = getReefAlgaePoses(); - int index = 0; - for (var entry : CURRENT_ALGAE_HEIGHTS.entrySet()) { - int algaeTag = entry.getKey(); - if (tag == algaeTag) { - return poses[index]; - } - index++; - } - return null; - } - - public static void removeReefAlgae(int tag) { - CURRENT_ALGAE_HEIGHTS.remove(tag); - // Reset reefAlgaePoses so it will be recalculated next time it is requested - reefAlgaePoses = null; - } - - public static void resetReefAlgae() { - CURRENT_ALGAE_HEIGHTS = new HashMap<>(ALGAE_HEIGHTS); - reefAlgaePoses = null; - } - - public static int getAlgaeLevel(int tag) { - Integer level = ALGAE_HEIGHTS.get(tag); - return level != null ? level : -1; - } - - public static int blueReefTagToRed(int blueTag) { - switch (blueTag) { - case 17: - return 8; - case 18: - return 7; - case 19: - return 6; - case 20: - return 11; - case 21: - return 10; - case 22: - return 9; - default: - return -1; - } - } - - public static int redReefTagToBlue(int redTag) { - switch (redTag) { - case 6: - return 19; - case 7: - return 18; - case 8: - return 17; - case 9: - return 22; - case 10: - return 21; - case 11: - return 20; - default: - return -1; - } - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java deleted file mode 100644 index 498139f..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/LEDSubsystem.java +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import com.ctre.phoenix.ErrorCode; -import com.ctre.phoenix.led.Animation; -import com.ctre.phoenix.led.CANdle; -import com.ctre.phoenix.led.CANdle.LEDStripType; -import com.ctre.phoenix.led.CANdle.VBatOutputMode; -import com.ctre.phoenix.led.CANdleConfiguration; -import com.ctre.phoenix.led.ColorFlowAnimation; -import com.ctre.phoenix.led.ColorFlowAnimation.Direction; -import com.ctre.phoenix.led.LarsonAnimation; -import com.ctre.phoenix.led.LarsonAnimation.BounceMode; -import com.ctre.phoenix.led.RainbowAnimation; -import com.ctre.phoenix.led.SingleFadeAnimation; -import com.ctre.phoenix.led.StrobeAnimation; -import com.ctre.phoenix.led.TwinkleAnimation; -import com.ctre.phoenix.led.TwinkleAnimation.TwinklePercent; -import com.ctre.phoenix.led.TwinkleOffAnimation; -import com.ctre.phoenix.led.TwinkleOffAnimation.TwinkleOffPercent; -import edu.wpi.first.wpilibj2.command.Command; -import edu.wpi.first.wpilibj2.command.SubsystemBase; -import frc.robot.Constants.CANandPowerPorts; - -public class LEDSubsystem extends SubsystemBase { - - public final LEDColor RED = new LEDColor(50, 0, 0, 255); - public final LEDColor BLUE = new LEDColor(21, 46, 150, 255); // navy blue - public final LEDColor GREEN = new LEDColor(0, 255, 0, 255); - public final LEDColor YELLOW = new LEDColor(255, 255, 0, 255); - public final LEDColor WHITE = new LEDColor(255, 255, 255, 255); - public final LEDColor ALGAE = new LEDColor(0, 255, 30, 255); - public final LEDColor PURPLE = new LEDColor(163, 49, 196, 255); - - public final RainbowAnimation rainbow; - public final SingleFadeAnimation blueFade, - redFade, - yellowFade, - whiteFade, - yellowFadeFast, - purpleFade; - public final TwinkleAnimation blueTwinkle, redTwinkle; - public final ColorFlowAnimation blueFlow, redFlow, yellowFlow; - public final ColorFlowAnimation blueLeftFlow, blueRightFlow; - public final LarsonAnimation yellowLarson; - public final StrobeAnimation greenStrobe; - - private final int STRIP_LENGTH = 49; - private final int STRIP_2_LENGTH = 47; - private final int NUM_LEDS = 8 + STRIP_LENGTH + STRIP_2_LENGTH; - private final int STRIP_OFFSET = 0; - private final int NO_CANDLE_OFFSET = 8; - - private final CANdle candle; - - private Animation currentAnimation = null; - private Animation currentAnimation2 = null; - private LEDColor currentColor = null; - private LEDColor currentSplitColor = null; - - public LEDSubsystem() { - CANdleConfiguration config = new CANdleConfiguration(); - config.disableWhenLOS = false; - config.statusLedOffWhenActive = true; - config.brightnessScalar = 0.2; - config.v5Enabled = true; - config.stripType = LEDStripType.GRB; - config.vBatOutputMode = VBatOutputMode.Off; - candle = new CANdle(CANandPowerPorts.LED.getDeviceNumber(), CANandPowerPorts.LED.getBus()); - candle.configAllSettings(config); - - rainbow = new RainbowAnimation(1, 1, NUM_LEDS, false, STRIP_OFFSET); - - blueFade = fade(BLUE, 0.5); - redFade = fade(RED, 0.5); - yellowFade = fade(YELLOW, 0.5); - purpleFade = fade(PURPLE, 1); - whiteFade = fade(WHITE, 1); - yellowFadeFast = fade(YELLOW, 1); - - blueTwinkle = twinkle(BLUE, 0.25, TwinklePercent.Percent64); - redTwinkle = twinkle(RED, 0.25, TwinklePercent.Percent64); - - blueFlow = colorFlow(BLUE, 0.7, Direction.Forward); - redFlow = colorFlow(RED, 0.7, Direction.Forward); - yellowFlow = colorFlow(YELLOW, 0.5, Direction.Forward); - yellowLarson = larson(YELLOW, 0.5, 5, BounceMode.Front); - - blueLeftFlow = colorFlow(BLUE, 0.7, Direction.Forward, STRIP_LENGTH, NO_CANDLE_OFFSET); - blueRightFlow = - colorFlow(BLUE, 0.7, Direction.Backward, STRIP_2_LENGTH, NO_CANDLE_OFFSET + STRIP_LENGTH); - - greenStrobe = strobe(GREEN, 0.25); - } - - public Command animate(Animation animation) { - return run(() -> setAnimation(animation)); - } - - public Command color(LEDColor color) { - return run(() -> setColor(color)); - } - - public void setAnimation(Animation animation) { - setAnimation(animation, 0); - } - - public void setAnimation(Animation animation, int slot) { - if (animation != currentAnimation) { - // clearAnimations(); - ErrorCode code = candle.animate(animation, slot); - if (code == ErrorCode.OK) { - clearState(); - currentAnimation = animation; - } - } - } - - public void setDualAnimation(Animation animation1, Animation animation2) { - if (currentAnimation != animation1 || currentAnimation2 != animation2) { - clearAnimations(); - ErrorCode code1 = candle.animate(animation1, 0); - ErrorCode code2 = candle.animate(animation2, 1); - - if (code1 == ErrorCode.OK && code2 == ErrorCode.OK) { - clearState(); - currentAnimation = animation1; - currentAnimation2 = animation2; - } - } - } - - public void setColor(LEDColor color) { - if (color != currentColor) { - clearAnimations(); - ErrorCode code = candle.setLEDs(color.r, color.g, color.b, color.w, STRIP_OFFSET, NUM_LEDS); - if (code == ErrorCode.OK) { - clearState(); - currentColor = color; - } - } - } - - public void setSplitColor(LEDColor top, LEDColor bottom) { - if (top != currentColor || bottom != currentSplitColor) { - clearAnimations(); - ErrorCode code1 = candle.setLEDs(bottom.r, bottom.g, bottom.b, bottom.w, 0, 24); - ErrorCode code2 = candle.setLEDs(top.r, top.g, top.b, top.w, 24, 25); - - ErrorCode code3 = candle.setLEDs(top.r, top.g, top.b, top.w, 49, 23); - ErrorCode code4 = candle.setLEDs(bottom.r, bottom.g, bottom.b, bottom.w, 72, 24); - - if (code1 == ErrorCode.OK - && code2 == ErrorCode.OK - && code3 == ErrorCode.OK - && code4 == ErrorCode.OK) { - clearState(); - currentColor = top; - currentSplitColor = bottom; - } - } - } - - private void clearAnimations() { - candle.clearAnimation(0); - } - - private void clearState() { - currentAnimation = null; - currentAnimation2 = null; - currentColor = null; - currentSplitColor = null; - } - - public SingleFadeAnimation fade(LEDColor color, double speed) { - return new SingleFadeAnimation( - color.r, color.g, color.b, color.w, speed, NUM_LEDS, STRIP_OFFSET); - } - - public TwinkleAnimation twinkle(LEDColor color, double speed, TwinklePercent percent) { - return new TwinkleAnimation( - color.r, color.g, color.b, color.w, speed, NUM_LEDS, percent, STRIP_OFFSET); - } - - public TwinkleOffAnimation twinkleOff(LEDColor color, double speed, TwinkleOffPercent percent) { - return new TwinkleOffAnimation( - color.r, color.g, color.b, color.w, speed, NUM_LEDS, percent, STRIP_OFFSET); - } - - public ColorFlowAnimation colorFlow(LEDColor color, double speed, Direction direction) { - return colorFlow(color, speed, direction, NUM_LEDS, STRIP_OFFSET); - } - - public ColorFlowAnimation colorFlow( - LEDColor color, double speed, Direction direction, int numLeds, int ledOffset) { - return new ColorFlowAnimation( - color.r, color.g, color.b, color.w, speed, numLeds, direction, ledOffset); - } - - public LarsonAnimation larson(LEDColor color, double speed, int size, BounceMode mode) { - return new LarsonAnimation( - color.r, - color.g, - color.b, - color.w, - speed, - NUM_LEDS - NO_CANDLE_OFFSET, - mode, - size, - NO_CANDLE_OFFSET); - } - - public StrobeAnimation strobe(LEDColor color, double speed) { - return new StrobeAnimation(color.r, color.g, color.b, color.w, speed, NUM_LEDS, STRIP_OFFSET); - } - - private record LEDColor(int r, int g, int b, int w) {} -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java b/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java deleted file mode 100644 index 84f84bd..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/LimelightStatus.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.wpilibj.Timer; -import frc.robot.util.LimelightHelpers; - -public class LimelightStatus implements CameraStatus { - - private final String limelightName; - - private double limelightHeartbeat = 0; - private double lastHeartbeatTime = 0; - private boolean limelightConnected = false; - - public LimelightStatus(String limelightName) { - this.limelightName = limelightName; - } - - @Override - public void update() { - double newHeartbeat = LimelightHelpers.getLimelightNTDouble(limelightName, "hb"); - if (newHeartbeat > limelightHeartbeat) { - limelightConnected = true; - limelightHeartbeat = newHeartbeat; - lastHeartbeatTime = Timer.getFPGATimestamp(); - } else if (Timer.getFPGATimestamp() - lastHeartbeatTime >= 1) { - limelightConnected = false; - } - } - - @Override - public boolean isConnected() { - return limelightConnected; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java b/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java deleted file mode 100644 index 1c9f5fd..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/ReefProximity.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.math.geometry.Pose2d; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map.Entry; - -/** - * This class is used to determine the closest reef scoring position to the robot's current - * position. - */ -public class ReefProximity { - - final List> reefList; - - public ReefProximity( - HashMap leftReefHashMap, HashMap rightReefHashMap) { - reefList = new ArrayList>(); - leftReefHashMap.entrySet().forEach(reefList::add); - rightReefHashMap.entrySet().forEach(reefList::add); - } - - public Entry closestReefPose(Pose2d position, boolean blueAlliance) { - return closestReefPose( - position, blueAlliance ? VisionSubsystem.blueReefTags : VisionSubsystem.redReefTags); - } - - public Entry closestReefPose(Pose2d position, List tagOptions) { - double currentDistance = Double.MAX_VALUE; - Entry closest = null; - - for (var entry : reefList) { - // Ensure the reef tag is in the list of tags we are looking for - if (!tagOptions.contains(entry.getKey())) { - continue; - } - - double distance = position.getTranslation().getDistance(entry.getValue().getTranslation()); - if (distance < currentDistance) { - currentDistance = distance; - closest = entry; - } - } - - return closest; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java b/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java deleted file mode 100644 index a2def21..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/SimCamera.java +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation3d; -import frc.robot.Robot; -import frc.robot.util.LimelightHelpers.PoseEstimate; -import frc.robot.util.LimelightHelpers.RawFiducial; -import java.util.List; -import java.util.Optional; -import org.photonvision.EstimatedRobotPose; -import org.photonvision.PhotonCamera; -import org.photonvision.PhotonPoseEstimator; -import org.photonvision.PhotonPoseEstimator.PoseStrategy; -import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; -import org.photonvision.simulation.VisionSystemSim; -import org.photonvision.targeting.PhotonPipelineResult; -import org.photonvision.targeting.PhotonTrackedTarget; - -/** Simulates a Limelight 4 using PhotonVision. */ -public class SimCamera { - - static final RawFiducial[] EMPTY_FIDUCIALS = new RawFiducial[0]; - - final PhotonCamera camera; - final PhotonPoseEstimator photonPoseEstimator; - - VisionSystemSim visionSim = null; - PhotonCameraSim cameraSim = null; - - public SimCamera(String name, Transform3d cameraPosition, AprilTagFieldLayout apriltagLayout) { - camera = new PhotonCamera(name); - photonPoseEstimator = - new PhotonPoseEstimator(apriltagLayout, PoseStrategy.LOWEST_AMBIGUITY, cameraPosition); - - // Everything past this point is simulation only - if (Robot.isReal()) return; - - visionSim = new VisionSystemSim("main"); - visionSim.addAprilTags(apriltagLayout); - - SimCameraProperties cameraProp = new SimCameraProperties(); - // TODO: set LL4 diagonal FOV instead of horizontal (82) - cameraProp.setCalibration(1280, 960, Rotation2d.fromDegrees(100)); - cameraProp.setCalibError(0.35, 0.10); - cameraProp.setFPS(45); - cameraProp.setAvgLatencyMs(50); - cameraProp.setLatencyStdDevMs(15); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible - // targets. - cameraSim = new PhotonCameraSim(camera, cameraProp); - // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera(cameraSim, cameraPosition); - - cameraSim.enableDrawWireframe(true); - } - - PoseEstimate poseEstimate = null; - RawFiducial[] rawFiducials = null; - double timestamp = 0; - - public void update() { - - PhotonPipelineResult latestResult = null; - EstimatedRobotPose latestEstimate = null; - for (PhotonPipelineResult result : camera.getAllUnreadResults()) { - Optional poseEst = photonPoseEstimator.update(result); - if (poseEst.isPresent()) { - latestEstimate = poseEst.get(); - latestResult = result; - } - } - List targets = latestResult != null ? latestResult.getTargets() : null; - - poseEstimate = toPoseEstimate(latestEstimate, targets); - if (poseEstimate != null) timestamp = poseEstimate.timestampSeconds; - - if (targets != null) { - rawFiducials = toRawFiducials(targets); - } else { - rawFiducials = EMPTY_FIDUCIALS; - } - } - - public PoseEstimate getPoseEstimate() { - return poseEstimate; - } - - public RawFiducial[] getRawFiducials() { - return rawFiducials; - } - - /** Converts a PhotonVision EstimatedRobotPose to a Limelight PoseEstimate */ - private PoseEstimate toPoseEstimate( - EstimatedRobotPose estimate, List targets) { - if (estimate == null) return null; - - PoseEstimate est = new PoseEstimate(); - est.pose = estimate.estimatedPose.toPose2d(); - est.timestampSeconds = estimate.timestampSeconds; - est.tagCount = targets.size(); - est.avgTagDist = - targets.stream() - .mapToDouble( - t -> t.getBestCameraToTarget().getTranslation().getDistance(Translation3d.kZero)) - .average() - .orElse(0); - est.avgTagArea = 99; // targets.stream().mapToDouble(t -> t.area).average().orElse(0); - return est; - } - - /** Converts a PhotonTrackedTarget list to a Limelight RawFiducials array */ - private RawFiducial[] toRawFiducials(List targets) { - RawFiducial[] fiducials = new RawFiducial[targets.size()]; - for (int i = 0; i < targets.size(); i++) { - PhotonTrackedTarget target = targets.get(i); - double dist = - target.getBestCameraToTarget().getTranslation().getDistance(Translation3d.kZero); - - fiducials[i] = - new RawFiducial( - target.getFiducialId(), - target.getYaw(), - target.getPitch(), - target.area, - dist, - dist, - target.poseAmbiguity); - } - return fiducials; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java b/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java deleted file mode 100644 index 9c9c462..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/SimLogic.java +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import static edu.wpi.first.units.Units.*; - -import com.pathplanner.lib.util.FlippingUtil; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.units.measure.Distance; -import frc.robot.Robot; -import frc.robot.RobotContainer; -import org.ironmaple.simulation.SimulatedArena; -import org.ironmaple.simulation.seasonspecific.reefscape2025.ReefscapeCoralOnField; - -public abstract class SimLogic { - - public static final Pose2d redHPCoralPose = new Pose2d(16.17, 1.33, new Rotation2d()); - public static final Pose2d blueHPCoralPose = FlippingUtil.flipFieldPose(redHPCoralPose); - public static final double CORAL_LENGTH = Field.CORAL_LENGTH.in(Meters); - - public static boolean intakeHasCoral = false; - public static boolean armHasCoral = false; - public static boolean intakeHasAlgae = false; - public static boolean armHasAlgae = false; - public static int coralScored = 0; - - public static double armCoralPosition = -1; - - public static boolean robotHasCoral() { - return intakeHasCoral || armHasCoral; - } - - public static boolean robotHasAlgae() { - return intakeHasAlgae || armHasAlgae; - } - - public static void spawnHumanPlayerCoral() { - spawnHumanPlayerCoral(Robot.isBlue()); - } - - public static void spawnHumanPlayerCoral(boolean blue) { - if (!RobotContainer.MAPLESIM) { - return; - } - - for (int i = 0; i < 2; i++) { - Pose2d coralPose = blue ? blueHPCoralPose : redHPCoralPose; - - if (i == 1) { - coralPose = coralPose.transformBy(new Transform2d(0, 5, new Rotation2d())); - } - - // generate a random physical offset between -0.3 and 0.3 meters and a random rotation - double xOffset = randomNumberPlusMinus(0.3); - double yOffset = randomNumberPlusMinus(0.3); - double rotationOffset = Math.random() * 360; - Transform2d randomTransform = - new Transform2d(xOffset, yOffset, Rotation2d.fromDegrees(rotationOffset)); - - spawnCoral(coralPose.transformBy(randomTransform)); - } - } - - public static void spawnCoral(Pose2d pose) { - SimulatedArena.getInstance().addGamePiece(new ReefscapeCoralOnField(pose)); - } - - public static void scoreCoral() { - if (!RobotContainer.MAPLESIM) { - return; - } - - RobotContainer rc = RobotContainer.instance; - // SwerveDriveSimulation swerveSim = rc.drivetrain.getDriveSim(); - // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); - double coralAngle; - double heightOffset = 0.6; - double xOffset; - Distance coralHeight = Meters.of(0. + heightOffset); - - // SimulatedArena.getInstance() - // .addGamePieceProjectile( - // new ReefscapeCoralOnFly( - // simRobotPose.getTranslation(), - // // The scoring mechanism position on the robot - // new Translation2d(xOffset, 0), - // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), - // simRobotPose.getRotation(), - // coralHeight, - // // The initial speed of the coral - // MetersPerSecond.of(2), - // Degrees.of(coralAngle))); - } - - public static void outtakeAlgae() { - if (!RobotContainer.MAPLESIM) { - return; - } - - // SwerveDriveSimulation swerveSim = RobotContainer.instance.drivetrain.getDriveSim(); - // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); - - // SimulatedArena.getInstance() - // .addGamePieceProjectile( - // new ReefscapeAlgaeOnFly( - // simRobotPose.getTranslation(), - // new Translation2d(0.6, 0), // scoring mechanism position on the robot - // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), - // simRobotPose.getRotation().rotateBy(Rotation2d.kCCW_90deg), - // Inches.of(16), // outtake height - // MetersPerSecond.of(2), // outtake speed - // Degrees.of(0))); - } - - public static void netAlgae(boolean forwards) { - if (!RobotContainer.MAPLESIM) { - return; - } - - // SwerveDriveSimulation swerveSim = RobotContainer.instance.drivetrain.getDriveSim(); - // Pose2d simRobotPose = swerveSim.getSimulatedDriveTrainPose(); - - // SimulatedArena.getInstance() - // .addGamePieceProjectile( - // new ReefscapeAlgaeOnFly( - // simRobotPose.getTranslation(), - // new Translation2d(-0.1, 0), // scoring mechanism position on the robot - // swerveSim.getDriveTrainSimulatedChassisSpeedsFieldRelative(), - // simRobotPose - // .getRotation() - // .rotateBy(forwards ? Rotation2d.kZero : Rotation2d.k180deg), - // Meters.of(10.), // outtake height - // MetersPerSecond.of(6), // outtake speed - // Degrees.of(75))); - } - - private static double randomNumberPlusMinus(double range) { - return Math.random() * (range * 2) - range; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java deleted file mode 100644 index c826e27..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIO.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.epilogue.Logged; -import frc.robot.util.LimelightHelpers.PoseEstimate; -import frc.robot.util.LimelightHelpers.RawDetection; -import frc.robot.util.LimelightHelpers.RawFiducial; - -@Logged -public interface VisionIO { - - @Logged - public class VisionIOInputs { - final RawFiducial[] emptyFiducials = new RawFiducial[0]; - final RawDetection[] emptyDetections = new RawDetection[0]; - - boolean scoringCameraConnected = false; - PoseEstimate scoringPoseEstimate = null; - RawFiducial[] scoringFiducials = emptyFiducials; - double scoringTimestamp = 0.0; - double scoringCPUTemp = 0; - double scoringTemp = 0; - - boolean frontCameraConnected = false; - PoseEstimate frontPoseEstimate = null; - PoseEstimate frontPoseEstimateMT2 = null; - RawFiducial[] frontFiducials = emptyFiducials; - double frontTemp = 0; - - boolean backCameraConnected = false; - RawDetection[] backDetections = emptyDetections; - double backTimestamp = 0.0; - } - - public void update(VisionIOInputs inputs); - - public default void simulationPeriodic() {} -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java deleted file mode 100644 index 049d2e6..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOLimelight.java +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.Timer; -import frc.robot.Robot; -import frc.robot.RobotContainer; -import frc.robot.util.LimelightHelpers; -import frc.robot.util.LimelightHelpers.PoseEstimate; - -public class VisionIOLimelight implements VisionIO { - - private static final String SCORING_LIMELIGHT = "limelight"; - private static final String FRONT_LIMEIGHT = "limelight-front"; - private static final String BACK_LIMEIGHT = "limelight-back"; - - private final LimelightStatus scoringLimelightStatus; - private final LimelightStatus frontLimelightStatus; - private final LimelightStatus backLimelightStatus; - private final PoseEstimate simPoseEstimate = new PoseEstimate(); - - private final double[] blueReefTags; - private final double[] redReefTags; - private final double[] allReefTags; - - private double lastSettingsUpdate = -1; - - public VisionIOLimelight() { - scoringLimelightStatus = new LimelightStatus(SCORING_LIMELIGHT); - frontLimelightStatus = new LimelightStatus(FRONT_LIMEIGHT); - backLimelightStatus = new LimelightStatus(BACK_LIMEIGHT); - - blueReefTags = new double[VisionSubsystem.blueReefTags.size()]; - for (int i = 0; i < blueReefTags.length; i++) { - blueReefTags[i] = VisionSubsystem.blueReefTags.get(i); - } - - redReefTags = new double[VisionSubsystem.redReefTags.size()]; - for (int i = 0; i < redReefTags.length; i++) { - redReefTags[i] = VisionSubsystem.redReefTags.get(i); - } - - allReefTags = new double[VisionSubsystem.allReefTags.size()]; - for (int i = 0; i < allReefTags.length; i++) { - allReefTags[i] = VisionSubsystem.allReefTags.get(i); - } - } - - @Override - public void update(VisionIOInputs inputs) { - scoringLimelightStatus.update(); - frontLimelightStatus.update(); - backLimelightStatus.update(); - - inputs.scoringCameraConnected = scoringLimelightStatus.isConnected(); - inputs.frontCameraConnected = frontLimelightStatus.isConnected(); - inputs.backCameraConnected = backLimelightStatus.isConnected(); - - double time = Timer.getFPGATimestamp(); - if (lastSettingsUpdate == -1 || time - lastSettingsUpdate > 5) { - setLimelightPosition(SCORING_LIMELIGHT, VisionSubsystem.ROBOT_TO_SCORING_CAMERA); - setLimelightPosition(FRONT_LIMEIGHT, VisionSubsystem.ROBOT_TO_FRONT_CAMERA); - - double[] validTags = allReefTags; - setValidTags(SCORING_LIMELIGHT, validTags); - setValidTags(FRONT_LIMEIGHT, validTags); - - lastSettingsUpdate = time; - } - - inputs.scoringFiducials = LimelightHelpers.getRawFiducials(SCORING_LIMELIGHT); - inputs.frontFiducials = LimelightHelpers.getRawFiducials(FRONT_LIMEIGHT); - inputs.backDetections = LimelightHelpers.getRawDetections(BACK_LIMEIGHT); - if (inputs.backCameraConnected) { - inputs.backTimestamp = Timer.getFPGATimestamp() - getLatencySeconds(BACK_LIMEIGHT); - } - - if (Robot.isSimulation() && RobotContainer.MAPLESIM) { - inputs.scoringPoseEstimate = simPoseEstimate; - inputs.frontPoseEstimate = simPoseEstimate; - } else { - inputs.scoringPoseEstimate = LimelightHelpers.getBotPoseEstimate_wpiBlue(SCORING_LIMELIGHT); - inputs.frontPoseEstimate = LimelightHelpers.getBotPoseEstimate_wpiBlue(FRONT_LIMEIGHT); - } - - if (inputs.scoringPoseEstimate != null) { - inputs.scoringTimestamp = inputs.scoringPoseEstimate.timestampSeconds; - } - } - - @Override - public void simulationPeriodic() { - // If we're running in simulation, we can feed constant perfect vision data to the robot - // to stop the odometry from diverging from the real robot position if we're using a physics - // simulation (like MapleSim). - // To simulate realistic vision input using Apriltags, use {@link VisionIOPhoton} instead. - // simPoseEstimate.pose = RobotContainer.instance.drivetrain.getSimPose(); - simPoseEstimate.timestampSeconds = Timer.getFPGATimestamp(); - simPoseEstimate.tagCount = 1; - simPoseEstimate.avgTagDist = 2; - simPoseEstimate.avgTagArea = 99; - } - - private double getLatencySeconds(String limelightName) { - double latency = - LimelightHelpers.getLatency_Capture(limelightName) - + LimelightHelpers.getLatency_Pipeline(limelightName); - return latency * 0.001; - } - - private void setLimelightPosition(String limelightName, Transform3d transform) { - LimelightHelpers.setCameraPose_RobotSpace( - limelightName, - transform.getX(), // forward - -transform.getY(), // side - transform.getZ(), // up - -Units.radiansToDegrees(transform.getRotation().getX()), // roll - -Units.radiansToDegrees(transform.getRotation().getY()), // pitch - -Units.radiansToDegrees(transform.getRotation().getZ()) // yaw - ); - } - - private void setValidTags(String limelightName, double[] validTags) { - LimelightHelpers.setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validTags); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java deleted file mode 100644 index 04c0ed5..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionIOPhoton.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import edu.wpi.first.apriltag.AprilTagFieldLayout; - -public class VisionIOPhoton implements VisionIO { - final SimCamera scoringCamera, frontCamera; - - public VisionIOPhoton(AprilTagFieldLayout apriltagLayout) { - scoringCamera = - new SimCamera("scoring", VisionSubsystem.ROBOT_TO_SCORING_CAMERA, apriltagLayout); - frontCamera = new SimCamera("front", VisionSubsystem.ROBOT_TO_FRONT_CAMERA, apriltagLayout); - } - - @Override - public void update(VisionIOInputs inputs) { - inputs.scoringCameraConnected = true; - inputs.frontCameraConnected = true; - inputs.backCameraConnected = true; - - inputs.scoringPoseEstimate = scoringCamera.getPoseEstimate(); - if (inputs.scoringPoseEstimate != null) { - inputs.scoringTimestamp = inputs.scoringPoseEstimate.timestampSeconds; - // We're not fully simulating the other camera, but we can use the scoring camera's timestamp - inputs.backTimestamp = inputs.scoringTimestamp; - } - inputs.scoringFiducials = scoringCamera.getRawFiducials(); - inputs.frontPoseEstimate = frontCamera.getPoseEstimate(); - } - - @Override - public void simulationPeriodic() { - scoringCamera.update(); - frontCamera.update(); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java deleted file mode 100644 index 87b312d..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC180/VisionSubsystem.java +++ /dev/null @@ -1,709 +0,0 @@ -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC180; - -import static edu.wpi.first.units.Units.*; - -import com.ctre.phoenix6.Utils; -import com.pathplanner.lib.util.FlippingUtil; -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.epilogue.Logged; -import edu.wpi.first.epilogue.NotLogged; -import edu.wpi.first.math.MatBuilder; -import edu.wpi.first.math.Matrix; -import edu.wpi.first.math.Nat; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.numbers.N1; -import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.units.measure.Distance; -import edu.wpi.first.wpilibj.Alert; -import edu.wpi.first.wpilibj.Alert.AlertType; -import edu.wpi.first.wpilibj.Timer; -import edu.wpi.first.wpilibj2.command.Command; -import edu.wpi.first.wpilibj2.command.Commands; -import edu.wpi.first.wpilibj2.command.SubsystemBase; -import edu.wpi.first.wpilibj2.command.button.Trigger; -import frc.robot.Robot; -import frc.robot.subsystems.drive.Drive; -import frc.robot.subsystems.vision.FRC180.VisionIO.VisionIOInputs; -import frc.robot.util.LimelightHelpers.PoseEstimate; -import frc.robot.util.LimelightHelpers.RawFiducial; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.function.Supplier; - -@Logged -public class VisionSubsystem extends SubsystemBase { - private final VisionConsumer consumer; - private final Supplier poseSupplier; - private final Drive m_drivebase; - - /** - * The source of a pose estimate, used to determine the standard deviation of the pose estimate - * (i.e. how much we trust the pose estimate). The lower the number, the more we trust the pose - * estimate, and the more it'll affect the robot's position. - */ - enum PoseEstimateSource { - SCORING_CAMERA(0.025), - FRONT_CAMERA(0.15), - BACK_CAMERA(0.3), - NONE(99); - - private Matrix stdDev; - - PoseEstimateSource(double dev) { - this(dev, dev, dev); - } - - PoseEstimateSource(double xDev, double yDev, double thetaDev) { - stdDev = MatBuilder.fill(Nat.N3(), Nat.N1(), xDev, yDev, thetaDev); - } - } - - private static final List redTags = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11); - private static final List blueTags = List.of(12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22); - public static final List redReefTags = List.of(6, 7, 8, 9, 10, 11); - public static final List blueReefTags = List.of(17, 18, 19, 20, 21, 22); - public static final List allReefTags = new ArrayList<>(); - - // Used to correct for horizontal offset of tags on the field - private static final Map TAG_OFFSETS = - Map.of( - // 6, Inches.of(24) - ); - - public static final Transform3d ROBOT_TO_SCORING_CAMERA = - new Transform3d( - Inches.of(9.757).in(Meters), // forward - Inches.of(-7.2).in(Meters), // right - Inches.of(31.296).in(Meters), - new Rotation3d(0, Units.degreesToRadians(50), 0) // downward tilt - ); - - // ORIGINAL FRONT CAMERA MOUNT (Orlando & South Florida) - public static final Transform3d ROBOT_TO_FRONT_CAMERA = - new Transform3d( - Inches.of(13.998 + 3.5 - 1).in(Meters), // forward - Inches.of(-7 + 1 - 1).in(Meters), // right - Inches.of(6.767).in(Meters), - new Rotation3d(0, Units.degreesToRadians(-15), 0) // upward tilt - ); - - public static final Transform3d ROBOT_TO_INTAKE_CAMERA = - new Transform3d( - Inches.of(-8.7).in(Meters), // forward - Inches.of(-5.22).in(Meters), // left - Inches.of(30.049).in(Meters), - new Rotation3d( - 0, Units.degreesToRadians(50), Units.degreesToRadians(180)) // downward tilt - ); - - public static final Transform2d ROBOT_TO_INTAKE_CAMERA_2D = - new Transform2d( - ROBOT_TO_INTAKE_CAMERA.getTranslation().getX(), - ROBOT_TO_INTAKE_CAMERA.getTranslation().getY(), - Rotation2d.k180deg); - public static final Transform2d INTAKE_CAMERA_TO_ROBOT_2D = ROBOT_TO_INTAKE_CAMERA_2D.inverse(); - - private static final int RED_PROCESSOR_TAG = 3; - private static final int BLUE_PROCESSOR_TAG = 16; - - private static final double BAD_CAMERA_TEMP = 55; - - private final VisionIO io; - private final VisionIOInputs inputs; - - private final ReefProximity reefProximity; - private final CoralDetector coralDetector; - private final CoralDetectorReal coralDetectorReal; - - private final Distance reefBackDistance = Meters.of(0.55).plus(Inches.of(0.5 - 1)); - private final Distance reefSideDistance = - Field.REEF_BRANCH_SEPARATION.div(2); // field measurement based - - private final Transform2d leftReefTransform = - new Transform2d( - reefBackDistance.in(Meters), -reefSideDistance.in(Meters), Rotation2d.k180deg); - private final Transform2d rightReefTransform = - new Transform2d(reefBackDistance.in(Meters), reefSideDistance.in(Meters), Rotation2d.k180deg); - private final Transform2d processorTransform = - new Transform2d(0.55, 0.0, Rotation2d.fromDegrees(90)); - - // The "front lay down" - private final double l1BackDistance = 0.62 - Inches.of(1.5).in(Meters); - - private final Transform2d leftL1ReefTransform = - new Transform2d(l1BackDistance, -Inches.of(12).in(Meters), Rotation2d.k180deg); - private final Transform2d rightL1ReefTransform = - new Transform2d(l1BackDistance, 0, Rotation2d.k180deg); - - private final Transform2d leftL1ReefRotation = new Transform2d(0, 0, Rotation2d.fromDegrees(0)); - private final Transform2d rightL1ReefRotation = new Transform2d(0, 0, Rotation2d.fromDegrees(0)); - - // 1.25 inches closer (forward) than standard, applied on top of left/right reef transforms - private final Transform2d algaeReefTransform = - new Transform2d(Inches.of(0.75 + 0.5 - 1).in(Meters), 0, Rotation2d.kZero); - - private final Transform2d algaeReefBackup = new Transform2d(-0.3, 0, Rotation2d.kZero); - - private final Pose2d redProcessorPose; - private final Pose2d blueProcessorPose; - - public final HashMap tagPoses2d = new HashMap<>(); - public final HashMap leftReefHashMap = new HashMap<>(); - public final HashMap rightReefHashMap = new HashMap<>(); - private final HashMap leftL1ReefHashMap = new HashMap<>(); - private final HashMap rightL1ReefHashMap = new HashMap<>(); - private final HashMap leftReefAlgaePoses = new HashMap<>(); - private final HashMap rightReefAlgaePoses = new HashMap<>(); - private final HashMap leftReefAlgaeBackupPoses = new HashMap<>(); - private final HashMap rightReefAlgaeBackupPoses = new HashMap<>(); - - public int bestReefID = -1; - public int lastReefID = -1; - - private Pose2d coralPose = Pose2d.kZero; - private boolean coralPoseValid = false; - private Pose2d coralPickupPose = null; - - private boolean closestReefPoseValid = false; - private Pose2d closestReefPose = Pose2d.kZero; - - private Pose2d singleTagPose = Pose2d.kZero; - - private PoseEstimate poseEstimate = null; - private PoseEstimateSource poseEstimateSource = PoseEstimateSource.NONE; - private boolean allowPoseEstimates = true; - private Pose3d scoringCameraPosition = Pose3d.kZero; - private Pose3d frontCameraPosition = Pose3d.kZero; - private Pose3d backCameraPosition = Pose3d.kZero; - private double poseEstimateDiffX, poseEstimateDiffY, poseEstimateDiffTheta; - private double lastPoseEstimateTime = 0; - - private Alert scoringCameraDisconnectedAlert = - new Alert("Scoring Camera disconnected!", AlertType.kError); - private Alert frontCameraDisconnectedAlert = - new Alert("Front Camera disconnected!", AlertType.kError); - private Alert backCameraDisconnectedAlert = - new Alert("Back Camera disconnected!", AlertType.kError); - - private Alert scoringCameraTempAlert = new Alert("", AlertType.kWarning); - private Alert frontCameraTempAlert = new Alert("", AlertType.kWarning); - - public AprilTagFieldLayout aprilTagFieldLayout; - public final Trigger poseEstimateDiffLow; - @NotLogged public final Trigger scoringCameraConnected; - public final Trigger hasPoseEstimates = new Trigger(() -> poseEstimate != null).debounce(0.5); - - @SuppressWarnings("unused") - public VisionSubsystem( - VisionConsumer consumer, Supplier poseSupplier, Drive m_drivebase) { - this.consumer = consumer; - this.poseSupplier = poseSupplier; - this.m_drivebase = m_drivebase; - - try { - aprilTagFieldLayout = - AprilTagFieldLayout.loadFromResource(AprilTagFields.k2025ReefscapeWelded.m_resourceFile); - } catch (Exception e) { - aprilTagFieldLayout = null; - } - - inputs = new VisionIOInputs(); - if (Robot.isReal()) { - io = new VisionIOLimelight(); - } else { - io = new VisionIOPhoton(aprilTagFieldLayout); - } - - allReefTags.addAll(redReefTags); - allReefTags.addAll(blueReefTags); - - for (var allianceTags : List.of(redTags, blueTags)) { - for (int tagID : allianceTags) { - Optional pose3d = aprilTagFieldLayout.getTagPose(tagID); - if (pose3d.isPresent()) { - tagPoses2d.put(tagID, pose3d.get().toPose2d()); - } - } - } - - // Pre-calculate reef poses for all reef tags - for (int i : redReefTags) { - leftReefHashMap.put(i, calculateReefPose(i, true)); - rightReefHashMap.put(i, calculateReefPose(i, false)); - - leftL1ReefHashMap.put(i, calculateL1ReefPose(i, true)); - rightL1ReefHashMap.put(i, calculateL1ReefPose(i, false)); - } - for (int i : blueReefTags) { - leftReefHashMap.put(i, calculateReefPose(i, true)); - rightReefHashMap.put(i, calculateReefPose(i, false)); - - leftL1ReefHashMap.put(i, calculateL1ReefPose(i, true)); - rightL1ReefHashMap.put(i, calculateL1ReefPose(i, false)); - } - - // Pre-calculate reef algae poses - leftReefHashMap - .keySet() - .forEach( - i -> { - leftReefAlgaePoses.put(i, calculateReefAlgaePose(i, true)); - leftReefAlgaeBackupPoses.put(i, calculateReefAlgaeBackupPose(i, true)); - }); - rightReefHashMap - .keySet() - .forEach( - i -> { - rightReefAlgaePoses.put(i, calculateReefAlgaePose(i, false)); - rightReefAlgaeBackupPoses.put(i, calculateReefAlgaeBackupPose(i, false)); - }); - - // Pre-calculate processor poses - redProcessorPose = calculateProcessorPose(false); - blueProcessorPose = calculateProcessorPose(true); - - reefProximity = new ReefProximity(leftReefHashMap, rightReefHashMap); - coralDetectorReal = new CoralDetectorReal(); - coralDetector = Robot.isReal() ? coralDetectorReal : new CoralDetectorSim(4.0, true); - - double diffMeters = Inches.of(1.5).in(Meters); - poseEstimateDiffLow = - new Trigger( - () -> { - return Math.abs(poseEstimateDiffX) <= diffMeters - && Math.abs(poseEstimateDiffY) <= diffMeters - && Math.abs(poseEstimateDiffTheta) < 5; - }); - scoringCameraConnected = new Trigger(() -> inputs.scoringCameraConnected); - } - - boolean wasEnabled = false; - - Pose2d futureRobotPose; - - @Override - public void periodic() { - io.update(inputs); - - scoringCameraDisconnectedAlert.set(!inputs.scoringCameraConnected); - frontCameraDisconnectedAlert.set(!inputs.frontCameraConnected); - backCameraDisconnectedAlert.set(!inputs.backCameraConnected); - - // cameraTemperatureAlert(scoringCameraTempAlert, "Scoring", inputs.scoringTemp); - // cameraTemperatureAlert(frontCameraTempAlert, "Front", inputs.frontTemp); - - // If the scoring camera is connected, use its pose estimate - if (inputs.scoringCameraConnected) { - poseEstimate = validatePoseEstimate(inputs.scoringPoseEstimate); - poseEstimateSource = PoseEstimateSource.SCORING_CAMERA; - } - - boolean invalidScoring = - inputs.scoringPoseEstimate == null || inputs.scoringPoseEstimate.tagCount == 0; - - // If we didn't get a pose estimate from the scoring camera, use the front camera's pose - // estimate - if (poseEstimate == null && invalidScoring && inputs.frontCameraConnected) { - poseEstimateSource = PoseEstimateSource.FRONT_CAMERA; - - if (Robot.isReal()) { - poseEstimate = validatePoseEstimate(inputs.frontPoseEstimate); - } - // if (RobotState.isEnabled() && Robot.isSimulation()) { - // poseEstimate = validateMT2PoseEstimate(inputs.frontPoseEstimateMT2); - // } else { - // poseEstimate = validatePoseEstimate(inputs.frontPoseEstimate); - // } - } - - Pose2d robotPose = null; - if (poseEstimate != null) { - consumer.accept( - poseEstimate.pose, - Utils.fpgaToCurrentTime(poseEstimate.timestampSeconds), - poseEstimateSource.stdDev); - // Calculate the difference between the updated robot pose and the scoring pose estimate, to - // get an idea - // of how closely we are tracking the robot's actual position - robotPose = poseSupplier.get(); - // Note - the goal of this if statement is to stop "bad" data from non-scoring cameras from - // allowing - // a coral to be scored. Unknown if this is working as intended - if (poseEstimateSource == PoseEstimateSource.SCORING_CAMERA) { - poseEstimateDiffX = robotPose.getX() - poseEstimate.pose.getX(); - poseEstimateDiffY = robotPose.getY() - poseEstimate.pose.getY(); - poseEstimateDiffTheta = - robotPose.getRotation().getDegrees() - poseEstimate.pose.getRotation().getDegrees(); - lastPoseEstimateTime = Timer.getFPGATimestamp(); - } else { - poseEstimateDiffX = 99; - poseEstimateDiffY = 99; - poseEstimateDiffTheta = 99; - } - } else { - poseEstimateSource = PoseEstimateSource.NONE; - } - - if (robotPose == null) robotPose = poseSupplier.get(); - - // calculate scoring camera in 3D space, for previewing in AdvantageScope - if (Robot.isSimulation()) { - Pose3d robotPose3d = new Pose3d(poseSupplier.get()); - scoringCameraPosition = robotPose3d.transformBy(ROBOT_TO_SCORING_CAMERA); - frontCameraPosition = robotPose3d.transformBy(ROBOT_TO_FRONT_CAMERA); - backCameraPosition = robotPose3d.transformBy(ROBOT_TO_INTAKE_CAMERA); - } - - ChassisSpeeds speeds = m_drivebase.getChassisSpeeds(); - futureRobotPose = - robotPose.plus( - new Transform2d( - speeds.vxMetersPerSecond * 0.3, speeds.vyMetersPerSecond * 0.3, Rotation2d.kZero)); - - // Allow targeting opponent's reef tags, which is needed for stealing algae - // Entry closestTagAndPose = reefProximity.closestReefPose(futureRobotPose, - // Robot.isBlue()); - Entry closestTagAndPose = - reefProximity.closestReefPose(futureRobotPose, allReefTags); - if (closestTagAndPose == null) { - closestReefPose = Pose2d.kZero; - closestReefPoseValid = false; - } else { - closestReefPose = closestTagAndPose.getValue(); - closestReefPoseValid = true; - } - - Pose2d latencyCompensatedRobotPose; - if (Robot.isReal()) { - latencyCompensatedRobotPose = m_drivebase.getBufferPose(inputs.backTimestamp); - } else { - latencyCompensatedRobotPose = robotPose; - } - coralPose = coralDetector.getCoralPose(latencyCompensatedRobotPose, inputs.backDetections); - if (coralPose == null) { - coralPose = Pose2d.kZero; - coralPoseValid = false; - } else { - coralPoseValid = true; - } - // Invalidate any previously stored coralPickUpPose - this will be recalculated if - // getCoralPickupPose() is called - coralPickupPose = null; - - if (closestTagAndPose == null) { - bestReefID = -1; - } else { - bestReefID = closestTagAndPose.getKey(); - } - - if (bestReefID != -1) lastReefID = bestReefID; - } - - @Override - public void simulationPeriodic() { - io.simulationPeriodic(); - } - - public void resetCoralDetector() { - coralDetector.reset(); - } - - private Pose2d getReefTagPose(int tagID) { - Pose2d pose = tagPoses2d.get(tagID); - if (pose == null) return null; - - if (TAG_OFFSETS.containsKey(tagID)) { - pose = - pose.transformBy(new Transform2d(0, TAG_OFFSETS.get(tagID).in(Meters), Rotation2d.kZero)); - } - return pose; - } - - /** - * Generates the scoring pose of the robot relative to a reef AprilTag. This is used to - * pre-calculate and store all positions to prevent duplicate object creation. To access these - * pre-calculated poses, use {@link #getReefPose(int, boolean)}. - */ - private Pose2d calculateReefPose(int tagID, boolean left) { - Pose2d pose = getReefTagPose(tagID); - if (pose == null) return null; - - return pose.transformBy(left ? leftReefTransform : rightReefTransform); - } - - /** - * Generates the L1 scoring pose of the robot relative to a reef AprilTag. This is used to - * pre-calculate and store all positions to prevent duplicate object creation. To access these - * pre-calculated poses, use {@link #getL1ReefPose(int, boolean)}. - */ - private Pose2d calculateL1ReefPose(int tagID, boolean left) { - Pose2d pose = getReefTagPose(tagID); - if (pose == null) return null; - - return pose.transformBy(left ? leftL1ReefTransform : rightL1ReefTransform) - .transformBy(left ? leftL1ReefRotation : rightL1ReefRotation); - } - - /** - * Generates the right branch scoring pose of the robot relative to a reef AprilTag, closer than - * the standard reef pose in order to faciliate grabbing an algae. This is used to pre-calculate - * and store all positions to prevent duplicate object creation. To access these pre-calculated - * poses, use {@link #getReefAlgaePose(int)}. - */ - private Pose2d calculateReefAlgaePose(int tagID, boolean left) { - return getReefPose(tagID, left).transformBy(algaeReefTransform); - } - - private Pose2d calculateReefAlgaeBackupPose(int tagID, boolean left) { - return calculateReefAlgaePose(tagID, left).transformBy(algaeReefBackup); - } - - /** - * Generates the scoring pose of the robot relative to the processor AprilTag. This is used to - * pre-calculate and store all positions to prevent duplicate object creation. To access these - * pre-calculated poses, use {@link #getProcessorPose(boolean)}. - */ - private Pose2d calculateProcessorPose(boolean blue) { - int tagID = blue ? BLUE_PROCESSOR_TAG : RED_PROCESSOR_TAG; - - Pose2d pose = getReefTagPose(tagID); - if (pose == null) return null; - - return pose.transformBy(processorTransform); - } - - /** - * Returns the scoring pose of the robot relative to the reef AprilTag that was last seen. - * - * @param left whether to return the left branch or the right branch scoring pose - */ - public Pose2d getReefPose(boolean left) { - return getReefPose(lastReefID, left); - } - - /** - * Returns a scoring pose of the robot relative to a reef AprilTag. - * - * @param tagID the ID of the reef AprilTag - * @param left whether to return the left branch or the right branch scoring pose - */ - public Pose2d getReefPose(int tagID, boolean left) { - if (left) { - return leftReefHashMap.get(tagID); - } else { - return rightReefHashMap.get(tagID); - } - } - - /** - * Returns the L1 scoring pose of the robot relative to a reef AprilTag. - * - * @param tagID the ID of the reef AprilTag - * @param left whether to return the left or the right L1 scoring pose - */ - public Pose2d getL1ReefPose(int tagID, boolean left) { - if (left) { - return leftL1ReefHashMap.get(tagID); - } else { - return rightL1ReefHashMap.get(tagID); - } - } - - /** - * Returns a right branch scoring pose of the robot relative to a reef AprilTag, closer than the - * standard reef pose in order to faciliate grabbing an algae. - * - * @param tagID the ID of the reef AprilTag - */ - public Pose2d getReefAlgaePose(int tagID, boolean left) { - return (left ? leftReefAlgaePoses : rightReefAlgaePoses).get(tagID); - } - - public Pose2d getReefAlgaeBackupPose(int tagID, boolean left) { - return (left ? leftReefAlgaeBackupPoses : rightReefAlgaeBackupPoses).get(tagID); - } - - /** - * Returns the scoring pose of the robot relative to the processor. - * - * @param blue whether to return the blue processor pose or the red processor pose - */ - public Pose2d getProcessorPose(boolean blue) { - return blue ? blueProcessorPose : redProcessorPose; - } - - /** Returns the closest reef scoring pose to the robot (accounting for alliance), or null */ - @NotLogged - public Pose2d getClosestReefPose() { - return closestReefPoseValid ? closestReefPose : null; - } - - private static final double BARGE_BLUE_X = 7.28; - private static final double BARGE_RED_X = FlippingUtil.fieldSizeX - BARGE_BLUE_X; - - public Pose2d getBargePose() { - Pose2d robotPose = poseSupplier.get(); - - return new Pose2d( - Robot.isBlue() ? BARGE_BLUE_X : BARGE_RED_X, - robotPose.getY(), - Robot.isBlue() ? Rotation2d.kZero : Rotation2d.k180deg); - } - - public int getReefTagFromPose(Pose2d pose) { - for (Entry entry : leftReefHashMap.entrySet()) { - if (entry.getValue().equals(pose)) { - return entry.getKey(); - } - } - for (Entry entry : rightReefHashMap.entrySet()) { - if (entry.getValue().equals(pose)) { - return entry.getKey(); - } - } - return -1; - } - - public boolean isScoringCameraConnected() { - return inputs.scoringCameraConnected; - } - - public boolean isFrontCameraConnected() { - return inputs.frontCameraConnected; - } - - public boolean isBackCameraConnected() { - return inputs.backCameraConnected; - } - - /** Returns the position of the closest detected coral, or null */ - @NotLogged - public Pose2d getCoralPose() { - return coralPoseValid ? coralPose : null; - } - - public Pose3d getCoralPose3D() { - if (!coralPoseValid) return Pose3d.kZero; - - return new Pose3d(coralPose); - } - - @NotLogged - public Pose2d getCoralPickupPose() { - if (coralPickupPose == null && coralPoseValid) { - Translation2d robotPosition = poseSupplier.get().getTranslation(); - Translation2d coralPosition = coralPose.getTranslation(); - double pickupOffset = true ? 0.6 : 0.9; - double centerDistance = robotPosition.getDistance(coralPosition); - double pickupDistance = centerDistance - pickupOffset; - - Translation2d pickupPosition = - robotPosition.interpolate(coralPosition, pickupDistance / centerDistance); - double radiansToCoral = - Math.atan2( - coralPosition.getY() - robotPosition.getY(), - coralPosition.getX() - robotPosition.getX()); - - coralPickupPose = new Pose2d(pickupPosition, new Rotation2d(radiansToCoral + Math.PI)); - } - return coralPickupPose; - } - - public PoseEstimate validatePoseEstimate(PoseEstimate poseEstimate) { - if (poseEstimate == null) return null; - - double tagMin = 1; - double tagMax = 99; - double maxDist = poseEstimate.tagCount == 1 ? 3.7 : 6; - double minArea = poseEstimate.tagCount == 1 ? 0.18 : 0.08; - if (poseEstimate.tagCount > tagMax || poseEstimate.tagCount < tagMin) return null; - if (poseEstimate.avgTagArea < minArea) return null; - if (poseEstimate.avgTagDist > maxDist) return null; - - // Rejected if the pose estimate is too far from the last one - // if (lastPoseEstimate != null && deltaSeconds <= 0.25) { - // double maxReasonableDistance = deltaSeconds * DrivetrainSubsystem.MAX_SPEED; - // Translation2d diff = - // poseEstimate.pose.getTranslation().minus(lastPoseEstimate.pose.getTranslation()); - // if (!Helpers.withinTolerance(diff, maxReasonableDistance)) return null; - // } - - return poseEstimate; - } - - public PoseEstimate validateMT2PoseEstimate(PoseEstimate poseEstimate) { - if (poseEstimate == null) return null; - if (poseEstimate.tagCount == 0) return null; - // if (Math.abs(RobotContainer.instance.drivetrain.getGyroscopeRate()) > 720) return null; - - return poseEstimate; - } - - @NotLogged - public boolean reefVisible() { - boolean isReefVisible = false; - for (int i = 0; i < inputs.scoringFiducials.length; i++) { - RawFiducial fiducial = inputs.scoringFiducials[i]; - if (redReefTags.contains(fiducial.id) || blueReefTags.contains(fiducial.id)) { - isReefVisible = true; - } else { - isReefVisible = false; - } - } - return isReefVisible; - } - - /** Blocks pose estimates from updating the robot's position while this command is running. */ - public Command blockPoseEstimates() { - return Commands.runEnd(() -> allowPoseEstimates = false, () -> allowPoseEstimates = true); - } - - public void setAllowPoseEstimates(boolean allow) { - allowPoseEstimates = allow; - } - - private void cameraTemperatureAlert(Alert alert, String cameraName, double temperature) { - if (temperature >= BAD_CAMERA_TEMP) { - int roundedtemp = (int) Math.round(temperature); - alert.setText(cameraName + " Camera temp high (" + roundedtemp + "deg F)"); - alert.set(true); - } else { - alert.set(false); - } - } - - /** Vision Consumer */ - @FunctionalInterface - public static interface VisionConsumer { - public void accept( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java b/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java deleted file mode 100644 index 349c64e..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/Constants.java +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import com.ctre.phoenix6.configs.ClosedLoopRampsConfigs; -import com.ctre.phoenix6.configs.OpenLoopRampsConfigs; -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.RobotBase; -import java.net.NetworkInterface; -import java.net.SocketException; -import java.util.Arrays; -import java.util.Enumeration; - -public class Constants { - public static final Mode simMode = Mode.SIM; - public static final Mode currentMode = RobotBase.isReal() ? Mode.REAL : simMode; - public static final SimControllerType kSimControllerType = SimControllerType.XBOX; - - public enum SimControllerType { - XBOX, - DUAL_SENSE - } - - public enum Mode { - /** Running on a real robot. */ - REAL, - - /** Running a physics simulator. */ - SIM, - - /** Replaying from a log file. */ - REPLAY - } - - public static final String kCanBusDrivebaseClimberCanivore = "drivebase-climber"; - public static final String kCanBusSuperstructureCanivore = "superstructure"; - public static boolean kIsReplay = false; - public static final String kPracticeBotMacAddress = "00:80:2F:33:BF:BB"; - public static boolean kIsPracticeBot = hasMacAddress(kPracticeBotMacAddress); - - public static final double kSteerJoystickDeadband = 0.05; - public static final double kRobotWidth = Units.inchesToMeters(35.625); - public static final double kRobotDiagonal = Math.sqrt(2.0) * kRobotWidth; - public static final double kRobotMassKg = Units.lbsToKilograms(147.92); - public static final double kRobotMomentOfInertia = 2 * 9.38; // kg * m^2 - public static final double kCOGHeightMeters = Units.inchesToMeters(0.0); - - public static final ClosedLoopRampsConfigs makeDefaultClosedLoopRampConfig() { - return new ClosedLoopRampsConfigs() - .withDutyCycleClosedLoopRampPeriod(0.02) - .withTorqueClosedLoopRampPeriod(0.02) - .withVoltageClosedLoopRampPeriod(0.02); - } - - public static final OpenLoopRampsConfigs makeDefaultOpenLoopRampConfig() { - return new OpenLoopRampsConfigs() - .withDutyCycleOpenLoopRampPeriod(0.02) - .withTorqueOpenLoopRampPeriod(0.02) - .withVoltageOpenLoopRampPeriod(0.02); - } - - // April Tag Layout - public static final AprilTagFieldLayout kAprilTagLayout = - AprilTagFieldLayout.loadField(AprilTagFields.k2025ReefscapeWelded); - public static final int[] kAllowedTagIDs = {17, 18, 19, 20, 21, 22, 6, 7, 8, 9, 10, 11}; - public static final AprilTagFieldLayout kAprilTagLayoutReefsOnly = - new AprilTagFieldLayout( - kAprilTagLayout.getTags().stream() - .filter(tag -> Arrays.stream(kAllowedTagIDs).anyMatch(element -> element == tag.ID)) - .toList(), - kAprilTagLayout.getFieldLength(), - kAprilTagLayout.getFieldWidth()); - - public static final double kFieldWidthMeters = kAprilTagLayout.getFieldWidth(); - public static final double kFieldLengthMeters = kAprilTagLayout.getFieldLength(); - - public static final double kReefRadius = 0.9604; // m - - public static final double kMidlineBuffer = 1.0; - - // Limelight constants - public static final class VisionConstants { - - // Large variance used to downweight unreliable vision measurements - public static final double kLargeVariance = 1e6; - - // Standard deviation constants - public static final int kMegatag1XStdDevIndex = 0; - public static final int kMegatag1YStdDevIndex = 1; - public static final int kMegatag1YawStdDevIndex = 5; - - // Standard deviation array indices for Megatag2 - public static final int kMegatag2XStdDevIndex = 6; - public static final int kMegatag2YStdDevIndex = 7; - public static final int kMegatag2YawStdDevIndex = 11; - - // Validation constants - public static final int kExpectedStdDevArrayLength = 12; - - public static final int kMinFiducialCount = 1; - - // Camera A (Left-side Camera) - public static final double kCameraAPitchDegrees = 20.0; - public static final double kCameraAPitchRads = Units.degreesToRadians(kCameraAPitchDegrees); - public static final double kCameraAHeightOffGroundMeters = Units.inchesToMeters(8.3787); - public static final String kLimelightATableName = "limelight-left"; - public static final double kRobotToCameraAForward = Units.inchesToMeters(7.8757); - public static final double kRobotToCameraASide = Units.inchesToMeters(-11.9269); - public static final Rotation2d kCameraAYawOffset = Rotation2d.fromDegrees(0.0); - public static final Transform2d kRobotToCameraA = - new Transform2d( - new Translation2d(kRobotToCameraAForward, kRobotToCameraASide), kCameraAYawOffset); - - // Camera B (Right-side camera) - public static final double kCameraBPitchDegrees = 20.0; - public static final double kCameraBPitchRads = Units.degreesToRadians(kCameraBPitchDegrees); - public static final double kCameraBHeightOffGroundMeters = Units.inchesToMeters(8.3787); - public static final String kLimelightBTableName = "limelight-right"; - public static final double kRobotToCameraBForward = Units.inchesToMeters(7.8757); - public static final double kRobotToCameraBSide = Units.inchesToMeters(11.9269); - public static final Rotation2d kCameraBYawOffset = Rotation2d.fromDegrees(0.0); - public static final Transform2d kRobotToCameraB = - new Transform2d( - new Translation2d(kRobotToCameraBForward, kRobotToCameraBSide), kCameraBYawOffset); - - // Vision processing constants - public static final double kDefaultAmbiguityThreshold = 0.19; - public static final double kDefaultYawDiffThreshold = 5.0; - public static final double kTagAreaThresholdForYawCheck = 2.0; - public static final double kTagMinAreaForSingleTagMegatag = 1.0; - public static final double kDefaultZThreshold = 0.2; - public static final double kDefaultNormThreshold = 1.0; - public static final double kMinAmbiguityToFlip = 0.08; - - public static final double kCameraHorizontalFOVDegrees = 81.0; - public static final double kCameraVerticalFOVDegrees = 55.0; - public static final int kCameraImageWidth = 1280; - public static final int kCameraImageHeight = 800; - - public static final double kScoringConfidenceThreshold = 0.7; - - // NetworkTables constants - public static final String kBoundingBoxTableName = "BoundingBoxes"; - } - - /** - * Check if this system has a certain mac address in any network device. - * - * @param mac_address Mac address to check. - * @return true if some device with this mac address exists on this system. - */ - public static boolean hasMacAddress(final String mac_address) { - try { - Enumeration nwInterface = NetworkInterface.getNetworkInterfaces(); - while (nwInterface.hasMoreElements()) { - NetworkInterface nis = nwInterface.nextElement(); - if (nis == null) { - continue; - } - StringBuilder device_mac_sb = new StringBuilder(); - System.out.println("hasMacAddress: NIS: " + nis.getDisplayName()); - byte[] mac = nis.getHardwareAddress(); - if (mac != null) { - for (int i = 0; i < mac.length; i++) { - device_mac_sb.append(String.format("%02X%s", mac[i], (i < mac.length - 1) ? ":" : "")); - } - String device_mac = device_mac_sb.toString(); - System.out.println( - "hasMacAddress: NIS " + nis.getDisplayName() + " device_mac: " + device_mac); - if (mac_address.equals(device_mac)) { - System.out.println("hasMacAddress: ** Mac address match! " + device_mac); - return true; - } - } else { - System.out.println("hasMacAddress: Address doesn't exist or is not accessible"); - } - } - - } catch (SocketException e) { - e.printStackTrace(); - } - return false; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java b/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java deleted file mode 100644 index 0203329..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/FiducialObservation.java +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.util.struct.Struct; -import edu.wpi.first.util.struct.StructSerializable; -import frc.robot.util.LimelightHelpers; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Objects; - -/** - * Represents an observation of a fiducial marker (AprilTag) with position and quality data. - * - * @param id The fiducial marker ID - * @param txnc Normalized horizontal offset (-1 to 1) - * @param tync Normalized vertical offset (-1 to 1) - * @param ambiguity Pose ambiguity score (0 = confident, 1 = ambiguous) - * @param area Target area as percentage of image - */ -public record FiducialObservation(int id, double txnc, double tync, double ambiguity, double area) - implements StructSerializable { - - /** Converts a Limelight raw fiducial to a FiducialObservation. */ - public static FiducialObservation fromLimelight(LimelightHelpers.RawFiducial fiducial) { - if (fiducial == null) { - return null; - } - return new FiducialObservation( - fiducial.id, fiducial.txnc, fiducial.tync, fiducial.ambiguity, fiducial.ta); - } - - /** Converts an array of Limelight raw fiducials to FiducialObservation array. */ - public static FiducialObservation[] fromLimelight(LimelightHelpers.RawFiducial[] fiducials) { - if (fiducials == null) { - return new FiducialObservation[0]; - } - return Arrays.stream(fiducials) - .map(FiducialObservation::fromLimelight) - .filter(Objects::nonNull) - .toArray(FiducialObservation[]::new); - } - - public static final Struct struct = - new Struct() { - @Override - public Class getTypeClass() { - return FiducialObservation.class; - } - - @Override - public String getTypeString() { - return "record:FiducialObservation"; - } - - @Override - public int getSize() { - return Integer.BYTES + 4 * Double.BYTES; - } - - @Override - public String getSchema() { - return "int id;double txnc;double tync;double ambiguity"; - } - - @Override - public FiducialObservation unpack(ByteBuffer bb) { - int id = bb.getInt(); - double txnc = bb.getDouble(); - double tync = bb.getDouble(); - double ambiguity = bb.getDouble(); - double area = bb.getDouble(); - return new FiducialObservation(id, txnc, tync, ambiguity, area); - } - - @Override - public void pack(ByteBuffer bb, FiducialObservation value) { - bb.putInt(value.id()); - bb.putDouble(value.txnc()); - bb.putDouble(value.tync()); - bb.putDouble(value.ambiguity()); - bb.putDouble(value.area()); - } - - @Override - public String getTypeName() { - return "FiducialObservation"; - } - }; -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java deleted file mode 100644 index c45c64e..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/LedState.java +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright (c) 2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -public class LedState { - public static final LedState kRed = new LedState(255, 0, 0); - public static final LedState kBlue = new LedState(0, 0, 255); - public static final LedState kCyan = new LedState(0, 255, 255); - public static final LedState kGreen = new LedState(0, 255, 0); - public static final LedState kYellow = new LedState(255, 215, 0); - public static final LedState kOrange = new LedState(255, 80, 0); - public static final LedState kPurple = new LedState(255, 0, 255); - public static final LedState kWhite = new LedState(255, 255, 255); - public static final LedState kPink = new LedState(255, 0, 100); - - public static final LedState kOff = new LedState(0, 0, 0); // No Color - public static final LedState kLowBattery = kRed; - public static final LedState kGoodBattery = kGreen; - public static final LedState[] kRainbow = { - kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kWhite, kOff, kRed, kOrange, kYellow, - kGreen, kCyan, kBlue, kPurple, kOff, kOff - }; - - public static final LedState kCoralMode = kWhite; - public static final LedState kAlgaeMode = kGreen; - public static final LedState kCoralManual = kCyan; - - // 3 leds for l2, 7 leds for l3, full for l4 - public static final LedState[] kL2StagingLeds = { - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kOff, - kOff, - kOff, - kOff, - kOff, - kOff - }; - public static final LedState[] kL3StagingLeds = { - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kCoralMode, - kOff, - kOff, - kOff - }; - - public static final LedState[] kL2StagingManualLeds = { - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kOff, - kOff, - kOff, - kOff, - kOff, - kOff - }; - public static final LedState[] kL3StagingManualLeds = { - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kCoralManual, - kOff, - kOff, - kOff - }; - - public static final LedState[] kAlgaeL2Leds = { - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kOff, - kOff, - kOff, - kOff, - kOff, - kOff - }; - public static final LedState[] kAlgaeL3Leds = { - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kAlgaeMode, - kOff, - kOff, - kOff - }; - - public int blue; - public int green; - public int red; - - public LedState() { - blue = 0; - green = 0; - red = 0; - } - - public LedState(int r, int g, int b) { - blue = b; - green = g; - red = r; - } - - public void copyFrom(LedState other) { - this.blue = other.blue; - this.green = other.green; - this.red = other.red; - } - - @Override - public boolean equals(Object other) { - if (other == null) { - return false; - } - - if (other.getClass() != this.getClass()) { - return false; - } - - LedState s = (LedState) other; - return this.blue == s.blue && this.red == s.red && this.green == s.green; - } - - @Override - public String toString() { - return "#" + Integer.toHexString(red) + Integer.toHexString(green) + Integer.toHexString(blue); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java deleted file mode 100644 index 32e4f3d..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/MegatagPoseEstimate.java +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.util.struct.Struct; -import edu.wpi.first.util.struct.StructSerializable; -import frc.robot.util.LimelightHelpers; -import frc.robot.util.MathHelpers; -import java.nio.ByteBuffer; - -/** - * Represents a robot pose estimate using multiple AprilTags (Megatag). - * - * @param fieldToRobot The estimated robot pose on the field - * @param timestampSeconds The timestamp when this estimate was captured - * @param latency Processing latency in seconds - * @param avgTagArea Average area of detected tags - * @param quality Quality score of the pose estimate (0-1) - * @param fiducialIds IDs of fiducials used for this estimate - */ -public record MegatagPoseEstimate( - Pose2d fieldToRobot, - double timestampSeconds, - double latency, - double avgTagArea, - double quality, - int[] fiducialIds) - implements StructSerializable { - - public MegatagPoseEstimate { - if (fieldToRobot == null) { - fieldToRobot = MathHelpers.kPose2dZero; - } - if (fiducialIds == null) { - fiducialIds = new int[0]; - } - } - - /** Converts a Limelight pose estimate to a MegatagPoseEstimate. */ - public static MegatagPoseEstimate fromLimelight(LimelightHelpers.PoseEstimate poseEstimate) { - Pose2d fieldToRobot = poseEstimate.pose; - if (fieldToRobot == null) { - fieldToRobot = MathHelpers.kPose2dZero; - } - int[] fiducialIds = new int[poseEstimate.rawFiducials.length]; - for (int i = 0; i < poseEstimate.rawFiducials.length; i++) { - if (poseEstimate.rawFiducials[i] != null) { - fiducialIds[i] = poseEstimate.rawFiducials[i].id; - } - } - return new MegatagPoseEstimate( - fieldToRobot, - poseEstimate.timestampSeconds, - poseEstimate.latency, - poseEstimate.avgTagArea, - fiducialIds.length > 1 ? 1.0 : 1.0 - poseEstimate.rawFiducials[0].ambiguity, - fiducialIds); - } - - public static final MegatagPoseEstimateStruct struct = new MegatagPoseEstimateStruct(); - - public static class MegatagPoseEstimateStruct implements Struct { - - @Override - public Class getTypeClass() { - return MegatagPoseEstimate.class; - } - - @Override - public String getTypeString() { - return "record:MegatagPoseEstimate"; - } - - @Override - public int getSize() { - return Pose2d.struct.getSize() + 3 * Double.BYTES; - } - - @Override - public String getSchema() { - return "Pose2d fieldToRobot; double timestampSeconds; double latency; double avgTagArea"; - } - - @Override - public Struct[] getNested() { - return new Struct[] {Pose2d.struct}; - } - - @Override - public MegatagPoseEstimate unpack(ByteBuffer bb) { - Pose2d fieldToRobot = Pose2d.struct.unpack(bb); - double timestampSeconds = bb.getDouble(); - double latency = bb.getDouble(); - double avgTagArea = bb.getDouble(); - double quality = bb.getDouble(); - int[] fiducialIds = new int[0]; - return new MegatagPoseEstimate( - fieldToRobot, timestampSeconds, latency, avgTagArea, quality, fiducialIds); - } - - @Override - public void pack(ByteBuffer bb, MegatagPoseEstimate value) { - Pose2d.struct.pack(bb, value.fieldToRobot()); - bb.putDouble(value.timestampSeconds()); - bb.putDouble(value.latency()); - bb.putDouble(value.avgTagArea()); - bb.putDouble(value.quality()); - } - - @Override - public String getTypeName() { - return "MegatagPoseEstimate"; - } - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java deleted file mode 100644 index 4d3f59d..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/RobotState.java +++ /dev/null @@ -1,507 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Twist2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.DriverStation.Alliance; -import frc.robot.Robot; -import frc.robot.util.ConcurrentTimeInterpolatableBuffer; -import frc.robot.util.FieldConstants; -import frc.robot.util.MathHelpers; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; -import java.util.function.IntSupplier; -import org.littletonrobotics.junction.Logger; - -/** Tracks robot state including pose, velocities, and mechanism positions. */ -public class RobotState { - - public static final double LOOKBACK_TIME = 1.0; - - private final Consumer visionEstimateConsumer; - - public RobotState(Consumer visionEstimateConsumer) { - this.visionEstimateConsumer = visionEstimateConsumer; - fieldToRobot.addSample(0.0, MathHelpers.kPose2dZero); - driveYawAngularVelocity.addSample(0.0, 0.0); - - // Initialize mechanism positions - elevatorHeightMeters.set(0.0); - wristRadians.set(0.0); - intakeRollerRotations.set(0.0); - clawRollerRotations.set(0.0); - } - - // State of robot. - - // Kinematic Frames - // Robot's pose in field coordinates over time - private final ConcurrentTimeInterpolatableBuffer fieldToRobot = - ConcurrentTimeInterpolatableBuffer.createBuffer(LOOKBACK_TIME); - // Current robot-relative chassis speeds (measured from encoders) - private final AtomicReference measuredRobotRelativeChassisSpeeds = - new AtomicReference<>(new ChassisSpeeds()); - // Current field-relative chassis speeds (measured from encoders) - private final AtomicReference measuredFieldRelativeChassisSpeeds = - new AtomicReference<>(new ChassisSpeeds()); - // Desired robot-relative chassis speeds (set by control systems) - private final AtomicReference desiredRobotRelativeChassisSpeeds = - new AtomicReference<>(new ChassisSpeeds()); - // Desired field-relative chassis speeds (set by control systems) - private final AtomicReference desiredFieldRelativeChassisSpeeds = - new AtomicReference<>(new ChassisSpeeds()); - private final AtomicReference fusedFieldRelativeChassisSpeeds = - new AtomicReference<>(new ChassisSpeeds()); - - private final AtomicInteger iteration = new AtomicInteger(0); - - private double lastUsedMegatagTimestamp = 0; - private Pose2d lastUsedMegatagPose = Pose2d.kZero; - private final ConcurrentTimeInterpolatableBuffer driveYawAngularVelocity = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer driveRollAngularVelocity = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer drivePitchAngularVelocity = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - - private final ConcurrentTimeInterpolatableBuffer drivePitchRads = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer driveRollRads = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer accelX = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - private final ConcurrentTimeInterpolatableBuffer accelY = - ConcurrentTimeInterpolatableBuffer.createDoubleBuffer(LOOKBACK_TIME); - - private final AtomicBoolean enablePathCancel = new AtomicBoolean(false); - - private double autoStartTime; - - private Optional trajectoryTargetPose = Optional.empty(); - private Optional trajectoryCurrentPose = Optional.empty(); - - public void setAutoStartTime(double timestamp) { - autoStartTime = timestamp; - } - - public double getAutoStartTime() { - return autoStartTime; - } - - public void enablePathCancel() { - enablePathCancel.set(true); - } - - public void disablePathCancel() { - enablePathCancel.set(false); - } - - public boolean getPathCancel() { - return enablePathCancel.get(); - } - - public void addOdometryMeasurement(double timestamp, Pose2d pose) { - fieldToRobot.addSample(timestamp, pose); - } - - public void incrementIterationCount() { - iteration.incrementAndGet(); - } - - public int getIteration() { - return iteration.get(); - } - - public IntSupplier getIterationSupplier() { - return () -> getIteration(); - } - - public void addDriveMotionMeasurements( - double timestamp, - double angularRollRadsPerS, - double angularPitchRadsPerS, - double angularYawRadsPerS, - double pitchRads, - double rollRads, - double accelX, - double accelY, - ChassisSpeeds desiredRobotRelativeChassisSpeeds, - ChassisSpeeds desiredFieldRelativeSpeeds, - ChassisSpeeds measuredSpeeds, - ChassisSpeeds measuredFieldRelativeSpeeds, - ChassisSpeeds fusedFieldRelativeSpeeds) { - this.driveRollAngularVelocity.addSample(timestamp, angularRollRadsPerS); - this.drivePitchAngularVelocity.addSample(timestamp, angularPitchRadsPerS); - this.driveYawAngularVelocity.addSample(timestamp, angularYawRadsPerS); - this.drivePitchRads.addSample(timestamp, pitchRads); - this.driveRollRads.addSample(timestamp, rollRads); - this.accelY.addSample(timestamp, accelY); - this.accelX.addSample(timestamp, accelX); - this.desiredRobotRelativeChassisSpeeds.set(desiredRobotRelativeChassisSpeeds); - this.desiredFieldRelativeChassisSpeeds.set(desiredFieldRelativeSpeeds); - this.measuredRobotRelativeChassisSpeeds.set(measuredSpeeds); - this.measuredFieldRelativeChassisSpeeds.set(measuredFieldRelativeSpeeds); - this.fusedFieldRelativeChassisSpeeds.set(fusedFieldRelativeSpeeds); - } - - public Map.Entry getLatestFieldToRobot() { - return fieldToRobot.getLatest(); - } - - /** - * Predicts robot's future pose based on current velocity. - * - * @param lookaheadTimeS How far ahead to predict (seconds) - * @return Predicted pose - */ - public Pose2d getPredictedFieldToRobot(double lookaheadTimeS) { - var maybeFieldToRobot = getLatestFieldToRobot(); - Pose2d fieldToRobot = - maybeFieldToRobot == null ? MathHelpers.kPose2dZero : maybeFieldToRobot.getValue(); - var delta = getLatestRobotRelativeChassisSpeed(); - delta = delta.times(lookaheadTimeS); - return fieldToRobot.exp( - new Twist2d(delta.vxMetersPerSecond, delta.vyMetersPerSecond, delta.omegaRadiansPerSecond)); - } - - /** - * Like getPredictedFieldToRobot but caps negative velocities to zero. Used for non-holonomic path - * planning. - */ - public Pose2d getPredictedCappedFieldToRobot(double lookaheadTimeS) { - var maybeFieldToRobot = getLatestFieldToRobot(); - Pose2d fieldToRobot = - maybeFieldToRobot == null ? MathHelpers.kPose2dZero : maybeFieldToRobot.getValue(); - var delta = getLatestRobotRelativeChassisSpeed(); - delta = delta.times(lookaheadTimeS); - return fieldToRobot.exp( - new Twist2d( - Math.max(0.0, delta.vxMetersPerSecond), - Math.max(0.0, delta.vyMetersPerSecond), - delta.omegaRadiansPerSecond)); - } - - public Optional getFieldToRobot(double timestamp) { - return fieldToRobot.getSample(timestamp); - } - - public ChassisSpeeds getLatestMeasuredFieldRelativeChassisSpeeds() { - return measuredFieldRelativeChassisSpeeds.get(); - } - - public ChassisSpeeds getLatestRobotRelativeChassisSpeed() { - return measuredRobotRelativeChassisSpeeds.get(); - } - - public ChassisSpeeds getLatestDesiredRobotRelativeChassisSpeeds() { - return desiredRobotRelativeChassisSpeeds.get(); - } - - public ChassisSpeeds getLatestDesiredFieldRelativeChassisSpeed() { - return desiredFieldRelativeChassisSpeeds.get(); - } - - public ChassisSpeeds getLatestFusedFieldRelativeChassisSpeed() { - return fusedFieldRelativeChassisSpeeds.get(); - } - - public ChassisSpeeds getLatestFusedRobotRelativeChassisSpeed() { - var speeds = getLatestRobotRelativeChassisSpeed(); - speeds.omegaRadiansPerSecond = getLatestFusedFieldRelativeChassisSpeed().omegaRadiansPerSecond; - return speeds; - } - - public void setLedState(LedState state) { - ledState.set(state); - } - - public LedState getLedState() { - return ledState.get(); - } - - private Optional getMaxAbsValueInRange( - ConcurrentTimeInterpolatableBuffer buffer, double minTime, double maxTime) { - var submap = buffer.getInternalBuffer().subMap(minTime, maxTime).values(); - var max = submap.stream().max(Double::compare); - var min = submap.stream().min(Double::compare); - if (max.isEmpty() || min.isEmpty()) return Optional.empty(); - if (Math.abs(max.get()) >= Math.abs(min.get())) return max; - else return min; - } - - public Optional getMaxAbsDriveYawAngularVelocityInRange(double minTime, double maxTime) { - // Gyro yaw rate not set in sim. - if (Robot.isReal()) return getMaxAbsValueInRange(driveYawAngularVelocity, minTime, maxTime); - return Optional.of(measuredRobotRelativeChassisSpeeds.get().omegaRadiansPerSecond); - } - - public Optional getMaxAbsDrivePitchAngularVelocityInRange( - double minTime, double maxTime) { - return getMaxAbsValueInRange(drivePitchAngularVelocity, minTime, maxTime); - } - - public Optional getMaxAbsDriveRollAngularVelocityInRange(double minTime, double maxTime) { - return getMaxAbsValueInRange(driveRollAngularVelocity, minTime, maxTime); - } - - public void updateMegatagEstimate(VisionFieldPoseEstimate megatagEstimate) { - lastUsedMegatagTimestamp = megatagEstimate.getTimestampSeconds(); - lastUsedMegatagPose = megatagEstimate.getVisionRobotPoseMeters(); - visionEstimateConsumer.accept(megatagEstimate); - } - - public double lastUsedMegatagTimestamp() { - return lastUsedMegatagTimestamp; - } - - public Pose2d lastUsedMegatagPose() { - return lastUsedMegatagPose; - } - - public boolean isRedAlliance() { - return DriverStation.getAlliance().isPresent() - && DriverStation.getAlliance().equals(Optional.of(Alliance.Red)); - } - - public void updateLogger() { - if (this.driveYawAngularVelocity.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/YawAngularVelocity", - this.driveYawAngularVelocity.getInternalBuffer().lastEntry().getValue()); - } - if (this.driveRollAngularVelocity.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/RollAngularVelocity", - this.driveRollAngularVelocity.getInternalBuffer().lastEntry().getValue()); - } - if (this.drivePitchAngularVelocity.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/PitchAngularVelocity", - this.drivePitchAngularVelocity.getInternalBuffer().lastEntry().getValue()); - } - if (this.drivePitchRads.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/PitchRads", this.drivePitchRads.getInternalBuffer().lastEntry().getValue()); - } - if (this.driveRollRads.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/RollRads", this.driveRollRads.getInternalBuffer().lastEntry().getValue()); - } - if (this.accelX.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/AccelX", this.accelX.getInternalBuffer().lastEntry().getValue()); - } - if (this.accelY.getInternalBuffer().lastEntry() != null) { - Logger.recordOutput( - "RobotState/AccelY", this.accelY.getInternalBuffer().lastEntry().getValue()); - } - Logger.recordOutput( - "RobotState/DesiredChassisSpeedFieldFrame", getLatestDesiredFieldRelativeChassisSpeed()); - Logger.recordOutput( - "RobotState/DesiredChassisSpeedRobotFrame", getLatestDesiredRobotRelativeChassisSpeeds()); - Logger.recordOutput( - "RobotState/MeasuredChassisSpeedFieldFrame", getLatestMeasuredFieldRelativeChassisSpeeds()); - Logger.recordOutput( - "RobotState/FusedChassisSpeedFieldFrame", getLatestFusedFieldRelativeChassisSpeed()); - - // Add mechanism logging - Logger.recordOutput("RobotState/ElevatorHeightMeters", getElevatorHeightMeters()); - Logger.recordOutput("RobotState/WristRadians", getWristRadians()); - Logger.recordOutput("RobotState/IntakeRollerRotations", getIntakeRollerRotations()); - Logger.recordOutput("RobotState/CoralRollerRotations", getClawRollerRotations()); - - // Add LED state logging - LedState currentLEDState = getLedState(); - Logger.recordOutput( - "RobotState/LEDState", - String.format( - "R:%d G:%d B:%d", currentLEDState.red, currentLEDState.green, currentLEDState.blue)); - } - - private final AtomicReference> exclusiveTag = - new AtomicReference<>(Optional.empty()); - - private final AtomicReference elevatorHeightMeters = new AtomicReference<>(0.0); - private final AtomicReference wristRadians = new AtomicReference<>(0.0); - private final AtomicReference clawRollerRotations = new AtomicReference<>(0.0); - - private final AtomicReference intakeRollerRotations = new AtomicReference<>(0.0); - private final AtomicReference intakeRollerRPS = new AtomicReference<>(0.0); - private final AtomicReference intakePivotRadians = new AtomicReference<>(0.0); - - private final AtomicReference indexerRotations = new AtomicReference<>(0.0); - private final AtomicReference indexerRPS = new AtomicReference<>(0.0); - - private final AtomicReference climberPivotRadians = new AtomicReference<>(0.0); - - private final AtomicReference climberRollerRotations = new AtomicReference<>(0.0); - - private final AtomicReference clawRollerRPS = new AtomicReference<>(0.0); - - private final AtomicReference ledState = new AtomicReference<>(LedState.kBlue); - - public void setClimberRollerRotations(double rotations) { - climberRollerRotations.set(rotations); - } - - public void setClimberPivotRadians(double radians) { - climberPivotRadians.set(radians); - } - - public void setIndexerRotations(double rotations) { - indexerRotations.set(rotations); - } - - public void setIndexerRPS(double rps) { - indexerRPS.set(rps); - } - - public double getIndexerRotations() { - return indexerRotations.get(); - } - - public double getIndexerRPS() { - return indexerRPS.get(); - } - - public void setIntakePivotRadians(double radians) { - intakePivotRadians.set(radians); - } - - public double getIntakePivotRadians() { - return intakePivotRadians.get(); - } - - public void setElevatorHeightMeters(double heightMeters) { - elevatorHeightMeters.set(heightMeters); - } - - public void setWristRadians(double radians) { - wristRadians.set(radians); - } - - public void setIntakeRollerRotations(double rotations) { - intakeRollerRotations.set(rotations); - } - - public void setIntakeRollerRPS(double rps) { - intakeRollerRPS.set(rps); - } - - public void setClawRollerRotations(double rotations) { - clawRollerRotations.set(rotations); - } - - public double getElevatorHeightMeters() { - return elevatorHeightMeters.get(); - } - - public double getWristRadians() { - return wristRadians.get(); - } - - public double getIntakeRollerRotations() { - return intakeRollerRotations.get(); - } - - public double getIntakeRollerRPS() { - return intakeRollerRPS.get(); - } - - public double getClawRollerRotations() { - return clawRollerRotations.get(); - } - - public double getClimberRollerRotations() { - return climberRollerRotations.get(); - } - - public double getClimberPivotRadians() { - return climberPivotRadians.get(); - } - - public void setClawRollerRPS(double rps) { - clawRollerRPS.set(rps); - } - - public double getClawRollerRPS() { - return clawRollerRPS.get(); - } - - public void setExclusiveTag(int id) { - exclusiveTag.set(Optional.of(id)); - } - - public void clearExclusiveTag() { - exclusiveTag.set(Optional.empty()); - } - - public Optional getExclusiveTag() { - return exclusiveTag.get(); - } - - public void setTrajectoryTargetPose(Pose2d pose) { - trajectoryTargetPose = Optional.of(pose); - } - - public Optional getTrajectoryTargetPose() { - return trajectoryTargetPose; - } - - public void setTrajectoryCurrentPose(Pose2d pose) { - trajectoryCurrentPose = Optional.of(pose); - } - - public Optional getTrajectoryCurrentPose() { - return trajectoryCurrentPose; - } - - public double getDrivePitchRadians() { - if (this.drivePitchRads.getInternalBuffer().lastEntry() != null) { - return drivePitchRads.getInternalBuffer().lastEntry().getValue(); - } - return 0.0; - } - - public double getDriveRollRadians() { - if (this.driveRollRads.getInternalBuffer().lastEntry() != null) { - return driveRollRads.getInternalBuffer().lastEntry().getValue(); - } - return 0.0; - } - - // public void logControllerMode() { - // Logger.recordOutput("Controller Mode", ModalControls.getInstance().getMode().toString()); - // } - - public static boolean onOpponentSide(boolean isRedAlliance, Pose2d pose) { - return (isRedAlliance - && pose.getTranslation().getX() - < FieldConstants.fieldLength / 2 - Constants.kMidlineBuffer) - || (!isRedAlliance - && pose.getTranslation().getX() - > FieldConstants.fieldLength / 2 + Constants.kMidlineBuffer); - } - - public boolean onOpponentSide() { - return onOpponentSide(this.isRedAlliance(), this.getLatestFieldToRobot().getValue()); - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java deleted file mode 100644 index e254ed3..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionFieldPoseEstimate.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.math.Matrix; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.numbers.N1; -import edu.wpi.first.math.numbers.N3; - -/** Represents a robot pose estimate from vision with associated uncertainty and metadata. */ -public class VisionFieldPoseEstimate { - - private final Pose2d visionRobotPoseMeters; - private final double timestampSeconds; - private final Matrix visionMeasurementStdDevs; - private final int numTags; - - /** - * Creates a new vision field pose estimate. - * - * @param visionRobotPoseMeters The estimated robot pose on the field in meters - * @param timestampSeconds The timestamp when this estimate was captured - * @param visionMeasurementStdDevs Standard deviations representing measurement uncertainty - * @param numTags Number of AprilTags used in this pose estimate - */ - public VisionFieldPoseEstimate( - Pose2d visionRobotPoseMeters, - double timestampSeconds, - Matrix visionMeasurementStdDevs, - int numTags) { - this.visionRobotPoseMeters = visionRobotPoseMeters; - this.timestampSeconds = timestampSeconds; - this.visionMeasurementStdDevs = visionMeasurementStdDevs; - this.numTags = numTags; - } - - public Pose2d getVisionRobotPoseMeters() { - return visionRobotPoseMeters; - } - - public double getTimestampSeconds() { - return timestampSeconds; - } - - public Matrix getVisionMeasurementStdDevs() { - return visionMeasurementStdDevs; - } - - public int getNumTags() { - return numTags; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java deleted file mode 100644 index 1765636..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionIO.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.math.geometry.Pose3d; - -/** Interface for vision system hardware abstraction. */ -public interface VisionIO { - - /** Container for all vision input data. */ - class VisionIOInputs { - /** Input data from a single camera. */ - public static class CameraInputs { - public boolean seesTarget; - public FiducialObservation[] fiducialObservations; - public MegatagPoseEstimate megatagPoseEstimate; - public MegatagPoseEstimate megatag2PoseEstimate; - public int megatag2Count; - public int megatagCount; - public Pose3d pose3d; - public double[] standardDeviations = - new double[12]; // [MT1x, MT1y, MT1z, MT1roll, MT1pitch, MT1Yaw, MT2x, - // MT2y, MT2z, MT2roll, MT2pitch, MT2yaw] - } - - public CameraInputs cameraA = new CameraInputs(); - public CameraInputs cameraB = new CameraInputs(); - } - - void readInputs(VisionIOInputs inputs); -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java b/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java deleted file mode 100644 index a6ec916..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC254/VisionSubsystem.java +++ /dev/null @@ -1,366 +0,0 @@ -// Copyright (c) 2024-2025 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC254; - -import edu.wpi.first.math.MathUtil; -import edu.wpi.first.math.Matrix; -import edu.wpi.first.math.VecBuilder; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.numbers.N1; -import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; -import edu.wpi.first.wpilibj2.command.SubsystemBase; -import frc.robot.subsystems.vision.FRC254.Constants.VisionConstants; -import frc.robot.util.RobotTime; -import java.util.Optional; -import org.littletonrobotics.junction.Logger; - -/** - * Vision subsystem that processes AprilTag detections and provides robot pose estimates. Supports - * multiple cameras and various pose estimation algorithms including MegaTag. - */ -public class VisionSubsystem extends SubsystemBase { - - private final VisionIO io; - private final RobotState state; - private final VisionIO.VisionIOInputs inputs = new VisionIO.VisionIOInputs(); - - private boolean useVision = true; - - /** Creates a new vision subsystem. */ - public VisionSubsystem(VisionIO io, RobotState state) { - this.io = io; - this.state = state; - } - - /** Fuses two vision pose estimates using inverse-variance weighting. */ - private VisionFieldPoseEstimate fuseEstimates( - VisionFieldPoseEstimate a, VisionFieldPoseEstimate b) { - // Ensure b is the newer measurement - if (b.getTimestampSeconds() < a.getTimestampSeconds()) { - VisionFieldPoseEstimate tmp = a; - a = b; - b = tmp; - } - - // Preview both estimates to the same timestamp - Transform2d a_T_b = - state - .getFieldToRobot(b.getTimestampSeconds()) - .get() - .minus(state.getFieldToRobot(a.getTimestampSeconds()).get()); - - Pose2d poseA = a.getVisionRobotPoseMeters().transformBy(a_T_b); - Pose2d poseB = b.getVisionRobotPoseMeters(); - - // Inverse-variance weighting - var varianceA = a.getVisionMeasurementStdDevs().elementTimes(a.getVisionMeasurementStdDevs()); - var varianceB = b.getVisionMeasurementStdDevs().elementTimes(b.getVisionMeasurementStdDevs()); - - Rotation2d fusedHeading = poseB.getRotation(); - if (varianceA.get(2, 0) < VisionConstants.kLargeVariance - && varianceB.get(2, 0) < VisionConstants.kLargeVariance) { - fusedHeading = - new Rotation2d( - poseA.getRotation().getCos() / varianceA.get(2, 0) - + poseB.getRotation().getCos() / varianceB.get(2, 0), - poseA.getRotation().getSin() / varianceA.get(2, 0) - + poseB.getRotation().getSin() / varianceB.get(2, 0)); - } - - double weightAx = 1.0 / varianceA.get(0, 0); - double weightAy = 1.0 / varianceA.get(1, 0); - double weightBx = 1.0 / varianceB.get(0, 0); - double weightBy = 1.0 / varianceB.get(1, 0); - - Pose2d fusedPose = - new Pose2d( - new Translation2d( - (poseA.getTranslation().getX() * weightAx - + poseB.getTranslation().getX() * weightBx) - / (weightAx + weightBx), - (poseA.getTranslation().getY() * weightAy - + poseB.getTranslation().getY() * weightBy) - / (weightAy + weightBy)), - fusedHeading); - - Matrix fusedStdDev = - VecBuilder.fill( - Math.sqrt(1.0 / (weightAx + weightBx)), - Math.sqrt(1.0 / (weightAy + weightBy)), - Math.sqrt(1.0 / (1.0 / varianceA.get(2, 0) + 1.0 / varianceB.get(2, 0)))); - - int numTags = a.getNumTags() + b.getNumTags(); - double time = b.getTimestampSeconds(); - - return new VisionFieldPoseEstimate(fusedPose, time, fusedStdDev, numTags); - } - - @Override - public void periodic() { - double startTime = RobotTime.getTimestampSeconds(); - io.readInputs(inputs); - - logCameraInputs("Vision/CameraA", inputs.cameraA); - logCameraInputs("Vision/CameraB", inputs.cameraB); - - var maybeMTA = processCamera(inputs.cameraA, "CameraA", VisionConstants.kRobotToCameraA); - var maybeMTB = processCamera(inputs.cameraB, "CameraB", VisionConstants.kRobotToCameraB); - - if (!useVision) { - Logger.recordOutput("Vision/usingVision", false); - Logger.recordOutput("Vision/exclusiveTagId", state.getExclusiveTag().orElse(-1)); - Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - startTime); - return; - } - - Logger.recordOutput("Vision/usingVision", true); - - Optional accepted = Optional.empty(); - if (maybeMTA.isPresent() != maybeMTB.isPresent()) { - accepted = maybeMTA.isPresent() ? maybeMTA : maybeMTB; - } else if (maybeMTA.isPresent() && maybeMTB.isPresent()) { - accepted = Optional.of(fuseEstimates(maybeMTA.get(), maybeMTB.get())); - } - - accepted.ifPresent( - est -> { - Logger.recordOutput("Vision/fusedAccepted", est.getVisionRobotPoseMeters()); - state.updateMegatagEstimate(est); - }); - - Logger.recordOutput("Vision/exclusiveTagId", state.getExclusiveTag().orElse(-1)); - Logger.recordOutput("Vision/latencyPeriodicSec", RobotTime.getTimestampSeconds() - startTime); - } - - private void logCameraInputs(String prefix, VisionIO.VisionIOInputs.CameraInputs cam) { - Logger.recordOutput(prefix + "/SeesTarget", cam.seesTarget); - Logger.recordOutput(prefix + "/MegatagCount", cam.megatagCount); - - if (DriverStation.isDisabled()) { - SmartDashboard.putBoolean(prefix + "/SeesTarget", cam.seesTarget); - SmartDashboard.putNumber(prefix + "/MegatagCount", cam.megatagCount); - } - - if (cam.pose3d != null) { - Logger.recordOutput(prefix + "/Pose3d", cam.pose3d); - } - - if (cam.megatagPoseEstimate != null) { - Logger.recordOutput(prefix + "/MegatagPoseEstimate", cam.megatagPoseEstimate.fieldToRobot()); - Logger.recordOutput(prefix + "/Quality", cam.megatagPoseEstimate.quality()); - Logger.recordOutput(prefix + "/AvgTagArea", cam.megatagPoseEstimate.avgTagArea()); - } - - if (cam.fiducialObservations != null) { - Logger.recordOutput(prefix + "/FiducialCount", cam.fiducialObservations.length); - } - } - - private Optional processCamera( - VisionIO.VisionIOInputs.CameraInputs cam, String label, Transform2d robotToCamera) { - - String logPrefix = "Vision/" + label; - - if (!cam.seesTarget) { - return Optional.empty(); - } - - Optional estimate = Optional.empty(); - - if (cam.megatagPoseEstimate != null) { - Optional mtEstimate = - processMegatagPoseEstimate(cam.megatagPoseEstimate, cam, logPrefix); - - mtEstimate.ifPresent( - est -> - Logger.recordOutput( - logPrefix + "/AcceptedMegatagEstimate", est.getVisionRobotPoseMeters())); - - Optional gyroEstimate = - fuseWithGyro(cam.megatagPoseEstimate, cam, logPrefix); - - gyroEstimate.ifPresent( - est -> - Logger.recordOutput( - logPrefix + "/FuseWithGyroEstimate", est.getVisionRobotPoseMeters())); - - // Prefer Megatag when available - if (mtEstimate.isPresent()) { - estimate = mtEstimate; - Logger.recordOutput(logPrefix + "/AcceptMegatag", true); - Logger.recordOutput(logPrefix + "/AcceptGyro", false); - } else if (gyroEstimate.isPresent()) { - estimate = gyroEstimate; - Logger.recordOutput(logPrefix + "/AcceptMegatag", false); - Logger.recordOutput(logPrefix + "/AcceptGyro", true); - } else { - Logger.recordOutput(logPrefix + "/AcceptMegatag", false); - Logger.recordOutput(logPrefix + "/AcceptGyro", false); - } - } - - return estimate; - } - - private Optional fuseWithGyro( - MegatagPoseEstimate poseEstimate, - VisionIO.VisionIOInputs.CameraInputs cam, - String logPrefix) { - - if (poseEstimate.timestampSeconds() <= state.lastUsedMegatagTimestamp()) { - return Optional.empty(); - } - - // Use Megatag directly when 2 or more tags are visible - if (poseEstimate.fiducialIds().length > 1) { - return Optional.empty(); - } - - // Reject if the robot is yawing rapidly (time-sync unreliable) - final double kHighYawLookbackS = 0.3; - final double kHighYawVelocityRadS = 5.0; - - if (state - .getMaxAbsDriveYawAngularVelocityInRange( - poseEstimate.timestampSeconds() - kHighYawLookbackS, - poseEstimate.timestampSeconds()) - .orElse(Double.POSITIVE_INFINITY) - > kHighYawVelocityRadS) { - return Optional.empty(); - } - - var priorPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); - if (priorPose.isEmpty()) { - return Optional.empty(); - } - - var maybeFieldToTag = - Constants.kAprilTagLayoutReefsOnly.getTagPose(poseEstimate.fiducialIds()[0]); - if (maybeFieldToTag.isEmpty()) { - return Optional.empty(); - } - - Pose2d fieldToTag = - new Pose2d(maybeFieldToTag.get().toPose2d().getTranslation(), Rotation2d.kZero); - - Pose2d robotToTag = fieldToTag.relativeTo(poseEstimate.fieldToRobot()); - - Pose2d posteriorPose = - new Pose2d( - fieldToTag - .getTranslation() - .minus(robotToTag.getTranslation().rotateBy(priorPose.get().getRotation())), - priorPose.get().getRotation()); - - double xStd = cam.standardDeviations[VisionConstants.kMegatag1XStdDevIndex]; - double yStd = cam.standardDeviations[VisionConstants.kMegatag1YStdDevIndex]; - double xyStd = Math.max(xStd, yStd); - - return Optional.of( - new VisionFieldPoseEstimate( - posteriorPose, - poseEstimate.timestampSeconds(), - VecBuilder.fill(xyStd, xyStd, VisionConstants.kLargeVariance), - poseEstimate.fiducialIds().length)); - } - - private Optional processMegatagPoseEstimate( - MegatagPoseEstimate poseEstimate, - VisionIO.VisionIOInputs.CameraInputs cam, - String logPrefix) { - - if (poseEstimate.timestampSeconds() <= state.lastUsedMegatagTimestamp()) { - return Optional.empty(); - } - - // Single-tag extra checks - if (poseEstimate.fiducialIds().length < 2) { - for (var fiducial : cam.fiducialObservations) { - if (fiducial.ambiguity() > VisionConstants.kDefaultAmbiguityThreshold) { - return Optional.empty(); - } - } - - if (poseEstimate.avgTagArea() < VisionConstants.kTagMinAreaForSingleTagMegatag) { - return Optional.empty(); - } - - var priorPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); - if (poseEstimate.avgTagArea() < VisionConstants.kTagAreaThresholdForYawCheck - && priorPose.isPresent()) { - double yawDiff = - Math.abs( - MathUtil.angleModulus( - priorPose.get().getRotation().getRadians() - - poseEstimate.fieldToRobot().getRotation().getRadians())); - - if (yawDiff > Units.degreesToRadians(VisionConstants.kDefaultYawDiffThreshold)) { - return Optional.empty(); - } - } - } - - if (poseEstimate.fieldToRobot().getTranslation().getNorm() - < VisionConstants.kDefaultNormThreshold) { - return Optional.empty(); - } - - if (Math.abs(cam.pose3d.getZ()) > VisionConstants.kDefaultZThreshold) { - return Optional.empty(); - } - - // Exclusive-tag filtering - var exclusiveTag = state.getExclusiveTag(); - boolean hasExclusiveId = - exclusiveTag.isPresent() - && java.util.Arrays.stream(poseEstimate.fiducialIds()) - .anyMatch(id -> id == exclusiveTag.get()); - - if (exclusiveTag.isPresent() && !hasExclusiveId) { - return Optional.empty(); - } - - var loggedPose = state.getFieldToRobot(poseEstimate.timestampSeconds()); - if (loggedPose.isEmpty()) { - return Optional.empty(); - } - - Pose2d estimatePose = poseEstimate.fieldToRobot(); - - double scaleFactor = 1.0 / poseEstimate.quality(); - double xStd = cam.standardDeviations[VisionConstants.kMegatag1XStdDevIndex] * scaleFactor; - double yStd = cam.standardDeviations[VisionConstants.kMegatag1YStdDevIndex] * scaleFactor; - double rotStd = cam.standardDeviations[VisionConstants.kMegatag1YawStdDevIndex] * scaleFactor; - - double xyStd = Math.max(xStd, yStd); - Matrix visionStdDevs = VecBuilder.fill(xyStd, xyStd, rotStd); - - return Optional.of( - new VisionFieldPoseEstimate( - estimatePose, - poseEstimate.timestampSeconds(), - visionStdDevs, - poseEstimate.fiducialIds().length)); - } - - public void setUseVision(boolean useVision) { - this.useVision = useVision; - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java deleted file mode 100644 index 7d0e53c..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC6328/RobotState.java +++ /dev/null @@ -1,448 +0,0 @@ -// // Copyright (c) 2025 FRC 6328 -// // http://github.com/Mechanical-Advantage -// // -// // Use of this source code is governed by an MIT-style -// // license that can be found in the LICENSE file at -// // the root directory of this project. - -// package org.littletonrobotics.frc2025; - -// import edu.wpi.first.math.MathUtil; -// import edu.wpi.first.math.Matrix; -// import edu.wpi.first.math.Nat; -// import edu.wpi.first.math.VecBuilder; -// import edu.wpi.first.math.geometry.*; -// import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer; -// import edu.wpi.first.math.kinematics.ChassisSpeeds; -// import edu.wpi.first.math.kinematics.SwerveDriveKinematics; -// import edu.wpi.first.math.kinematics.SwerveModulePosition; -// import edu.wpi.first.math.numbers.N1; -// import edu.wpi.first.math.numbers.N3; -// import edu.wpi.first.math.util.Units; -// import edu.wpi.first.wpilibj.DriverStation; -// import edu.wpi.first.wpilibj.Timer; -// import java.util.*; -// import java.util.stream.Collectors; -// import lombok.Getter; -// import lombok.Setter; -// import lombok.experimental.ExtensionMethod; -// import org.littletonrobotics.frc2025.subsystems.drive.DriveConstants; -// import org.littletonrobotics.frc2025.subsystems.vision.VisionConstants; -// import org.littletonrobotics.frc2025.util.*; -// import org.littletonrobotics.junction.AutoLogOutput; -// import org.littletonrobotics.junction.Logger; - -// @ExtensionMethod({GeomUtil.class}) -// public class RobotState { -// // Must be less than 2.0 -// private static final LoggedTunableNumber txTyObservationStaleSecs = -// new LoggedTunableNumber("RobotState/TxTyObservationStaleSeconds", 0.2); -// private static final LoggedTunableNumber minDistanceTagPoseBlend = -// new LoggedTunableNumber("RobotState/MinDistanceTagPoseBlend", Units.inchesToMeters(24.0)); -// private static final LoggedTunableNumber maxDistanceTagPoseBlend = -// new LoggedTunableNumber("RobotState/MaxDistanceTagPoseBlend", Units.inchesToMeters(36.0)); -// private static final LoggedTunableNumber coralOverlap = -// new LoggedTunableNumber("RobotState/CoralOverlap", .5); -// private static final LoggedTunableNumber coralPersistanceTime = -// new LoggedTunableNumber("RobotState/CoralPersistanceTime", 0.75); -// private static final LoggedTunableNumber algaePersistanceTime = -// new LoggedTunableNumber("RobotState/AlgaePersistanceTime", 0.1); - -// private static final double poseBufferSizeSec = 2.0; -// private static final Matrix odometryStateStdDevs = -// new Matrix<>(VecBuilder.fill(0.003, 0.003, 0.002)); -// private static final Map tagPoses2d = new HashMap<>(); - -// static { -// for (int i = 1; i <= FieldConstants.aprilTagCount; i++) { -// tagPoses2d.put( -// i, -// FieldConstants.defaultAprilTagType -// .getLayout() -// .getTagPose(i) -// .map(Pose3d::toPose2d) -// .orElse(Pose2d.kZero)); -// } -// } - -// private static RobotState instance; - -// public static RobotState getInstance() { -// if (instance == null) instance = new RobotState(); -// return instance; -// } - -// // Pose Estimation Members -// @Getter @AutoLogOutput private Pose2d odometryPose = Pose2d.kZero; -// @Getter @AutoLogOutput private Pose2d estimatedPose = Pose2d.kZero; - -// private final TimeInterpolatableBuffer poseBuffer = -// TimeInterpolatableBuffer.createBuffer(poseBufferSizeSec); -// private final Matrix qStdDevs = new Matrix<>(Nat.N3(), Nat.N1()); -// // Odometry -// private final SwerveDriveKinematics kinematics; -// private SwerveModulePosition[] lastWheelPositions = -// new SwerveModulePosition[] { -// new SwerveModulePosition(), -// new SwerveModulePosition(), -// new SwerveModulePosition(), -// new SwerveModulePosition() -// }; -// // Assume gyro starts at zero -// private Rotation2d gyroOffset = Rotation2d.kZero; - -// private final Map txTyPoses = new HashMap<>(); -// private Set coralPoses = new HashSet<>(); - -// @AutoLogOutput private Rotation2d leftAlgaeObservation = Rotation2d.kZero; -// @AutoLogOutput private Rotation2d rightAlgaeObservation = Rotation2d.kZero; -// private double leftAlgaeObservationTimestamp = 0.0; -// private double rightAlgaeObservationTimestamp = 0.0; - -// @Getter -// @AutoLogOutput(key = "RobotState/RobotVelocity") -// private ChassisSpeeds robotVelocity = new ChassisSpeeds(); - -// @Getter -// @Setter -// @AutoLogOutput(key = "RobotState/ElevatorExtensionPercent") -// private double elevatorExtensionPercent; - -// @Getter @Setter private OptionalDouble distanceToBranch = OptionalDouble.empty(); -// @Getter @Setter private OptionalDouble distanceToReefAlgae = OptionalDouble.empty(); -// @Getter @Setter private Rotation2d pitch = Rotation2d.kZero; -// @Getter @Setter private Rotation2d roll = Rotation2d.kZero; - -// private RobotState() { -// for (int i = 0; i < 3; ++i) { -// qStdDevs.set(i, 0, Math.pow(odometryStateStdDevs.get(i, 0), 2)); -// } -// kinematics = new SwerveDriveKinematics(DriveConstants.moduleTranslations); - -// for (int i = 1; i <= FieldConstants.aprilTagCount; i++) { -// txTyPoses.put(i, new TxTyPoseRecord(Pose2d.kZero, Double.POSITIVE_INFINITY, -1.0)); -// } -// } - -// public void resetPose(Pose2d pose) { -// // Gyro offset is the rotation that maps the old gyro rotation (estimated - offset) to the -// new -// // frame of rotation -// gyroOffset = pose.getRotation().minus(odometryPose.getRotation().minus(gyroOffset)); -// estimatedPose = pose; -// odometryPose = pose; -// poseBuffer.clear(); -// } - -// public void addOdometryObservation(OdometryObservation observation) { -// Twist2d twist = kinematics.toTwist2d(lastWheelPositions, observation.wheelPositions()); -// lastWheelPositions = observation.wheelPositions(); -// Pose2d lastOdometryPose = odometryPose; -// odometryPose = odometryPose.exp(twist); -// // Use gyro if connected -// observation.gyroAngle.ifPresent( -// gyroAngle -> { -// // Add offset to measured angle -// Rotation2d angle = gyroAngle.plus(gyroOffset); -// odometryPose = new Pose2d(odometryPose.getTranslation(), angle); -// }); -// // Add pose to buffer at timestamp -// poseBuffer.addSample(observation.timestamp(), odometryPose); -// // Calculate diff from last odometry pose and add onto pose estimate -// Twist2d finalTwist = lastOdometryPose.log(odometryPose); -// estimatedPose = estimatedPose.exp(finalTwist); -// } - -// public void addVisionObservation(VisionObservation observation) { -// // If measurement is old enough to be outside the pose buffer's timespan, skip. -// try { -// if (poseBuffer.getInternalBuffer().lastKey() - poseBufferSizeSec > observation.timestamp()) -// { -// return; -// } -// } catch (NoSuchElementException ex) { -// return; -// } -// // Get odometry based pose at timestamp -// var sample = poseBuffer.getSample(observation.timestamp()); -// if (sample.isEmpty()) { -// // exit if not there -// return; -// } - -// // sample --> odometryPose transform and backwards of that -// var sampleToOdometryTransform = new Transform2d(sample.get(), odometryPose); -// var odometryToSampleTransform = new Transform2d(odometryPose, sample.get()); -// // get old estimate by applying odometryToSample Transform -// Pose2d estimateAtTime = estimatedPose.plus(odometryToSampleTransform); - -// // Calculate 3 x 3 vision matrix -// var r = new double[3]; -// for (int i = 0; i < 3; ++i) { -// r[i] = observation.stdDevs().get(i, 0) * observation.stdDevs().get(i, 0); -// } -// // Solve for closed form Kalman gain for continuous Kalman filter with A = 0 -// // and C = I. See wpimath/algorithms.md. -// Matrix visionK = new Matrix<>(Nat.N3(), Nat.N3()); -// for (int row = 0; row < 3; ++row) { -// double stdDev = qStdDevs.get(row, 0); -// if (stdDev == 0.0) { -// visionK.set(row, row, 0.0); -// } else { -// visionK.set(row, row, stdDev / (stdDev + Math.sqrt(stdDev * r[row]))); -// } -// } -// // difference between estimate and vision pose -// Transform2d transform = new Transform2d(estimateAtTime, observation.visionPose()); -// // scale transform by visionK -// var kTimesTransform = -// visionK.times( -// VecBuilder.fill( -// transform.getX(), transform.getY(), transform.getRotation().getRadians())); -// Transform2d scaledTransform = -// new Transform2d( -// kTimesTransform.get(0, 0), -// kTimesTransform.get(1, 0), -// Rotation2d.fromRadians(kTimesTransform.get(2, 0))); - -// // Recalculate current estimate by applying scaled transform to old estimate -// // then replaying odometry data -// estimatedPose = estimateAtTime.plus(scaledTransform).plus(sampleToOdometryTransform); -// } - -// public void addTxTyObservation(TxTyObservation observation) { -// // Skip if current data for tag is newer -// if (txTyPoses.containsKey(observation.tagId()) -// && txTyPoses.get(observation.tagId()).timestamp() >= observation.timestamp()) { -// return; -// } - -// // Get rotation at timestamp -// var sample = poseBuffer.getSample(observation.timestamp()); -// if (sample.isEmpty()) { -// // exit if not there -// return; -// } -// Rotation2d robotRotation = -// estimatedPose.transformBy(new Transform2d(odometryPose, sample.get())).getRotation(); - -// // Average tx's and ty's -// double tx = 0.0; -// double ty = 0.0; -// for (int i = 0; i < 4; i++) { -// tx += observation.tx()[i]; -// ty += observation.ty()[i]; -// } -// tx /= 4.0; -// ty /= 4.0; - -// Pose3d cameraPose = VisionConstants.cameras[observation.camera()].pose().get(); - -// // Use 3D distance and tag angles to find robot pose -// Translation2d camToTagTranslation = -// new Pose3d(Translation3d.kZero, new Rotation3d(0, ty, -tx)) -// .transformBy( -// new Transform3d(new Translation3d(observation.distance(), 0, 0), -// Rotation3d.kZero)) -// .getTranslation() -// .rotateBy(new Rotation3d(0, cameraPose.getRotation().getY(), 0)) -// .toTranslation2d(); -// Rotation2d camToTagRotation = -// robotRotation.plus( -// cameraPose.toPose2d().getRotation().plus(camToTagTranslation.getAngle())); -// var tagPose2d = tagPoses2d.get(observation.tagId()); -// if (tagPose2d == null) return; -// Translation2d fieldToCameraTranslation = -// new Pose2d(tagPose2d.getTranslation(), camToTagRotation.plus(Rotation2d.kPi)) -// .transformBy(GeomUtil.toTransform2d(camToTagTranslation.getNorm(), 0.0)) -// .getTranslation(); -// Pose2d robotPose = -// new Pose2d( -// fieldToCameraTranslation, -// robotRotation.plus(cameraPose.toPose2d().getRotation())) -// .transformBy(new Transform2d(cameraPose.toPose2d(), Pose2d.kZero)); -// // Use gyro angle at time for robot rotation -// robotPose = new Pose2d(robotPose.getTranslation(), robotRotation); - -// // Add transform to current odometry based pose for latency correction -// txTyPoses.put( -// observation.tagId(), -// new TxTyPoseRecord(robotPose, camToTagTranslation.getNorm(), observation.timestamp())); -// } - -// public void addDriveSpeeds(ChassisSpeeds speeds) { -// robotVelocity = speeds; -// } - -// @AutoLogOutput(key = "RobotState/FieldVelocity") -// public ChassisSpeeds getFieldVelocity() { -// return ChassisSpeeds.fromRobotRelativeSpeeds(robotVelocity, getRotation()); -// } - -// /** Get 2d pose estimate of robot if not stale. */ -// public Optional getTxTyPose(int tagId) { -// if (!txTyPoses.containsKey(tagId)) { -// return Optional.empty(); -// } -// var data = txTyPoses.get(tagId); -// // Check if stale -// if (Timer.getTimestamp() - data.timestamp() >= txTyObservationStaleSecs.get()) { -// return Optional.empty(); -// } -// // Get odometry based pose at timestamp -// var sample = poseBuffer.getSample(data.timestamp()); -// // Latency compensate -// return sample.map(pose2d -> data.pose().plus(new Transform2d(pose2d, odometryPose))); -// } - -// /** -// * Get estimated pose using txty data given tagId on reef and aligned pose on reef. Used for -// algae -// * intaking and coral scoring. -// */ -// public ReefPoseEstimate getReefPose(int face, Pose2d finalPose) { -// final boolean isRed = AllianceFlipUtil.shouldFlip(); -// var tagPose = -// getTxTyPose( -// switch (face) { -// case 1 -> isRed ? 6 : 19; -// case 2 -> isRed ? 11 : 20; -// case 3 -> isRed ? 10 : 21; -// case 4 -> isRed ? 9 : 22; -// case 5 -> isRed ? 8 : 17; -// // 0 -// default -> isRed ? 7 : 18; -// }); -// // Use estimated pose if tag pose is not present -// if (tagPose.isEmpty()) return new ReefPoseEstimate(getEstimatedPose(), 0.0); -// // Use distance from estimated pose to final pose to get t value -// final double t = -// MathUtil.clamp( -// (getEstimatedPose().getTranslation().getDistance(finalPose.getTranslation()) -// - minDistanceTagPoseBlend.get()) -// / (maxDistanceTagPoseBlend.get() - minDistanceTagPoseBlend.get()), -// 0.0, -// 1.0); -// return new ReefPoseEstimate(getEstimatedPose().interpolate(tagPose.get(), 1.0 - t), 1.0 - t); -// } - -// public void addCoralTxTyObservation(CoralTxTyObservation observation) { -// var oldOdometryPose = poseBuffer.getSample(observation.timestamp()); -// if (oldOdometryPose.isEmpty()) { -// return; -// } -// Pose2d fieldToRobot = -// estimatedPose.transformBy(new Transform2d(odometryPose, oldOdometryPose.get())); -// Pose3d robotToCamera = VisionConstants.cameras[observation.camera()].pose().get(); - -// // Assume coral height of zero and find midpoint of width of bottom tx ty -// double tx = (observation.tx()[2] + observation.tx()[3]) / 2; -// double ty = (observation.ty()[2] + observation.ty()[3]) / 2; - -// double cameraToCoralAngle = -robotToCamera.getRotation().getY() - ty; -// if (cameraToCoralAngle >= 0) { -// return; -// } - -// double cameraToCoralNorm = -// (-robotToCamera.getZ()) -// / Math.tan(-robotToCamera.getRotation().getY() - ty) -// / Math.cos(-tx); -// Pose2d fieldToCamera = fieldToRobot.transformBy(robotToCamera.toPose2d().toTransform2d()); -// Pose2d fieldToCoral = -// fieldToCamera -// .transformBy(new Transform2d(Translation2d.kZero, new Rotation2d(-tx))) -// .transformBy( -// new Transform2d(new Translation2d(cameraToCoralNorm, 0), Rotation2d.kZero)); -// Translation2d fieldToCoralTranslation2d = fieldToCoral.getTranslation(); -// CoralPoseRecord coralPoseRecord = -// new CoralPoseRecord(fieldToCoralTranslation2d, observation.timestamp()); - -// coralPoses = -// coralPoses.stream() -// .filter( -// (x) -> x.translation.getDistance(fieldToCoralTranslation2d) > coralOverlap.get()) -// .collect(Collectors.toSet()); -// coralPoses.add(coralPoseRecord); -// } - -// public Set getCoralTranslations() { -// if (Constants.getRobot() == Constants.RobotType.SIMBOT && -// DriverStation.isAutonomousEnabled()) { -// return AutoCoralSim.getCorals(); -// } -// return coralPoses.stream() -// .filter((x) -> Timer.getTimestamp() - x.timestamp() < coralPersistanceTime.get()) -// .map(CoralPoseRecord::translation) -// .collect(Collectors.toSet()); -// } - -// public Rotation2d getRotation() { -// return estimatedPose.getRotation(); -// } - -// public void addLeftAlgaeObservation(Rotation2d angle) { -// leftAlgaeObservation = angle; -// leftAlgaeObservationTimestamp = Timer.getTimestamp(); -// } - -// public void addRightAlgaeObservation(Rotation2d angle) { -// rightAlgaeObservation = angle; -// rightAlgaeObservationTimestamp = Timer.getTimestamp(); -// } - -// public Optional getLeftAlgaeObservation() { -// if (Timer.getTimestamp() - leftAlgaeObservationTimestamp < algaePersistanceTime.get()) { -// return Optional.of(leftAlgaeObservation); -// } else { -// return Optional.empty(); -// } -// } - -// public Optional getRightAlgaeObservation() { -// if (Timer.getTimestamp() - rightAlgaeObservationTimestamp < algaePersistanceTime.get()) { -// return Optional.of(rightAlgaeObservation); -// } else { -// return Optional.empty(); -// } -// } - -// public void periodicLog() { -// // Log tx/ty poses -// Pose2d[] tagPoses = new Pose2d[FieldConstants.aprilTagCount + 1]; -// for (int i = 0; i < FieldConstants.aprilTagCount + 1; i++) { -// tagPoses[i] = getTxTyPose(i).orElse(Pose2d.kZero); -// } -// Logger.recordOutput("RobotState/TxTyPoses", tagPoses); - -// // Log coral poses -// Logger.recordOutput( -// "RobotState/CoralPoses", -// getCoralTranslations().stream() -// .map( -// (translation) -> -// new Pose3d( -// new Translation3d( -// translation.getX(), -// translation.getY(), -// FieldConstants.coralDiameter / 2.0), -// new Rotation3d(new Rotation2d(Timer.getTimestamp() * 5.0)))) -// .toArray(Pose3d[]::new)); -// } - -// public record OdometryObservation( -// SwerveModulePosition[] wheelPositions, Optional gyroAngle, double timestamp) {} - -// public record VisionObservation(Pose2d visionPose, double timestamp, Matrix stdDevs) {} - -// public record TxTyObservation( -// int tagId, int camera, double[] tx, double[] ty, double distance, double timestamp) {} - -// public record TxTyPoseRecord(Pose2d pose, double distance, double timestamp) {} - -// public record CoralTxTyObservation(int camera, double[] tx, double[] ty, double timestamp) {} - -// public record CoralPoseRecord(Translation2d translation, double timestamp) {} - -// public record ReefPoseEstimate(Pose2d pose, double blend) {} -// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java deleted file mode 100644 index 2b21394..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC6328/Vision.java +++ /dev/null @@ -1,401 +0,0 @@ -// // Copyright (c) 2025 FRC 6328 -// // http://github.com/Mechanical-Advantage -// // -// // Use of this source code is governed by an MIT-style -// // license that can be found in the LICENSE file at -// // the root directory of this project. - -// package org.littletonrobotics.frc2025.subsystems.vision; - -// import static org.littletonrobotics.frc2025.subsystems.vision.VisionConstants.*; - -// import edu.wpi.first.math.VecBuilder; -// import edu.wpi.first.math.geometry.*; -// import edu.wpi.first.wpilibj.Alert; -// import edu.wpi.first.wpilibj.DriverStation; -// import edu.wpi.first.wpilibj.Timer; -// import java.util.*; -// import java.util.function.Supplier; -// import lombok.experimental.ExtensionMethod; -// import org.littletonrobotics.frc2025.FieldConstants; -// import org.littletonrobotics.frc2025.FieldConstants.AprilTagLayoutType; -// import org.littletonrobotics.frc2025.RobotState; -// import org.littletonrobotics.frc2025.RobotState.CoralTxTyObservation; -// import org.littletonrobotics.frc2025.RobotState.TxTyObservation; -// import org.littletonrobotics.frc2025.RobotState.VisionObservation; -// import org.littletonrobotics.frc2025.subsystems.leds.Leds; -// import org.littletonrobotics.frc2025.util.GeomUtil; -// import org.littletonrobotics.frc2025.util.LoggedTracer; -// import org.littletonrobotics.frc2025.util.VirtualSubsystem; -// import org.littletonrobotics.junction.Logger; -// import org.littletonrobotics.junction.networktables.LoggedNetworkBoolean; - -// /** Vision subsystem for vision. */ -// @ExtensionMethod({GeomUtil.class}) -// public class Vision extends VirtualSubsystem { -// private final Supplier aprilTagLayoutSupplier; -// private final VisionIO[] io; -// private final VisionIOInputsAutoLogged[] inputs; -// private final AprilTagVisionIOInputsAutoLogged[] aprilTagInputs; -// private final ObjDetectVisionIOInputsAutoLogged[] objDetectInputs; -// private final LoggedNetworkBoolean recordingRequest = -// new LoggedNetworkBoolean("/SmartDashboard/Enable Recording", false); - -// private final Map lastFrameTimes = new HashMap<>(); -// private final Map lastTagDetectionTimes = new HashMap<>(); - -// private final double disconnectedTimeout = 0.5; -// private final Timer[] disconnectedTimers; -// private final Alert[] disconnectedAlerts; - -// public Vision(Supplier aprilTagLayoutSupplier, VisionIO... io) { -// this.aprilTagLayoutSupplier = aprilTagLayoutSupplier; -// this.io = io; -// inputs = new VisionIOInputsAutoLogged[io.length]; -// aprilTagInputs = new AprilTagVisionIOInputsAutoLogged[io.length]; -// objDetectInputs = new ObjDetectVisionIOInputsAutoLogged[io.length]; -// disconnectedTimers = new Timer[io.length]; -// disconnectedAlerts = new Alert[io.length]; -// for (int i = 0; i < io.length; i++) { -// inputs[i] = new VisionIOInputsAutoLogged(); -// aprilTagInputs[i] = new AprilTagVisionIOInputsAutoLogged(); -// objDetectInputs[i] = new ObjDetectVisionIOInputsAutoLogged(); -// disconnectedAlerts[i] = new Alert("", Alert.AlertType.kError); -// } - -// // Create map of last frame times for instances -// for (int i = 0; i < io.length; i++) { -// lastFrameTimes.put(i, 0.0); -// } - -// for (int i = 0; i < io.length; i++) { -// disconnectedTimers[i] = new Timer(); -// disconnectedTimers[i].start(); -// } -// } - -// public void periodic() { -// for (int i = 0; i < io.length; i++) { -// io[i].updateInputs(inputs[i], aprilTagInputs[i], objDetectInputs[i]); -// Logger.processInputs("Vision/Inst" + i, aprilTagInputs[i]); -// Logger.processInputs("AprilTagVision/Inst" + i, aprilTagInputs[i]); -// Logger.processInputs("ObjDetectVision/Inst" + i, objDetectInputs[i]); -// } - -// // Update recording state -// boolean shouldRecord = DriverStation.isFMSAttached() || recordingRequest.get(); -// for (var ioInst : io) { -// ioInst.setRecording(shouldRecord); -// } - -// // Update disconnected alerts & LEDs -// boolean anyNTDisconnected = false; -// for (int i = 0; i < io.length; i++) { -// if (aprilTagInputs[i].timestamps.length > 0 || objDetectInputs[i].timestamps.length > 0) { -// disconnectedTimers[i].reset(); -// } -// boolean disconnected = -// disconnectedTimers[i].hasElapsed(disconnectedTimeout) || !inputs[i].ntConnected; -// if (disconnected) { -// disconnectedAlerts[i].setText( -// inputs[i].ntConnected -// ? "Northstar " + i + " connected to NT but not publishing frames" -// : "Northstar " + i + " disconnected from NT"); -// } -// disconnectedAlerts[i].set(disconnected); -// anyNTDisconnected = anyNTDisconnected || !inputs[i].ntConnected; -// } -// Leds.getInstance().visionDisconnected = anyNTDisconnected; - -// // Loop over instances -// List allRobotPoses = new ArrayList<>(); -// List allVisionObservations = new ArrayList<>(); -// Map allTxTyObservations = new HashMap<>(); -// List allCoralTxTyObservations = new ArrayList<>(); -// for (int instanceIndex = 0; instanceIndex < io.length; instanceIndex++) { -// // Loop over frames -// for (int frameIndex = 0; -// frameIndex < aprilTagInputs[instanceIndex].timestamps.length; -// frameIndex++) { -// lastFrameTimes.put(instanceIndex, Timer.getTimestamp()); -// var timestamp = -// aprilTagInputs[instanceIndex].timestamps[frameIndex] + timestampOffset.get(); -// var values = aprilTagInputs[instanceIndex].frames[frameIndex]; - -// // Exit if blank frame -// if (values.length == 0 || values[0] == 0) { -// continue; -// } - -// // Switch based on number of poses -// Pose3d cameraPose = null; -// Pose2d robotPose = null; -// boolean useVisionRotation = false; -// switch ((int) values[0]) { -// case 1: -// // One pose (multi-tag), use directly -// cameraPose = -// new Pose3d( -// values[2], -// values[3], -// values[4], -// new Rotation3d(new Quaternion(values[5], values[6], values[7], values[8]))); -// robotPose = -// cameraPose -// .toPose2d() -// .transformBy( -// -// cameras[instanceIndex].pose().get().toPose2d().toTransform2d().inverse()); -// useVisionRotation = true; -// break; -// case 2: -// // Two poses (one tag), disambiguate -// double error0 = values[1]; -// double error1 = values[9]; -// Pose3d cameraPose0 = -// new Pose3d( -// values[2], -// values[3], -// values[4], -// new Rotation3d(new Quaternion(values[5], values[6], values[7], values[8]))); -// Pose3d cameraPose1 = -// new Pose3d( -// values[10], -// values[11], -// values[12], -// new Rotation3d(new Quaternion(values[13], values[14], values[15], -// values[16]))); -// Transform2d cameraToRobot = -// cameras[instanceIndex].pose().get().toPose2d().toTransform2d().inverse(); -// Pose2d robotPose0 = cameraPose0.toPose2d().transformBy(cameraToRobot); -// Pose2d robotPose1 = cameraPose1.toPose2d().transformBy(cameraToRobot); - -// // Check for ambiguity and select based on estimated rotation -// if (error0 < error1 * ambiguityThreshold || error1 < error0 * ambiguityThreshold) { -// Rotation2d currentRotation = RobotState.getInstance().getRotation(); -// Rotation2d visionRotation0 = robotPose0.getRotation(); -// Rotation2d visionRotation1 = robotPose1.getRotation(); -// if (Math.abs(currentRotation.minus(visionRotation0).getRadians()) -// < Math.abs(currentRotation.minus(visionRotation1).getRadians())) { -// cameraPose = cameraPose0; -// robotPose = robotPose0; -// } else { -// cameraPose = cameraPose1; -// robotPose = robotPose1; -// } -// } -// break; -// } - -// // Exit if no data -// if (cameraPose == null || robotPose == null) { -// continue; -// } - -// // Exit if robot pose is off the field -// if (robotPose.getX() < -fieldBorderMargin -// || robotPose.getX() > FieldConstants.fieldLength + fieldBorderMargin -// || robotPose.getY() < -fieldBorderMargin -// || robotPose.getY() > FieldConstants.fieldWidth + fieldBorderMargin) { -// continue; -// } - -// // Get tag poses and update last detection times -// List tagPoses = new ArrayList<>(); -// for (int i = (values[0] == 1 ? 9 : 17); i < values.length; i += 10) { -// int tagId = (int) values[i]; -// lastTagDetectionTimes.put(tagId, Timer.getTimestamp()); -// Optional tagPose = -// aprilTagLayoutSupplier.get().getLayout().getTagPose((int) values[i]); -// tagPose.ifPresent(tagPoses::add); -// } -// if (tagPoses.isEmpty()) continue; - -// // Calculate average distance to tag -// double totalDistance = 0.0; -// for (Pose3d tagPose : tagPoses) { -// totalDistance += tagPose.getTranslation().getDistance(cameraPose.getTranslation()); -// } -// double avgDistance = totalDistance / tagPoses.size(); - -// // Add observation to list -// double xyStdDev = -// xyStdDevCoefficient -// * Math.pow(avgDistance, 1.2) -// / Math.pow(tagPoses.size(), 2.0) -// * cameras[instanceIndex].stdDevFactor(); -// double thetaStdDev = -// useVisionRotation -// ? thetaStdDevCoefficient -// * Math.pow(avgDistance, 1.2) -// / Math.pow(tagPoses.size(), 2.0) -// * cameras[instanceIndex].stdDevFactor() -// : Double.POSITIVE_INFINITY; -// allVisionObservations.add( -// new VisionObservation( -// robotPose, timestamp, VecBuilder.fill(xyStdDev, xyStdDev, thetaStdDev))); -// allRobotPoses.add(robotPose); - -// // Log data from instance -// if (enableInstanceLogging) { -// Logger.recordOutput( -// "AprilTagVision/Inst" + instanceIndex + "/LatencySecs", -// Timer.getTimestamp() - timestamp); -// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/RobotPose", robotPose); -// Logger.recordOutput( -// "AprilTagVision/Inst" + instanceIndex + "/TagPoses", -// tagPoses.toArray(Pose3d[]::new)); -// } -// } - -// // Get tag tx ty observation data -// Map txTyObservations = new HashMap<>(); -// for (int frameIndex = 0; -// frameIndex < aprilTagInputs[instanceIndex].timestamps.length; -// frameIndex++) { -// var timestamp = aprilTagInputs[instanceIndex].timestamps[frameIndex]; -// var values = aprilTagInputs[instanceIndex].frames[frameIndex]; -// int tagEstimationDataEndIndex = -// switch ((int) values[0]) { -// default -> 0; -// case 1 -> 8; -// case 2 -> 16; -// }; - -// for (int index = tagEstimationDataEndIndex + 1; index < values.length; index += 10) { -// double[] tx = new double[4]; -// double[] ty = new double[4]; -// for (int i = 0; i < 4; i++) { -// tx[i] = values[index + 1 + (2 * i)]; -// ty[i] = values[index + 1 + (2 * i) + 1]; -// } -// int tagId = (int) values[index]; -// double distance = values[index + 9]; - -// txTyObservations.put( -// tagId, new TxTyObservation(tagId, instanceIndex, tx, ty, distance, timestamp)); -// } -// } - -// // Save tx ty observation data -// for (var observation : txTyObservations.values()) { -// if (!allTxTyObservations.containsKey(observation.tagId()) -// || observation.distance() < allTxTyObservations.get(observation.tagId()).distance()) -// { -// allTxTyObservations.put(observation.tagId(), observation); -// } -// } - -// // Record object detection observations -// switch (instanceIndex) { -// case 2: -// // Record coral observations -// for (int frameIndex = 0; -// frameIndex < objDetectInputs[instanceIndex].timestamps.length; -// frameIndex++) { -// double[] frame = objDetectInputs[instanceIndex].frames[frameIndex]; -// for (int i = 0; i < frame.length; i += 10) { -// if (frame[i + 1] > coralDetectConfidenceThreshold) { -// double[] tx = new double[4]; -// double[] ty = new double[4]; -// for (int z = 0; z < 4; z++) { -// tx[z] = frame[i + 2 + (2 * z)]; -// ty[z] = frame[i + 2 + (2 * z) + 1]; -// } -// allCoralTxTyObservations.add( -// new CoralTxTyObservation( -// instanceIndex, -// tx, -// ty, -// objDetectInputs[instanceIndex].timestamps[frameIndex])); -// } -// } -// } -// break; - -// case 0: -// case 1: -// // Record algae observations -// if (objDetectInputs[instanceIndex].timestamps.length > 0) { -// double[] frame = -// objDetectInputs[instanceIndex] -// .frames[objDetectInputs[instanceIndex].timestamps.length - 1]; -// double largestSize = 0.0; -// Rotation2d angle = null; -// for (int i = 0; i < frame.length; i += 10) { -// if (frame[i + 1] > algaeDetectConfidenceThreshold) { -// // Read data from frame -// double[] tx = new double[4]; -// double[] ty = new double[4]; -// for (int z = 0; z < 4; z++) { -// tx[z] = frame[i + 2 + (2 * z)]; -// ty[z] = frame[i + 2 + (2 * z) + 1]; -// } - -// // Check if at bottom of frame -// double maxTy = Math.max(ty[2], ty[3]); -// if (maxTy < 0.4) { -// continue; -// } - -// // Check if largest algae -// double size = Math.abs(tx[0] - tx[1]); -// if (size > largestSize) { -// largestSize = size; -// angle = Rotation2d.fromRadians(-(tx[0] + tx[1]) / 2.0); -// } -// } -// } -// if (angle != null) { -// if (instanceIndex == 0) { -// RobotState.getInstance().addLeftAlgaeObservation(angle); -// } else { -// RobotState.getInstance().addRightAlgaeObservation(angle); -// } -// } -// } -// break; -// } - -// // If no frames from instances, clear robot pose -// if (enableInstanceLogging && aprilTagInputs[instanceIndex].timestamps.length == 0) { -// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/RobotPose", Pose2d.kZero); -// } - -// // If no recent frames from instance, clear tag poses -// if (enableInstanceLogging -// && Timer.getTimestamp() - lastFrameTimes.get(instanceIndex) > targetLogTimeSecs) { -// Logger.recordOutput("AprilTagVision/Inst" + instanceIndex + "/TagPoses", new Pose3d[] -// {}); -// } -// } - -// // Log robot poses -// Logger.recordOutput("AprilTagVision/RobotPoses", allRobotPoses.toArray(Pose2d[]::new)); - -// // Log tag poses -// List allTagPoses = new ArrayList<>(); -// for (Map.Entry detectionEntry : lastTagDetectionTimes.entrySet()) { -// if (Timer.getTimestamp() - detectionEntry.getValue() < targetLogTimeSecs) { -// aprilTagLayoutSupplier -// .get() -// .getLayout() -// .getTagPose(detectionEntry.getKey()) -// .ifPresent(allTagPoses::add); -// } -// } -// Logger.recordOutput("AprilTagVision/TagPoses", allTagPoses.toArray(Pose3d[]::new)); - -// // Send results to robot state -// allVisionObservations.stream() -// .sorted(Comparator.comparingDouble(VisionObservation::timestamp)) -// .forEach(RobotState.getInstance()::addVisionObservation); -// allTxTyObservations.values().stream().forEach(RobotState.getInstance()::addTxTyObservation); -// allCoralTxTyObservations.stream() -// .sorted(Comparator.comparingDouble(CoralTxTyObservation::timestamp)) -// .forEach(RobotState.getInstance()::addCoralTxTyObservation); - -// // Record cycle time -// LoggedTracer.record("Vision"); -// } -// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java b/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java deleted file mode 100644 index 3f84f9d..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC6328/VisionConstants.java +++ /dev/null @@ -1,175 +0,0 @@ -// // Copyright (c) 2025 FRC 6328 -// // http://github.com/Mechanical-Advantage -// // -// // Use of this source code is governed by an MIT-style -// // license that can be found in the LICENSE file at -// // the root directory of this project. - -// package org.littletonrobotics.frc2025.subsystems.vision; - -// import edu.wpi.first.math.geometry.Pose3d; -// import edu.wpi.first.math.geometry.Rotation3d; -// import edu.wpi.first.math.util.Units; -// import java.util.function.Supplier; -// import lombok.Builder; -// import org.littletonrobotics.frc2025.Constants; -// import org.littletonrobotics.frc2025.Constants.Mode; -// import org.littletonrobotics.frc2025.util.LoggedTunableNumber; - -// public class VisionConstants { -// private static final boolean forceEnableInstanceLogging = false; -// public static final boolean enableInstanceLogging = -// forceEnableInstanceLogging || Constants.getMode() == Mode.REPLAY; - -// public static final double ambiguityThreshold = 0.4; -// public static final double targetLogTimeSecs = 0.1; -// public static final double fieldBorderMargin = 0.5; -// public static final double xyStdDevCoefficient = 0.01; -// public static final double thetaStdDevCoefficient = 0.03; -// public static final double demoTagPosePersistenceSecs = 0.5; -// public static final double coralDetectConfidenceThreshold = 0.35; -// public static final double algaeDetectConfidenceThreshold = 0.35; -// public static final LoggedTunableNumber timestampOffset = -// new LoggedTunableNumber("AprilTagVision/TimestampOffset", 0.0); - -// private static int monoExposure = 2200; -// private static double monoGain = 17.5; -// private static double monoDenoise = 1.0; -// private static int colorExposure = 4500; -// private static double colorGain = 5.0; - -// public static LoggedTunableNumber[] pitchAdjustments = -// switch (Constants.getRobot()) { -// case DEVBOT -> -// new LoggedTunableNumber[] { -// new LoggedTunableNumber("Vision/PitchAdjust0", 0.0), -// new LoggedTunableNumber("Vision/PitchAdjust1", 0.0) -// }; -// case COMPBOT -> -// new LoggedTunableNumber[] { -// new LoggedTunableNumber("Vision/PitchAdjust0", 0.0), -// new LoggedTunableNumber("Vision/PitchAdjust1", 0.0), -// new LoggedTunableNumber("Vision/PitchAdjust2", 0.0), -// }; -// default -> new LoggedTunableNumber[] {}; -// }; -// public static CameraConfig[] cameras = -// switch (Constants.getRobot()) { -// case DEVBOT -> -// new CameraConfig[] { -// CameraConfig.builder() -// .pose( -// () -> -// new Pose3d( -// 0.254, -// 0.2032, -// 0.21113, -// new Rotation3d( -// 0.0, -// Units.degreesToRadians(-25.0 + pitchAdjustments[0].get()), -// Units.degreesToRadians(-20.0)))) -// .id("40265450") -// .width(1600) -// .height(1200) -// .exposure(monoExposure) -// .gain(monoGain) -// .stdDevFactor(1.0) -// .build(), -// CameraConfig.builder() -// .pose( -// () -> -// new Pose3d( -// 0.254, -// -0.2032, -// 0.21113, -// new Rotation3d( -// 0.0, -// Units.degreesToRadians(-25.0 + pitchAdjustments[1].get()), -// Units.degreesToRadians(20.0)))) -// .id("40265453") -// .width(1600) -// .height(1200) -// .exposure(monoExposure) -// .gain(monoGain) -// .stdDevFactor(1.0) -// .build() -// }; -// case COMPBOT -> -// new CameraConfig[] { -// // Front Left -// CameraConfig.builder() -// .pose( -// () -> -// new Pose3d( -// 0.2794, -// 0.2286, -// 0.21113, -// new Rotation3d( -// 0.0, -// Units.degreesToRadians(-25.0 + pitchAdjustments[0].get()), -// Units.degreesToRadians(-20.0)))) -// .id("40530395") -// .width(1600) -// .height(1200) -// .exposure(monoExposure) -// .gain(monoGain) -// .denoise(monoDenoise) -// .stdDevFactor(1.0) -// .build(), -// // Front Right -// CameraConfig.builder() -// .pose( -// () -> -// new Pose3d( -// 0.2794, -// -0.2286, -// 0.21113, -// new Rotation3d( -// 0.0, -// Units.degreesToRadians(-25.0 + pitchAdjustments[1].get()), -// Units.degreesToRadians(20.0)))) -// .id("40552081") -// .width(1600) -// .height(1200) -// .exposure(monoExposure) -// .gain(monoGain) -// .denoise(monoDenoise) -// .stdDevFactor(1.0) -// .build(), -// // Back Up -// CameraConfig.builder() -// .pose( -// () -> -// new Pose3d( -// Units.inchesToMeters(3), -// Units.inchesToMeters(-10), -// Units.inchesToMeters(25), -// new Rotation3d( -// 0.0, -// Units.degreesToRadians(25.0 + pitchAdjustments[2].get()), -// Units.degreesToRadians(170.0)))) -// .id("24737133") -// .width(1280) -// .height(960) -// .exposure(colorExposure) -// .gain(colorGain) -// .stdDevFactor(1.25) -// .build(), -// }; -// default -> new CameraConfig[] {}; -// }; - -// @Builder -// public record CameraConfig( -// Supplier pose, -// String id, -// int width, -// int height, -// int autoExposure, -// int exposure, -// double gain, -// double denoise, -// double stdDevFactor) {} - -// private VisionConstants() {} -// } diff --git a/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java b/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java deleted file mode 100644 index 7d2a7dc..0000000 --- a/src/main/java/frc/robot/subsystems/vision/FRC6995/Vision.java +++ /dev/null @@ -1,291 +0,0 @@ -// Copyright (c) 2024 FRC 6995 -// https://github.com/frc6995 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.subsystems.vision.FRC6995; - -import com.ctre.phoenix6.Utils; -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.MathUtil; -import edu.wpi.first.math.VecBuilder; -import edu.wpi.first.math.Vector; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.networktables.NetworkTableInstance; -import edu.wpi.first.networktables.StructPublisher; -import edu.wpi.first.wpilibj.RobotBase; -import edu.wpi.first.wpilibj.Timer; -import frc.robot.util.TriConsumer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; -import org.photonvision.PhotonCamera; -import org.photonvision.PhotonPoseEstimator; -import org.photonvision.PhotonPoseEstimator.PoseStrategy; -import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; -import org.photonvision.simulation.VisionSystemSim; -import org.photonvision.targeting.PhotonPipelineResult; - -public class Vision { - public interface VisionConsumer extends TriConsumer> {} - - private class Camera { - String name; - PhotonCamera camera; - PhotonPoseEstimator estimator; - PhotonCameraSim simCamera; - Pose3d estimatedPose = new Pose3d(); - StructPublisher rawPosePublisher; - - public Camera( - String name, - PhotonCamera camera, - PhotonPoseEstimator estimator, - PhotonCameraSim simCamera) { - this.name = name; - this.camera = camera; - this.estimator = estimator; - this.simCamera = simCamera; - var table = NetworkTableInstance.getDefault().getTable("Cameras").getSubTable(name); - rawPosePublisher = table.getStructTopic("rawPose", Pose3d.struct).publish(); - } - - public void setRawPose(Pose3d pose) { - estimatedPose = pose; - rawPosePublisher.accept(estimatedPose); - } - } - ; - - public class VisionConstants { - private static SimCameraProperties cameraProp = new SimCameraProperties(); - - static { - // A 640 x 480 camera with a 100 degree diagonal FOV. - cameraProp.setCalibration(1280, 900, Rotation2d.fromDegrees(100)); - // Approximate detection noise with average and standard deviation error in pixels. - cameraProp.setCalibError(0.12, 0.04); - // Set the camera image capture framerate (Note: this is limited by robot loop rate). - cameraProp.setFPS(20); - // The average and standard deviation in milliseconds of image data latency. - cameraProp.setAvgLatencyMs(35); - cameraProp.setLatencyStdDevMs(5); - } - - public static final SimCameraProperties SIM_CAMERA_PROPERTIES = cameraProp; - - public static final Map CAMERAS = - Map.of( - "OV9281-BL", - new Transform3d( - -(Units.inchesToMeters(14.25) - 0.102), - (Units.inchesToMeters(14.25) - 0.066), - Units.inchesToMeters(8.5), - new Rotation3d( - Units.degreesToRadians(0), - Units.degreesToRadians(-13.65), - Units.degreesToRadians(-170))), - "OV9281-BR", - new Transform3d( - -(Units.inchesToMeters(14.25) - 0.102), - -(Units.inchesToMeters(14.25) - 0.072), - Units.inchesToMeters(8.5), - new Rotation3d( - Units.degreesToRadians(0), - Units.degreesToRadians(-13.65), - Units.degreesToRadians(170))), - "OV9281-FR", - new Transform3d( - Units.inchesToMeters(14.25) - 0.102, - -(Units.inchesToMeters(14.25) - 0.112), - Units.inchesToMeters(8.5), - new Rotation3d( - Units.degreesToRadians(0), - Units.degreesToRadians(-13.65), - Units.degreesToRadians(10))), - "OV9281-FL", - new Transform3d( - (Units.inchesToMeters(14.25) - 0.102), - (Units.inchesToMeters(14.25) - 0.066), - Units.inchesToMeters(8.5), - new Rotation3d( - Units.degreesToRadians(0), - Units.degreesToRadians(-13.65), - Units.degreesToRadians(-10)))); - public static final AprilTagFieldLayout FIELD_LAYOUT = - AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); - } - - VisionSystemSim visionSim = new VisionSystemSim("main"); - private List m_cameras; - private List m_actualCameras; - private List m_simCameras; - private VisionConsumer addVisionMeasurement; - private Supplier getPose; - private double lastPoseResetTimestamp = 0; - - public void resetPose() { - lastPoseResetTimestamp = Timer.getFPGATimestamp(); - } - - public Vision(VisionConsumer addVisionMeasurement, Supplier getPose) { - if (RobotBase.isSimulation()) { - visionSim.addAprilTags(AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField)); - } - - this.addVisionMeasurement = addVisionMeasurement; - this.getPose = getPose; - m_cameras = new ArrayList<>(); - VisionConstants.CAMERAS - .entrySet() - .iterator() - .forEachRemaining( - (entry) -> { - var translation = entry.getValue(); - var cam = new PhotonCamera(entry.getKey()); - var estimator = - new PhotonPoseEstimator( - VisionConstants.FIELD_LAYOUT, - PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, - translation); - estimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); - PhotonCameraSim simCam = null; - if (RobotBase.isSimulation()) { - simCam = new PhotonCameraSim(cam, VisionConstants.SIM_CAMERA_PROPERTIES); - visionSim.addCamera(simCam, translation); - } - m_cameras.add(new Camera(entry.getKey(), cam, estimator, simCam)); - }); - } - - public boolean filterPose(Pose3d robotPose) { - if (Math.abs(robotPose.getZ()) > 0.25) { - return false; - } - return true; - } - - private boolean isReef(int id) { - return (id >= 6 && id <= 11) || (id >= 17 && id <= 22); - } - - private void handleResult(Camera camera, PhotonPipelineResult result) { - var estimator = camera.estimator; - estimator.setReferencePose(getPose.get()); - - var robotPoseOpt = estimator.update(result); - if (robotPoseOpt.isEmpty()) { - return; - } - camera.setRawPose(robotPoseOpt.get().estimatedPose); - var pose = robotPoseOpt.get(); - if (pose.timestampSeconds < lastPoseResetTimestamp) { - return; - } - if (!filterPose(pose.estimatedPose)) { - return; - } - double xConfidence; - double yConfidence; - double angleConfidence; - if (pose.targetsUsed.size() == 0) { - return; // should never happen but measurement shouldn't be trusted - } - double closestDistance = 1000; - double avgDistance = 0; - double closeEnoughTgts = 0; - boolean ignore = false; - - final double border = 15; - for (var tgt : pose.targetsUsed) { - - // rule out - for (var corner : tgt.detectedCorners) { - if (MathUtil.isNear(0, corner.x, border) - || MathUtil.isNear( - VisionConstants.SIM_CAMERA_PROPERTIES.getResWidth(), corner.x, border) - || MathUtil.isNear(0, corner.y, border) - || MathUtil.isNear( - VisionConstants.SIM_CAMERA_PROPERTIES.getResHeight(), corner.y, border)) { - return; - } - } - double tdist = tgt.getBestCameraToTarget().getTranslation().getNorm(); - if (pose.targetsUsed.size() < 2) { - var trustedDistance = - isReef(pose.targetsUsed.get(0).fiducialId) - ? Units.feetToMeters(6) - : Units.feetToMeters(6); - if (tdist > trustedDistance) { - return; - - } else { - closeEnoughTgts = 1; - } - } - avgDistance += tdist; - if (tdist < closestDistance) { - closestDistance = tdist; - } - if (pose.targetsUsed.size() >= 2) { - var trustedDistance = - isReef(pose.targetsUsed.get(0).fiducialId) - ? Units.feetToMeters(10) - : Units.feetToMeters(8); - - if (tdist <= trustedDistance) { - closeEnoughTgts++; - } - } - // ignore |= (tgt.getFiducialId() == 13); - // ignore |= (tgt.getFiducialId() == 14); - } - if (ignore) { - return; - } - double distance = avgDistance / pose.targetsUsed.size(); - // SmartDashboard.putNumber(camera.name + "/distance", distance); - if (closeEnoughTgts == 0) { - return; - } - if (pose.targetsUsed.size() < 2) { - xConfidence = 0.5 * distance / 4.0; - yConfidence = 0.5 * distance / 4.0; - angleConfidence = 1; - } else { - xConfidence = 0.02 * distance * distance; - yConfidence = 0.02 * distance * distance; - angleConfidence = 0.3 * distance * distance; - } - this.addVisionMeasurement.accept( - pose.estimatedPose.toPose2d(), - Utils.fpgaToCurrentTime(pose.timestampSeconds), - VecBuilder.fill(xConfidence, yConfidence, angleConfidence)); - } - - public void update() { - visionSim.update(getPose.get()); - for (Camera camera : m_cameras) { - for (PhotonPipelineResult result : camera.camera.getAllUnreadResults()) { - handleResult(camera, result); - } - } - } -} diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 68f4594..908cf9c 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,203 +1,442 @@ // Copyright (c) 2024-2026 Az-FIRST // http://github.com/AZ-First -// Copyright (c) 2024-2025 FRC 2486 -// http://github.com/Coconuts2486-FRC -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage // -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.vision; import static frc.robot.Constants.VisionConstants.*; +import edu.wpi.first.math.Matrix; import edu.wpi.first.math.VecBuilder; -import edu.wpi.first.math.Vector; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.wpilibj.Alert; -import edu.wpi.first.wpilibj.Alert.AlertType; import frc.robot.Constants; import frc.robot.FieldConstants; +import frc.robot.subsystems.drive.Drive; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; -import frc.robot.util.TriConsumer; import frc.robot.util.VirtualSubsystem; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Optional; +import java.util.OptionalDouble; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import org.littletonrobotics.junction.Logger; public class Vision extends VirtualSubsystem { - public interface VisionConsumer extends TriConsumer> {} + @FunctionalInterface + public interface VisionConsumer { + void accept(Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix stdDevs); + } + + private record TimedEstimate(Pose2d pose, double ts, Matrix stdDevs) {} private final VisionConsumer consumer; + private final Drive drive; + private long lastSeenPoseResetEpoch = -1; + private final VisionIO[] io; private final VisionIOInputsAutoLogged[] inputs; - private final Alert[] disconnectedAlerts; - // Camera configs (names, transforms, stddev multipliers, sim props) - private final Constants.Cameras.CameraConfig[] camConfigs; + private final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; + + // --- Per-camera monotonic gate --- + private final double[] lastAcceptedTsPerCam; + + // --- Pose reset gate --- + private volatile double lastPoseResetTimestamp = Double.NEGATIVE_INFINITY; + + // --- Smoothing buffer (recent fused estimates) --- + private final ArrayDeque fusedBuffer = new ArrayDeque<>(); + private final double smoothWindowSec = 0.25; + private final int smoothMaxSize = 12; - // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- - // Summary buffers - private final ArrayList allTagPoses = new ArrayList<>(32); - private final ArrayList allRobotPoses = new ArrayList<>(64); - private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // --- Trusted tags configuration (swappable per event/field) --- + private final AtomicReference> trustedTags = new AtomicReference<>(Set.of()); + private volatile boolean requireTrustedTag = false; - // Per-camera buffers (reused each camera) - private final ArrayList tagPoses = new ArrayList<>(16); - private final ArrayList robotPoses = new ArrayList<>(32); - private final ArrayList robotPosesAccepted = new ArrayList<>(32); - private final ArrayList robotPosesRejected = new ArrayList<>(32); + // Scale factors applied based on fraction of trusted tags in an observation + private volatile double trustedTagStdDevScale = 0.70; // < 1 => more trusted + private volatile double untrustedTagStdDevScale = 1.40; // > 1 => less trusted - public Vision(VisionConsumer consumer, VisionIO... io) { + // --- Optional 254-style yaw gate for single-tag --- + private volatile boolean enableSingleTagYawGate = true; + private volatile double yawGateLookbackSec = 0.30; + private volatile double yawGateLimitRadPerSec = 5.0; + + public Vision(Drive drive, VisionConsumer consumer, VisionIO... io) { + this.drive = drive; this.consumer = consumer; this.io = io; - this.camConfigs = Constants.Cameras.ALL; - - // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { inputs[i] = new VisionIOInputsAutoLogged(); } - // Initialize disconnected alerts - this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < io.length; i++) { - disconnectedAlerts[i] = - new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); - } + this.lastAcceptedTsPerCam = new double[io.length]; + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); - // Log robot-to-camera transforms from the new camera config array - // (Only log as many as exist in BOTH configs and IOs) + // Log robot->camera transforms if available int n = Math.min(camConfigs.length, io.length); for (int i = 0; i < n; i++) { Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); } } - /** Returns the X angle to the best target, useful for simple servoing. */ - public Rotation2d getTargetX(int cameraIndex) { - return inputs[cameraIndex].latestTargetObservation.tx(); + // -------- Runtime configuration hooks -------- + + /** Call when you reset odometry (e.g. auto start, manual reset, etc). */ + public void resetPoseGate(double fpgaNowSeconds) { + lastPoseResetTimestamp = fpgaNowSeconds; + fusedBuffer.clear(); + java.util.Arrays.fill(lastAcceptedTsPerCam, Double.NEGATIVE_INFINITY); + } + + /** Swap trusted tag set per event/field without redeploy. */ + public void setTrustedTags(Set tags) { + trustedTags.set(Set.copyOf(tags)); + } + + public void setRequireTrustedTag(boolean require) { + requireTrustedTag = require; + } + + public void setTrustedTagStdDevScales(double trustedScale, double untrustedScale) { + trustedTagStdDevScale = trustedScale; + untrustedTagStdDevScale = untrustedScale; + } + + public void setSingleTagYawGate(boolean enabled, double lookbackSec, double limitRadPerSec) { + enableSingleTagYawGate = enabled; + yawGateLookbackSec = lookbackSec; + yawGateLimitRadPerSec = limitRadPerSec; } + // -------- Core periodic -------- + @Override public void rbsiPeriodic() { - // 1) Update inputs + process inputs first (keeps AK logs consistent) + + long epoch = drive.getPoseResetEpoch(); + if (epoch != lastSeenPoseResetEpoch) { + lastSeenPoseResetEpoch = epoch; + resetPoseGate(drive.getLastPoseResetTimestamp()); // your existing method + Logger.recordOutput("Vision/PoseGateResetFromDrive", true); + } else { + Logger.recordOutput("Vision/PoseGateResetFromDrive", false); + } + + // 1) Update/log camera inputs for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); - disconnectedAlerts[i].set(!inputs[i].connected); - } - - // 2) Clear summary buffers (reused) - allTagPoses.clear(); - allRobotPoses.clear(); - allRobotPosesAccepted.clear(); - allRobotPosesRejected.clear(); - - // 3) Process each camera - for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - // Clear per-camera buffers - tagPoses.clear(); - robotPoses.clear(); - robotPosesAccepted.clear(); - robotPosesRejected.clear(); - - // Add tag poses from ids - for (int tagId : inputs[cameraIndex].tagIds) { - var tagPose = FieldConstants.aprilTagLayout.getTagPose(tagId); - if (tagPose.isPresent()) { - tagPoses.add(tagPose.get()); - } - } + } - // Loop over pose observations - for (var observation : inputs[cameraIndex].poseObservations) { - // Reject rules - boolean rejectPose = - observation.tagCount() == 0 - || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) - || Math.abs(observation.pose().getZ()) > maxZError - || observation.pose().getX() < 0.0 - || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() - || observation.pose().getY() < 0.0 - || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - - // Log pose buckets - robotPoses.add(observation.pose()); - if (rejectPose) { - robotPosesRejected.add(observation.pose()); - } else { - robotPosesAccepted.add(observation.pose()); - } + // 2) Pick one best accepted estimate per camera for this loop + final ArrayList perCamAccepted = new ArrayList<>(io.length); - if (rejectPose) { + for (int cam = 0; cam < io.length; cam++) { + int seen = 0; + int accepted = 0; + int rejected = 0; + + TimedEstimate best = null; + double bestTrustScale = Double.NaN; + int bestTrustedCount = 0; + int bestTagCount = 0; + + for (var obs : inputs[cam].poseObservations) { + seen++; + + GateResult gate = passesHardGatesAndYawGate(cam, obs); + if (!gate.accepted) { + rejected++; continue; } - // Standard deviations - double stdDevFactor = - Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); - double linearStdDev = linearStdDevBaseline * stdDevFactor; - double angularStdDev = angularStdDevBaseline * stdDevFactor; - - if (observation.type() == PoseObservationType.MEGATAG_2) { - linearStdDev *= linearStdDevMegatag2Factor; - angularStdDev *= angularStdDevMegatag2Factor; + BuiltEstimate built = buildEstimate(cam, obs); + if (built == null) { + rejected++; + continue; } - // Apply per-camera multiplier from CameraConfig - if (cameraIndex < camConfigs.length) { - double k = camConfigs[cameraIndex].stdDevFactor(); - linearStdDev *= k; - angularStdDev *= k; + TimedEstimate est = built.estimate; + if (best == null || isBetter(est, best)) { + best = est; + bestTrustScale = built.trustScale; + bestTrustedCount = built.trustedCount; + bestTagCount = obs.tagCount(); } + } - consumer.accept( - observation.pose().toPose2d(), - observation.timestamp(), - VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + if (best != null) { + accepted++; + lastAcceptedTsPerCam[cam] = best.ts; + perCamAccepted.add(best); + + Logger.recordOutput("Vision/Camera" + cam + "/InjectedPose2d", best.pose); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedTimestamp", best.ts); + Logger.recordOutput("Vision/Camera" + cam + "/InjectedStdDevs", best.stdDevs); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustScale", bestTrustScale); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTrustedCount", bestTrustedCount); + Logger.recordOutput("Vision/Camera" + cam + "/LastAcceptedTagCount", bestTagCount); } - // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPoses", robotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesAccepted", - robotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Camera" + cameraIndex + "/RobotPosesRejected", - robotPosesRejected.toArray(new Pose3d[0])); - - // Aggregate summary - allTagPoses.addAll(tagPoses); - allRobotPoses.addAll(robotPoses); - allRobotPosesAccepted.addAll(robotPosesAccepted); - allRobotPosesRejected.addAll(robotPosesRejected); - } - - // 4) Summary logs - Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); - Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesAccepted", allRobotPosesAccepted.toArray(new Pose3d[0])); - Logger.recordOutput( - "Vision/Summary/RobotPosesRejected", allRobotPosesRejected.toArray(new Pose3d[0])); - } - - // @FunctionalInterface - // public static interface VisionConsumer { - // public void accept( - // Pose2d visionRobotPoseMeters, - // double timestampSeconds, - // Matrix visionMeasurementStdDevs); - // } + Logger.recordOutput("Vision/Camera" + cam + "/ObsSeen", seen); + Logger.recordOutput("Vision/Camera" + cam + "/ObsAccepted", accepted); + Logger.recordOutput("Vision/Camera" + cam + "/ObsRejected", rejected); + } + + if (perCamAccepted.isEmpty()) return; + + // 3) Fusion time is the newest timestamp among accepted per-camera samples + double tF = perCamAccepted.stream().mapToDouble(e -> e.ts).max().orElse(Double.NaN); + if (!Double.isFinite(tF)) return; + + // 4) Time-align camera estimates to tF using odometry buffer, then inverse-variance fuse + TimedEstimate fused = fuseAtTime(perCamAccepted, tF); + if (fused == null) return; + + // 5) Smooth by fusing recent fused estimates (also aligned to tF) + pushFused(fused); + TimedEstimate smoothed = smoothAtTime(tF); + if (smoothed == null) return; + + // 6) Inject + consumer.accept(smoothed.pose, smoothed.ts, smoothed.stdDevs); + + Logger.recordOutput("Vision/FusedPose", fused.pose); + Logger.recordOutput("Vision/SmoothedPose", smoothed.pose); + Logger.recordOutput("Vision/FusedTimestamp", tF); + } + + // -------- Gating + scoring -------- + + private static final class GateResult { + final boolean accepted; + + GateResult(boolean accepted) { + this.accepted = accepted; + } + } + + private GateResult passesHardGatesAndYawGate(int cam, VisionIO.PoseObservation obs) { + final double ts = obs.timestamp(); + + // Monotonic per-camera time + if (ts <= lastAcceptedTsPerCam[cam]) return new GateResult(false); + + // Reject anything older than last pose reset + if (ts < lastPoseResetTimestamp) return new GateResult(false); + + // Must have tags + if (obs.tagCount() <= 0) return new GateResult(false); + + // Single-tag ambiguity gate + if (obs.tagCount() == 1 && obs.ambiguity() > maxAmbiguity) return new GateResult(false); + + // Z sanity + if (Math.abs(obs.pose().getZ()) > maxZError) return new GateResult(false); + + // Field bounds + Pose3d p = obs.pose(); + if (p.getX() < 0.0 || p.getX() > FieldConstants.aprilTagLayout.getFieldLength()) + return new GateResult(false); + if (p.getY() < 0.0 || p.getY() > FieldConstants.aprilTagLayout.getFieldWidth()) + return new GateResult(false); + + // Optional 254-style yaw gate: only meaningful for single-tag + if (enableSingleTagYawGate && obs.tagCount() == 1) { + OptionalDouble maxYaw = drive.getMaxAbsYawRateRadPerSec(ts - yawGateLookbackSec, ts); + if (maxYaw.isPresent() && maxYaw.getAsDouble() > yawGateLimitRadPerSec) { + return new GateResult(false); + } + } + + return new GateResult(true); + } + + private static final class BuiltEstimate { + final TimedEstimate estimate; + final double trustScale; + final int trustedCount; + + BuiltEstimate(TimedEstimate estimate, double trustScale, int trustedCount) { + this.estimate = estimate; + this.trustScale = trustScale; + this.trustedCount = trustedCount; + } + } + + private BuiltEstimate buildEstimate(int cam, VisionIO.PoseObservation obs) { + // Base uncertainty grows with distance^2 / tagCount (your 2486-style) + final double tagCount = Math.max(1, obs.tagCount()); + final double avgDist = Math.max(0.0, obs.averageTagDistance()); + final double distFactor = (avgDist * avgDist) / tagCount; + + final double camFactor = (cam < camConfigs.length) ? camConfigs[cam].stdDevFactor() : 1.0; + + double linearStdDev = linearStdDevBaseline * camFactor * distFactor; + double angularStdDev = angularStdDevBaseline * camFactor * distFactor; + + // MegaTag2 bonus if applicable + if (obs.type() == PoseObservationType.MEGATAG_2) { + linearStdDev *= linearStdDevMegatag2Factor; + angularStdDev *= angularStdDevMegatag2Factor; + } + + // Trusted tag blending + final Set kTrusted = trustedTags.get(); + int trustedCount = 0; + for (int id : obs.usedTagIds()) { + if (kTrusted.contains(id)) trustedCount++; + } + + if (requireTrustedTag && trustedCount == 0) { + return null; + } + + final int usedCount = obs.usedTagIds().size(); + final double fracTrusted = (usedCount == 0) ? 0.0 : ((double) trustedCount / usedCount); + + final double trustScale = + untrustedTagStdDevScale + fracTrusted * (trustedTagStdDevScale - untrustedTagStdDevScale); + + linearStdDev *= trustScale; + angularStdDev *= trustScale; + + // Output logs for tuning + Logger.recordOutput("Vision/Camera" + cam + "/InjectedFracTrusted", fracTrusted); + + return new BuiltEstimate( + new TimedEstimate( + obs.pose().toPose2d(), + obs.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)), + trustScale, + trustedCount); + } + + private boolean isBetter(TimedEstimate a, TimedEstimate b) { + // Lower trace of stddev vector (x+y+theta) is better + double ta = a.stdDevs.get(0, 0) + a.stdDevs.get(1, 0) + a.stdDevs.get(2, 0); + double tb = b.stdDevs.get(0, 0) + b.stdDevs.get(1, 0) + b.stdDevs.get(2, 0); + return ta < tb; + } + + // -------- Time alignment + fusion -------- + + private TimedEstimate fuseAtTime(ArrayList estimates, double tF) { + final ArrayList aligned = new ArrayList<>(estimates.size()); + for (var e : estimates) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) return null; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + return inverseVarianceFuse(aligned, tF); + } + + private Pose2d timeAlignPose(Pose2d visionPoseAtTs, double ts, double tF) { + Optional odomAtTsOpt = drive.getPoseAtTime(ts); + Optional odomAtTFOpt = drive.getPoseAtTime(tF); + if (odomAtTsOpt.isEmpty() || odomAtTFOpt.isEmpty()) return null; + + Pose2d odomAtTs = odomAtTsOpt.get(); + Pose2d odomAtTF = odomAtTFOpt.get(); + + // Transform that takes odomAtTs -> odomAtTF + Transform2d ts_T_tf = odomAtTF.minus(odomAtTs); + + // Apply same motion to vision pose to bring it forward + return visionPoseAtTs.transformBy(ts_T_tf); + } + + private TimedEstimate inverseVarianceFuse(ArrayList alignedAtTF, double tF) { + if (alignedAtTF.isEmpty()) return null; + if (alignedAtTF.size() == 1) return alignedAtTF.get(0); + + double sumWx = 0, sumWy = 0, sumWth = 0; + double sumX = 0, sumY = 0; + double sumCos = 0, sumSin = 0; + + for (var e : alignedAtTF) { + double sx = e.stdDevs.get(0, 0); + double sy = e.stdDevs.get(1, 0); + double sth = e.stdDevs.get(2, 0); + + double vx = sx * sx; + double vy = sy * sy; + double vth = sth * sth; + + double wx = 1.0 / vx; + double wy = 1.0 / vy; + double wth = 1.0 / vth; + + sumWx += wx; + sumWy += wy; + sumWth += wth; + + sumX += e.pose.getX() * wx; + sumY += e.pose.getY() * wy; + + sumCos += e.pose.getRotation().getCos() * wth; + sumSin += e.pose.getRotation().getSin() * wth; + } + + Pose2d fused = new Pose2d(sumX / sumWx, sumY / sumWy, new Rotation2d(sumCos, sumSin)); + + Matrix fusedStd = + VecBuilder.fill(Math.sqrt(1.0 / sumWx), Math.sqrt(1.0 / sumWy), Math.sqrt(1.0 / sumWth)); + + return new TimedEstimate(fused, tF, fusedStd); + } + + // -------- Smoothing buffer -------- + + private void pushFused(TimedEstimate fused) { + fusedBuffer.addLast(fused); + + while (fusedBuffer.size() > smoothMaxSize) { + fusedBuffer.removeFirst(); + } + + // Trim by time window relative to newest + while (!fusedBuffer.isEmpty() && fused.ts - fusedBuffer.peekFirst().ts > smoothWindowSec) { + fusedBuffer.removeFirst(); + } + } + + private TimedEstimate smoothAtTime(double tF) { + if (fusedBuffer.isEmpty()) return null; + if (fusedBuffer.size() == 1) return fusedBuffer.peekLast(); + + final ArrayList aligned = new ArrayList<>(fusedBuffer.size()); + for (var e : fusedBuffer) { + Pose2d alignedPose = timeAlignPose(e.pose, e.ts, tF); + if (alignedPose == null) continue; + aligned.add(new TimedEstimate(alignedPose, tF, e.stdDevs)); + } + + if (aligned.isEmpty()) return fusedBuffer.peekLast(); + return inverseVarianceFuse(aligned, tF); + } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index c9ed834..fad294d 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -11,35 +11,44 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; +import java.util.Set; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { @AutoLog - public static class VisionIOInputs { + class VisionIOInputs { public boolean connected = false; + + /** Latest "servo to target" observation (optional) */ public TargetObservation latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + + /** Pose observations generated by the camera pipeline (each has its own timestamp) */ public PoseObservation[] poseObservations = new PoseObservation[0]; + + /** Union of tag IDs seen this loop (for logging/UI only) */ public int[] tagIds = new int[0]; } /** Represents the angle to a simple target, not used for pose estimation. */ - public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + record TargetObservation(Rotation2d tx, Rotation2d ty) {} /** Represents a robot pose sample used for pose estimation. */ - public static record PoseObservation( - double timestamp, - Pose3d pose, - double ambiguity, + record PoseObservation( + double timestamp, // camera capture time (seconds, same timebase as FPGA) + Pose3d pose, // field->robot pose + double ambiguity, // single-tag ambiguity if available int tagCount, double averageTagDistance, - PoseObservationType type) {} + PoseObservationType type, + Set usedTagIds // immutable per observation + ) {} - public static enum PoseObservationType { + enum PoseObservationType { MEGATAG_1, MEGATAG_2, PHOTONVISION } - public default void updateInputs(VisionIOInputs inputs) {} + default void updateInputs(VisionIOInputs inputs) {} } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index a4b1eaf..07c83c7 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -96,7 +96,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_1)); + PoseObservationType.MEGATAG_1, + + // Visible Tag IDs + tagIds)); } for (var rawSample : megatag2Subscriber.readQueue()) { if (rawSample.value.length == 0) continue; @@ -121,7 +124,10 @@ public void updateInputs(VisionIOInputs inputs) { rawSample.value[9], // Observation type - PoseObservationType.MEGATAG_2)); + PoseObservationType.MEGATAG_2, + + // Visible Tag IDs + tagIds)); } // Save pose observations to inputs object diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java index b3e1baf..14a2867 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVision.java @@ -19,7 +19,7 @@ import java.util.Set; import org.photonvision.PhotonCamera; -/** IO implementation for real PhotonVision hardware. */ +/** IO implementation for real PhotonVision hardware (pose already solved by PV). */ public class VisionIOPhotonVision implements VisionIO { protected final PhotonCamera camera; protected final Transform3d robotToCamera; @@ -28,10 +28,10 @@ public class VisionIOPhotonVision implements VisionIO { * Creates a new VisionIOPhotonVision. * * @param name The configured name of the camera. - * @param rotationSupplier The 3D position of the camera relative to the robot. + * @param robotToCamera The 3D position of the camera relative to the robot. */ public VisionIOPhotonVision(String name, Transform3d robotToCamera) { - camera = new PhotonCamera(name); + this.camera = new PhotonCamera(name); this.robotToCamera = robotToCamera; } @@ -42,33 +42,30 @@ public void updateInputs(VisionIOInputs inputs) { // Cap the number of unread results processed per loop final int kMaxUnread = 5; - // Use HashSet/ArrayList to avoid LinkedList churn - Set tagIds = new HashSet<>(); - ArrayList poseObservations = new ArrayList<>(kMaxUnread); + final Set unionTagIds = new HashSet<>(); + final ArrayList poseObservations = new ArrayList<>(kMaxUnread); + + double newestTargetTs = Double.NEGATIVE_INFINITY; + Rotation2d bestYaw = Rotation2d.kZero; + Rotation2d bestPitch = Rotation2d.kZero; int processed = 0; for (var result : camera.getAllUnreadResults()) { // Hard cap - if (processed++ >= kMaxUnread) { - break; - } + if (processed++ >= kMaxUnread) break; + + final double ts = result.getTimestampSeconds(); - // Update latest target observation - if (result.hasTargets()) { - inputs.latestTargetObservation = - new TargetObservation( - Rotation2d.fromDegrees(result.getBestTarget().getYaw()), - Rotation2d.fromDegrees(result.getBestTarget().getPitch())); - } else { - inputs.latestTargetObservation = new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); + if (result.hasTargets() && ts >= newestTargetTs) { + newestTargetTs = ts; + bestYaw = Rotation2d.fromDegrees(result.getBestTarget().getYaw()); + bestPitch = Rotation2d.fromDegrees(result.getBestTarget().getPitch()); } - // Add pose observation - if (result.multitagResult.isPresent()) { // Multitag result - var multitagResult = result.multitagResult.get(); + if (result.multitagResult.isPresent()) { + var multitag = result.multitagResult.get(); - // Calculate robot pose - Transform3d fieldToCamera = multitagResult.estimatedPose.best; + Transform3d fieldToCamera = multitag.estimatedPose.best; Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); @@ -77,64 +74,62 @@ public void updateInputs(VisionIOInputs inputs) { for (var target : result.targets) { totalTagDistance += target.bestCameraToTarget.getTranslation().getNorm(); } - - // Add tag IDs - tagIds.addAll(multitagResult.fiducialIDsUsed); - - // Guard against divide-by-zero if targets is empty (defensive) double avgTagDistance = result.targets.isEmpty() ? 0.0 : (totalTagDistance / result.targets.size()); - // Add observation + Set used = new HashSet<>(); + for (int id : multitag.fiducialIDsUsed) { + used.add(id); + unionTagIds.add(id); + } + poseObservations.add( new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - multitagResult.estimatedPose.ambiguity, // Ambiguity - multitagResult.fiducialIDsUsed.size(), // Tag count - avgTagDistance, // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - - } else if (!result.targets.isEmpty()) { // Single tag result + ts, + robotPose, + multitag.estimatedPose.ambiguity, + multitag.fiducialIDsUsed.size(), + avgTagDistance, + PoseObservationType.PHOTONVISION, + Set.copyOf(used))); + + } else if (!result.targets.isEmpty()) { var target = result.targets.get(0); - // Calculate robot pose - var tagPose = kAprilTagLayout.getTagPose(target.fiducialId); - if (tagPose.isPresent()) { - Transform3d fieldToTarget = - new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); - Transform3d cameraToTarget = target.bestCameraToTarget; - Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); - Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); - Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); - - // Add tag ID - tagIds.add((short) target.fiducialId); - - // Add observation - poseObservations.add( - new PoseObservation( - result.getTimestampSeconds(), // Timestamp - robotPose, // 3D pose estimate - target.poseAmbiguity, // Ambiguity - 1, // Tag count - cameraToTarget.getTranslation().getNorm(), // Average tag distance - PoseObservationType.PHOTONVISION)); // Observation type - } + var tagPose = aprilTagLayout.getTagPose(target.fiducialId); + if (tagPose.isEmpty()) continue; + + Transform3d fieldToTarget = + new Transform3d(tagPose.get().getTranslation(), tagPose.get().getRotation()); + Transform3d cameraToTarget = target.bestCameraToTarget; + + Transform3d fieldToCamera = fieldToTarget.plus(cameraToTarget.inverse()); + Transform3d fieldToRobot = fieldToCamera.plus(robotToCamera.inverse()); + Pose3d robotPose = new Pose3d(fieldToRobot.getTranslation(), fieldToRobot.getRotation()); + + unionTagIds.add(target.fiducialId); + + poseObservations.add( + new PoseObservation( + ts, + robotPose, + target.poseAmbiguity, + 1, + cameraToTarget.getTranslation().getNorm(), + PoseObservationType.PHOTONVISION, + Set.of(target.fiducialId))); } } - // Save pose observations to inputs object - inputs.poseObservations = new PoseObservation[poseObservations.size()]; - for (int i = 0; i < poseObservations.size(); i++) { - inputs.poseObservations[i] = poseObservations.get(i); - } + inputs.latestTargetObservation = + (newestTargetTs > Double.NEGATIVE_INFINITY) + ? new TargetObservation(bestYaw, bestPitch) + : new TargetObservation(Rotation2d.kZero, Rotation2d.kZero); - // Save tag IDs to inputs objects - inputs.tagIds = new int[tagIds.size()]; + inputs.poseObservations = poseObservations.toArray(new PoseObservation[0]); + + inputs.tagIds = new int[unionTagIds.size()]; int i = 0; - for (int id : tagIds) { - inputs.tagIds[i++] = id; - } + for (int id : unionTagIds) inputs.tagIds[i++] = id; } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java index fd4525b..4e5761b 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonVisionSim.java @@ -22,34 +22,50 @@ public class VisionIOPhotonVisionSim extends VisionIOPhotonVision { private static VisionSystemSim visionSim; private final Supplier poseSupplier; + + @SuppressWarnings("unused") private final PhotonCameraSim cameraSim; /** * Creates a new VisionIOPhotonVisionSim. * - * @param name The name of the camera. - * @param poseSupplier Supplier for the robot pose to use in simulation. + * @param name The name of the camera (PhotonVision camera name). + * @param robotToCamera Camera pose relative to robot frame. + * @param poseSupplier Supplier for the robot pose (field->robot) to use in simulation. */ public VisionIOPhotonVisionSim( String name, Transform3d robotToCamera, Supplier poseSupplier) { super(name, robotToCamera); this.poseSupplier = poseSupplier; - // Initialize vision sim + // Initialize VisionSystemSim once for all cameras if (visionSim == null) { visionSim = new VisionSystemSim("main"); visionSim.addAprilTags(FieldConstants.aprilTagLayout); } - // Add sim camera + // Camera properties: + // - If you have per-camera SimCameraProperties in Constants, pass them here instead. + // - Otherwise keep the default and tune later. var cameraProperties = new SimCameraProperties(); + + // Recommended defaults (feel free to tune) + // cameraProperties.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); + // cameraProperties.setFPS(20); + // cameraProperties.setAvgLatencyMs(35); + // cameraProperties.setLatencyStdDevMs(5); + cameraSim = new PhotonCameraSim(camera, cameraProperties); visionSim.addCamera(cameraSim, robotToCamera); } @Override public void updateInputs(VisionIOInputs inputs) { + // NOTE: This updates the sim world every time a sim camera is polled. + // That's fine (fast enough), but if you want “update once per loop,” see note below. visionSim.update(poseSupplier.get()); + + // Then pull results like normal (and emit PoseObservation + usedTagIds sets) super.updateInputs(inputs); } } diff --git a/src/main/java/frc/robot/util/FieldConstants.java b/src/main/java/frc/robot/util/FieldConstants.java deleted file mode 100644 index 42de6a5..0000000 --- a/src/main/java/frc/robot/util/FieldConstants.java +++ /dev/null @@ -1,265 +0,0 @@ -// Copyright (c) 2025 FRC 6328 -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file at -// the root directory of this project. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.*; -import edu.wpi.first.math.util.Units; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Contains various field dimensions and useful reference points. All units are in meters and poses - * have a blue alliance origin. - */ -public class FieldConstants { - public static final double fieldLength = Units.inchesToMeters(690.876); - public static final double fieldWidth = Units.inchesToMeters(317); - public static final double startingLineX = - Units.inchesToMeters(299.438); // Measured from the inside of starting line - - public static class Processor { - public static final Pose2d centerFace = - new Pose2d(Units.inchesToMeters(235.726), 0, Rotation2d.fromDegrees(90)); - } - - public static class Barge { - public static final Translation2d farCage = - new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(286.779)); - public static final Translation2d middleCage = - new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(242.855)); - public static final Translation2d closeCage = - new Translation2d(Units.inchesToMeters(345.428), Units.inchesToMeters(199.947)); - - // Measured from floor to bottom of cage - public static final double deepHeight = Units.inchesToMeters(3.125); - public static final double shallowHeight = Units.inchesToMeters(30.125); - } - - public static class CoralStation { - public static final Pose2d leftCenterFace = - new Pose2d( - Units.inchesToMeters(33.526), - Units.inchesToMeters(291.176), - Rotation2d.fromDegrees(90 - 144.011)); - public static final Pose2d rightCenterFace = - new Pose2d( - Units.inchesToMeters(33.526), - Units.inchesToMeters(25.824), - Rotation2d.fromDegrees(144.011 - 90)); - } - - @SuppressWarnings("unchecked") - public static class Reef { - public static final Translation2d center = - new Translation2d(Units.inchesToMeters(176.746), Units.inchesToMeters(158.501)); - public static final double faceToZoneLine = - Units.inchesToMeters(12); // Side of the reef to the inside of the reef zone line - - public static final Pose2d[] centerFaces = - new Pose2d[6]; // Starting facing the driver station in clockwise order - public static final List> branchPositions = - new ArrayList<>(); // Starting at the right branch facing the driver station in - // clockwise - public static final List> branchTipPositions = - new ArrayList<>(); // Starting at the right branch facing the driver station in - - // clockwise - - static { - // Initialize faces - centerFaces[0] = - new Pose2d( - Units.inchesToMeters(144.003), - Units.inchesToMeters(158.500), - Rotation2d.fromDegrees(180)); - centerFaces[1] = - new Pose2d( - Units.inchesToMeters(160.373), - Units.inchesToMeters(186.857), - Rotation2d.fromDegrees(120)); - centerFaces[2] = - new Pose2d( - Units.inchesToMeters(193.116), - Units.inchesToMeters(186.858), - Rotation2d.fromDegrees(60)); - centerFaces[3] = - new Pose2d( - Units.inchesToMeters(209.489), - Units.inchesToMeters(158.502), - Rotation2d.fromDegrees(0)); - centerFaces[4] = - new Pose2d( - Units.inchesToMeters(193.118), - Units.inchesToMeters(130.145), - Rotation2d.fromDegrees(-60)); - centerFaces[5] = - new Pose2d( - Units.inchesToMeters(160.375), - Units.inchesToMeters(130.144), - Rotation2d.fromDegrees(-120)); - - // Initialize branch positions - for (int face = 0; face < 6; face++) { - Map fillRight = new HashMap<>(); - Map fillLeft = new HashMap<>(); - for (var level : ReefHeight.values()) { - Pose2d poseDirection = new Pose2d(center, Rotation2d.fromDegrees(180 - (60 * face))); - double adjustX = Units.inchesToMeters(30.738); - double adjustY = Units.inchesToMeters(6.469); - - fillRight.put( - level, - new Pose3d( - new Translation3d( - poseDirection - .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) - .getX(), - poseDirection - .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) - .getY(), - level.height), - new Rotation3d( - 0, - Units.degreesToRadians(level.pitch), - poseDirection.getRotation().getRadians()))); - fillLeft.put( - level, - new Pose3d( - new Translation3d( - poseDirection - .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) - .getX(), - poseDirection - .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) - .getY(), - level.height), - new Rotation3d( - 0, - Units.degreesToRadians(level.pitch), - poseDirection.getRotation().getRadians()))); - } - branchPositions.add(fillRight); - branchPositions.add(fillLeft); - } - - // Mirror for red alliance - for (Map blueBranch : branchPositions.toArray(new Map[0])) { - Map redBranch = new HashMap<>(); - for (Map.Entry entry : blueBranch.entrySet()) { - Pose3d bluePose = entry.getValue(); - redBranch.put( - entry.getKey(), - new Pose3d( - Util.flipRedBlue( - new Translation3d(bluePose.getX(), bluePose.getY(), bluePose.getZ())), - new Rotation3d( - 0, -bluePose.getRotation().getY(), -bluePose.getRotation().getZ()))); - } - branchPositions.add(redBranch); - } - for (int face = 0; face < 6; face++) { - Map fillRight = new HashMap<>(); - Map fillLeft = new HashMap<>(); - for (var level : ReefHeight.values()) { - Pose2d poseDirection = new Pose2d(center, Rotation2d.fromDegrees(180 - (60 * face))); - double adjustX = Units.inchesToMeters(-2); - double adjustY = Units.inchesToMeters(6.469); - - fillRight.put( - level, - new Pose3d( - new Translation3d( - poseDirection - .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) - .getX(), - poseDirection - .transformBy(new Transform2d(adjustX, adjustY, new Rotation2d())) - .getY(), - level.height), - new Rotation3d( - 0, - Units.degreesToRadians(level.pitch), - poseDirection.getRotation().getRadians()))); - fillLeft.put( - level, - new Pose3d( - new Translation3d( - poseDirection - .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) - .getX(), - poseDirection - .transformBy(new Transform2d(adjustX, -adjustY, new Rotation2d())) - .getY(), - level.height), - new Rotation3d( - 0, - Units.degreesToRadians(level.pitch), - poseDirection.getRotation().getRadians()))); - } - branchTipPositions.add(fillRight); - branchTipPositions.add(fillLeft); - } - } - } - - public static class StagingPositions { - // Measured from the center of the ice cream - public static final Pose2d leftIceCream = - new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(230.5), new Rotation2d()); - public static final Pose2d middleIceCream = - new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(170.5), new Rotation2d()); - public static final Pose2d rightIceCream = - new Pose2d(Units.inchesToMeters(48), Units.inchesToMeters(86.5), new Rotation2d()); - } - - public static class HPIntake { - // HP Intake positions for blue alliance - public static final Pose2d kStationA = - new Pose2d( - new Translation2d(0.7571420669555664, 0.6461422443389893), new Rotation2d(Math.PI / 4)); - - public static final Pose2d kStationB = - new Pose2d( - new Translation2d(0.7571420669555664, 7.364640235900879), new Rotation2d(-Math.PI / 4)); - } - - public enum ReefHeight { - L4(Units.inchesToMeters(72), -90), - L3(Units.inchesToMeters(47.625), -35), - L2(Units.inchesToMeters(31.875), -35), - L1(Units.inchesToMeters(18), 0); - - ReefHeight(double height, double pitch) { - this.height = height; - this.pitch = pitch; // in degrees - } - - public final double height; - public final double pitch; - } - - public enum BranchCode { - A(0), - B(1), - C(2), - D(3), - E(4), - F(5), - G(6), - H(7), - L(8); - - BranchCode(int indexOffset) { - this.indexOffset = indexOffset; - } - - public final int indexOffset; - } -} diff --git a/src/main/java/frc/robot/util/GeomUtil.java b/src/main/java/frc/robot/util/GeomUtil.java deleted file mode 100644 index f77c8b2..0000000 --- a/src/main/java/frc/robot/util/GeomUtil.java +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) 2024-2026 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2021-2026 Littleton Robotics -// http://github.com/Mechanical-Advantage -// -// Use of this source code is governed by a BSD -// license that can be found in the AdvantageKit-License.md file -// at the root directory of this project. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Twist2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; - -/** Geometry utilities for working with translations, rotations, transforms, and poses. */ -public class GeomUtil { - /** - * Creates a pure translating transform - * - * @param translation The translation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Translation2d translation) { - return new Transform2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure translating transform - * - * @param x The x coordinate of the translation - * @param y The y coordinate of the translation - * @return The resulting transform - */ - public static Transform2d toTransform2d(double x, double y) { - return new Transform2d(x, y, Rotation2d.kZero); - } - - /** - * Creates a pure rotating transform - * - * @param rotation The rotation to create the transform with - * @return The resulting transform - */ - public static Transform2d toTransform2d(Rotation2d rotation) { - return new Transform2d(Translation2d.kZero, rotation); - } - - /** - * Converts a Pose2d to a Transform2d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform2d toTransform2d(Pose2d pose) { - return new Transform2d(pose.getTranslation(), pose.getRotation()); - } - - public static Pose2d inverse(Pose2d pose) { - Rotation2d rotationInverse = pose.getRotation().unaryMinus(); - return new Pose2d( - pose.getTranslation().unaryMinus().rotateBy(rotationInverse), rotationInverse); - } - - /** - * Converts a Transform2d to a Pose2d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose2d toPose2d(Transform2d transform) { - return new Pose2d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Creates a pure translated pose - * - * @param translation The translation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Translation2d translation) { - return new Pose2d(translation, Rotation2d.kZero); - } - - /** - * Creates a pure rotated pose - * - * @param rotation The rotation to create the pose with - * @return The resulting pose - */ - public static Pose2d toPose2d(Rotation2d rotation) { - return new Pose2d(Translation2d.kZero, rotation); - } - - /** - * Multiplies a twist by a scaling factor - * - * @param twist The twist to multiply - * @param factor The scaling factor for the twist components - * @return The new twist - */ - public static Twist2d multiply(Twist2d twist, double factor) { - return new Twist2d(twist.dx * factor, twist.dy * factor, twist.dtheta * factor); - } - - /** - * Converts a Pose3d to a Transform3d to be used in a kinematic chain - * - * @param pose The pose that will represent the transform - * @return The resulting transform - */ - public static Transform3d toTransform3d(Pose3d pose) { - return new Transform3d(pose.getTranslation(), pose.getRotation()); - } - - /** - * Converts a Transform3d to a Pose3d to be used as a position or as the start of a kinematic - * chain - * - * @param transform The transform that will represent the pose - * @return The resulting pose - */ - public static Pose3d toPose3d(Transform3d transform) { - return new Pose3d(transform.getTranslation(), transform.getRotation()); - } - - /** - * Converts a ChassisSpeeds to a Twist2d by extracting two dimensions (Y and Z). chain - * - * @param speeds The original translation - * @return The resulting translation - */ - public static Twist2d toTwist2d(ChassisSpeeds speeds) { - return new Twist2d( - speeds.vxMetersPerSecond, speeds.vyMetersPerSecond, speeds.omegaRadiansPerSecond); - } - - /** - * Creates a new pose from an existing one using a different translation value. - * - * @param pose The original pose - * @param translation The new translation to use - * @return The new pose with the new translation and original rotation - */ - public static Pose2d withTranslation(Pose2d pose, Translation2d translation) { - return new Pose2d(translation, pose.getRotation()); - } - - /** - * Creates a new pose from an existing one using a different rotation value. - * - * @param pose The original pose - * @param rotation The new rotation to use - * @return The new pose with the original translation and new rotation - */ - public static Pose2d withRotation(Pose2d pose, Rotation2d rotation) { - return new Pose2d(pose.getTranslation(), rotation); - } -} diff --git a/src/main/java/frc/robot/util/LimelightHelpers.java b/src/main/java/frc/robot/util/LimelightHelpers.java deleted file mode 100644 index c00dcc9..0000000 --- a/src/main/java/frc/robot/util/LimelightHelpers.java +++ /dev/null @@ -1,1704 +0,0 @@ -// LimelightHelpers v1.11 (REQUIRES LLOS 2025.0 OR LATER) - -// Copyright (c) 2025 FRC 180 -// https://github.com/frc180 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.annotation.JsonFormat.Shape; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.networktables.DoubleArrayEntry; -import edu.wpi.first.networktables.NetworkTable; -import edu.wpi.first.networktables.NetworkTableEntry; -import edu.wpi.first.networktables.NetworkTableInstance; -import edu.wpi.first.networktables.TimestampedDoubleArray; -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; - -/** - * LimelightHelpers provides static methods and classes for interfacing with Limelight vision - * cameras in FRC. This library supports all Limelight features including AprilTag tracking, Neural - * Networks, and standard color/retroreflective tracking. - */ -public class LimelightHelpers { - - private static final Map doubleArrayEntries = new ConcurrentHashMap<>(); - - /** Represents a Color/Retroreflective Target Result extracted from JSON Output */ - public static class LimelightTarget_Retro { - - @JsonProperty("t6c_ts") - private double[] cameraPose_TargetSpace; - - @JsonProperty("t6r_fs") - private double[] robotPose_FieldSpace; - - @JsonProperty("t6r_ts") - private double[] robotPose_TargetSpace; - - @JsonProperty("t6t_cs") - private double[] targetPose_CameraSpace; - - @JsonProperty("t6t_rs") - private double[] targetPose_RobotSpace; - - public Pose3d getCameraPose_TargetSpace() { - return toPose3D(cameraPose_TargetSpace); - } - - public Pose3d getRobotPose_FieldSpace() { - return toPose3D(robotPose_FieldSpace); - } - - public Pose3d getRobotPose_TargetSpace() { - return toPose3D(robotPose_TargetSpace); - } - - public Pose3d getTargetPose_CameraSpace() { - return toPose3D(targetPose_CameraSpace); - } - - public Pose3d getTargetPose_RobotSpace() { - return toPose3D(targetPose_RobotSpace); - } - - public Pose2d getCameraPose_TargetSpace2D() { - return toPose2D(cameraPose_TargetSpace); - } - - public Pose2d getRobotPose_FieldSpace2D() { - return toPose2D(robotPose_FieldSpace); - } - - public Pose2d getRobotPose_TargetSpace2D() { - return toPose2D(robotPose_TargetSpace); - } - - public Pose2d getTargetPose_CameraSpace2D() { - return toPose2D(targetPose_CameraSpace); - } - - public Pose2d getTargetPose_RobotSpace2D() { - return toPose2D(targetPose_RobotSpace); - } - - @JsonProperty("ta") - public double ta; - - @JsonProperty("tx") - public double tx; - - @JsonProperty("ty") - public double ty; - - @JsonProperty("txp") - public double tx_pixels; - - @JsonProperty("typ") - public double ty_pixels; - - @JsonProperty("tx_nocross") - public double tx_nocrosshair; - - @JsonProperty("ty_nocross") - public double ty_nocrosshair; - - @JsonProperty("ts") - public double ts; - - public LimelightTarget_Retro() { - cameraPose_TargetSpace = new double[6]; - robotPose_FieldSpace = new double[6]; - robotPose_TargetSpace = new double[6]; - targetPose_CameraSpace = new double[6]; - targetPose_RobotSpace = new double[6]; - } - } - - /** Represents an AprilTag/Fiducial Target Result extracted from JSON Output */ - public static class LimelightTarget_Fiducial { - - @JsonProperty("fID") - public double fiducialID; - - @JsonProperty("fam") - public String fiducialFamily; - - @JsonProperty("t6c_ts") - private double[] cameraPose_TargetSpace; - - @JsonProperty("t6r_fs") - private double[] robotPose_FieldSpace; - - @JsonProperty("t6r_ts") - private double[] robotPose_TargetSpace; - - @JsonProperty("t6t_cs") - private double[] targetPose_CameraSpace; - - @JsonProperty("t6t_rs") - private double[] targetPose_RobotSpace; - - public Pose3d getCameraPose_TargetSpace() { - return toPose3D(cameraPose_TargetSpace); - } - - public Pose3d getRobotPose_FieldSpace() { - return toPose3D(robotPose_FieldSpace); - } - - public Pose3d getRobotPose_TargetSpace() { - return toPose3D(robotPose_TargetSpace); - } - - public Pose3d getTargetPose_CameraSpace() { - return toPose3D(targetPose_CameraSpace); - } - - public Pose3d getTargetPose_RobotSpace() { - return toPose3D(targetPose_RobotSpace); - } - - public Pose2d getCameraPose_TargetSpace2D() { - return toPose2D(cameraPose_TargetSpace); - } - - public Pose2d getRobotPose_FieldSpace2D() { - return toPose2D(robotPose_FieldSpace); - } - - public Pose2d getRobotPose_TargetSpace2D() { - return toPose2D(robotPose_TargetSpace); - } - - public Pose2d getTargetPose_CameraSpace2D() { - return toPose2D(targetPose_CameraSpace); - } - - public Pose2d getTargetPose_RobotSpace2D() { - return toPose2D(targetPose_RobotSpace); - } - - @JsonProperty("ta") - public double ta; - - @JsonProperty("tx") - public double tx; - - @JsonProperty("ty") - public double ty; - - @JsonProperty("txp") - public double tx_pixels; - - @JsonProperty("typ") - public double ty_pixels; - - @JsonProperty("tx_nocross") - public double tx_nocrosshair; - - @JsonProperty("ty_nocross") - public double ty_nocrosshair; - - @JsonProperty("ts") - public double ts; - - public LimelightTarget_Fiducial() { - cameraPose_TargetSpace = new double[6]; - robotPose_FieldSpace = new double[6]; - robotPose_TargetSpace = new double[6]; - targetPose_CameraSpace = new double[6]; - targetPose_RobotSpace = new double[6]; - } - } - - /** Represents a Barcode Target Result extracted from JSON Output */ - public static class LimelightTarget_Barcode { - - /** Barcode family type (e.g. "QR", "DataMatrix", etc.) */ - @JsonProperty("fam") - public String family; - - /** Gets the decoded data content of the barcode */ - @JsonProperty("data") - public String data; - - @JsonProperty("txp") - public double tx_pixels; - - @JsonProperty("typ") - public double ty_pixels; - - @JsonProperty("tx") - public double tx; - - @JsonProperty("ty") - public double ty; - - @JsonProperty("tx_nocross") - public double tx_nocrosshair; - - @JsonProperty("ty_nocross") - public double ty_nocrosshair; - - @JsonProperty("ta") - public double ta; - - @JsonProperty("pts") - public double[][] corners; - - public LimelightTarget_Barcode() {} - - public String getFamily() { - return family; - } - } - - /** Represents a Neural Classifier Pipeline Result extracted from JSON Output */ - public static class LimelightTarget_Classifier { - - @JsonProperty("class") - public String className; - - @JsonProperty("classID") - public double classID; - - @JsonProperty("conf") - public double confidence; - - @JsonProperty("zone") - public double zone; - - @JsonProperty("tx") - public double tx; - - @JsonProperty("txp") - public double tx_pixels; - - @JsonProperty("ty") - public double ty; - - @JsonProperty("typ") - public double ty_pixels; - - public LimelightTarget_Classifier() {} - } - - /** Represents a Neural Detector Pipeline Result extracted from JSON Output */ - public static class LimelightTarget_Detector { - - @JsonProperty("class") - public String className; - - @JsonProperty("classID") - public double classID; - - @JsonProperty("conf") - public double confidence; - - @JsonProperty("ta") - public double ta; - - @JsonProperty("tx") - public double tx; - - @JsonProperty("ty") - public double ty; - - @JsonProperty("txp") - public double tx_pixels; - - @JsonProperty("typ") - public double ty_pixels; - - @JsonProperty("tx_nocross") - public double tx_nocrosshair; - - @JsonProperty("ty_nocross") - public double ty_nocrosshair; - - public LimelightTarget_Detector() {} - } - - /** Limelight Results object, parsed from a Limelight's JSON results output. */ - public static class LimelightResults { - - public String error; - - @JsonProperty("pID") - public double pipelineID; - - @JsonProperty("tl") - public double latency_pipeline; - - @JsonProperty("cl") - public double latency_capture; - - public double latency_jsonParse; - - @JsonProperty("ts") - public double timestamp_LIMELIGHT_publish; - - @JsonProperty("ts_rio") - public double timestamp_RIOFPGA_capture; - - @JsonProperty("v") - @JsonFormat(shape = Shape.NUMBER) - public boolean valid; - - @JsonProperty("botpose") - public double[] botpose; - - @JsonProperty("botpose_wpired") - public double[] botpose_wpired; - - @JsonProperty("botpose_wpiblue") - public double[] botpose_wpiblue; - - @JsonProperty("botpose_tagcount") - public double botpose_tagcount; - - @JsonProperty("botpose_span") - public double botpose_span; - - @JsonProperty("botpose_avgdist") - public double botpose_avgdist; - - @JsonProperty("botpose_avgarea") - public double botpose_avgarea; - - @JsonProperty("t6c_rs") - public double[] camerapose_robotspace; - - public Pose3d getBotPose3d() { - return toPose3D(botpose); - } - - public Pose3d getBotPose3d_wpiRed() { - return toPose3D(botpose_wpired); - } - - public Pose3d getBotPose3d_wpiBlue() { - return toPose3D(botpose_wpiblue); - } - - public Pose2d getBotPose2d() { - return toPose2D(botpose); - } - - public Pose2d getBotPose2d_wpiRed() { - return toPose2D(botpose_wpired); - } - - public Pose2d getBotPose2d_wpiBlue() { - return toPose2D(botpose_wpiblue); - } - - @JsonProperty("Retro") - public LimelightTarget_Retro[] targets_Retro; - - @JsonProperty("Fiducial") - public LimelightTarget_Fiducial[] targets_Fiducials; - - @JsonProperty("Classifier") - public LimelightTarget_Classifier[] targets_Classifier; - - @JsonProperty("Detector") - public LimelightTarget_Detector[] targets_Detector; - - @JsonProperty("Barcode") - public LimelightTarget_Barcode[] targets_Barcode; - - public LimelightResults() { - botpose = new double[6]; - botpose_wpired = new double[6]; - botpose_wpiblue = new double[6]; - camerapose_robotspace = new double[6]; - targets_Retro = new LimelightTarget_Retro[0]; - targets_Fiducials = new LimelightTarget_Fiducial[0]; - targets_Classifier = new LimelightTarget_Classifier[0]; - targets_Detector = new LimelightTarget_Detector[0]; - targets_Barcode = new LimelightTarget_Barcode[0]; - } - } - - /** Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. */ - public static class RawFiducial { - public int id = 0; - public double txnc = 0; - public double tync = 0; - public double ta = 0; - public double distToCamera = 0; - public double distToRobot = 0; - public double ambiguity = 0; - - public RawFiducial( - int id, - double txnc, - double tync, - double ta, - double distToCamera, - double distToRobot, - double ambiguity) { - this.id = id; - this.txnc = txnc; - this.tync = tync; - this.ta = ta; - this.distToCamera = distToCamera; - this.distToRobot = distToRobot; - this.ambiguity = ambiguity; - } - } - - /** Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. */ - public static class RawDetection { - public int classId = 0; - public double txnc = 0; - public double tync = 0; - public double ta = 0; - public double corner0_X = 0; - public double corner0_Y = 0; - public double corner1_X = 0; - public double corner1_Y = 0; - public double corner2_X = 0; - public double corner2_Y = 0; - public double corner3_X = 0; - public double corner3_Y = 0; - - public RawDetection( - int classId, - double txnc, - double tync, - double ta, - double corner0_X, - double corner0_Y, - double corner1_X, - double corner1_Y, - double corner2_X, - double corner2_Y, - double corner3_X, - double corner3_Y) { - this.classId = classId; - this.txnc = txnc; - this.tync = tync; - this.ta = ta; - this.corner0_X = corner0_X; - this.corner0_Y = corner0_Y; - this.corner1_X = corner1_X; - this.corner1_Y = corner1_Y; - this.corner2_X = corner2_X; - this.corner2_Y = corner2_Y; - this.corner3_X = corner3_X; - this.corner3_Y = corner3_Y; - } - } - - /** Represents a 3D Pose Estimate. */ - public static class PoseEstimate { - public Pose2d pose; - public double timestampSeconds; - public double latency; - public int tagCount; - public double tagSpan; - public double avgTagDist; - public double avgTagArea; - - public RawFiducial[] rawFiducials; - public boolean isMegaTag2; - - /** Instantiates a PoseEstimate object with default values */ - public PoseEstimate() { - this.pose = new Pose2d(); - this.timestampSeconds = 0; - this.latency = 0; - this.tagCount = 0; - this.tagSpan = 0; - this.avgTagDist = 0; - this.avgTagArea = 0; - this.rawFiducials = new RawFiducial[] {}; - this.isMegaTag2 = false; - } - - public PoseEstimate( - Pose2d pose, - double timestampSeconds, - double latency, - int tagCount, - double tagSpan, - double avgTagDist, - double avgTagArea, - RawFiducial[] rawFiducials, - boolean isMegaTag2) { - - this.pose = pose; - this.timestampSeconds = timestampSeconds; - this.latency = latency; - this.tagCount = tagCount; - this.tagSpan = tagSpan; - this.avgTagDist = avgTagDist; - this.avgTagArea = avgTagArea; - this.rawFiducials = rawFiducials; - this.isMegaTag2 = isMegaTag2; - } - } - - /** Encapsulates the state of an internal Limelight IMU. */ - public static class IMUData { - public double robotYaw = 0.0; - public double Roll = 0.0; - public double Pitch = 0.0; - public double Yaw = 0.0; - public double gyroX = 0.0; - public double gyroY = 0.0; - public double gyroZ = 0.0; - public double accelX = 0.0; - public double accelY = 0.0; - public double accelZ = 0.0; - - public IMUData() {} - - public IMUData(double[] imuData) { - if (imuData != null && imuData.length >= 10) { - this.robotYaw = imuData[0]; - this.Roll = imuData[1]; - this.Pitch = imuData[2]; - this.Yaw = imuData[3]; - this.gyroX = imuData[4]; - this.gyroY = imuData[5]; - this.gyroZ = imuData[6]; - this.accelX = imuData[7]; - this.accelY = imuData[8]; - this.accelZ = imuData[9]; - } - } - } - - private static ObjectMapper mapper; - - /** Print JSON Parse time to the console in milliseconds */ - static boolean profileJSON = false; - - static final String sanitizeName(String name) { - if (name == "" || name == null) { - return "limelight"; - } - return name; - } - - /** - * Takes a 6-length array of pose data and converts it to a Pose3d object. Array format: [x, y, z, - * roll, pitch, yaw] where angles are in degrees. - * - * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] - * @return Pose3d object representing the pose, or empty Pose3d if invalid data - */ - public static Pose3d toPose3D(double[] inData) { - if (inData.length < 6) { - // System.err.println("Bad LL 3D Pose Data!"); - return new Pose3d(); - } - return new Pose3d( - new Translation3d(inData[0], inData[1], inData[2]), - new Rotation3d( - Units.degreesToRadians(inData[3]), - Units.degreesToRadians(inData[4]), - Units.degreesToRadians(inData[5]))); - } - - /** - * Takes a 6-length array of pose data and converts it to a Pose2d object. Uses only x, y, and yaw - * components, ignoring z, roll, and pitch. Array format: [x, y, z, roll, pitch, yaw] where angles - * are in degrees. - * - * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] - * @return Pose2d object representing the pose, or empty Pose2d if invalid data - */ - public static Pose2d toPose2D(double[] inData) { - if (inData.length < 6) { - // System.err.println("Bad LL 2D Pose Data!"); - return new Pose2d(); - } - Translation2d tran2d = new Translation2d(inData[0], inData[1]); - Rotation2d r2d = new Rotation2d(Units.degreesToRadians(inData[5])); - return new Pose2d(tran2d, r2d); - } - - /** - * Converts a Pose3d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. - * Translation components are in meters, rotation components are in degrees. - * - * @param pose The Pose3d object to convert - * @return A 6-element array containing [x, y, z, roll, pitch, yaw] - */ - public static double[] pose3dToArray(Pose3d pose) { - double[] result = new double[6]; - result[0] = pose.getTranslation().getX(); - result[1] = pose.getTranslation().getY(); - result[2] = pose.getTranslation().getZ(); - result[3] = Units.radiansToDegrees(pose.getRotation().getX()); - result[4] = Units.radiansToDegrees(pose.getRotation().getY()); - result[5] = Units.radiansToDegrees(pose.getRotation().getZ()); - return result; - } - - /** - * Converts a Pose2d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. - * Translation components are in meters, rotation components are in degrees. Note: z, roll, and - * pitch will be 0 since Pose2d only contains x, y, and yaw. - * - * @param pose The Pose2d object to convert - * @return A 6-element array containing [x, y, 0, 0, 0, yaw] - */ - public static double[] pose2dToArray(Pose2d pose) { - double[] result = new double[6]; - result[0] = pose.getTranslation().getX(); - result[1] = pose.getTranslation().getY(); - result[2] = 0; - result[3] = Units.radiansToDegrees(0); - result[4] = Units.radiansToDegrees(0); - result[5] = Units.radiansToDegrees(pose.getRotation().getRadians()); - return result; - } - - private static double extractArrayEntry(double[] inData, int position) { - if (inData.length < position + 1) { - return 0; - } - return inData[position]; - } - - private static PoseEstimate getBotPoseEstimate( - String limelightName, String entryName, boolean isMegaTag2) { - DoubleArrayEntry poseEntry = - LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); - - TimestampedDoubleArray tsValue = poseEntry.getAtomic(); - double[] poseArray = tsValue.value; - long timestamp = tsValue.timestamp; - - if (poseArray.length == 0) { - // Handle the case where no data is available - return null; // or some default PoseEstimate - } - - var pose = toPose2D(poseArray); - double latency = extractArrayEntry(poseArray, 6); - int tagCount = (int) extractArrayEntry(poseArray, 7); - double tagSpan = extractArrayEntry(poseArray, 8); - double tagDist = extractArrayEntry(poseArray, 9); - double tagArea = extractArrayEntry(poseArray, 10); - - // Convert server timestamp from microseconds to seconds and adjust for latency - double adjustedTimestamp = (timestamp / 1000000.0) - (latency / 1000.0); - - RawFiducial[] rawFiducials = new RawFiducial[tagCount]; - int valsPerFiducial = 7; - int expectedTotalVals = 11 + valsPerFiducial * tagCount; - - if (poseArray.length != expectedTotalVals) { - // Don't populate fiducials - } else { - for (int i = 0; i < tagCount; i++) { - int baseIndex = 11 + (i * valsPerFiducial); - int id = (int) poseArray[baseIndex]; - double txnc = poseArray[baseIndex + 1]; - double tync = poseArray[baseIndex + 2]; - double ta = poseArray[baseIndex + 3]; - double distToCamera = poseArray[baseIndex + 4]; - double distToRobot = poseArray[baseIndex + 5]; - double ambiguity = poseArray[baseIndex + 6]; - rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); - } - } - - return new PoseEstimate( - pose, - adjustedTimestamp, - latency, - tagCount, - tagSpan, - tagDist, - tagArea, - rawFiducials, - isMegaTag2); - } - - /** - * Gets the latest raw fiducial/AprilTag detection results from NetworkTables. - * - * @param limelightName Name/identifier of the Limelight - * @return Array of RawFiducial objects containing detection details - */ - public static RawFiducial[] getRawFiducials(String limelightName) { - var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawfiducials"); - var rawFiducialArray = entry.getDoubleArray(new double[0]); - int valsPerEntry = 7; - if (rawFiducialArray.length % valsPerEntry != 0) { - return new RawFiducial[0]; - } - - int numFiducials = rawFiducialArray.length / valsPerEntry; - RawFiducial[] rawFiducials = new RawFiducial[numFiducials]; - - for (int i = 0; i < numFiducials; i++) { - int baseIndex = i * valsPerEntry; - int id = (int) extractArrayEntry(rawFiducialArray, baseIndex); - double txnc = extractArrayEntry(rawFiducialArray, baseIndex + 1); - double tync = extractArrayEntry(rawFiducialArray, baseIndex + 2); - double ta = extractArrayEntry(rawFiducialArray, baseIndex + 3); - double distToCamera = extractArrayEntry(rawFiducialArray, baseIndex + 4); - double distToRobot = extractArrayEntry(rawFiducialArray, baseIndex + 5); - double ambiguity = extractArrayEntry(rawFiducialArray, baseIndex + 6); - - rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); - } - - return rawFiducials; - } - - /** - * Gets the latest raw neural detector results from NetworkTables - * - * @param limelightName Name/identifier of the Limelight - * @return Array of RawDetection objects containing detection details - */ - public static RawDetection[] getRawDetections(String limelightName) { - var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawdetections"); - var rawDetectionArray = entry.getDoubleArray(new double[0]); - int valsPerEntry = 12; - if (rawDetectionArray.length % valsPerEntry != 0) { - return new RawDetection[0]; - } - - int numDetections = rawDetectionArray.length / valsPerEntry; - RawDetection[] rawDetections = new RawDetection[numDetections]; - - for (int i = 0; i < numDetections; i++) { - int baseIndex = i * valsPerEntry; // Starting index for this detection's data - int classId = (int) extractArrayEntry(rawDetectionArray, baseIndex); - double txnc = extractArrayEntry(rawDetectionArray, baseIndex + 1); - double tync = extractArrayEntry(rawDetectionArray, baseIndex + 2); - double ta = extractArrayEntry(rawDetectionArray, baseIndex + 3); - double corner0_X = extractArrayEntry(rawDetectionArray, baseIndex + 4); - double corner0_Y = extractArrayEntry(rawDetectionArray, baseIndex + 5); - double corner1_X = extractArrayEntry(rawDetectionArray, baseIndex + 6); - double corner1_Y = extractArrayEntry(rawDetectionArray, baseIndex + 7); - double corner2_X = extractArrayEntry(rawDetectionArray, baseIndex + 8); - double corner2_Y = extractArrayEntry(rawDetectionArray, baseIndex + 9); - double corner3_X = extractArrayEntry(rawDetectionArray, baseIndex + 10); - double corner3_Y = extractArrayEntry(rawDetectionArray, baseIndex + 11); - - rawDetections[i] = - new RawDetection( - classId, txnc, tync, ta, corner0_X, corner0_Y, corner1_X, corner1_Y, corner2_X, - corner2_Y, corner3_X, corner3_Y); - } - - return rawDetections; - } - - /** - * Prints detailed information about a PoseEstimate to standard output. Includes timestamp, - * latency, tag count, tag span, average tag distance, average tag area, and detailed information - * about each detected fiducial. - * - * @param pose The PoseEstimate object to print. If null, prints "No PoseEstimate available." - */ - public static void printPoseEstimate(PoseEstimate pose) { - if (pose == null) { - System.out.println("No PoseEstimate available."); - return; - } - - System.out.printf("Pose Estimate Information:%n"); - System.out.printf("Timestamp (Seconds): %.3f%n", pose.timestampSeconds); - System.out.printf("Latency: %.3f ms%n", pose.latency); - System.out.printf("Tag Count: %d%n", pose.tagCount); - System.out.printf("Tag Span: %.2f meters%n", pose.tagSpan); - System.out.printf("Average Tag Distance: %.2f meters%n", pose.avgTagDist); - System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); - System.out.printf("Is MegaTag2: %b%n", pose.isMegaTag2); - System.out.println(); - - if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { - System.out.println("No RawFiducials data available."); - return; - } - - System.out.println("Raw Fiducials Details:"); - for (int i = 0; i < pose.rawFiducials.length; i++) { - RawFiducial fiducial = pose.rawFiducials[i]; - System.out.printf(" Fiducial #%d:%n", i + 1); - System.out.printf(" ID: %d%n", fiducial.id); - System.out.printf(" TXNC: %.2f%n", fiducial.txnc); - System.out.printf(" TYNC: %.2f%n", fiducial.tync); - System.out.printf(" TA: %.2f%n", fiducial.ta); - System.out.printf(" Distance to Camera: %.2f meters%n", fiducial.distToCamera); - System.out.printf(" Distance to Robot: %.2f meters%n", fiducial.distToRobot); - System.out.printf(" Ambiguity: %.2f%n", fiducial.ambiguity); - System.out.println(); - } - } - - public static Boolean validPoseEstimate(PoseEstimate pose) { - return pose != null && pose.rawFiducials != null && pose.rawFiducials.length != 0; - } - - public static NetworkTable getLimelightNTTable(String tableName) { - return NetworkTableInstance.getDefault().getTable(sanitizeName(tableName)); - } - - public static void Flush() { - NetworkTableInstance.getDefault().flush(); - } - - public static NetworkTableEntry getLimelightNTTableEntry(String tableName, String entryName) { - return getLimelightNTTable(tableName).getEntry(entryName); - } - - public static DoubleArrayEntry getLimelightDoubleArrayEntry(String tableName, String entryName) { - String key = tableName + "/" + entryName; - return doubleArrayEntries.computeIfAbsent( - key, - k -> { - NetworkTable table = getLimelightNTTable(tableName); - return table.getDoubleArrayTopic(entryName).getEntry(new double[0]); - }); - } - - public static double getLimelightNTDouble(String tableName, String entryName) { - return getLimelightNTTableEntry(tableName, entryName).getDouble(0.0); - } - - public static void setLimelightNTDouble(String tableName, String entryName, double val) { - getLimelightNTTableEntry(tableName, entryName).setDouble(val); - } - - public static void setLimelightNTDoubleArray(String tableName, String entryName, double[] val) { - getLimelightNTTableEntry(tableName, entryName).setDoubleArray(val); - } - - public static double[] getLimelightNTDoubleArray(String tableName, String entryName) { - return getLimelightNTTableEntry(tableName, entryName).getDoubleArray(new double[0]); - } - - public static String getLimelightNTString(String tableName, String entryName) { - return getLimelightNTTableEntry(tableName, entryName).getString(""); - } - - public static String[] getLimelightNTStringArray(String tableName, String entryName) { - return getLimelightNTTableEntry(tableName, entryName).getStringArray(new String[0]); - } - - public static URL getLimelightURLString(String tableName, String request) { - String urlString = "http://" + sanitizeName(tableName) + ".local:5807/" + request; - URL url; - try { - url = new URL(urlString); - return url; - } catch (MalformedURLException e) { - System.err.println("bad LL URL"); - } - return null; - } - - ///// - ///// - - /** - * Does the Limelight have a valid target? - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return True if a valid target is present, false otherwise - */ - public static boolean getTV(String limelightName) { - return 1.0 == getLimelightNTDouble(limelightName, "tv"); - } - - /** - * Gets the horizontal offset from the crosshair to the target in degrees. - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return Horizontal offset angle in degrees - */ - public static double getTX(String limelightName) { - return getLimelightNTDouble(limelightName, "tx"); - } - - /** - * Gets the vertical offset from the crosshair to the target in degrees. - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return Vertical offset angle in degrees - */ - public static double getTY(String limelightName) { - return getLimelightNTDouble(limelightName, "ty"); - } - - /** - * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is the - * most accurate 2d metric if you are using a calibrated camera and you don't need adjustable - * crosshair functionality. - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return Horizontal offset angle in degrees - */ - public static double getTXNC(String limelightName) { - return getLimelightNTDouble(limelightName, "txnc"); - } - - /** - * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the - * most accurate 2d metric if you are using a calibrated camera and you don't need adjustable - * crosshair functionality. - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return Vertical offset angle in degrees - */ - public static double getTYNC(String limelightName) { - return getLimelightNTDouble(limelightName, "tync"); - } - - /** - * Gets the target area as a percentage of the image (0-100%). - * - * @param limelightName Name of the Limelight camera ("" for default) - * @return Target area percentage (0-100) - */ - public static double getTA(String limelightName) { - return getLimelightNTDouble(limelightName, "ta"); - } - - /** - * T2D is an array that contains several targeting metrcis - * - * @param limelightName Name of the Limelight camera - * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, - * txnc, tync, ta, tid, targetClassIndexDetector, targetClassIndexClassifier, - * targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, - * targetVerticalExtentPixels, targetSkewDegrees] - */ - public static double[] getT2DArray(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "t2d"); - } - - /** - * Gets the number of targets currently detected. - * - * @param limelightName Name of the Limelight camera - * @return Number of detected targets - */ - public static int getTargetCount(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[1]; - } - return 0; - } - - /** - * Gets the classifier class index from the currently running neural classifier pipeline - * - * @param limelightName Name of the Limelight camera - * @return Class index from classifier pipeline - */ - public static int getClassifierClassIndex(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[10]; - } - return 0; - } - - /** - * Gets the detector class index from the primary result of the currently running neural detector - * pipeline. - * - * @param limelightName Name of the Limelight camera - * @return Class index from detector pipeline - */ - public static int getDetectorClassIndex(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[11]; - } - return 0; - } - - /** - * Gets the current neural classifier result class name. - * - * @param limelightName Name of the Limelight camera - * @return Class name string from classifier pipeline - */ - public static String getClassifierClass(String limelightName) { - return getLimelightNTString(limelightName, "tcclass"); - } - - /** - * Gets the primary neural detector result class name. - * - * @param limelightName Name of the Limelight camera - * @return Class name string from detector pipeline - */ - public static String getDetectorClass(String limelightName) { - return getLimelightNTString(limelightName, "tdclass"); - } - - /** - * Gets the pipeline's processing latency contribution. - * - * @param limelightName Name of the Limelight camera - * @return Pipeline latency in milliseconds - */ - public static double getLatency_Pipeline(String limelightName) { - return getLimelightNTDouble(limelightName, "tl"); - } - - /** - * Gets the capture latency. - * - * @param limelightName Name of the Limelight camera - * @return Capture latency in milliseconds - */ - public static double getLatency_Capture(String limelightName) { - return getLimelightNTDouble(limelightName, "cl"); - } - - /** - * Gets the active pipeline index. - * - * @param limelightName Name of the Limelight camera - * @return Current pipeline index (0-9) - */ - public static double getCurrentPipelineIndex(String limelightName) { - return getLimelightNTDouble(limelightName, "getpipe"); - } - - /** - * Gets the current pipeline type. - * - * @param limelightName Name of the Limelight camera - * @return Pipeline type string (e.g. "retro", "apriltag", etc) - */ - public static String getCurrentPipelineType(String limelightName) { - return getLimelightNTString(limelightName, "getpipetype"); - } - - /** - * Gets the full JSON results dump. - * - * @param limelightName Name of the Limelight camera - * @return JSON string containing all current results - */ - public static String getJSONDump(String limelightName) { - return getLimelightNTString(limelightName, "json"); - } - - /** - * Switch to getBotPose - * - * @param limelightName - * @return - */ - @Deprecated - public static double[] getBotpose(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose"); - } - - /** - * Switch to getBotPose_wpiRed - * - * @param limelightName - * @return - */ - @Deprecated - public static double[] getBotpose_wpiRed(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose_wpired"); - } - - /** - * Switch to getBotPose_wpiBlue - * - * @param limelightName - * @return - */ - @Deprecated - public static double[] getBotpose_wpiBlue(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); - } - - public static double[] getBotPose(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose"); - } - - public static double[] getBotPose_wpiRed(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose_wpired"); - } - - public static double[] getBotPose_wpiBlue(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); - } - - public static double[] getBotPose_TargetSpace(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); - } - - public static double[] getCameraPose_TargetSpace(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); - } - - public static double[] getTargetPose_CameraSpace(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); - } - - public static double[] getTargetPose_RobotSpace(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); - } - - public static double[] getTargetColor(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "tc"); - } - - public static double getFiducialID(String limelightName) { - return getLimelightNTDouble(limelightName, "tid"); - } - - public static String getNeuralClassID(String limelightName) { - return getLimelightNTString(limelightName, "tclass"); - } - - public static String[] getRawBarcodeData(String limelightName) { - return getLimelightNTStringArray(limelightName, "rawbarcodes"); - } - - ///// - ///// - - public static Pose3d getBotPose3d(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose"); - return toPose3D(poseArray); - } - - /** - * (Not Recommended) Gets the robot's 3D pose in the WPILib Red Alliance Coordinate System. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation in Red Alliance field - * space - */ - public static Pose3d getBotPose3d_wpiRed(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); - return toPose3D(poseArray); - } - - /** - * (Recommended) Gets the robot's 3D pose in the WPILib Blue Alliance Coordinate System. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation in Blue Alliance field - * space - */ - public static Pose3d getBotPose3d_wpiBlue(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); - return toPose3D(poseArray); - } - - /** - * Gets the robot's 3D pose with respect to the currently tracked target's coordinate system. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation relative to the target - */ - public static Pose3d getBotPose3d_TargetSpace(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); - return toPose3D(poseArray); - } - - /** - * Gets the camera's 3D pose with respect to the currently tracked target's coordinate system. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the camera's position and orientation relative to the target - */ - public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); - return toPose3D(poseArray); - } - - /** - * Gets the target's 3D pose with respect to the camera's coordinate system. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the target's position and orientation relative to the camera - */ - public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); - return toPose3D(poseArray); - } - - /** - * Gets the target's 3D pose with respect to the robot's coordinate system. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the target's position and orientation relative to the robot - */ - public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); - return toPose3D(poseArray); - } - - /** - * Gets the camera's 3D pose with respect to the robot's coordinate system. - * - * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the camera's position and orientation relative to the robot - */ - public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { - double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); - return toPose3D(poseArray); - } - - /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * - * @param limelightName - * @return - */ - public static Pose2d getBotPose2d_wpiBlue(String limelightName) { - - double[] result = getBotPose_wpiBlue(limelightName); - return toPose2D(result); - } - - /** - * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator - * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. - * - * @param limelightName - * @return - */ - public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpiblue", false); - } - - /** - * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator - * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. Make sure you are calling - * setRobotOrientation() before calling this method. - * - * @param limelightName - * @return - */ - public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue", true); - } - - /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * - * @param limelightName - * @return - */ - public static Pose2d getBotPose2d_wpiRed(String limelightName) { - - double[] result = getBotPose_wpiRed(limelightName); - return toPose2D(result); - } - - /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the RED alliance - * - * @param limelightName - * @return - */ - public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpired", false); - } - - /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the RED alliance - * - * @param limelightName - * @return - */ - public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpired", true); - } - - /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * - * @param limelightName - * @return - */ - public static Pose2d getBotPose2d(String limelightName) { - - double[] result = getBotPose(limelightName); - return toPose2D(result); - } - - /** - * Gets the current IMU data from NetworkTables. IMU data is formatted as [robotYaw, Roll, Pitch, - * Yaw, gyroX, gyroY, gyroZ, accelX, accelY, accelZ]. Returns all zeros if data is invalid or - * unavailable. - * - * @param limelightName Name/identifier of the Limelight - * @return IMUData object containing all current IMU data - */ - public static IMUData getIMUData(String limelightName) { - double[] imuData = getLimelightNTDoubleArray(limelightName, "imu"); - if (imuData == null || imuData.length < 10) { - return new IMUData(); // Returns object with all zeros - } - return new IMUData(imuData); - } - - ///// - ///// - - public static void setPipelineIndex(String limelightName, int pipelineIndex) { - setLimelightNTDouble(limelightName, "pipeline", pipelineIndex); - } - - public static void setPriorityTagID(String limelightName, int ID) { - setLimelightNTDouble(limelightName, "priorityid", ID); - } - - /** - * Sets LED mode to be controlled by the current pipeline. - * - * @param limelightName Name of the Limelight camera - */ - public static void setLEDMode_PipelineControl(String limelightName) { - setLimelightNTDouble(limelightName, "ledMode", 0); - } - - public static void setLEDMode_ForceOff(String limelightName) { - setLimelightNTDouble(limelightName, "ledMode", 1); - } - - public static void setLEDMode_ForceBlink(String limelightName) { - setLimelightNTDouble(limelightName, "ledMode", 2); - } - - public static void setLEDMode_ForceOn(String limelightName) { - setLimelightNTDouble(limelightName, "ledMode", 3); - } - - /** - * Enables standard side-by-side stream mode. - * - * @param limelightName Name of the Limelight camera - */ - public static void setStreamMode_Standard(String limelightName) { - setLimelightNTDouble(limelightName, "stream", 0); - } - - /** - * Enables Picture-in-Picture mode with secondary stream in the corner. - * - * @param limelightName Name of the Limelight camera - */ - public static void setStreamMode_PiPMain(String limelightName) { - setLimelightNTDouble(limelightName, "stream", 1); - } - - /** - * Enables Picture-in-Picture mode with primary stream in the corner. - * - * @param limelightName Name of the Limelight camera - */ - public static void setStreamMode_PiPSecondary(String limelightName) { - setLimelightNTDouble(limelightName, "stream", 2); - } - - /** - * Sets the crop window for the camera. The crop window in the UI must be completely open. - * - * @param limelightName Name of the Limelight camera - * @param cropXMin Minimum X value (-1 to 1) - * @param cropXMax Maximum X value (-1 to 1) - * @param cropYMin Minimum Y value (-1 to 1) - * @param cropYMax Maximum Y value (-1 to 1) - */ - public static void setCropWindow( - String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { - double[] entries = new double[4]; - entries[0] = cropXMin; - entries[1] = cropXMax; - entries[2] = cropYMin; - entries[3] = cropYMax; - setLimelightNTDoubleArray(limelightName, "crop", entries); - } - - /** Sets 3D offset point for easy 3D targeting. */ - public static void setFiducial3DOffset( - String limelightName, double offsetX, double offsetY, double offsetZ) { - double[] entries = new double[3]; - entries[0] = offsetX; - entries[1] = offsetY; - entries[2] = offsetZ; - setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); - } - - /** - * Sets robot orientation values used by MegaTag2 localization algorithm. - * - * @param limelightName Name/identifier of the Limelight - * @param yaw Robot yaw in degrees. 0 = robot facing red alliance wall in FRC - * @param yawRate (Unnecessary) Angular velocity of robot yaw in degrees per second - * @param pitch (Unnecessary) Robot pitch in degrees - * @param pitchRate (Unnecessary) Angular velocity of robot pitch in degrees per second - * @param roll (Unnecessary) Robot roll in degrees - * @param rollRate (Unnecessary) Angular velocity of robot roll in degrees per second - */ - public static void SetRobotOrientation( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate) { - SetRobotOrientation_INTERNAL( - limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, true); - } - - public static void SetRobotOrientation_NoFlush( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate) { - SetRobotOrientation_INTERNAL( - limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, false); - } - - private static void SetRobotOrientation_INTERNAL( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate, - boolean flush) { - - double[] entries = new double[6]; - entries[0] = yaw; - entries[1] = yawRate; - entries[2] = pitch; - entries[3] = pitchRate; - entries[4] = roll; - entries[5] = rollRate; - setLimelightNTDoubleArray(limelightName, "robot_orientation_set", entries); - if (flush) { - Flush(); - } - } - - /** - * Configures the IMU mode for MegaTag2 Localization - * - * @param limelightName Name/identifier of the Limelight - * @param mode IMU mode. - */ - public static void SetIMUMode(String limelightName, int mode) { - setLimelightNTDouble(limelightName, "imumode_set", mode); - } - - /** - * Sets the 3D point-of-interest offset for the current fiducial pipeline. - * https://docs.limelightvision.io/docs/docs-limelight/pipeline-apriltag/apriltag-3d#point-of-interest-tracking - * - * @param limelightName Name/identifier of the Limelight - * @param x X offset in meters - * @param y Y offset in meters - * @param z Z offset in meters - */ - public static void SetFidcuial3DOffset(String limelightName, double x, double y, double z) { - - double[] entries = new double[3]; - entries[0] = x; - entries[1] = y; - entries[2] = z; - setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); - } - - /** - * Overrides the valid AprilTag IDs that will be used for localization. Tags not in this list will - * be ignored for robot pose estimation. - * - * @param limelightName Name/identifier of the Limelight - * @param validIDs Array of valid AprilTag IDs to track - */ - public static void SetFiducialIDFiltersOverride(String limelightName, int[] validIDs) { - double[] validIDsDouble = new double[validIDs.length]; - for (int i = 0; i < validIDs.length; i++) { - validIDsDouble[i] = validIDs[i]; - } - setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); - } - - /** - * Sets the downscaling factor for AprilTag detection. Increasing downscale can improve - * performance at the cost of potentially reduced detection range. - * - * @param limelightName Name/identifier of the Limelight - * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set to - * 0 for pipeline control. - */ - public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { - int d = 0; // pipeline - if (downscale == 1.0) { - d = 1; - } - if (downscale == 1.5) { - d = 2; - } - if (downscale == 2) { - d = 3; - } - if (downscale == 3) { - d = 4; - } - if (downscale == 4) { - d = 5; - } - setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); - } - - /** - * Sets the camera pose relative to the robot. - * - * @param limelightName Name of the Limelight camera - * @param forward Forward offset in meters - * @param side Side offset in meters - * @param up Up offset in meters - * @param roll Roll angle in degrees - * @param pitch Pitch angle in degrees - * @param yaw Yaw angle in degrees - */ - public static void setCameraPose_RobotSpace( - String limelightName, - double forward, - double side, - double up, - double roll, - double pitch, - double yaw) { - double[] entries = new double[6]; - entries[0] = forward; - entries[1] = side; - entries[2] = up; - entries[3] = roll; - entries[4] = pitch; - entries[5] = yaw; - setLimelightNTDoubleArray(limelightName, "camerapose_robotspace_set", entries); - } - - ///// - ///// - - public static void setPythonScriptData(String limelightName, double[] outgoingPythonData) { - setLimelightNTDoubleArray(limelightName, "llrobot", outgoingPythonData); - } - - public static double[] getPythonScriptData(String limelightName) { - return getLimelightNTDoubleArray(limelightName, "llpython"); - } - - ///// - ///// - - /** Asynchronously take snapshot. */ - public static CompletableFuture takeSnapshot(String tableName, String snapshotName) { - return CompletableFuture.supplyAsync( - () -> { - return SYNCH_TAKESNAPSHOT(tableName, snapshotName); - }); - } - - private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) { - URL url = getLimelightURLString(tableName, "capturesnapshot"); - try { - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("GET"); - if (snapshotName != null && snapshotName != "") { - connection.setRequestProperty("snapname", snapshotName); - } - - int responseCode = connection.getResponseCode(); - if (responseCode == 200) { - return true; - } else { - System.err.println("Bad LL Request"); - } - } catch (IOException e) { - System.err.println(e.getMessage()); - } - return false; - } - - /** - * Gets the latest JSON results output and returns a LimelightResults object. - * - * @param limelightName Name of the Limelight camera - * @return LimelightResults object containing all current target data - */ - public static LimelightResults getLatestResults(String limelightName) { - - long start = System.nanoTime(); - LimelightHelpers.LimelightResults results = new LimelightHelpers.LimelightResults(); - if (mapper == null) { - mapper = - new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - } - - try { - results = mapper.readValue(getJSONDump(limelightName), LimelightResults.class); - } catch (JsonProcessingException e) { - results.error = "lljson error: " + e.getMessage(); - } - - long end = System.nanoTime(); - double millis = (end - start) * .000001; - results.latency_jsonParse = millis; - if (profileJSON) { - System.out.printf("lljson: %.2f\r\n", millis); - } - - return results; - } -} diff --git a/src/main/java/frc/robot/util/MathHelpers.java b/src/main/java/frc/robot/util/MathHelpers.java deleted file mode 100644 index 3b6a97e..0000000 --- a/src/main/java/frc/robot/util/MathHelpers.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) 2025 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2024 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation2d; - -public class MathHelpers { - public static final Pose2d kPose2dZero = new Pose2d(); - - public static final Pose2d pose2dFromRotation(Rotation2d rotation) { - return new Pose2d(kTranslation2dZero, rotation); - } - - public static final Pose2d pose2dFromTranslation(Translation2d translation) { - return new Pose2d(translation, kRotation2dZero); - } - - public static final Rotation2d kRotation2dZero = new Rotation2d(); - public static final Rotation2d kRotation2dPi = Rotation2d.fromDegrees(180.0); - - public static final Translation2d kTranslation2dZero = new Translation2d(); - - public static final Transform2d kTransform2dZero = new Transform2d(); - - public static final Transform2d transform2dFromRotation(Rotation2d rotation) { - return new Transform2d(kTranslation2dZero, rotation); - } - - public static final Transform2d transform2dFromTranslation(Translation2d translation) { - return new Transform2d(translation, kRotation2dZero); - } -} diff --git a/src/main/java/frc/robot/util/RobotTime.java b/src/main/java/frc/robot/util/RobotTime.java deleted file mode 100644 index 1f4b48d..0000000 --- a/src/main/java/frc/robot/util/RobotTime.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) 2025 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2024 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -import org.littletonrobotics.junction.Logger; - -public class RobotTime { - public static double getTimestampSeconds() { - long micros = Logger.getTimestamp(); - return (double) micros * 1.0E-6; - } -} diff --git a/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java b/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java deleted file mode 100644 index 2647a0f..0000000 --- a/src/main/java/frc/robot/util/ServoMotorSubsystemConfig.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) 2025 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2024 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -import com.ctre.phoenix6.configs.TalonFXConfiguration; - -public class ServoMotorSubsystemConfig { - public String name = "UNNAMED"; - public RobotDeviceId talonCANID; - public TalonFXConfiguration fxConfig = new TalonFXConfiguration(); - - // Ratio of rotor to units for this talon. rotor * by this ratio should - // be the units. - // <1 is reduction - public double unitToRotorRatio = 1.0; - public double kMinPositionUnits = Double.NEGATIVE_INFINITY; - public double kMaxPositionUnits = Double.POSITIVE_INFINITY; - - // Moment of Inertia (KgMetersSquared) for sim - public double momentOfInertia = 0.5; -} diff --git a/src/main/java/frc/robot/util/TriConsumer.java b/src/main/java/frc/robot/util/TriConsumer.java deleted file mode 100644 index 67972db..0000000 --- a/src/main/java/frc/robot/util/TriConsumer.java +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) 2024 FRC 6995 -// https://github.com/frc6995 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -@FunctionalInterface -public interface TriConsumer { - public void accept(T arg0, U arg1, V arg2); -} diff --git a/src/main/java/frc/robot/util/Util.java b/src/main/java/frc/robot/util/Util.java deleted file mode 100644 index 9b3c181..0000000 --- a/src/main/java/frc/robot/util/Util.java +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright (c) 2025 Az-FIRST -// http://github.com/AZ-First -// Copyright (c) 2024 FRC 254 -// https://github.com/team254 -// -// This program is free software; you can redistribute it and/or -// modify it under the terms of the GNU General Public License -// version 3 as published by the Free Software Foundation or -// available in the root directory of this project. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -package frc.robot.util; - -import static frc.robot.Constants.AprilTagConstants.*; - -import edu.wpi.first.math.geometry.Translation3d; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.IntSupplier; -import java.util.function.Supplier; - -/** Contains basic functions that are used often. */ -public class Util { - public static final double kEpsilon = 1e-12; - - /** Prevent this class from being instantiated. */ - private Util() {} - - /** Limits the given input to the given magnitude. */ - public static double limit(double v, double maxMagnitude) { - return limit(v, -maxMagnitude, maxMagnitude); - } - - public static double limit(double v, double min, double max) { - return Math.min(max, Math.max(min, v)); - } - - public static int limit(int v, int min, int max) { - return Math.min(max, Math.max(min, v)); - } - - public static boolean inRange(double v, double maxMagnitude) { - return inRange(v, -maxMagnitude, maxMagnitude); - } - - /** Checks if the given input is within the range (min, max), both exclusive. */ - public static boolean inRange(double v, double min, double max) { - return v > min && v < max; - } - - public static double interpolate(double a, double b, double x) { - x = limit(x, 0.0, 1.0); - return a + (b - a) * x; - } - - public static String joinStrings(final String delim, final List strings) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < strings.size(); ++i) { - sb.append(strings.get(i).toString()); - if (i < strings.size() - 1) { - sb.append(delim); - } - } - return sb.toString(); - } - - public static boolean epsilonEquals(double a, double b, double epsilon) { - return (a - epsilon <= b) && (a + epsilon >= b); - } - - public static boolean epsilonEquals(double a, double b) { - return epsilonEquals(a, b, kEpsilon); - } - - public static boolean epsilonEquals(int a, int b, int epsilon) { - return (a - epsilon <= b) && (a + epsilon >= b); - } - - public static boolean allCloseTo(final List list, double value, double epsilon) { - boolean result = true; - for (Double value_in : list) { - result &= epsilonEquals(value_in, value, epsilon); - } - return result; - } - - public static double handleDeadband(double value, double deadband) { - deadband = Math.abs(deadband); - if (deadband == 1) { - return 0; - } - double scaledValue = (value + (value < 0 ? deadband : -deadband)) / (1 - deadband); - return (Math.abs(value) > Math.abs(deadband)) ? scaledValue : 0; - } - - public static Translation3d flipRedBlue(Translation3d original) { - return new Translation3d( - kFieldLengthMeters - original.getX(), original.getY(), original.getZ()); - } - - public static Supplier memoizeByIteration(IntSupplier iteration, Supplier delegate) { - AtomicReference value = new AtomicReference<>(); - AtomicInteger last_iteration = new AtomicInteger(-1); - return () -> { - int last = last_iteration.get(); - int now = iteration.getAsInt(); - if (last != now) { - value.updateAndGet((cur) -> null); - } - T val = value.get(); - if (val == null) { - val = value.updateAndGet(cur -> cur == null ? delegate.get() : cur); - last_iteration.set(now); - } - return val; - }; - } -}