From 4ef80ce8f8f3d943bd663782b39559e5a5ae0a0b Mon Sep 17 00:00:00 2001 From: Benjamin Hall Date: Fri, 10 Jan 2025 03:28:07 -0500 Subject: [PATCH] Improve example Limelight integration --- .../src/main/cpp/Robot.cpp | 9 +- .../main/java/frc/robot/LimelightHelpers.java | 448 +++++++++++++++--- .../src/main/java/frc/robot/Robot.java | 10 +- .../main/java/frc/robot/LimelightHelpers.java | 448 +++++++++++++++--- .../src/main/java/frc/robot/Robot.java | 10 +- 5 files changed, 797 insertions(+), 128 deletions(-) diff --git a/cpp/SwerveWithPathPlanner/src/main/cpp/Robot.cpp b/cpp/SwerveWithPathPlanner/src/main/cpp/Robot.cpp index 716d9de2..f319acfc 100644 --- a/cpp/SwerveWithPathPlanner/src/main/cpp/Robot.cpp +++ b/cpp/SwerveWithPathPlanner/src/main/cpp/Robot.cpp @@ -21,8 +21,13 @@ void Robot::RobotPeriodic() { * of how to use vision should be tuned per-robot and to the team's specification. */ if (kUseLimelight) { - auto llMeasurement = LimelightHelpers::getBotPoseEstimate_wpiBlue("limelight"); - if (llMeasurement) { + auto const driveState = m_container.drivetrain.GetState(); + auto const heading = driveState.Pose.Rotation().Degrees(); + auto const omega = driveState.Speeds.omega; + + LimelightHelpers::SetRobotOrientation("limelight", heading.value(), 0, 0, 0, 0, 0); + auto llMeasurement = LimelightHelpers::getBotPoseEstimate_wpiBlue_MegaTag2("limelight"); + if (llMeasurement && llMeasurement->tagCount > 0 && omega < 2_tps) { m_container.drivetrain.AddVisionMeasurement(llMeasurement->pose, utils::FPGAToCurrentTime(llMeasurement->timestampSeconds)); } } diff --git a/java/SwerveWithChoreo/src/main/java/frc/robot/LimelightHelpers.java b/java/SwerveWithChoreo/src/main/java/frc/robot/LimelightHelpers.java index 854e6f26..dcd1c265 100644 --- a/java/SwerveWithChoreo/src/main/java/frc/robot/LimelightHelpers.java +++ b/java/SwerveWithChoreo/src/main/java/frc/robot/LimelightHelpers.java @@ -1,14 +1,28 @@ -//LimelightHelpers v1.9 (REQUIRES 2024.9.1) +//LimelightHelpers v1.10 (REQUIRES LLOS 2024.9.1 OR LATER) package frc.robot; +import edu.wpi.first.networktables.DoubleArrayEntry; +import edu.wpi.first.networktables.NetworkTable; +import edu.wpi.first.networktables.NetworkTableEntry; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.networktables.TimestampedDoubleArray; +import frc.robot.LimelightHelpers.LimelightResults; +import frc.robot.LimelightHelpers.PoseEstimate; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation2d; + import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonFormat.Shape; @@ -16,25 +30,19 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.concurrent.ConcurrentHashMap; -import edu.wpi.first.networktables.DoubleArrayEntry; -import edu.wpi.first.networktables.NetworkTable; -import edu.wpi.first.networktables.NetworkTableEntry; -import edu.wpi.first.networktables.NetworkTableInstance; -import edu.wpi.first.networktables.TimestampedDoubleArray; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Translation2d; - +/** + * LimelightHelpers provides static methods and classes for interfacing with Limelight vision cameras in FRC. + * This library supports all Limelight features including AprilTag tracking, Neural Networks, and standard color/retroreflective tracking. + */ public class LimelightHelpers { private static final Map doubleArrayEntries = new ConcurrentHashMap<>(); - + /** + * Represents a Color/Retroreflective Target Result extracted from JSON Output + */ public static class LimelightTarget_Retro { @JsonProperty("t6c_ts") @@ -99,16 +107,22 @@ public Pose2d getTargetPose_RobotSpace2D() @JsonProperty("tx") public double tx; + + @JsonProperty("ty") + public double ty; @JsonProperty("txp") public double tx_pixels; - @JsonProperty("ty") - public double ty; - @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -122,6 +136,9 @@ public LimelightTarget_Retro() { } + /** + * Represents an AprilTag/Fiducial Target Result extracted from JSON Output + */ public static class LimelightTarget_Fiducial { @JsonProperty("fID") @@ -193,15 +210,21 @@ public Pose2d getTargetPose_RobotSpace2D() @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -214,10 +237,58 @@ public LimelightTarget_Fiducial() { } } + /** + * Represents a Barcode Target Result extracted from JSON Output + */ public static class LimelightTarget_Barcode { + /** + * Barcode family type (e.g. "QR", "DataMatrix", etc.) + */ + @JsonProperty("fam") + public String family; + + /** + * Gets the decoded data content of the barcode + */ + @JsonProperty("data") + public String data; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("typ") + public double ty_pixels; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + + @JsonProperty("ta") + public double ta; + + @JsonProperty("pts") + public double[][] corners; + + public LimelightTarget_Barcode() { + } + + public String getFamily() { + return family; + } } + /** + * Represents a Neural Classifier Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Classifier { @JsonProperty("class") @@ -248,6 +319,9 @@ public LimelightTarget_Classifier() { } } + /** + * Represents a Neural Detector Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Detector { @JsonProperty("class") @@ -265,19 +339,28 @@ public static class LimelightTarget_Detector { @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + public LimelightTarget_Detector() { } } + /** + * Limelight Results object, parsed from a Limelight's JSON results output. + */ public static class LimelightResults { public String error; @@ -382,6 +465,9 @@ public LimelightResults() { } + /** + * Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. + */ public static class RawFiducial { public int id = 0; public double txnc = 0; @@ -403,6 +489,9 @@ public RawFiducial(int id, double txnc, double tync, double ta, double distToCam } } + /** + * Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. + */ public static class RawDetection { public int classId = 0; public double txnc = 0; @@ -437,7 +526,10 @@ public RawDetection(int classId, double txnc, double tync, double ta, this.corner3_Y = corner3_Y; } } - + + /** + * Represents a 3D Pose Estimate. + */ public static class PoseEstimate { public Pose2d pose; public double timestampSeconds; @@ -446,10 +538,12 @@ public static class PoseEstimate { public double tagSpan; public double avgTagDist; public double avgTagArea; + public RawFiducial[] rawFiducials; + public boolean isMegaTag2; /** - * Makes a PoseEstimate object with default values + * Instantiates a PoseEstimate object with default values */ public PoseEstimate() { this.pose = new Pose2d(); @@ -460,11 +554,12 @@ public PoseEstimate() { this.avgTagDist = 0; this.avgTagArea = 0; this.rawFiducials = new RawFiducial[]{}; + this.isMegaTag2 = false; } public PoseEstimate(Pose2d pose, double timestampSeconds, double latency, int tagCount, double tagSpan, double avgTagDist, - double avgTagArea, RawFiducial[] rawFiducials) { + double avgTagArea, RawFiducial[] rawFiducials, boolean isMegaTag2) { this.pose = pose; this.timestampSeconds = timestampSeconds; @@ -474,6 +569,7 @@ public PoseEstimate(Pose2d pose, double timestampSeconds, double latency, this.avgTagDist = avgTagDist; this.avgTagArea = avgTagArea; this.rawFiducials = rawFiducials; + this.isMegaTag2 = isMegaTag2; } } @@ -492,6 +588,12 @@ static final String sanitizeName(String name) { return name; } + /** + * Takes a 6-length array of pose data and converts it to a Pose3d object. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose3d object representing the pose, or empty Pose3d if invalid data + */ public static Pose3d toPose3D(double[] inData){ if(inData.length < 6) { @@ -504,6 +606,13 @@ public static Pose3d toPose3D(double[] inData){ Units.degreesToRadians(inData[5]))); } + /** + * Takes a 6-length array of pose data and converts it to a Pose2d object. + * Uses only x, y, and yaw components, ignoring z, roll, and pitch. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose2d object representing the pose, or empty Pose2d if invalid data + */ public static Pose2d toPose2D(double[] inData){ if(inData.length < 6) { @@ -516,11 +625,12 @@ public static Pose2d toPose2D(double[] inData){ } /** - * Converts a Pose3d object to an array of doubles. + * Converts a Pose3d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. * - * @param pose The Pose3d object to convert. - * @return The array of doubles representing the pose. - **/ + * @param pose The Pose3d object to convert + * @return A 6-element array containing [x, y, z, roll, pitch, yaw] + */ public static double[] pose3dToArray(Pose3d pose) { double[] result = new double[6]; result[0] = pose.getTranslation().getX(); @@ -533,11 +643,13 @@ public static double[] pose3dToArray(Pose3d pose) { } /** - * Converts a Pose2d object to an array of doubles. + * Converts a Pose2d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. + * Note: z, roll, and pitch will be 0 since Pose2d only contains x, y, and yaw. * - * @param pose The Pose2d object to convert. - * @return The array of doubles representing the pose. - **/ + * @param pose The Pose2d object to convert + * @return A 6-element array containing [x, y, 0, 0, 0, yaw] + */ public static double[] pose2dToArray(Pose2d pose) { double[] result = new double[6]; result[0] = pose.getTranslation().getX(); @@ -557,7 +669,7 @@ private static double extractArrayEntry(double[] inData, int position){ return inData[position]; } - private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName) { + private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName, boolean isMegaTag2) { DoubleArrayEntry poseEntry = LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); TimestampedDoubleArray tsValue = poseEntry.getAtomic(); @@ -599,10 +711,16 @@ private static PoseEstimate getBotPoseEstimate(String limelightName, String entr } } - return new PoseEstimate(pose, adjustedTimestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials); + return new PoseEstimate(pose, adjustedTimestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials, isMegaTag2); } - private static RawFiducial[] getRawFiducials(String limelightName) { + /** + * Gets the latest raw fiducial/AprilTag detection results from NetworkTables. + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawFiducial objects containing detection details + */ + public static RawFiducial[] getRawFiducials(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawfiducials"); var rawFiducialArray = entry.getDoubleArray(new double[0]); int valsPerEntry = 7; @@ -629,10 +747,16 @@ private static RawFiducial[] getRawFiducials(String limelightName) { return rawFiducials; } + /** + * Gets the latest raw neural detector results from NetworkTables + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawDetection objects containing detection details + */ public static RawDetection[] getRawDetections(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawdetections"); var rawDetectionArray = entry.getDoubleArray(new double[0]); - int valsPerEntry = 11; + int valsPerEntry = 12; if (rawDetectionArray.length % valsPerEntry != 0) { return new RawDetection[0]; } @@ -661,6 +785,13 @@ public static RawDetection[] getRawDetections(String limelightName) { return rawDetections; } + /** + * Prints detailed information about a PoseEstimate to standard output. + * Includes timestamp, latency, tag count, tag span, average tag distance, + * average tag area, and detailed information about each detected fiducial. + * + * @param pose The PoseEstimate object to print. If null, prints "No PoseEstimate available." + */ public static void printPoseEstimate(PoseEstimate pose) { if (pose == null) { System.out.println("No PoseEstimate available."); @@ -674,6 +805,7 @@ public static void printPoseEstimate(PoseEstimate pose) { System.out.printf("Tag Span: %.2f meters%n", pose.tagSpan); System.out.printf("Average Tag Distance: %.2f meters%n", pose.avgTagDist); System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); + System.out.printf("Is MegaTag2: %b%n", pose.isMegaTag2); System.out.println(); if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { @@ -696,6 +828,10 @@ public static void printPoseEstimate(PoseEstimate pose) { } } + public static Boolean validPoseEstimate(PoseEstimate pose) { + return pose != null && pose.rawFiducials != null && pose.rawFiducials.length != 0; + } + public static NetworkTable getLimelightNTTable(String tableName) { return NetworkTableInstance.getDefault().getTable(sanitizeName(tableName)); } @@ -733,7 +869,6 @@ public static double[] getLimelightNTDoubleArray(String tableName, String entryN } - public static String getLimelightNTString(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getString(""); } @@ -757,23 +892,75 @@ public static URL getLimelightURLString(String tableName, String request) { ///// ///// + /** + * Does the Limelight have a valid target? + * @param limelightName Name of the Limelight camera ("" for default) + * @return True if a valid target is present, false otherwise + */ + public static boolean getTV(String limelightName) { + return 1.0 == getLimelightNTDouble(limelightName, "tv"); + } + + /** + * Gets the horizontal offset from the crosshair to the target in degrees. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ public static double getTX(String limelightName) { return getLimelightNTDouble(limelightName, "tx"); } + /** + * Gets the vertical offset from the crosshair to the target in degrees. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ public static double getTY(String limelightName) { return getLimelightNTDouble(limelightName, "ty"); } + /** + * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ + public static double getTXNC(String limelightName) { + return getLimelightNTDouble(limelightName, "txnc"); + } + + /** + * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ + public static double getTYNC(String limelightName) { + return getLimelightNTDouble(limelightName, "tync"); + } + + /** + * Gets the target area as a percentage of the image (0-100%). + * @param limelightName Name of the Limelight camera ("" for default) + * @return Target area percentage (0-100) + */ public static double getTA(String limelightName) { return getLimelightNTDouble(limelightName, "ta"); } + /** + * T2D is an array that contains several targeting metrcis + * @param limelightName Name of the Limelight camera + * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, txnc, tync, ta, tid, targetClassIndexDetector, + * targetClassIndexClassifier, targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, targetVerticalExtentPixels, targetSkewDegrees] + */ public static double[] getT2DArray(String limelightName) { return getLimelightNTDoubleArray(limelightName, "t2d"); } - + /** + * Gets the number of targets currently detected. + * @param limelightName Name of the Limelight camera + * @return Number of detected targets + */ public static int getTargetCount(String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -783,6 +970,11 @@ public static int getTargetCount(String limelightName) { return 0; } + /** + * Gets the classifier class index from the currently running neural classifier pipeline + * @param limelightName Name of the Limelight camera + * @return Class index from classifier pipeline + */ public static int getClassifierClassIndex (String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -791,6 +983,12 @@ public static int getClassifierClassIndex (String limelightName) { } return 0; } + + /** + * Gets the detector class index from the primary result of the currently running neural detector pipeline. + * @param limelightName Name of the Limelight camera + * @return Class index from detector pipeline + */ public static int getDetectorClassIndex (String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -799,31 +997,66 @@ public static int getDetectorClassIndex (String limelightName) { } return 0; } - + + /** + * Gets the current neural classifier result class name. + * @param limelightName Name of the Limelight camera + * @return Class name string from classifier pipeline + */ public static String getClassifierClass (String limelightName) { return getLimelightNTString(limelightName, "tcclass"); } + + /** + * Gets the primary neural detector result class name. + * @param limelightName Name of the Limelight camera + * @return Class name string from detector pipeline + */ public static String getDetectorClass (String limelightName) { return getLimelightNTString(limelightName, "tdclass"); } - + /** + * Gets the pipeline's processing latency contribution. + * @param limelightName Name of the Limelight camera + * @return Pipeline latency in milliseconds + */ public static double getLatency_Pipeline(String limelightName) { return getLimelightNTDouble(limelightName, "tl"); } + /** + * Gets the capture latency. + * @param limelightName Name of the Limelight camera + * @return Capture latency in milliseconds + */ public static double getLatency_Capture(String limelightName) { return getLimelightNTDouble(limelightName, "cl"); } + /** + * Gets the active pipeline index. + * @param limelightName Name of the Limelight camera + * @return Current pipeline index (0-9) + */ public static double getCurrentPipelineIndex(String limelightName) { return getLimelightNTDouble(limelightName, "getpipe"); } + /** + * Gets the current pipeline type. + * @param limelightName Name of the Limelight camera + * @return Pipeline type string (e.g. "retro", "apriltag", etc) + */ public static String getCurrentPipelineType(String limelightName) { return getLimelightNTString(limelightName, "getpipetype"); } + /** + * Gets the full JSON results dump. + * @param limelightName Name of the Limelight camera + * @return JSON string containing all current results + */ public static String getJSONDump(String limelightName) { return getLimelightNTString(limelightName, "json"); } @@ -913,36 +1146,71 @@ public static Pose3d getBotPose3d(String limelightName) { return toPose3D(poseArray); } + /** + * (Not Recommended) Gets the robot's 3D pose in the WPILib Red Alliance Coordinate System. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Red Alliance field space + */ public static Pose3d getBotPose3d_wpiRed(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); return toPose3D(poseArray); } + /** + * (Recommended) Gets the robot's 3D pose in the WPILib Blue Alliance Coordinate System. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Blue Alliance field space + */ public static Pose3d getBotPose3d_wpiBlue(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); return toPose3D(poseArray); } + /** + * Gets the robot's 3D pose with respect to the currently tracked target's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation relative to the target + */ public static Pose3d getBotPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the currently tracked target's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the target + */ public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the camera's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the camera + */ public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the robot's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the robot + */ public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the robot's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the robot + */ public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); return toPose3D(poseArray); @@ -962,25 +1230,24 @@ public static Pose2d getBotPose2d_wpiBlue(String limelightName) { } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the BLUE - * alliance + * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_wpiblue", false); } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the BLUE - * alliance + * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. + * Make sure you are calling setRobotOrientation() before calling this method. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue", true); } /** @@ -1004,7 +1271,7 @@ public static Pose2d getBotPose2d_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpired"); + return getBotPoseEstimate(limelightName, "botpose_wpired", false); } /** @@ -1014,7 +1281,7 @@ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpired"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpired", true); } /** @@ -1030,10 +1297,8 @@ public static Pose2d getBotPose2d(String limelightName) { return toPose2D(result); } + - public static boolean getTV(String limelightName) { - return 1.0 == getLimelightNTDouble(limelightName, "tv"); - } ///// ///// @@ -1048,8 +1313,8 @@ public static void setPriorityTagID(String limelightName, int ID) { } /** - * The LEDs will be controlled by Limelight pipeline settings, and not by robot - * code. + * Sets LED mode to be controlled by the current pipeline. + * @param limelightName Name of the Limelight camera */ public static void setLEDMode_PipelineControl(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 0); @@ -1067,22 +1332,38 @@ public static void setLEDMode_ForceOn(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 3); } + /** + * Enables standard side-by-side stream mode. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_Standard(String limelightName) { setLimelightNTDouble(limelightName, "stream", 0); } + /** + * Enables Picture-in-Picture mode with secondary stream in the corner. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPMain(String limelightName) { setLimelightNTDouble(limelightName, "stream", 1); } + /** + * Enables Picture-in-Picture mode with primary stream in the corner. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPSecondary(String limelightName) { setLimelightNTDouble(limelightName, "stream", 2); } /** - * Sets the crop window. The crop window in the UI must be completely open for - * dynamic cropping to work. + * Sets the crop window for the camera. The crop window in the UI must be completely open. + * @param limelightName Name of the Limelight camera + * @param cropXMin Minimum X value (-1 to 1) + * @param cropXMax Maximum X value (-1 to 1) + * @param cropYMin Minimum Y value (-1 to 1) + * @param cropYMax Maximum Y value (-1 to 1) */ public static void setCropWindow(String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { double[] entries = new double[4]; @@ -1104,6 +1385,17 @@ public static void setFiducial3DOffset(String limelightName, double offsetX, dou setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); } + /** + * Sets robot orientation values used by MegaTag2 localization algorithm. + * + * @param limelightName Name/identifier of the Limelight + * @param yaw Robot yaw in degrees. 0 = robot facing red alliance wall in FRC + * @param yawRate (Unnecessary) Angular velocity of robot yaw in degrees per second + * @param pitch (Unnecessary) Robot pitch in degrees + * @param pitchRate (Unnecessary) Angular velocity of robot pitch in degrees per second + * @param roll (Unnecessary) Robot roll in degrees + * @param rollRate (Unnecessary) Angular velocity of robot roll in degrees per second + */ public static void SetRobotOrientation(String limelightName, double yaw, double yawRate, double pitch, double pitchRate, double roll, double rollRate) { @@ -1134,7 +1426,15 @@ private static void SetRobotOrientation_INTERNAL(String limelightName, double ya } } - + /** + * Sets the 3D point-of-interest offset for the current fiducial pipeline. + * https://docs.limelightvision.io/docs/docs-limelight/pipeline-apriltag/apriltag-3d#point-of-interest-tracking + * + * @param limelightName Name/identifier of the Limelight + * @param x X offset in meters + * @param y Y offset in meters + * @param z Z offset in meters + */ public static void SetFidcuial3DOffset(String limelightName, double x, double y, double z) { @@ -1145,6 +1445,13 @@ public static void SetFidcuial3DOffset(String limelightName, double x, double y, setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); } + /** + * Overrides the valid AprilTag IDs that will be used for localization. + * Tags not in this list will be ignored for robot pose estimation. + * + * @param limelightName Name/identifier of the Limelight + * @param validIDs Array of valid AprilTag IDs to track + */ public static void SetFiducialIDFiltersOverride(String limelightName, int[] validIDs) { double[] validIDsDouble = new double[validIDs.length]; for (int i = 0; i < validIDs.length; i++) { @@ -1153,6 +1460,13 @@ public static void SetFiducialIDFiltersOverride(String limelightName, int[] vali setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); } + /** + * Sets the downscaling factor for AprilTag detection. + * Increasing downscale can improve performance at the cost of potentially reduced detection range. + * + * @param limelightName Name/identifier of the Limelight + * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set to 0 for pipeline control. + */ public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { int d = 0; // pipeline @@ -1179,6 +1493,16 @@ public static void SetFiducialDownscalingOverride(String limelightName, float do setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); } + /** + * Sets the camera pose relative to the robot. + * @param limelightName Name of the Limelight camera + * @param forward Forward offset in meters + * @param side Side offset in meters + * @param up Up offset in meters + * @param roll Roll angle in degrees + * @param pitch Pitch angle in degrees + * @param yaw Yaw angle in degrees + */ public static void setCameraPose_RobotSpace(String limelightName, double forward, double side, double up, double roll, double pitch, double yaw) { double[] entries = new double[6]; entries[0] = forward; @@ -1235,7 +1559,9 @@ private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) } /** - * Parses Limelight's JSON results dump into a LimelightResults Object + * Gets the latest JSON results output and returns a LimelightResults object. + * @param limelightName Name of the Limelight camera + * @return LimelightResults object containing all current target data */ public static LimelightResults getLatestResults(String limelightName) { diff --git a/java/SwerveWithChoreo/src/main/java/frc/robot/Robot.java b/java/SwerveWithChoreo/src/main/java/frc/robot/Robot.java index fd9601c9..2be3db82 100644 --- a/java/SwerveWithChoreo/src/main/java/frc/robot/Robot.java +++ b/java/SwerveWithChoreo/src/main/java/frc/robot/Robot.java @@ -6,6 +6,7 @@ import com.ctre.phoenix6.Utils; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.TimedRobot; import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.CommandScheduler; @@ -34,8 +35,13 @@ public void robotPeriodic() { * of how to use vision should be tuned per-robot and to the team's specification. */ if (kUseLimelight) { - var llMeasurement = LimelightHelpers.getBotPoseEstimate_wpiBlue("limelight"); - if (llMeasurement != null) { + var driveState = m_robotContainer.drivetrain.getState(); + double headingDeg = driveState.Pose.getRotation().getDegrees(); + double omegaRps = Units.radiansToRotations(driveState.Speeds.omegaRadiansPerSecond); + + LimelightHelpers.SetRobotOrientation("limelight", headingDeg, 0, 0, 0, 0, 0); + var llMeasurement = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2("limelight"); + if (llMeasurement != null && llMeasurement.tagCount > 0 && omegaRps < 2.0) { m_robotContainer.drivetrain.addVisionMeasurement(llMeasurement.pose, Utils.fpgaToCurrentTime(llMeasurement.timestampSeconds)); } } diff --git a/java/SwerveWithPathPlanner/src/main/java/frc/robot/LimelightHelpers.java b/java/SwerveWithPathPlanner/src/main/java/frc/robot/LimelightHelpers.java index 854e6f26..dcd1c265 100644 --- a/java/SwerveWithPathPlanner/src/main/java/frc/robot/LimelightHelpers.java +++ b/java/SwerveWithPathPlanner/src/main/java/frc/robot/LimelightHelpers.java @@ -1,14 +1,28 @@ -//LimelightHelpers v1.9 (REQUIRES 2024.9.1) +//LimelightHelpers v1.10 (REQUIRES LLOS 2024.9.1 OR LATER) package frc.robot; +import edu.wpi.first.networktables.DoubleArrayEntry; +import edu.wpi.first.networktables.NetworkTable; +import edu.wpi.first.networktables.NetworkTableEntry; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.networktables.TimestampedDoubleArray; +import frc.robot.LimelightHelpers.LimelightResults; +import frc.robot.LimelightHelpers.PoseEstimate; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation2d; + import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonFormat.Shape; @@ -16,25 +30,19 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.concurrent.ConcurrentHashMap; -import edu.wpi.first.networktables.DoubleArrayEntry; -import edu.wpi.first.networktables.NetworkTable; -import edu.wpi.first.networktables.NetworkTableEntry; -import edu.wpi.first.networktables.NetworkTableInstance; -import edu.wpi.first.networktables.TimestampedDoubleArray; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Translation2d; - +/** + * LimelightHelpers provides static methods and classes for interfacing with Limelight vision cameras in FRC. + * This library supports all Limelight features including AprilTag tracking, Neural Networks, and standard color/retroreflective tracking. + */ public class LimelightHelpers { private static final Map doubleArrayEntries = new ConcurrentHashMap<>(); - + /** + * Represents a Color/Retroreflective Target Result extracted from JSON Output + */ public static class LimelightTarget_Retro { @JsonProperty("t6c_ts") @@ -99,16 +107,22 @@ public Pose2d getTargetPose_RobotSpace2D() @JsonProperty("tx") public double tx; + + @JsonProperty("ty") + public double ty; @JsonProperty("txp") public double tx_pixels; - @JsonProperty("ty") - public double ty; - @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -122,6 +136,9 @@ public LimelightTarget_Retro() { } + /** + * Represents an AprilTag/Fiducial Target Result extracted from JSON Output + */ public static class LimelightTarget_Fiducial { @JsonProperty("fID") @@ -193,15 +210,21 @@ public Pose2d getTargetPose_RobotSpace2D() @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + @JsonProperty("ts") public double ts; @@ -214,10 +237,58 @@ public LimelightTarget_Fiducial() { } } + /** + * Represents a Barcode Target Result extracted from JSON Output + */ public static class LimelightTarget_Barcode { + /** + * Barcode family type (e.g. "QR", "DataMatrix", etc.) + */ + @JsonProperty("fam") + public String family; + + /** + * Gets the decoded data content of the barcode + */ + @JsonProperty("data") + public String data; + + @JsonProperty("txp") + public double tx_pixels; + + @JsonProperty("typ") + public double ty_pixels; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + + @JsonProperty("ta") + public double ta; + + @JsonProperty("pts") + public double[][] corners; + + public LimelightTarget_Barcode() { + } + + public String getFamily() { + return family; + } } + /** + * Represents a Neural Classifier Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Classifier { @JsonProperty("class") @@ -248,6 +319,9 @@ public LimelightTarget_Classifier() { } } + /** + * Represents a Neural Detector Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Detector { @JsonProperty("class") @@ -265,19 +339,28 @@ public static class LimelightTarget_Detector { @JsonProperty("tx") public double tx; - @JsonProperty("txp") - public double tx_pixels; - @JsonProperty("ty") public double ty; + @JsonProperty("txp") + public double tx_pixels; + @JsonProperty("typ") public double ty_pixels; + @JsonProperty("tx_nocross") + public double tx_nocrosshair; + + @JsonProperty("ty_nocross") + public double ty_nocrosshair; + public LimelightTarget_Detector() { } } + /** + * Limelight Results object, parsed from a Limelight's JSON results output. + */ public static class LimelightResults { public String error; @@ -382,6 +465,9 @@ public LimelightResults() { } + /** + * Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. + */ public static class RawFiducial { public int id = 0; public double txnc = 0; @@ -403,6 +489,9 @@ public RawFiducial(int id, double txnc, double tync, double ta, double distToCam } } + /** + * Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. + */ public static class RawDetection { public int classId = 0; public double txnc = 0; @@ -437,7 +526,10 @@ public RawDetection(int classId, double txnc, double tync, double ta, this.corner3_Y = corner3_Y; } } - + + /** + * Represents a 3D Pose Estimate. + */ public static class PoseEstimate { public Pose2d pose; public double timestampSeconds; @@ -446,10 +538,12 @@ public static class PoseEstimate { public double tagSpan; public double avgTagDist; public double avgTagArea; + public RawFiducial[] rawFiducials; + public boolean isMegaTag2; /** - * Makes a PoseEstimate object with default values + * Instantiates a PoseEstimate object with default values */ public PoseEstimate() { this.pose = new Pose2d(); @@ -460,11 +554,12 @@ public PoseEstimate() { this.avgTagDist = 0; this.avgTagArea = 0; this.rawFiducials = new RawFiducial[]{}; + this.isMegaTag2 = false; } public PoseEstimate(Pose2d pose, double timestampSeconds, double latency, int tagCount, double tagSpan, double avgTagDist, - double avgTagArea, RawFiducial[] rawFiducials) { + double avgTagArea, RawFiducial[] rawFiducials, boolean isMegaTag2) { this.pose = pose; this.timestampSeconds = timestampSeconds; @@ -474,6 +569,7 @@ public PoseEstimate(Pose2d pose, double timestampSeconds, double latency, this.avgTagDist = avgTagDist; this.avgTagArea = avgTagArea; this.rawFiducials = rawFiducials; + this.isMegaTag2 = isMegaTag2; } } @@ -492,6 +588,12 @@ static final String sanitizeName(String name) { return name; } + /** + * Takes a 6-length array of pose data and converts it to a Pose3d object. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose3d object representing the pose, or empty Pose3d if invalid data + */ public static Pose3d toPose3D(double[] inData){ if(inData.length < 6) { @@ -504,6 +606,13 @@ public static Pose3d toPose3D(double[] inData){ Units.degreesToRadians(inData[5]))); } + /** + * Takes a 6-length array of pose data and converts it to a Pose2d object. + * Uses only x, y, and yaw components, ignoring z, roll, and pitch. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. + * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] + * @return Pose2d object representing the pose, or empty Pose2d if invalid data + */ public static Pose2d toPose2D(double[] inData){ if(inData.length < 6) { @@ -516,11 +625,12 @@ public static Pose2d toPose2D(double[] inData){ } /** - * Converts a Pose3d object to an array of doubles. + * Converts a Pose3d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. * - * @param pose The Pose3d object to convert. - * @return The array of doubles representing the pose. - **/ + * @param pose The Pose3d object to convert + * @return A 6-element array containing [x, y, z, roll, pitch, yaw] + */ public static double[] pose3dToArray(Pose3d pose) { double[] result = new double[6]; result[0] = pose.getTranslation().getX(); @@ -533,11 +643,13 @@ public static double[] pose3dToArray(Pose3d pose) { } /** - * Converts a Pose2d object to an array of doubles. + * Converts a Pose2d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. + * Translation components are in meters, rotation components are in degrees. + * Note: z, roll, and pitch will be 0 since Pose2d only contains x, y, and yaw. * - * @param pose The Pose2d object to convert. - * @return The array of doubles representing the pose. - **/ + * @param pose The Pose2d object to convert + * @return A 6-element array containing [x, y, 0, 0, 0, yaw] + */ public static double[] pose2dToArray(Pose2d pose) { double[] result = new double[6]; result[0] = pose.getTranslation().getX(); @@ -557,7 +669,7 @@ private static double extractArrayEntry(double[] inData, int position){ return inData[position]; } - private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName) { + private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName, boolean isMegaTag2) { DoubleArrayEntry poseEntry = LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); TimestampedDoubleArray tsValue = poseEntry.getAtomic(); @@ -599,10 +711,16 @@ private static PoseEstimate getBotPoseEstimate(String limelightName, String entr } } - return new PoseEstimate(pose, adjustedTimestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials); + return new PoseEstimate(pose, adjustedTimestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials, isMegaTag2); } - private static RawFiducial[] getRawFiducials(String limelightName) { + /** + * Gets the latest raw fiducial/AprilTag detection results from NetworkTables. + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawFiducial objects containing detection details + */ + public static RawFiducial[] getRawFiducials(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawfiducials"); var rawFiducialArray = entry.getDoubleArray(new double[0]); int valsPerEntry = 7; @@ -629,10 +747,16 @@ private static RawFiducial[] getRawFiducials(String limelightName) { return rawFiducials; } + /** + * Gets the latest raw neural detector results from NetworkTables + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawDetection objects containing detection details + */ public static RawDetection[] getRawDetections(String limelightName) { var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawdetections"); var rawDetectionArray = entry.getDoubleArray(new double[0]); - int valsPerEntry = 11; + int valsPerEntry = 12; if (rawDetectionArray.length % valsPerEntry != 0) { return new RawDetection[0]; } @@ -661,6 +785,13 @@ public static RawDetection[] getRawDetections(String limelightName) { return rawDetections; } + /** + * Prints detailed information about a PoseEstimate to standard output. + * Includes timestamp, latency, tag count, tag span, average tag distance, + * average tag area, and detailed information about each detected fiducial. + * + * @param pose The PoseEstimate object to print. If null, prints "No PoseEstimate available." + */ public static void printPoseEstimate(PoseEstimate pose) { if (pose == null) { System.out.println("No PoseEstimate available."); @@ -674,6 +805,7 @@ public static void printPoseEstimate(PoseEstimate pose) { System.out.printf("Tag Span: %.2f meters%n", pose.tagSpan); System.out.printf("Average Tag Distance: %.2f meters%n", pose.avgTagDist); System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); + System.out.printf("Is MegaTag2: %b%n", pose.isMegaTag2); System.out.println(); if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { @@ -696,6 +828,10 @@ public static void printPoseEstimate(PoseEstimate pose) { } } + public static Boolean validPoseEstimate(PoseEstimate pose) { + return pose != null && pose.rawFiducials != null && pose.rawFiducials.length != 0; + } + public static NetworkTable getLimelightNTTable(String tableName) { return NetworkTableInstance.getDefault().getTable(sanitizeName(tableName)); } @@ -733,7 +869,6 @@ public static double[] getLimelightNTDoubleArray(String tableName, String entryN } - public static String getLimelightNTString(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getString(""); } @@ -757,23 +892,75 @@ public static URL getLimelightURLString(String tableName, String request) { ///// ///// + /** + * Does the Limelight have a valid target? + * @param limelightName Name of the Limelight camera ("" for default) + * @return True if a valid target is present, false otherwise + */ + public static boolean getTV(String limelightName) { + return 1.0 == getLimelightNTDouble(limelightName, "tv"); + } + + /** + * Gets the horizontal offset from the crosshair to the target in degrees. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ public static double getTX(String limelightName) { return getLimelightNTDouble(limelightName, "tx"); } + /** + * Gets the vertical offset from the crosshair to the target in degrees. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ public static double getTY(String limelightName) { return getLimelightNTDouble(limelightName, "ty"); } + /** + * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Horizontal offset angle in degrees + */ + public static double getTXNC(String limelightName) { + return getLimelightNTDouble(limelightName, "txnc"); + } + + /** + * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. + * @param limelightName Name of the Limelight camera ("" for default) + * @return Vertical offset angle in degrees + */ + public static double getTYNC(String limelightName) { + return getLimelightNTDouble(limelightName, "tync"); + } + + /** + * Gets the target area as a percentage of the image (0-100%). + * @param limelightName Name of the Limelight camera ("" for default) + * @return Target area percentage (0-100) + */ public static double getTA(String limelightName) { return getLimelightNTDouble(limelightName, "ta"); } + /** + * T2D is an array that contains several targeting metrcis + * @param limelightName Name of the Limelight camera + * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, txnc, tync, ta, tid, targetClassIndexDetector, + * targetClassIndexClassifier, targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, targetVerticalExtentPixels, targetSkewDegrees] + */ public static double[] getT2DArray(String limelightName) { return getLimelightNTDoubleArray(limelightName, "t2d"); } - + /** + * Gets the number of targets currently detected. + * @param limelightName Name of the Limelight camera + * @return Number of detected targets + */ public static int getTargetCount(String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -783,6 +970,11 @@ public static int getTargetCount(String limelightName) { return 0; } + /** + * Gets the classifier class index from the currently running neural classifier pipeline + * @param limelightName Name of the Limelight camera + * @return Class index from classifier pipeline + */ public static int getClassifierClassIndex (String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -791,6 +983,12 @@ public static int getClassifierClassIndex (String limelightName) { } return 0; } + + /** + * Gets the detector class index from the primary result of the currently running neural detector pipeline. + * @param limelightName Name of the Limelight camera + * @return Class index from detector pipeline + */ public static int getDetectorClassIndex (String limelightName) { double[] t2d = getT2DArray(limelightName); if(t2d.length == 17) @@ -799,31 +997,66 @@ public static int getDetectorClassIndex (String limelightName) { } return 0; } - + + /** + * Gets the current neural classifier result class name. + * @param limelightName Name of the Limelight camera + * @return Class name string from classifier pipeline + */ public static String getClassifierClass (String limelightName) { return getLimelightNTString(limelightName, "tcclass"); } + + /** + * Gets the primary neural detector result class name. + * @param limelightName Name of the Limelight camera + * @return Class name string from detector pipeline + */ public static String getDetectorClass (String limelightName) { return getLimelightNTString(limelightName, "tdclass"); } - + /** + * Gets the pipeline's processing latency contribution. + * @param limelightName Name of the Limelight camera + * @return Pipeline latency in milliseconds + */ public static double getLatency_Pipeline(String limelightName) { return getLimelightNTDouble(limelightName, "tl"); } + /** + * Gets the capture latency. + * @param limelightName Name of the Limelight camera + * @return Capture latency in milliseconds + */ public static double getLatency_Capture(String limelightName) { return getLimelightNTDouble(limelightName, "cl"); } + /** + * Gets the active pipeline index. + * @param limelightName Name of the Limelight camera + * @return Current pipeline index (0-9) + */ public static double getCurrentPipelineIndex(String limelightName) { return getLimelightNTDouble(limelightName, "getpipe"); } + /** + * Gets the current pipeline type. + * @param limelightName Name of the Limelight camera + * @return Pipeline type string (e.g. "retro", "apriltag", etc) + */ public static String getCurrentPipelineType(String limelightName) { return getLimelightNTString(limelightName, "getpipetype"); } + /** + * Gets the full JSON results dump. + * @param limelightName Name of the Limelight camera + * @return JSON string containing all current results + */ public static String getJSONDump(String limelightName) { return getLimelightNTString(limelightName, "json"); } @@ -913,36 +1146,71 @@ public static Pose3d getBotPose3d(String limelightName) { return toPose3D(poseArray); } + /** + * (Not Recommended) Gets the robot's 3D pose in the WPILib Red Alliance Coordinate System. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Red Alliance field space + */ public static Pose3d getBotPose3d_wpiRed(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); return toPose3D(poseArray); } + /** + * (Recommended) Gets the robot's 3D pose in the WPILib Blue Alliance Coordinate System. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation in Blue Alliance field space + */ public static Pose3d getBotPose3d_wpiBlue(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); return toPose3D(poseArray); } + /** + * Gets the robot's 3D pose with respect to the currently tracked target's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the robot's position and orientation relative to the target + */ public static Pose3d getBotPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the currently tracked target's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the target + */ public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the camera's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the camera + */ public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); return toPose3D(poseArray); } + /** + * Gets the target's 3D pose with respect to the robot's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the target's position and orientation relative to the robot + */ public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); return toPose3D(poseArray); } + /** + * Gets the camera's 3D pose with respect to the robot's coordinate system. + * @param limelightName Name/identifier of the Limelight + * @return Pose3d object representing the camera's position and orientation relative to the robot + */ public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); return toPose3D(poseArray); @@ -962,25 +1230,24 @@ public static Pose2d getBotPose2d_wpiBlue(String limelightName) { } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the BLUE - * alliance + * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_wpiblue", false); } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the BLUE - * alliance + * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. + * Make sure you are calling setRobotOrientation() before calling this method. * * @param limelightName * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue", true); } /** @@ -1004,7 +1271,7 @@ public static Pose2d getBotPose2d_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_wpired"); + return getBotPoseEstimate(limelightName, "botpose_wpired", false); } /** @@ -1014,7 +1281,7 @@ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { * @return */ public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightName) { - return getBotPoseEstimate(limelightName, "botpose_orb_wpired"); + return getBotPoseEstimate(limelightName, "botpose_orb_wpired", true); } /** @@ -1030,10 +1297,8 @@ public static Pose2d getBotPose2d(String limelightName) { return toPose2D(result); } + - public static boolean getTV(String limelightName) { - return 1.0 == getLimelightNTDouble(limelightName, "tv"); - } ///// ///// @@ -1048,8 +1313,8 @@ public static void setPriorityTagID(String limelightName, int ID) { } /** - * The LEDs will be controlled by Limelight pipeline settings, and not by robot - * code. + * Sets LED mode to be controlled by the current pipeline. + * @param limelightName Name of the Limelight camera */ public static void setLEDMode_PipelineControl(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 0); @@ -1067,22 +1332,38 @@ public static void setLEDMode_ForceOn(String limelightName) { setLimelightNTDouble(limelightName, "ledMode", 3); } + /** + * Enables standard side-by-side stream mode. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_Standard(String limelightName) { setLimelightNTDouble(limelightName, "stream", 0); } + /** + * Enables Picture-in-Picture mode with secondary stream in the corner. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPMain(String limelightName) { setLimelightNTDouble(limelightName, "stream", 1); } + /** + * Enables Picture-in-Picture mode with primary stream in the corner. + * @param limelightName Name of the Limelight camera + */ public static void setStreamMode_PiPSecondary(String limelightName) { setLimelightNTDouble(limelightName, "stream", 2); } /** - * Sets the crop window. The crop window in the UI must be completely open for - * dynamic cropping to work. + * Sets the crop window for the camera. The crop window in the UI must be completely open. + * @param limelightName Name of the Limelight camera + * @param cropXMin Minimum X value (-1 to 1) + * @param cropXMax Maximum X value (-1 to 1) + * @param cropYMin Minimum Y value (-1 to 1) + * @param cropYMax Maximum Y value (-1 to 1) */ public static void setCropWindow(String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { double[] entries = new double[4]; @@ -1104,6 +1385,17 @@ public static void setFiducial3DOffset(String limelightName, double offsetX, dou setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); } + /** + * Sets robot orientation values used by MegaTag2 localization algorithm. + * + * @param limelightName Name/identifier of the Limelight + * @param yaw Robot yaw in degrees. 0 = robot facing red alliance wall in FRC + * @param yawRate (Unnecessary) Angular velocity of robot yaw in degrees per second + * @param pitch (Unnecessary) Robot pitch in degrees + * @param pitchRate (Unnecessary) Angular velocity of robot pitch in degrees per second + * @param roll (Unnecessary) Robot roll in degrees + * @param rollRate (Unnecessary) Angular velocity of robot roll in degrees per second + */ public static void SetRobotOrientation(String limelightName, double yaw, double yawRate, double pitch, double pitchRate, double roll, double rollRate) { @@ -1134,7 +1426,15 @@ private static void SetRobotOrientation_INTERNAL(String limelightName, double ya } } - + /** + * Sets the 3D point-of-interest offset for the current fiducial pipeline. + * https://docs.limelightvision.io/docs/docs-limelight/pipeline-apriltag/apriltag-3d#point-of-interest-tracking + * + * @param limelightName Name/identifier of the Limelight + * @param x X offset in meters + * @param y Y offset in meters + * @param z Z offset in meters + */ public static void SetFidcuial3DOffset(String limelightName, double x, double y, double z) { @@ -1145,6 +1445,13 @@ public static void SetFidcuial3DOffset(String limelightName, double x, double y, setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); } + /** + * Overrides the valid AprilTag IDs that will be used for localization. + * Tags not in this list will be ignored for robot pose estimation. + * + * @param limelightName Name/identifier of the Limelight + * @param validIDs Array of valid AprilTag IDs to track + */ public static void SetFiducialIDFiltersOverride(String limelightName, int[] validIDs) { double[] validIDsDouble = new double[validIDs.length]; for (int i = 0; i < validIDs.length; i++) { @@ -1153,6 +1460,13 @@ public static void SetFiducialIDFiltersOverride(String limelightName, int[] vali setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); } + /** + * Sets the downscaling factor for AprilTag detection. + * Increasing downscale can improve performance at the cost of potentially reduced detection range. + * + * @param limelightName Name/identifier of the Limelight + * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set to 0 for pipeline control. + */ public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { int d = 0; // pipeline @@ -1179,6 +1493,16 @@ public static void SetFiducialDownscalingOverride(String limelightName, float do setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); } + /** + * Sets the camera pose relative to the robot. + * @param limelightName Name of the Limelight camera + * @param forward Forward offset in meters + * @param side Side offset in meters + * @param up Up offset in meters + * @param roll Roll angle in degrees + * @param pitch Pitch angle in degrees + * @param yaw Yaw angle in degrees + */ public static void setCameraPose_RobotSpace(String limelightName, double forward, double side, double up, double roll, double pitch, double yaw) { double[] entries = new double[6]; entries[0] = forward; @@ -1235,7 +1559,9 @@ private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) } /** - * Parses Limelight's JSON results dump into a LimelightResults Object + * Gets the latest JSON results output and returns a LimelightResults object. + * @param limelightName Name of the Limelight camera + * @return LimelightResults object containing all current target data */ public static LimelightResults getLatestResults(String limelightName) { diff --git a/java/SwerveWithPathPlanner/src/main/java/frc/robot/Robot.java b/java/SwerveWithPathPlanner/src/main/java/frc/robot/Robot.java index fd9601c9..2be3db82 100644 --- a/java/SwerveWithPathPlanner/src/main/java/frc/robot/Robot.java +++ b/java/SwerveWithPathPlanner/src/main/java/frc/robot/Robot.java @@ -6,6 +6,7 @@ import com.ctre.phoenix6.Utils; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.TimedRobot; import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.CommandScheduler; @@ -34,8 +35,13 @@ public void robotPeriodic() { * of how to use vision should be tuned per-robot and to the team's specification. */ if (kUseLimelight) { - var llMeasurement = LimelightHelpers.getBotPoseEstimate_wpiBlue("limelight"); - if (llMeasurement != null) { + var driveState = m_robotContainer.drivetrain.getState(); + double headingDeg = driveState.Pose.getRotation().getDegrees(); + double omegaRps = Units.radiansToRotations(driveState.Speeds.omegaRadiansPerSecond); + + LimelightHelpers.SetRobotOrientation("limelight", headingDeg, 0, 0, 0, 0, 0); + var llMeasurement = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2("limelight"); + if (llMeasurement != null && llMeasurement.tagCount > 0 && omegaRps < 2.0) { m_robotContainer.drivetrain.addVisionMeasurement(llMeasurement.pose, Utils.fpgaToCurrentTime(llMeasurement.timestampSeconds)); } }