Skip to content

Instantly share code, notes, and snippets.

@nimdahk
Last active December 13, 2015 19:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nimdahk/4964554 to your computer and use it in GitHub Desktop.
Save nimdahk/4964554 to your computer and use it in GitHub Desktop.
Quick code post
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.SimpleRobot;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.camera.AxisCameraException;
import edu.wpi.first.wpilibj.image.*;
import edu.wpi.first.wpilibj.image.NIVision.MeasurementType;
import edu.wpi.first.wpilibj.image.NIVision.Rect;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* Sample program to use NIVision to find rectangles in the scene that are illuminated
* by a LED ring light (similar to the model from FIRSTChoice). The camera sensitivity
* is set very low so as to only show light sources and remove any distracting parts
* of the image.
*
* The CriteriaCollection is the set of criteria that is used to filter the set of
* rectangles that are detected. In this example we're looking for rectangles with
* a minimum width of 30 pixels and maximum of 400 pixels.
*
* The algorithm first does a color threshold operation that only takes objects in the
* scene that have a bright green color component. Then a convex hull operation fills
* all the rectangle outlines (even the partially occluded ones). Then a small object filter
* removes small particles that might be caused by green reflection scattered from other
* parts of the scene. Finally all particles are scored on rectangularity, aspect ratio,
* and hollowness to determine if they match the target.
*
* Look in the VisionImages directory inside the project that is created for the sample
* images as well as the NI Vision Assistant file that contains the vision command
* chain (open it with the Vision Assistant)
*/
public class VisionSampleProject2013 extends SimpleRobot {
final int XMAXSIZE = 24;
final int XMINSIZE = 24;
final int YMAXSIZE = 24;
final int YMINSIZE = 48;
final double xMax[] = {1, 1, 1, 1, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, 1, 1, 1, 1};
final double xMin[] = {.4, .6, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, .1, 0.6, 0};
final double yMax[] = {1, 1, 1, 1, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, .5, 1, 1, 1, 1};
final double yMin[] = {.4, .6, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05,
.05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05, .05,
.05, .05, .6, 0};
final int RECTANGULARITY_LIMIT = 60;
final int ASPECT_RATIO_LIMIT = 75;
final int X_EDGE_LIMIT = 40;
final int Y_EDGE_LIMIT = 60;
final int X_IMAGE_RES = 320; //X Image resolution in pixels, should be 160, 320 or 640
final double VIEW_ANGLE = 43.5; //Axis 206 camera
// final double VIEW_ANGLE = 48; //Axis M1011 camera
Jaguar drive1, drive2;
AxisCamera camera; // the axis camera object (connected to the switch)
CriteriaCollection cc; // the criteria for doing the particle filter operation
int loopCount=0;
double prevX;
public class Scores {
double rectangularity;
double aspectRatioInner;
double aspectRatioOuter;
double xEdge;
double yEdge;
}
public void robotInit() {
println("init");
//camera = AxisCamera.getInstance(); // get an instance of the camera
cc = new CriteriaCollection(); // create the criteria for the particle filter
cc.addCriteria(MeasurementType.IMAQ_MT_AREA, 500, 65535, false);
camera = AxisCamera.getInstance();
camera.writeMaxFPS(30);
camera.writeResolution(AxisCamera.ResolutionT.k320x240);
//camera.writeCompression();
drive1 = new Jaguar(1); drive2 = new Jaguar(2);
SmartDashboard.putBoolean("WriteFile?", true);
}
public void autonomous() {
int minH = 75; int maxH = 125; int minS = 30; int maxS = 255; int minL = 100; int maxL = 255;
double P = 0.25; double I = 0.1; double maxI = 1; double in = 0; double stallPoint = 0; double tolerance = 0.1;
boolean writeFile;
while (isAutonomous() && isEnabled()) {
println("while", 1, 6);
try {
/**
* Do the image capture with the camera and apply the algorithm described above. This
* sample will either get images from the camera or from an image file stored in the top
* level directory in the flash memory on the cRIO. The file name in this case is "testImage.jpg"
*
*/
ColorImage image = null; // comment if using stored images
println("declared", 2);
try {
println("try", 3);
image = camera.getImage();
println("got", 3, 5);
} catch (AxisCameraException ex) {
ex.printStackTrace();
} catch (NIVisionException ex) {
ex.printStackTrace();
}
println("gotImage", 1, 12);
//ColorImage image; // next 2 lines read image from flash on cRIO
//image = new RGBImage("/testImage.jpg"); // get the sample image from the cRIO flash
//BinaryImage thresholdImage; // keep only red objects
minH = (int) SmartDashboard.getNumber("minH", minH);
maxH = (int) SmartDashboard.getNumber("maxH", maxH);
minS = (int) SmartDashboard.getNumber("minS", minS);
maxS = (int) SmartDashboard.getNumber("maxS", maxS);
minL = (int) SmartDashboard.getNumber("minL", minL);
maxL = (int) SmartDashboard.getNumber("maxL", maxL);
P = ((double) SmartDashboard.getNumber("P", P)) / 1000;
I = (double) SmartDashboard.getNumber("I", I);
maxI=(double) SmartDashboard.getNumber("maxI", maxI);
stallPoint =(double) SmartDashboard.getNumber("Stall Point", .19);
tolerance = (double) SmartDashboard.getNumber("Tolerance", 0.1);
writeFile = SmartDashboard.getBoolean("WriteFile?", false);
//D = (double) SmartDashboard.getNumber("D");
println(minH+"", 5);
BinaryImage thresholdImage = image.thresholdHSL(minH, maxH, minS, maxS, minL, maxL);
println("threshold", 4);
thresholdImage.write("/threshold.bmp");
BinaryImage convexHullImage = thresholdImage.convexHull(false); // fill in occluded rectangles
//convexHullImage.write("/convexHull.bmp");
BinaryImage filteredImage = convexHullImage.particleFilter(cc); // filter out small particles
// try{ filteredImage.write("/filteredImage.bmp");}
// catch (NIVisionException ex){
// println(ex.getMessage(), 4);
// ex.printStackTrace();
// }
//iterate through each particle and score to see if it is a target
boolean foundHigh = false;
Scores scores[] = new Scores[filteredImage.getNumberParticles()];
SmartDashboard.putNumber("Count", ++loopCount);
SmartDashboard.putNumber("Particles", scores.length);
for (int i = 0; i < scores.length; i++) {
ParticleAnalysisReport report = filteredImage.getParticleAnalysisReport(i);
scores[i] = new Scores();
scores[i].rectangularity = scoreRectangularity(report);
scores[i].aspectRatioOuter = scoreAspectRatio(filteredImage,report, i, true);
scores[i].aspectRatioInner = scoreAspectRatio(filteredImage, report, i, false);
scores[i].xEdge = scoreXEdge(thresholdImage, report);
scores[i].yEdge = scoreYEdge(thresholdImage, report);
if(scoreCompare(scores[i], false)) // This particle is the high goal
{
System.out.println("particle: " + i + "is a High Goal centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
SmartDashboard.putString("High", "Found");
SmartDashboard.putNumber("hCenterX", report.center_mass_x_normalized);
SmartDashboard.putNumber("hCenterY", report.center_mass_y_normalized);
SmartDashboard.putNumber("DistW", report.boundingRectWidth);
SmartDashboard.putNumber("DistA", report.particleArea);
double dist = computeDistance(thresholdImage, report, i, false);
if (dist < 1403){
SmartDashboard.putNumber("hDistance", dist);
}
System.out.println("Distance: " + dist);
println("H"+report.center_mass_x_normalized+"x ", 6);
foundHigh = true;
// Align using PI
double x = report.center_mass_x_normalized;
in += x; // integrated error
if ((in < 0 && x > 0) || (in > 0 && x < 0)){
in = 0;
}
// if (in > maxI){
// in = maxI;
// }
double PID = P * x + I * in;
if (PID > 0.40){
PID = 0.40;
}
if (x < tolerance && x > -tolerance){
PID = 0;
in = 0;
}else{
PID += (x < 0 ? -1.0 * stallPoint : stallPoint);
}
drive1.set(-1.0 * PID); drive2.set(PID);
SmartDashboard.putString("v", "in: "+in+" PID:"+PID);
} else if (scoreCompare(scores[i], true)) {
System.out.println("particle: " + i + "is a Middle Goal centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
System.out.println("Distance: " + computeDistance(thresholdImage, report, i, true));
println("M"+report.center_mass_x_normalized+"x", 5);
} else {
System.out.println("particle: " + i + "is not a goal centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
}
System.out.println("rect: " + scores[i].rectangularity + "ARinner: " + scores[i].aspectRatioInner);
System.out.println("ARouter: " + scores[i].aspectRatioOuter + "xEdge: " + scores[i].xEdge + "yEdge: " + scores[i].yEdge);
}
if (!foundHigh){
SmartDashboard.putString("High", "None");
drive1.set(0.0); drive2.set(0.0);
}
/**
* all images in Java must be freed after they are used since they are allocated out
* of C data structures. Not calling free() will cause the memory to accumulate over
* each pass of this loop.
*/
filteredImage.free();
convexHullImage.free();
thresholdImage.free();
image.free();
// } catch (AxisCameraException ex) { // this is needed if the camera.getImage() is called
// ex.printStackTrace();
} catch (NIVisionException ex) {
ex.printStackTrace();
}
}
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
while (isOperatorControl() && isEnabled()) {
Timer.delay(1);
}
}
/**
* Computes the estimated distance to a target using the height of the particle in the image. For more information and graphics
* showing the math behind this approach see the Vision Processing section of the ScreenStepsLive documentation.
*
* @param image The image to use for measuring the particle estimated rectangle
* @param report The Particle Analysis Report for the particle
* @param outer True if the particle should be treated as an outer target, false to treat it as a center target
* @return The estimated distance to the target in Inches.
*/
double computeDistance (BinaryImage image, ParticleAnalysisReport report, int particleNumber, boolean outer) throws NIVisionException {
double rectShort, height;
int targetHeight;
rectShort = NIVision.MeasureParticle(image.image, particleNumber, false, MeasurementType.IMAQ_MT_EQUIVALENT_RECT_SHORT_SIDE);
//using the smaller of the estimated rectangle short side and the bounding rectangle height results in better performance
//on skewed rectangles
height = Math.min(report.boundingRectHeight, rectShort);
targetHeight = outer ? 29 : 21;
return X_IMAGE_RES * targetHeight / (height * 12 * 2 * Math.tan(VIEW_ANGLE*Math.PI/(180*2)));
}
/**
* Computes a score (0-100) comparing the aspect ratio to the ideal aspect ratio for the target. This method uses
* the equivalent rectangle sides to determine aspect ratio as it performs better as the target gets skewed by moving
* to the left or right. The equivalent rectangle is the rectangle with sides x and y where particle area= x*y
* and particle perimeter= 2x+2y
*
* @param image The image containing the particle to score, needed to performa additional measurements
* @param report The Particle Analysis Report for the particle, used for the width, height, and particle number
* @param outer Indicates whether the particle aspect ratio should be compared to the ratio for the inner target or the outer
* @return The aspect ratio score (0-100)
*/
public double scoreAspectRatio(BinaryImage image, ParticleAnalysisReport report, int particleNumber, boolean outer) throws NIVisionException
{
double rectLong, rectShort, aspectRatio, idealAspectRatio;
rectLong = NIVision.MeasureParticle(image.image, particleNumber, false, MeasurementType.IMAQ_MT_EQUIVALENT_RECT_LONG_SIDE);
rectShort = NIVision.MeasureParticle(image.image, particleNumber, false, MeasurementType.IMAQ_MT_EQUIVALENT_RECT_SHORT_SIDE);
idealAspectRatio = outer ? (62/29) : (62/20); //Dimensions of goal opening + 4 inches on all 4 sides for reflective tape
//Divide width by height to measure aspect ratio
if(report.boundingRectWidth > report.boundingRectHeight){
//particle is wider than it is tall, divide long by short
aspectRatio = 100*(1-Math.abs((1-((rectLong/rectShort)/idealAspectRatio))));
} else {
//particle is taller than it is wide, divide short by long
aspectRatio = 100*(1-Math.abs((1-((rectShort/rectLong)/idealAspectRatio))));
}
return (Math.max(0, Math.min(aspectRatio, 100.0))); //force to be in range 0-100
}
/**
* Compares scores to defined limits and returns true if the particle appears to be a target
*
* @param scores The structure containing the scores to compare
* @param outer True if the particle should be treated as an outer target, false to treat it as a center target
*
* @return True if the particle meets all limits, false otherwise
*/
boolean scoreCompare(Scores scores, boolean outer){
boolean isTarget = true;
isTarget &= scores.rectangularity > RECTANGULARITY_LIMIT;
if(outer){
isTarget &= scores.aspectRatioOuter > ASPECT_RATIO_LIMIT;
} else {
isTarget &= scores.aspectRatioInner > ASPECT_RATIO_LIMIT;
}
isTarget &= scores.xEdge > X_EDGE_LIMIT;
isTarget &= scores.yEdge > Y_EDGE_LIMIT;
return isTarget;
}
/**
* Computes a score (0-100) estimating how rectangular the particle is by comparing the area of the particle
* to the area of the bounding box surrounding it. A perfect rectangle would cover the entire bounding box.
*
* @param report The Particle Analysis Report for the particle to score
* @return The rectangularity score (0-100)
*/
double scoreRectangularity(ParticleAnalysisReport report){
if(report.boundingRectWidth*report.boundingRectHeight !=0){
return 100*report.particleArea/(report.boundingRectWidth*report.boundingRectHeight);
} else {
return 0;
}
}
/**
* Computes a score based on the match between a template profile and the particle profile in the X direction. This method uses the
* the column averages and the profile defined at the top of the sample to look for the solid vertical edges with
* a hollow center.
*
* @param image The image to use, should be the image before the convex hull is performed
* @param report The Particle Analysis Report for the particle
*
* @return The X Edge Score (0-100)
*/
public double scoreXEdge(BinaryImage image, ParticleAnalysisReport report) throws NIVisionException
{
double total = 0;
LinearAverages averages;
Rect rect = new Rect(report.boundingRectTop, report.boundingRectLeft, report.boundingRectHeight, report.boundingRectWidth);
averages = NIVision.getLinearAverages(image.image, LinearAverages.LinearAveragesMode.IMAQ_COLUMN_AVERAGES, rect);
float columnAverages[] = averages.getColumnAverages();
for(int i=0; i < (columnAverages.length); i++){
if(xMin[(i*(XMINSIZE-1)/columnAverages.length)] < columnAverages[i]
&& columnAverages[i] < xMax[i*(XMAXSIZE-1)/columnAverages.length]){
total++;
}
}
total = 100*total/(columnAverages.length);
return total;
}
/**
* Computes a score based on the match between a template profile and the particle profile in the Y direction. This method uses the
* the row averages and the profile defined at the top of the sample to look for the solid horizontal edges with
* a hollow center
*
* @param image The image to use, should be the image before the convex hull is performed
* @param report The Particle Analysis Report for the particle
*
* @return The Y Edge score (0-100)
*
*/
public double scoreYEdge(BinaryImage image, ParticleAnalysisReport report) throws NIVisionException
{
double total = 0;
LinearAverages averages;
Rect rect = new Rect(report.boundingRectTop, report.boundingRectLeft, report.boundingRectHeight, report.boundingRectWidth);
averages = NIVision.getLinearAverages(image.image, LinearAverages.LinearAveragesMode.IMAQ_ROW_AVERAGES, rect);
float rowAverages[] = averages.getRowAverages();
for(int i=0; i < (rowAverages.length); i++){
if(yMin[(i*(YMINSIZE-1)/rowAverages.length)] < rowAverages[i]
&& rowAverages[i] < yMax[i*(YMAXSIZE-1)/rowAverages.length]){
total++;
}
}
total = 100*total/(rowAverages.length);
return total;
}
public void println(String s){
println(s, 1, 1);
}
public void println(String s, int line){
println(s, line, 1);
}
public void println(String s, int line, int startingColumn){
switch (line)
{
case 1:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser1, startingColumn, s);
break;
case 2:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser2, startingColumn, s);
break;
case 3:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser3, startingColumn, s);
break;
case 4:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser4, startingColumn, s);
break;
case 5:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser5, startingColumn, s);
break;
case 6:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser6, startingColumn, s);
break;
}
DriverStationLCD.getInstance().updateLCD();
}
}
/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DoubleSolenoid;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.RobotDrive;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class RobotTemplate extends IterativeRobot {
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
RobotDrive drive;
Joystick stick, stick2;
Jaguar drive1, drive2,
wheel1, wheel2;
double wheel1Speed, wheel2Speed;
Relay indexer, compressor;
Compressor comp;
DoubleSolenoid shootPiston, lift1, lift2;
//DigitalInput dio1;
boolean b2, b3, b4, b5;
final double INC1 = 0.03,
INC2 = 0.03;
final int DRIVEPWM1 = 1,
DRIVEPWM2 = 2,
SHOOTPWM1 = 3,
SHOOTPWM2 = 4,
SHOOTSOL1 = 1,
SHOOTSOL2 = 2,
INDEXPWM = 5,
COMPCHAN = 6,
DIOCHANNEL= 1,
LIFTSOL11 = 3,
LIFTSOL12 = 4,
LIFTSOL21 = 5,
LIFTSOL22 = 6,
CLEARSHOOTBTN = 9, CLEARSHOOTSTICK = 1,
INCBTN1 = 3, INCSTICK = 2,
DECBTN1 = 2, DECSTICK = 2,
INCBTN2 = 4, // INCSTICK
DECBTN2 = 5, // DECSTICK
LIFTBTN = 10, LIFTSTICK = 2,
INDEXBTN = 6, INDEXSTICK = 2,
DRIVESTICK = 1;
public void robotInit() {
println("robotInit()");
stick = new Joystick(1);
stick2 = new Joystick(2);
indexer = new Relay(INDEXPWM);
//compressor = new Relay(COMPPWM);
comp = new Compressor(DIOCHANNEL, COMPCHAN);
shootPiston = new DoubleSolenoid(SHOOTSOL1, SHOOTSOL2);
//dio1 = new DigitalInput(1);
lift1 = new DoubleSolenoid(LIFTSOL11, LIFTSOL12);
lift2 = new DoubleSolenoid(LIFTSOL21, LIFTSOL22);
drive1 = new Jaguar(DRIVEPWM1); drive2 = new Jaguar(DRIVEPWM2);
wheel1 = new Jaguar(SHOOTPWM1); wheel2 = new Jaguar(SHOOTPWM2);
wheel1Speed = 0; wheel2Speed = 0;
drive = new RobotDrive(wheel1, wheel2);
comp.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
if (DRIVESTICK == 1) {
drive.arcadeDrive(stick);
}
else {
drive.arcadeDrive(stick2);
}
println("teleopPeriodic()");
if (getButton(INCSTICK, INCBTN1)){
println("b3 down", 2);
b3 = true;
}else if (b3){
wheel1Speed += INC1;
wheel1.set(wheel1Speed);
b3 = false;
}
if (getButton(DECSTICK, DECBTN1)){
println("b" + DECBTN2 + " down", 2);
b2 = true;
}else if (b2){
wheel1Speed -= INC1;
wheel1.set(wheel1Speed);
b2 = false;
}
if (getButton(INCSTICK, INCBTN2)){
println("b" + INCBTN2 + " down", 2);
b4 = true;
}else if (b4){
wheel2Speed += INC2;
wheel2.set(wheel2Speed);
b4 = false;
}
if (getButton(DECSTICK, DECBTN2)){
println("b5 down", 2);
b5 = true;
}else if (b5){
wheel2Speed -= INC2;
wheel2.set(wheel2Speed);
b5 = false;
}
if (getButton(CLEARSHOOTSTICK, CLEARSHOOTBTN)){
wheel1.set(0); wheel2.set(0);
wheel1Speed = 0; wheel2Speed = 0;
println(" ", 3);
println(" ", 4);
}
println(wheel1.get() + "", 3);
println(wheel2.get() + "", 4);
if (getButton(INDEXSTICK, INDEXBTN)) {
indexer.set(Relay.Value.kOn);
}else{
indexer.set(Relay.Value.kOff);
}
// if (dio1.get()){ // **BUG
// compressor.set(Relay.Value.kOn);
// }else{
// compressor.set(Relay.Value.kOff);
// }
if (getButton(LIFTSTICK, LIFTBTN)){
lift1.set(DoubleSolenoid.Value.kForward);
lift2.set(DoubleSolenoid.Value.kForward);
}else{
lift1.set(DoubleSolenoid.Value.kReverse);
lift2.set(DoubleSolenoid.Value.kReverse);
}
}
public boolean getButton(int joystick, int button){
if (joystick == 1){
return stick.getRawButton(button);
}else if (joystick == 2){
return stick2.getRawButton(button);
}else{
System.out.println("Not a stick");
}
return false;
}
public void println(String s){
println(s, 1, 1);
}
public void println(String s, int line){
println(s, line, 1);
}
public void println(String s, int line, int startingColumn){
switch (line)
{
case 1:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser1, startingColumn, s);
break;
case 2:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser2, startingColumn, s);
break;
case 3:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser3, startingColumn, s);
break;
case 4:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser4, startingColumn, s);
break;
case 5:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser5, startingColumn, s);
break;
case 6:
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser6, startingColumn, s);
break;
}
DriverStationLCD.getInstance().updateLCD();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment