Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@

package org.firstinspires.ftc.robotcontroller.external.samples;

import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
Expand All @@ -50,7 +49,7 @@
* is explained below.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
@Disabled
//@Disabled
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
Expand All @@ -63,12 +62,9 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
private static final String TFOD_MODEL_ASSET = "Capstone_v1_100-10.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
"Capstone"
};

/*
Expand All @@ -84,7 +80,8 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
"ARjSEzX/////AAABmTyfc/uSOUjluYpQyDMk15tX0Mf3zESzZKo6V7Y0O/qtPvPQOVben+DaABjfl4m5YNOhGW1HuHywuYGMHpJ5/uXY6L8Mu93OdlOYwwVzeYBhHZx9le+rUMr7NtQO/zWEHajiZ6Jmx7K+A+UmRZMpCmr//dMQdlcuyHmPagFERkl4fdP0UKsRxANaHpwfQcY3npBkmgE8XsmK4zuFEmzfN2/FV0Cns/tiTfXtx1WaFD0YWYfkTHRyNwhmuBxY6MXNmaG8VlLwJcoanBFmor2PVBaRYZ9pnJ4TJU5w25h1lAFAFPbLTz1RT/UB3sHT5CeG0bMyM4mTYLi9SHPOUQjmIomxp9D7R39j8g5G7hiKr2JP";


/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
Expand Down Expand Up @@ -118,7 +115,7 @@ public void runOpMode() {
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(2.5, 16.0/9.0);
tfod.setZoom(1.0, 16.0/9.0);
}

/** Wait for the game to begin */
Expand Down Expand Up @@ -177,8 +174,9 @@ private void initTfod() {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.minResultConfidence = 0.55f;
tfodParameters.isModelTensorFlow2 = false;
tfodParameters.isModelQuantized = false;
tfodParameters.inputSize = 320;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
Expand Down
1 change: 1 addition & 0 deletions road-runner-quickstart-master/TeamCode/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ apply from: '../build.dependencies.gradle'
dependencies {
implementation project(':FtcRobotController')
annotationProcessor files('lib/OpModeAnnotationProcessor.jar')
//implementation 'com.google.mlkit:image-labeling-custom:17.0.0'

implementation 'org.apache.commons:commons-math3:3.6.1'

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package org.firstinspires.ftc.teamcode.vision.TFODModels;

import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;

public class CAPSTONE_V1_MOBILETNET3 extends DetectionModel {
private static final float MIN_CONFIDENCE = 0.55f;
private static final float MIN_TRACKER_CONFIDENCE = 0.60f;
private static final float MARGINAL_CORRELATION = 0.80f;
private static final int INPUT_SIZE = 320;
private static final boolean IS_QUANTIZED = false;

public CAPSTONE_V1_MOBILETNET3(){
super("MOBILENET_V3_LARGE_CAPSTONE_STICKER_100-10", "Capstone_v1_100-10.tflite",
new String[] { "capstone" }, MIN_CONFIDENCE, INPUT_SIZE, IS_QUANTIZED);
}

@Override
public TFObjectDetector.Parameters getParameters(int tfodMonitorViewId){
TFObjectDetector.Parameters params = new TFObjectDetector.Parameters(tfodMonitorViewId);
params.isModelQuantized = IS_QUANTIZED;
params.maxFrameRate = 40;
params.inputSize = INPUT_SIZE;
params.minResultConfidence = MIN_CONFIDENCE;
params.trackerMinCorrelation = MIN_TRACKER_CONFIDENCE;
params.trackerMarginalCorrelation = MARGINAL_CORRELATION;
params.numExecutorThreads = 1;
return params;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package org.firstinspires.ftc.teamcode.vision.TFODModels;

import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;

public class CAPSTONE_V2_MOBILETNET3 extends DetectionModel {
private static final float MIN_CONFIDENCE = 0.55f;
private static final float MIN_TRACKER_CONFIDENCE = 0.60f;
private static final float MARGINAL_CORRELATION = 0.80f;
private static final int INPUT_SIZE = 320;
private static final boolean IS_QUANTIZED = false;

public CAPSTONE_V2_MOBILETNET3(){
super("MOBILENET_V3_LARGE_CAPSTONE_STICKER_130-13", "Capstone_v2_130-13.tflite",
new String[] { "capstone" }, MIN_CONFIDENCE, INPUT_SIZE, IS_QUANTIZED);
}

@Override
public TFObjectDetector.Parameters getParameters(int tfodMonitorViewId){
TFObjectDetector.Parameters params = new TFObjectDetector.Parameters(tfodMonitorViewId);
params.isModelQuantized = IS_QUANTIZED;
params.maxFrameRate = 40;
params.inputSize = INPUT_SIZE;
params.minResultConfidence = MIN_CONFIDENCE;
params.trackerMinCorrelation = MIN_TRACKER_CONFIDENCE;
params.trackerMarginalCorrelation = MARGINAL_CORRELATION;
params.numExecutorThreads = 1;
return params;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package org.firstinspires.ftc.teamcode.vision.TFODModels;

import androidx.annotation.NonNull;

import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;

import java.util.Arrays;

public abstract class DetectionModel {
public final String MODEL_NAME;
public final String MODEL_ASSET_FILE_PATH;
public final String[] ELEMENT_NAMES;

public float MIN_CONFIDENCE;
public int INPUT_SIZE;
public boolean IS_QUANTIZED;

protected DetectionModel(String modelName, String modelAssetFile, String[] elements,
float minConfidence, int inputSize, boolean isQuantized){
MODEL_NAME = modelName;
MODEL_ASSET_FILE_PATH = modelAssetFile;
ELEMENT_NAMES = elements;
MIN_CONFIDENCE = minConfidence;
INPUT_SIZE = inputSize;
IS_QUANTIZED = isQuantized;
}

public TFObjectDetector.Parameters getParameters(int tfodMonitorViewId){
return null;
}

@Override
@NonNull
public String toString(){
return "{" + MODEL_NAME + ": " + MODEL_ASSET_FILE_PATH + " - " + Arrays.toString(ELEMENT_NAMES) + "}";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package org.firstinspires.ftc.teamcode.vision.Tensorflow;

import android.graphics.RectF;

import org.firstinspires.ftc.robotcore.external.tfod.Recognition;

public class Detection {
private Recognition RECOGNIZED_OBJECT;
private float FOCAL_LENGTH; //in mm
private double SENSOR_HEIGHT; //in mm

public Detection(Recognition recognition, float focalLength, float verticalFOVAngle){
RECOGNIZED_OBJECT = recognition;
FOCAL_LENGTH = focalLength;
SENSOR_HEIGHT = Math.tan(Math.toRadians(verticalFOVAngle / 2d)) * 2 * FOCAL_LENGTH;
}

public RectF getBoundingBox(){
return new RectF(RECOGNIZED_OBJECT.getLeft(), RECOGNIZED_OBJECT.getTop(),
RECOGNIZED_OBJECT.getRight(), RECOGNIZED_OBJECT.getBottom());
}

public String getLabel(){
return RECOGNIZED_OBJECT.getLabel();
}

public float getBottom(){
return RECOGNIZED_OBJECT.getBottom();
}

public float getLeft(){
return RECOGNIZED_OBJECT.getLeft();
}

public float getRight(){
return RECOGNIZED_OBJECT.getRight();
}

public float getTop(){
return RECOGNIZED_OBJECT.getTop();
}

public float getWidth(){
return RECOGNIZED_OBJECT.getWidth();
}

public float getHeight(){
return RECOGNIZED_OBJECT.getHeight();
}

public float getImageWidth(){
return RECOGNIZED_OBJECT.getImageWidth();
}

public float getImageHeight(){
return RECOGNIZED_OBJECT.getImageHeight();
}

public float getConfidence(){
return RECOGNIZED_OBJECT.getConfidence();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
package org.firstinspires.ftc.teamcode.vision.Tensorflow;

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.view.Surface;
import android.widget.FrameLayout;

import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.vuforia.Image;
import com.vuforia.PIXEL_FORMAT;
import com.vuforia.Vuforia;

import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraName;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import org.firstinspires.ftc.teamcode.vision.TFODModels.CAPSTONE_V1_MOBILETNET3;
import org.firstinspires.ftc.teamcode.vision.TFODModels.CAPSTONE_V2_MOBILETNET3;
import org.firstinspires.ftc.teamcode.vision.TFODModels.DetectionModel;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;

@TeleOp(name = "Object Detection Test", group = "Vision")
public class ObjectDetectionTest extends LinearOpMode {
private static final DetectionModel TFOD_MODEL = new CAPSTONE_V2_MOBILETNET3();
public void runOpMode() {
ObjectDetector detector = new ObjectDetector(TFOD_MODEL, hardwareMap);
detector.initialize();
telemetry.addData("Loaded Model", TFOD_MODEL);
telemetry.update();

waitForStart();

while (opModeIsActive()) {
telemetry.addData("Loaded Model", TFOD_MODEL);

List<Recognition> updatedRecognitions = detector.getDetections();
if (updatedRecognitions != null) {
telemetry.addData("# Object Detected", updatedRecognitions.size());

// step through the list of recognitions and display boundary info.
int i = 0;
for (Recognition recognition : updatedRecognitions) {
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.addData("DetectionX", detector.getPositionX(ObjectDetector.MatchMode.DEFAULT));
telemetry.update();
}

detector.autoAdjustZoom();
}
}

}
Loading