Skip to content

Commit

Permalink
Merge pull request #15 from Pattonville-Robotics/opencv-features
Browse files Browse the repository at this point in the history
Opencv features
  • Loading branch information
greg-bahr committed Nov 1, 2017
2 parents 755bf71 + 6693275 commit 4775659
Show file tree
Hide file tree
Showing 6 changed files with 420 additions and 35 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package org.pattonvillerobotics.commoncode.opmodes;

import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;

import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.pattonvillerobotics.commoncode.robotclasses.opencv.ImageProcessor;
import org.pattonvillerobotics.commoncode.robotclasses.opencv.JewelColorDetector;
import org.pattonvillerobotics.commoncode.robotclasses.opencv.util.PhoneOrientation;
import org.pattonvillerobotics.commoncode.robotclasses.vuforia.VuforiaNavigation;
import org.pattonvillerobotics.commoncode.robotclasses.vuforia.VuforiaParameters;

/**
* Created by greg on 10/7/2017.
*/

@Autonomous(name = "OpenCV Test", group = OpModeGroups.TESTING)
public class OpenCVTest extends LinearOpMode {

private JewelColorDetector jewelColorDetector;
private VuforiaNavigation vuforia;

private VuforiaParameters VUFORIA_PARAMETERS = new VuforiaParameters.Builder()
.phoneLocation(0, 0, 0, AxesOrder.XYZ, 90, -90, 0)
.cameraDirection(VuforiaLocalizer.CameraDirection.BACK)
.licenseKey("AclLpHb/////AAAAGa41kVT84EtWtYJZW0bIHf9DHg5EHVYWCqExQMx6bbuBtjFeYdvzZLExJiXnT31qDi3WI3QQnOXH8pLZ4cmb39d1w0Oi7aCwy35ODjMvG5qX+e2+3v0l3r1hPpM8P7KPTkRPIl+CGYEBvoNkVbGGjalCW7N9eFDV/T5CN/RQvZjonX/uBPKkEd8ciqK8vWgfy9aPEipAoyr997DDagnMQJ0ajpwKn/SAfaVPA4osBZ5euFf07/3IUnpLEMdMKfoIH6QYLVgwbPuVtUiJWM6flzWaAw5IIhy0XXWwI0nGXrzVjPwZlN3El4Su73ADK36qqOax/pNxD4oYBrlpfYiaFaX0Q+BNro09weXQEoz/Mfgm")
.build();

@Override
public void runOpMode() throws InterruptedException {
ImageProcessor.initOpenCV(hardwareMap, this);

jewelColorDetector = new JewelColorDetector(PhoneOrientation.PORTRAIT);
vuforia = new VuforiaNavigation(VUFORIA_PARAMETERS);

JewelColorDetector.Analysis analysis;

waitForStart();

while (opModeIsActive()) {
jewelColorDetector.process(vuforia.getImage());
analysis = jewelColorDetector.getAnalysis();

telemetry.addData("Left: ", analysis.leftJewelColor);
telemetry.addData("Right: ", analysis.rightJewelColor);
telemetry.update();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package org.pattonvillerobotics.commoncode.robotclasses.opencv;

import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.pattonvillerobotics.commoncode.enums.ColorSensorColor;

import java.util.ArrayList;
import java.util.List;

public class ColorBlobDetector {

private final List<MatOfPoint> contours = new ArrayList<>();
private final Mat blurMat = new Mat();
private final Mat thresholdMat = new Mat();
private final Mat hierarchyMat = new Mat();
private final Mat hsvMat = new Mat();
private Scalar lowerBoundHSV;
private Scalar upperBoundHSV;

public ColorBlobDetector(Scalar lowerBoundHSV, Scalar upperBoundHSV) {
setHSVBounds(lowerBoundHSV, upperBoundHSV);
}

public ColorBlobDetector(ColorSensorColor color) {
setHSVBounds(color);
}

public void setHSVBounds(Scalar lower, Scalar upper) {
lowerBoundHSV = lower;
upperBoundHSV = upper;
}

public void setHSVBounds(ColorSensorColor color) {
switch (color) {
case RED:
setHSVBounds(new Scalar(160, 40, 0), new Scalar(180, 255, 255));
break;
case BLUE:
setHSVBounds(new Scalar(90, 40, 0), new Scalar(110, 255, 255));
break;
case GREEN:
setHSVBounds(new Scalar(45, 40, 0), new Scalar(75, 255, 255));
break;
default:
throw new IllegalArgumentException("Must provide RED, BLUE, or GREEN!");
}
}

public void process(Mat rgbaMat) {
Imgproc.blur(rgbaMat, blurMat, new Size(10, 10));

Imgproc.cvtColor(blurMat, hsvMat, Imgproc.COLOR_RGB2HSV);
Core.inRange(hsvMat, lowerBoundHSV, upperBoundHSV, thresholdMat);

List<MatOfPoint> tmp = new ArrayList<>();

Imgproc.findContours(thresholdMat, tmp, hierarchyMat, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

// filters out super small contours
contours.clear();
for (MatOfPoint contour : tmp) {
if (Imgproc.contourArea(contour) > 500) {
contours.add(contour);
}
}
}

public Mat getThresholdMat() {
return thresholdMat;
}

public List<MatOfPoint> getContours() {
return contours;
}
}
Original file line number Diff line number Diff line change
@@ -1,32 +1,29 @@
package org.pattonvillerobotics.commoncode.robotclasses.opencv;

import android.graphics.Bitmap;
import android.util.Log;

import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.HardwareMap;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;
import org.pattonvillerobotics.commoncode.robotclasses.opencv.util.PhoneOrientation;


/**
* Created by greg on 10/4/2017.
*/

public abstract class ImageProcessor {
public final class ImageProcessor {

private static final String TAG = ImageProcessor.class.getSimpleName();

private boolean initialized;
private HardwareMap hardwareMap;
private static boolean initialized;

public ImageProcessor(HardwareMap hardwareMap) {
this.hardwareMap = hardwareMap;
private ImageProcessor() {}

initOpenCV();
}

private void initOpenCV() {
public static void initOpenCV(HardwareMap hardwareMap, LinearOpMode opMode) {
BaseLoaderCallback baseLoaderCallback = null;

try {
Expand Down Expand Up @@ -67,7 +64,7 @@ public void onManagerConnected(int status) {
}
}

while (!initialized) {
while (!initialized && opMode.opModeIsActive()) {
try {
Thread.sleep(50);
} catch (InterruptedException e) {
Expand All @@ -76,16 +73,23 @@ public void onManagerConnected(int status) {
}
}

public void waitForOpenCVInit() {
while (!initialized) {
synchronized (this) {
try {
this.wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
}
/**
* Converts the Bitmap to a Mat and then rotates the image based off of the phone's orientation.
*
* @param bitmap A bitmap from taken from vuforia
* @param orientation The current orientation of the phone on the robot
* @return a Mat of the bitmap in the correct orientation
*/
public static Mat processBitmap(Bitmap bitmap, PhoneOrientation orientation) {
Mat tmp = new Mat();
Utils.bitmapToMat(bitmap, tmp);

Mat rotMat = Imgproc.getRotationMatrix2D(new Point(tmp.cols()/2, tmp.rows()/2), orientation.getRotation(), 1.0);
Mat rotated = new Mat();
Imgproc.warpAffine(tmp, rotated, rotMat, tmp.size());

Log.i("Jewel", "Rotated.");

return rotated;
}
}
Loading

0 comments on commit 4775659

Please sign in to comment.