Skip to content

Commit

Permalink
Merge branch 'dev' into id-structs-overhaul
Browse files Browse the repository at this point in the history
  • Loading branch information
Cyco12 committed Feb 15, 2019
2 parents 2011c67 + 5a2b62d commit ba1207a
Show file tree
Hide file tree
Showing 118 changed files with 548 additions and 76 deletions.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 2 additions & 0 deletions src/Jetson/ahoy22.cpp → src/Jetson/Old Vision/ahoy22.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
//Main
int main()
{
cv::gpu::printShortCudaDeviceInfo(cv::gpu::getDevice());


//zmq::context_t context (1);
//zmq::socket_t publisher(context, ZMQ_PUB);
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
45 changes: 45 additions & 0 deletions src/Jetson/OrthusVision/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Require correct version of cmake
cmake_minimum_required (VERSION 3.5.1)

# Set CMake CUDA compiler
set(CMAKE_CUDA_COMPILER /usr/local/cuda-9.0/bin/nvcc)

# Set project name
project (OrthusVision LANGUAGES CUDA CXX)

# Enable CUDA language
enable_language(CUDA)

# Include/Find/Check libraries
include(FindCUDA)
include(CheckLanguage)
find_package(OpenCV REQUIRED)
find_package(CUDA REQUIRED)
check_language(CUDA)

# Set flags for C++ standard libraries
if(NOT DEFINED CMAKE_CUDA_STANDARD)
set(CMAKE_CUDA_STANDARD 11)
set(CMAKE_CUDA_STANDARD_REQUIRED ON)
endif()

# Include directories
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
include_directories(${allwpilib/wpilibc/athena/include})
#include_directories(${orthusVision.h})
include_directories(${WPILib.h})
include_directories(/usr/local/include)
include_directories(/usr/local/lib)

# Disabling unneeded CUDA stuff
unset(CUDA_USE_STATIC_CUDA_RUNTIME CACHE)
option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)

# Creating executable
add_executable(OrthusVision orthusVision.cpp)

# Link libraries to executable
target_link_libraries(OrthusVision ${OpenCV_LIBS})
target_link_libraries(OrthusVision ${allwpilib/wpilibc/athena/include})
target_link_libraries(OrthusVision ${WPILib.h})
122 changes: 122 additions & 0 deletions src/Jetson/OrthusVision/orthusVision.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
// Reset Robotics 2019
// NVIDIA JetsonTX1 Vision

// Libraries
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "opencv2/gpu/gpu.hpp"
#include


// Main
int main()
{
cv::gpu::printShortCudaDeviceInfo(cv::gpu::getDevice());

printf("Current exposure settings:\n");
system("v4l2-ctl -d /dev/video0 -C exposure_auto");
system("v4l2-ctl -d /dev/video0 -C exposure_absolute");

// Set exposure
system("v4l2-ctl -d /dev/video0 -c exposure_auto=1");
system("v4l2-ctl -d /dev/video0 -c exposure_absolute=5");

// Print the exposures settings of the cameras
printf("New exposure settings:\n");
system("v4l2-ctl -d /dev/video0 -C exposure_absolute");
int blur_size = 3; //size of the median blur kernel
int img_scale_factor = 1; //halves the size of the picture


// Images
cv::Mat imgRaw; //input image
cv::Mat imgResize; //image resized to 320x240
cv::Mat imgBlur; //median blur
cv::Mat imgHSV; //switch to HSV colorspace
cv::Mat imgThreshold; //binary image from HSV thresholding
cv::Mat imgContour; //finding countours messes up the threshold image
cv::Mat imgOutput; //final image

cv::gpu::GpuMat src, resize, blur, hsv, threshold;

// HSV Threshold Sliders (currently commented out cause unnecessary)
cv::namedWindow("HSV Thresholding");
cv::createTrackbar("Hue Lower Bound", "HSV Thresholding", &h_lowerb, 179);
cv::createTrackbar("Hue Upper Bound", "HSV Thresholding", &h_upperb, 179);
cv::createTrackbar("Saturation Lower Bound", "HSV Thresholding", &s_lowerb, 255);
cv::createTrackbar("Saturation Upper Bound", "HSV Thresholding", &s_upperb, 255);
cv::createTrackbar("Value Lower Bound", "HSV Thresholding", &v_lowerb, 255);
cv::createTrackbar("Value Upper Bound", "HSV Thresholding", &v_upperb, 255);

std::vector<std::vector<cv::Point> > contours; // array of contours (which are each an array of points)

// Start video stream
cv::VideoCapture input(0);
cv::VideoWriter output;

// Loop for each frame
for (;;)
{
// Checks for video feed
if (!input.read(imgRaw))
break;
}

src.upload(imgRaw);

// Gpu accelerated image transformations
cv::gpu::resize(src, resize, cv::Size(input.get(CV_CAP_PROP_FRAME_WIDTH) / img_scale_factor, input.get(CV_CAP_PROP_FRAME_HEIGHT) / img_scale_factor), CV_INTER_CUBIC);
resize.download(imgResize);

cv::gpu::cvtColor(resize, hsv, CV_BGR2HSV);

// Download GpuMat from the GPU
hsv.download(imgHSV);

cv::inRange(imgHSV, cv::Scalar(h_lowerb, s_lowerb, v_lowerb), cv::Scalar(h_upperb, s_upperb, v_upperb), imgThreshold);
imgThreshold.copyTo(imgContour);
cv::findContours(imgContour, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);

// Create bounding rectangles to identify what contours are actual targets
std::vector<cv::RotatedRect> boundRects(contours.size());
for(int i = 0; i < contours.size(); i++)
boundRects[i] = cv::minAreaRect(cv::Mat(contours[i]));

// Identify contours which could be actual targets
std::vector<std::vector<cv::Point> > poss_targets(0);
for(int i = 0; i < boundRects.size(); i++)
{
cv::Size s = boundRects[i].size;

// Filter based on how much of the bounding rectangle is filled up by the contour and rectangles that are too small
if (s.height * s.width < 1.25 * cv::contourArea(contours[i]) && s.height * s.width > 30)
poss_targets.insert(poss_targets.end(), contours[i]);
}

// If we identify two possible targets then they are correct targets
std::vector<std::vector<cv::Point> > targets(0);
if (poss_targets.size() == 2)
targets = poss_targets;

imgOutput = imgResize.clone(); // output image is the blurred image with information on it

cv::drawContours(imgOutput, contours, -1, cv::Scalar(0, 255, 0)); // draw all contours in green

// Save and show the output image
output.write(imgOutput);
cv::namedWindow("stream", CV_WINDOW_AUTOSIZE);
cv::namedWindow("raw", CV_WINDOW_AUTOSIZE);
cv::imshow("stream", imgThreshold);
cv::imshow("raw", imgRaw);

// If we identify targets print all this
if (!targets.empty())
{
cv::Rect r = boundingRect(targets[1]);
cv::Rect r1 = boundingRect(targets[0]);
double centerX= r.x + (r.width/2);
double centerX1= r1.x + (r1.width/2);
double distance2Pixels= ((centerX + centerX1) / 2) - (640 / 2); // 640 is camera width
std::cout << "Distance pix" << distance2Pixels << std::endl;
}
}
5 changes: 3 additions & 2 deletions src/main/java/frc/robot/Mag.kt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import frc.robot.subsystems.RBrake
import frc.robot.subsystems.AutoController
import frc.robot.commands.Drive.Auto.DriveByTime
import frc.robot.commands.Drive.ToggleFieldOriented
import frc.robot.commands.Drive.InertialGuidance
//import frc.robot.commands.Drive.InertialGuidance


public class Mag : Robot()
Expand Down Expand Up @@ -65,7 +65,7 @@ public class Mag : Robot()
{
drivetrain.onCreate()
autocontroller.onCreate()
InertialGuidance(2.0, 2.0).start()
//InertialGuidance(2.0, 2.0).start()
//ToggleFieldOriented()
//DriveByTime(-1.0, 0.0, 0.25, 1.0, 2.0).start()
//DriveByTime
Expand Down Expand Up @@ -114,6 +114,7 @@ public class Mag : Robot()
override fun executeTeleop()
{
oi.OI()
System.err.println(Drivetrain.ultrasonicTest())

// put dashboard data here
}
Expand Down
28 changes: 28 additions & 0 deletions src/main/java/frc/robot/Util/PIDSourceX.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package frc.robot.Util

import edu.wpi.first.wpilibj.PIDOutput
import edu.wpi.first.wpilibj.PIDSource
import edu.wpi.first.wpilibj.PIDSourceType
import frc.robot.subsystems.Drivetrain

public object PIDSourceX : PIDSource//the object that the PID controller checks while it is being run on the other thread
{
var m_pidSource: PIDSourceType = PIDSourceType.kRate//defining whther this is an absolute value (kDisplacement) like distance or a rate (kRate) like speed to be fed into the PID Controller
override fun setPIDSourceType(pidSource : PIDSourceType)
{
m_pidSource = pidSource
return
}

override fun getPIDSourceType() : PIDSourceType
{
return m_pidSource
}

override fun pidGet(): Double
{
return (Drivetrain.rotatedXVelocity)//runs this function whenever the pid loop is called and takes the value as the input into the pid loop
}
}


28 changes: 28 additions & 0 deletions src/main/java/frc/robot/Util/PIDSourceY.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package frc.robot.Util

import edu.wpi.first.wpilibj.PIDOutput
import edu.wpi.first.wpilibj.PIDSource
import edu.wpi.first.wpilibj.PIDSourceType
import frc.robot.subsystems.Drivetrain

public object PIDSourceY : PIDSource//the object that the PID controller checks while it is being run on the other thread
{
var m_pidSource: PIDSourceType = PIDSourceType.kRate//defining whther this is an absolute value (kDisplacement) like distance or a rate (kRate) like speed to be fed into the PID Controller
override fun setPIDSourceType(pidSource : PIDSourceType)
{
m_pidSource = pidSource
return
}

override fun getPIDSourceType() : PIDSourceType
{
return m_pidSource
}

override fun pidGet(): Double
{
return (Drivetrain.rotatedYVelocity)//runs this function whenever the pid loop is called and takes the value as the input into the pid loop
}
}


15 changes: 15 additions & 0 deletions src/main/java/frc/robot/Util/PIDWriteX.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package frc.robot.Util

import edu.wpi.first.wpilibj.PIDOutput
import edu.wpi.first.wpilibj.PIDSource
import edu.wpi.first.wpilibj.PIDSourceType
import frc.robot.subsystems.Drivetrain

public object PIDWriteX : PIDOutput
{
private var PIDOutputLocal: Double = 0.0
override fun pidWrite(output: Double){PIDOutputLocal = output}

fun getOutput(): Double {return PIDOutputLocal}//PID Output for Auto in X Direction

}
15 changes: 15 additions & 0 deletions src/main/java/frc/robot/Util/PIDWriteY.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package frc.robot.Util

import edu.wpi.first.wpilibj.PIDOutput
import edu.wpi.first.wpilibj.PIDSource
import edu.wpi.first.wpilibj.PIDSourceType
import frc.robot.subsystems.Drivetrain

public object PIDWriteY : PIDOutput
{
private var PIDOutputLocal: Double = 0.0
override fun pidWrite(output: Double){PIDOutputLocal = output}

fun getOutput(): Double {return PIDOutputLocal}

}
76 changes: 76 additions & 0 deletions src/main/java/frc/robot/Util/UltrasonicBase.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@

package frc.robot.Util

import edu.wpi.first.hal.DIOJNI;
import edu.wpi.first.hal.FRCNetComm.tResourceType;
import edu.wpi.first.hal.HAL;
import edu.wpi.first.wpilibj.smartdashboard.SendableBuilder;
import edu.wpi.first.wpilibj.DigitalSource


public class UltrasonicBase(channel: Int): DigitalSource()
{
var m_channel: Int = 0
var m_handle: Int = 0
fun UltrasonicBase(channel: Int)
{
m_channel = channel
//m_handle = DIOJNI.initializeDIOPort(HAL.getPort((byte) channel), true)
m_handle = DIOJNI.initializeDIOPort(channel, true)

HAL.report(tResourceType.kResourceType_DigitalInput, channel)
setName("Ultrasonic", channel)
}

override fun close() {
super.close()
if (m_interrupt != 0)
{
cancelInterrupts()
}
}

override fun getChannel() : Int
{
return m_channel;
}

override fun getAnalogTriggerTypeForRouting(): Int
{
return 0;
}

fun get():Boolean
{
return DIOJNI.getDIO(m_handle);
}
override fun isAnalogTrigger(): Boolean
{
return false;
}

override fun getPortHandleForRouting() : Int
{
return m_handle;
}

override fun readRisingTimestamp(): Double
{
return this.readRisingTimestamp();
}

fun readFallingTimeStamp(): Double
{
return this.readFallingTimestamp();
}






override fun initSendable(builder:SendableBuilder) {
builder.setSmartDashboardType("Digital Input")
builder.addBooleanProperty("Value", this::get, null)
}
}
Loading

0 comments on commit ba1207a

Please sign in to comment.