Skip to content

Commit

Permalink
code refactor (#128)
Browse files Browse the repository at this point in the history
* minor code cleanup
* update to target version 29
* convert anonymous functions to lambda
  • Loading branch information
dhruv2295 committed Dec 14, 2020
1 parent 48c12e1 commit d4eebd5
Show file tree
Hide file tree
Showing 13 changed files with 108 additions and 132 deletions.
9 changes: 5 additions & 4 deletions android/app/build.gradle
@@ -1,12 +1,12 @@
apply plugin: 'com.android.application'

android {
compileSdkVersion 28
buildToolsVersion '29.0.3'
compileSdkVersion 30
buildToolsVersion '30.0.3'
defaultConfig {
applicationId "org.openbot"
minSdkVersion 21
targetSdkVersion 28
targetSdkVersion 29
versionCode 1
versionName "1.0"
}
Expand Down Expand Up @@ -36,7 +36,7 @@ android {

dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar','*.aar'])
implementation 'com.android.support:design:28.0.0'
implementation 'com.google.android.material:material:1.2.1'

// Build off of stable TensorFlow Lite
implementation 'org.tensorflow:tensorflow-lite:2.0.0'
Expand All @@ -47,5 +47,6 @@ dependencies {
implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
implementation 'org.zeroturnaround:zt-zip:1.14'
implementation 'com.loopj.android:android-async-http:1.4.9'
implementation 'androidx.localbroadcastmanager:localbroadcastmanager:1.0.0'

}
1 change: 1 addition & 0 deletions android/app/src/main/AndroidManifest.xml
Expand Up @@ -24,6 +24,7 @@
android:label="@string/app_name"
android:roundIcon="@drawable/openbot_icon"
android:supportsRtl="true"
android:requestLegacyExternalStorage="true"
android:theme="@style/AppTheme"
android:usesCleartextTraffic="true">

Expand Down
15 changes: 3 additions & 12 deletions android/app/src/main/java/org/openbot/CameraActivity.java
Expand Up @@ -35,7 +35,6 @@
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
Expand Down Expand Up @@ -104,12 +103,12 @@ public abstract class CameraActivity extends AppCompatActivity
private int cameraSelection = CameraCharacteristics.LENS_FACING_BACK;
protected int previewWidth = 0;
protected int previewHeight = 0;
private boolean debug = false;
private final boolean debug = false;
private Handler handler;
private HandlerThread handlerThread;
private boolean useCamera2API;
private boolean isProcessingFrame = false;
private byte[][] yuvBytes = new byte[3][];
private final byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes = null;
private int yRowStride;
private Runnable postInferenceCallback;
Expand Down Expand Up @@ -240,11 +239,7 @@ protected void onCreate(final Bundle savedInstanceState) {
new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
// int width = bottomSheetLayout.getMeasuredWidth();
int height = gestureLayout.getMeasuredHeight();

Expand Down Expand Up @@ -858,7 +853,6 @@ protected void sendFrameNumberToSensorService(long frameNumber) {
mSensorMessenger.send(msg);
} catch (RemoteException e) {
e.printStackTrace();
;
}
}
}
Expand All @@ -875,7 +869,6 @@ protected void sendInferenceTimeToSensorService(long frameNumber, long inference
mSensorMessenger.send(msg);
} catch (RemoteException e) {
e.printStackTrace();
;
}
}
}
Expand All @@ -890,7 +883,6 @@ protected void sendControlToSensorService(ControlSignal vehicleControl) {
mSensorMessenger.send(msg);
} catch (RemoteException e) {
e.printStackTrace();
;
}
}
}
Expand All @@ -904,7 +896,6 @@ protected void sendIndicatorToSensorService(int vehicleIndicator) {
mSensorMessenger.send(msg);
} catch (RemoteException e) {
e.printStackTrace();
;
}
}
}
Expand Down
Expand Up @@ -294,7 +294,7 @@ public View onCreateView(

@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
textureView = view.findViewById(R.id.texture);
}

@Override
Expand Down
Expand Up @@ -55,10 +55,10 @@ public class LegacyCameraConnectionFragment extends Fragment {
}

private Camera camera;
private Camera.PreviewCallback imageListener;
private Size desiredSize;
private final Camera.PreviewCallback imageListener;
private final Size desiredSize;
/** The layout identifier to inflate for this Fragment. */
private int layout;
private final int layout;
/** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/**
Expand Down Expand Up @@ -142,7 +142,7 @@ public View onCreateView(

@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
textureView = view.findViewById(R.id.texture);
}

@Override
Expand Down
136 changes: 65 additions & 71 deletions android/app/src/main/java/org/openbot/NetworkActivity.java
Expand Up @@ -86,8 +86,8 @@ public class NetworkActivity extends CameraActivity implements OnImageAvailableL
private MultiBoxTracker tracker;
private BorderedText borderedText;

private AudioPlayer audioPlayer;
private String voice;
private final AudioPlayer audioPlayer;
private final String voice;

public NetworkActivity() {
audioPlayer = new AudioPlayer(this);
Expand Down Expand Up @@ -121,7 +121,7 @@ public void onPreviewSizeChosen(final Size size, final int rotation) {
return;
}

trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay = findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(
new DrawCallback() {
@Override
Expand Down Expand Up @@ -193,81 +193,75 @@ protected void processImage() {
LOGGER.i("Putting image " + currFrameNum + " for detection in bg thread.");

runInBackground(
new Runnable() {
@Override
public void run() {
if (detector != null) {
LOGGER.i("Running detection on image " + currFrameNum);
final long startTime = SystemClock.uptimeMillis();
final List<Detector.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

if (!results.isEmpty())
LOGGER.i(
"Object: "
+ results.get(0).getLocation().centerX()
+ ", "
+ results.get(0).getLocation().centerY()
+ ", "
+ results.get(0).getLocation().height()
+ ", "
+ results.get(0).getLocation().width());

cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(2.0f);

float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;

final List<Detector.Recognition> mappedRecognitions =
new LinkedList<Detector.Recognition>();

for (final Detector.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint);
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
}
() -> {
if (detector != null) {
LOGGER.i("Running detection on image " + currFrameNum);
final long startTime = SystemClock.uptimeMillis();
final List<Detector.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

if (!results.isEmpty())
LOGGER.i(
"Object: "
+ results.get(0).getLocation().centerX()
+ ", "
+ results.get(0).getLocation().centerY()
+ ", "
+ results.get(0).getLocation().height()
+ ", "
+ results.get(0).getLocation().width());

cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas1 = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(2.0f);

float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;

final List<Detector.Recognition> mappedRecognitions =
new LinkedList<Detector.Recognition>();

for (final Detector.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas1.drawRect(location, paint);
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
}

tracker.trackResults(mappedRecognitions, currFrameNum);
vehicleControl = tracker.updateTarget();
trackingOverlay.postInvalidate();
} else if (autoPilot != null) {
LOGGER.i("Running autopilot on image " + currFrameNum);
final long startTime = SystemClock.uptimeMillis();
vehicleControl = autoPilot.recognizeImage(croppedBitmap, vehicleIndicator);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
}

// In case control was removed from network during inference
if (!driveByNetwork) {
vehicleControl = new ControlSignal(0, 0);
}
tracker.trackResults(mappedRecognitions, currFrameNum);
vehicleControl = tracker.updateTarget();
trackingOverlay.postInvalidate();
} else if (autoPilot != null) {
LOGGER.i("Running autopilot on image " + currFrameNum);
final long startTime = SystemClock.uptimeMillis();
vehicleControl = autoPilot.recognizeImage(croppedBitmap, vehicleIndicator);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
}

if (getLoggingEnabled()) {
sendInferenceTimeToSensorService(currFrameNum, lastProcessingTimeMs);
}
// In case control was removed from network during inference
if (!driveByNetwork) {
vehicleControl = new ControlSignal(0, 0);
}

if (getLoggingEnabled()) {
sendInferenceTimeToSensorService(currFrameNum, lastProcessingTimeMs);
}

computingNetwork = false;
computingNetwork = false;

updateVehicleState();
updateVehicleState();

runOnUiThread(
new Runnable() {
@Override
public void run() {
// showFrameInfo(previewWidth + "x" + previewHeight);
// showCropInfo(croppedBitmap.getWidth() + "x" + croppedBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms");
}
});
}
runOnUiThread(
() -> {
// showFrameInfo(previewWidth + "x" + previewHeight);
// showCropInfo(croppedBitmap.getWidth() + "x" + croppedBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms");
});
});
}
}
Expand Down
6 changes: 3 additions & 3 deletions android/app/src/main/java/org/openbot/SensorService.java
Expand Up @@ -465,7 +465,7 @@ public BufferedWriter openLog(String path, String filename) {
e.printStackTrace();
return null;
}
};
}

public void appendLog(BufferedWriter writer, String text) {
try {
Expand All @@ -474,15 +474,15 @@ public void appendLog(BufferedWriter writer, String text) {
} catch (IOException e) {
e.printStackTrace();
}
};
}

public void closeLog(BufferedWriter writer) {
try {
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
};
}

private void startTrackingLocation() {
try {
Expand Down
Expand Up @@ -45,6 +45,6 @@ public synchronized void draw(final Canvas canvas) {

/** Interface defining the callback for client classes. */
public interface DrawCallback {
public void drawCallback(final Canvas canvas);
void drawCallback(final Canvas canvas);
}
}
Expand Up @@ -19,5 +19,5 @@
import org.openbot.tflite.Detector.Recognition;

public interface ResultsView {
public void setResults(final List<Recognition> results);
void setResults(final List<Recognition> results);
}
4 changes: 2 additions & 2 deletions android/app/src/main/java/org/openbot/env/AudioPlayer.java
Expand Up @@ -13,7 +13,7 @@

public class AudioPlayer {
private MediaPlayer mp;
private Context mContext;
private final Context mContext;

public AudioPlayer(Context context) {
mp = new MediaPlayer();
Expand All @@ -24,7 +24,7 @@ public AudioPlayer(Context context) {
public void play(int id) {
try {
mp.reset();
mp = mp.create(mContext, id);
mp = MediaPlayer.create(mContext, id);
mp.start();
} catch (Exception e) {
e.printStackTrace();
Expand Down

0 comments on commit d4eebd5

Please sign in to comment.