Skip to content

Commit

Permalink
Merge pull request #366 from guardianproject/dev_camera_fixes
Browse files Browse the repository at this point in the history
for #365 reduce camera resolution and remove one unneeded bitmap
  • Loading branch information
n8fr8 committed Feb 10, 2019
2 parents 272b341 + bf09efa commit cea9d51
Show file tree
Hide file tree
Showing 8 changed files with 240 additions and 167 deletions.
1 change: 1 addition & 0 deletions src/main/java/org/havenapp/main/sensors/BumpMonitor.java
Expand Up @@ -83,6 +83,7 @@ public void onTrigger(TriggerEvent event) {
*/
Message message = new Message();
message.what = EventTrigger.BUMP;
message.getData().putString("path","BUMPED!");

try {
if (serviceMessenger != null) {
Expand Down
Expand Up @@ -134,6 +134,15 @@ public void onSignalReceived(short[] signal) {
if (averageDB > mNoiseThreshold) {

if (!MicrophoneTaskFactory.isRecording()) {
Message message = new Message();
message.what = EventTrigger.MICROPHONE;
try {
if (serviceMessenger != null)
serviceMessenger.send(message);
} catch (RemoteException e) {
// Cannot happen
}

try {
AudioRecorderTask audioRecorderTask = MicrophoneTaskFactory.makeRecorder(context);
audioRecorderTask.setAudioRecorderListener(new AudioRecorderTask.AudioRecorderListener() {
Expand Down
83 changes: 26 additions & 57 deletions src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java
Expand Up @@ -36,20 +36,13 @@ public class MotionDetector {
// Input data

private List<MotionListener> listeners = new ArrayList<>();
private Handler handler;
private int motionSensitivity;
// Output data

private boolean hasChanged;

private IMotionDetector detector;

//private RenderScript renderScript;

private int detectColor = Color.YELLOW;

public interface MotionListener {
public void onProcess(Bitmap oldBitmap,
void onProcess(
Bitmap newBitmap,
Bitmap rawBitmap,
boolean motionDetected);
Expand All @@ -60,19 +53,12 @@ public void addListener(MotionListener listener) {
}

public MotionDetector(

Handler updateHandler,
int motionSensitivity) {
// this.renderScript = renderScript;
this.handler = updateHandler;
this.motionSensitivity = motionSensitivity;
detector = new LuminanceMotionDetector();
detector.setThreshold(motionSensitivity);

}

public void setDetectColor (int detectColor)
{
this.detectColor = detectColor;
}

public void setMotionSensitivity (int motionSensitivity)
Expand All @@ -84,31 +70,22 @@ public void setMotionSensitivity (int motionSensitivity)
public void detect(byte[] rawOldPic,
byte[] rawNewPic,
int width,
int height,
int rotationDegrees,
boolean facingFront) {
int height) {

int[] newPicLuma = ImageCodec.N21toLuma(rawNewPic, width, height);

if (rawOldPic != null) {

int[] oldPicLuma = ImageCodec.N21toLuma(rawOldPic, width, height);
detector.setThreshold(motionSensitivity);
List<Integer> changedPixels =
detector.detectMotion(oldPicLuma, newPicLuma, width, height);
hasChanged = false;

int[] newPic = ImageCodec.lumaToGreyscale(newPicLuma, width, height);
detector.detectMotion(ImageCodec.N21toLuma(rawOldPic, width, height), newPicLuma, width, height);

if (changedPixels != null) {
hasChanged = true;

}

/*
int[] newPic = ImageCodec.lumaToGreyscale(newPicLuma, width, height);
newPicLuma = null;
if (hasChanged) {


Bitmap lastBitmap = ImageCodec.lumaToBitmapGreyscale(oldPicLuma, width, height);
System.gc();
for (int i = 0; i < newPic.length; i++)
newPic[i] = Color.TRANSPARENT;
Expand All @@ -117,7 +94,6 @@ public void detect(byte[] rawOldPic,
newPic[changedPixel] = detectColor;
}

Matrix mtx = new Matrix();
if (facingFront) {
Expand All @@ -127,37 +103,30 @@ public void detect(byte[] rawOldPic,
else
mtx.postRotate(rotationDegrees);

Bitmap newBitmap
= Bitmap.createBitmap(Bitmap.createBitmap(newPic, width, height, Bitmap.Config.ARGB_4444), 0, 0, width, height, mtx, true);
newPic = null;
*/

Bitmap rawBitmap = convertImage(rawNewPic,width,height);
//Bitmap.createBitmap(Nv21Image.nv21ToBitmap(renderScript, rawNewPic, width, height),0,0,width,height,mtx,true);

handler.post(() -> {
for (MotionListener listener : listeners) {
listener.onProcess(
lastBitmap,
newBitmap,
rawBitmap,
hasChanged);
}

});

for (MotionListener listener : listeners) {
listener.onProcess(
null,
rawBitmap,
true);
}
}
else
{
//nothing changed
handler.post(() -> {
for (MotionListener listener : listeners) {
listener.onProcess(
null,
null,
null,
hasChanged);
}

});
for (MotionListener listener : listeners) {
listener.onProcess(
null,
null,
false);
}

}

}
Expand Down
14 changes: 12 additions & 2 deletions src/main/java/org/havenapp/main/service/MonitorService.java
Expand Up @@ -15,6 +15,7 @@
import android.app.Service;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
Expand All @@ -26,6 +27,7 @@

import androidx.annotation.RequiresApi;
import androidx.core.app.NotificationCompat;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;

import org.havenapp.main.HavenApp;
import org.havenapp.main.MonitorActivity;
Expand Down Expand Up @@ -292,11 +294,19 @@ private void stopSensors ()
/**
* Sends an alert according to type of connectivity
*/
public synchronized void alert(int alertType, String path) {
public void alert(int alertType, String value) {

Date now = new Date();
boolean doNotification = false;

//for the UI visual
Intent iEvent = new Intent("event");
iEvent.putExtra("type",alertType);
LocalBroadcastManager.getInstance(this).sendBroadcast(iEvent);

if (TextUtils.isEmpty(value))
return;

if (mLastEvent == null) {
mLastEvent = new Event();
long eventId = HavenEventDB.getDatabase(getApplicationContext())
Expand All @@ -323,7 +333,7 @@ else if (mPrefs.getNotificationTimeMs() > 0 && mLastNotification != null)

EventTrigger eventTrigger = new EventTrigger();
eventTrigger.setType(alertType);
eventTrigger.setPath(path);
eventTrigger.setPath(value);

mLastEvent.addEventTrigger(eventTrigger);

Expand Down
99 changes: 84 additions & 15 deletions src/main/java/org/havenapp/main/ui/CameraFragment.java
Expand Up @@ -8,8 +8,15 @@
*/
package org.havenapp.main.ui;

import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.hardware.SensorEvent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
Expand All @@ -18,13 +25,13 @@

import com.otaliastudios.cameraview.Audio;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.SizeSelector;

import org.havenapp.main.PreferenceManager;
import org.havenapp.main.R;
import org.havenapp.main.sensors.motion.CameraViewHolder;
import org.havenapp.main.model.EventTrigger;

import androidx.fragment.app.Fragment;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;

public final class CameraFragment extends Fragment {

Expand All @@ -33,6 +40,75 @@ public final class CameraFragment extends Fragment {
private PreferenceManager prefs;
private TextView txtCameraStatus;

private boolean isAttached = false;

/**
* Handler used to update back the UI after motion detection
*/
private final Handler handler = new Handler()
{
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);

if (isAttached) {
if (txtCameraStatus != null) {

if (msg.what == EventTrigger.CAMERA) {
if (cameraViewHolder.doingVideoProcessing()) {
txtCameraStatus.setText(getString(R.string.motion_detected)
+ "\n" + getString(R.string.status_recording_video));
} else {
txtCameraStatus.setText(getString(R.string.motion_detected));
}
}
else if (msg.what == EventTrigger.POWER) {
txtCameraStatus.setText(getString(R.string.power_detected));
}
else if (msg.what == EventTrigger.MICROPHONE) {
txtCameraStatus.setText(getString(R.string.sound_detected));
}
else if (msg.what == EventTrigger.ACCELEROMETER || msg.what == EventTrigger.BUMP) {
txtCameraStatus.setText(getString(R.string.device_move_detected));
}
else if (msg.what == EventTrigger.LIGHT) {
txtCameraStatus.setText(getString(R.string.status_light));
}


}
}
}
};

BroadcastReceiver receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {

int eventType = intent.getIntExtra("type",-1);

//String path = intent.getData().getPath();

handler.sendEmptyMessage(eventType);
}
};

@Override
public void onDetach() {
super.onDetach();
isAttached = false;
LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(receiver);
}

@Override
public void onAttach(Context context) {
super.onAttach(context);
isAttached = true;
IntentFilter filter = new IntentFilter();
filter.addAction("event");
LocalBroadcastManager.getInstance(getContext()).registerReceiver(receiver,filter );
}

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Expand Down Expand Up @@ -68,6 +144,8 @@ public void onResume() {
initCamera();

cameraViewHolder.setMotionSensitivity(prefs.getCameraSensitivity());


}

public void updateCamera ()
Expand Down Expand Up @@ -99,19 +177,10 @@ public void initCamera ()
if (cameraViewHolder == null) {
cameraViewHolder = new CameraViewHolder(getActivity(), cameraView);

cameraViewHolder.addListener((oldBitmap, newBitmap, rawBitmap, motionDetected) -> {
if (motionDetected)
newImage.setImageBitmap(newBitmap);
else
newImage.setImageResource(R.drawable.blankimage);

if (txtCameraStatus != null) {
if (cameraViewHolder.doingVideoProcessing()) {
txtCameraStatus.setText("Recording...");
} else {
txtCameraStatus.setText("");
}
}
cameraViewHolder.addListener((newBitmap, rawBitmap, motionDetected) -> {

handler.sendEmptyMessage(motionDetected?EventTrigger.CAMERA:-1);


});
}
Expand Down

0 comments on commit cea9d51

Please sign in to comment.