package com.secureapps.facerecog; import android.Manifest; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.ImageFormat; import android.graphics.Rect; import android.graphics.YuvImage; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.util.Log; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.AppCompatButton; import androidx.core.content.ContextCompat; import com.otaliastudios.cameraview.CameraListener; import com.otaliastudios.cameraview.CameraView; import com.otaliastudios.cameraview.controls.Engine; import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Mode; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.android.Utils; import org.opencv.core.Core; import org.opencv.core.Mat; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; public class MainActivity extends AppCompatActivity implements FrameProcessingCompleteListener, DistanceMeasureCompleteListener { private static String TAG = "HLFR__1"; private static int APP_PERMISSIONS_REQUEST_CODE = 1000; private boolean permissionGranted = false; private CameraView cameraView; private AppCompatButton trainFace; private AppCompatButton recognizeFace; private TinyDB tinydb; private Mat mRgba, mGray, mGrayTemp, mRgbaTemp; private ArrayList images; private boolean useEigenfaces = true; private ArrayList imagesLabels = new ArrayList<>(); private int maximumImages = 50; private float faceThreshold = 0.400f, distanceThreshold = 0.400f; private SharedPreferences prefs; private SharedPreferences.Editor editor; private String[] uniqueLabels; private Toast mToast; private int processedFrameIndex = 0; private List frames = new ArrayList<>(); private FrameProcessingCompleteListener frameProcessingCompleteListener; private DistanceMeasureCompleteListener distanceMeasureCompleteListener; // private NativeMethods.MeasureDistTask mMeasureDistTask; // private NativeMethods.TrainFacesTask mTrainFacesTask; private String videoFolderPath; private String videoFileName; private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: mRgba = new Mat(); mGray = new Mat(); mRgbaTemp = new Mat(); mGrayTemp = new Mat(); NativeMethods.loadNativeLibraries(); // Load native libraries after(!) OpenCV initialization Log.d(TAG, "OpenCV loaded successfully"); // Read images and labels from shared preferences images = tinydb.getListMat("images"); imagesLabels = tinydb.getListString("imagesLabels"); Log.d(TAG, "Number of images: " + images.size() + ". Number of labels: " + imagesLabels.size()); if (!images.isEmpty()) { trainFaces(); // Train images after they are loaded Log.d(TAG, "Images height: " + images.get(0).height() + " Width: " + images.get(0).width() + " total: " + images.get(0).total()); } Log.d(TAG, "Labels: " + imagesLabels); Log.d(TAG, "images: " + images); break; default: super.onManagerConnected(status); break; } } }; private NativeMethods.MeasureDistTask.Callback measureDistTaskCallback = new NativeMethods.MeasureDistTask.Callback() { @Override public void onMeasureDistComplete(Bundle bundle) { if (bundle == null) { Log.d(TAG, "Failed to measure distance"); showToast("Failed to measure distance", Toast.LENGTH_LONG); return; } float minDist = bundle.getFloat(NativeMethods.MeasureDistTask.MIN_DIST_FLOAT); Log.d(TAG, "minDist: " + minDist); Log.d(TAG, "Keep training..."); distanceMeasureCompleteListener.onMeasureCompleted(); // if (minDist != -1) { // int minIndex = bundle.getInt(NativeMethods.MeasureDistTask.MIN_DIST_INDEX_INT); // float faceDist = bundle.getFloat(NativeMethods.MeasureDistTask.DIST_FACE_FLOAT); // if (imagesLabels.size() > minIndex) { // Just to be sure // Log.d(TAG, "dist[" + minIndex + "]: " + minDist + ", face dist: " + faceDist + ", label: " + imagesLabels.get(minIndex)); // // String minDistString = String.format(Locale.US, "%.4f", minDist); // String faceDistString = String.format(Locale.US, "%.4f", faceDist); // // if (faceDist < faceThreshold && minDist < distanceThreshold) // 1. Near face space and near a face class // showToast("Face detected: " + imagesLabels.get(minIndex) + ". Distance: " + minDistString, Toast.LENGTH_LONG); // else if (faceDist < faceThreshold) // 2. Near face space but not near a known face class // showToast("Unknown face. Face distance: " + faceDistString + ". Closest Distance: " + minDistString, Toast.LENGTH_LONG); // else if (minDist < distanceThreshold) // 3. Distant from face space and near a face class // showToast("False recognition. Face distance: " + faceDistString + ". Closest Distance: " + minDistString, Toast.LENGTH_LONG); // else // 4. Distant from face space and not near a known face class. // showToast("Image is not a face. Face distance: " + faceDistString + ". Closest Distance: " + minDistString, Toast.LENGTH_LONG); // } // } else { // if (useEigenfaces || uniqueLabels == null || uniqueLabels.length > 1) { // Log.d(TAG, "Keep training..."); // showToast("Keep training...", Toast.LENGTH_SHORT); // distanceMeasureCompleteListener.onMeasureCompleted(); // } else { // Log.d(TAG, "Fisherfaces needs two different faces"); // showToast("Fisherfaces needs two different faces", Toast.LENGTH_SHORT); // } // } } }; private NativeMethods.TrainFacesTask.Callback trainFacesTaskCallback = result -> { if (result) { Log.d(TAG, "Training frame " + processedFrameIndex + " complete"); frameProcessingCompleteListener.onFrameProcessed(); } else { Log.d(TAG, "Training frame " + processedFrameIndex + " failed"); } }; @Override protected void onStop() { super.onStop(); mGray.release(); mRgba.release(); cameraView.clearFrameProcessors(); } @Override protected void onPause() { super.onPause(); mGray.release(); mRgba.release(); } @Override protected void onDestroy() { super.onDestroy(); mGray.release(); mRgba.release(); } @Override protected void onResume() { super.onResume(); loadOpenCV(); } private void loadOpenCV() { if (!OpenCVLoader.initDebug(true)) { Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization"); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback); } else { Log.d(TAG, "OpenCV library found inside package. Using it!"); mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS); } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); tinydb = new TinyDB(this); // Used to store ArrayLists in the shared preferences prefs = PreferenceManager.getDefaultSharedPreferences(this); frameProcessingCompleteListener = this; distanceMeasureCompleteListener = this; editor = prefs.edit(); editor.apply(); checkAppPermissions(); cameraView = findViewById(R.id.camera); cameraView.setLifecycleOwner(this); cameraView.setEngine(Engine.CAMERA2); cameraView.setMode(Mode.VIDEO); cameraView.setFacing(Facing.FRONT); recognizeFace = findViewById(R.id.recognize_face); recognizeFace.setOnClickListener(v -> { startActivity(new Intent(MainActivity.this, RecognitionActivity.class)); }); trainFace = findViewById(R.id.train_face); trainFace.setOnClickListener(v -> { if (!permissionGranted) { Log.d(TAG, "All Permissions not granted"); return; } images = new ArrayList<>(); trainFace.setText("Training in progress..."); Log.d(TAG, "Permissions granted? " + permissionGranted); videoFolderPath = Environment.getExternalStorageDirectory() + File.separator + "HLFR" + File.separator + "master"; videoFileName = UUID.randomUUID().toString().toLowerCase().replaceAll("[\\s\\-()]", "") + ".mp4"; File outPath = new File(videoFolderPath); if (!outPath.exists()) { //noinspection ResultOfMethodCallIgnored outPath.mkdirs(); } String fullPath = videoFolderPath + File.separator + videoFileName; cameraView.takeVideoSnapshot(new File(fullPath), 10000); }); cameraView.addCameraListener(new CameraListener() { @Override public void onVideoRecordingEnd() { super.onVideoRecordingEnd(); Log.d(TAG, "Frames size: " + frames.size()); processFrames(processedFrameIndex); } }); cameraView.addFrameProcessor(frame -> { if (cameraView.isTakingVideo()) { try { int w = frame.getSize().getWidth(); int h = frame.getSize().getHeight(); YuvImage yuvImage = new YuvImage(frame.getData(), ImageFormat.NV21, w, h, null); ByteArrayOutputStream os = new ByteArrayOutputStream(); yuvImage.compressToJpeg(new Rect(0, 0, w, h), 100, os); byte[] jpegByteArray = os.toByteArray(); Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length); os.close(); if (bitmap != null && frames.size() <= maximumImages) { Log.d(TAG, "Added frame bitmap of length: " + bitmap.getByteCount()); frames.add(bitmap); } else { Log.d(TAG, "Maximum frames required have been captured"); cameraView.stopVideo(); } } catch (IOException e) { Log.d(TAG, "IOE: " + e.getMessage()); Log.d(TAG, "StackTrace: " + Arrays.toString(e.getStackTrace())); } } }); } private void processFrames(int frameIndex) { if (frames.get(frameIndex) != null) { Log.d(TAG, "Begin processing frame " + frameIndex); Bitmap bitmap = frames.get(frameIndex); if (bitmap != null) { Utils.bitmapToMat(bitmap, mRgbaTemp); Imgproc.cvtColor(mRgbaTemp, mGrayTemp, Imgproc.COLOR_BGR2GRAY); mRgba = mRgbaTemp; mGray = mGrayTemp; Mat mGrayTmp = mGray; Mat mRgbaTmp = mRgba; int orientation = HushlockUtils.getScreenOrientation(this); switch (orientation) { // RGB image case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT: case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT: if (cameraView.getFacing() == Facing.FRONT) Core.flip(mRgbaTmp, mRgbaTmp, 0); // Flip along x-axis else Core.flip(mRgbaTmp, mRgbaTmp, -1); // Flip along both axis break; case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE: case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE: if (cameraView.getFacing() == Facing.FRONT) Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis break; } switch (orientation) { // Grayscale image case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT: Core.transpose(mGrayTmp, mGrayTmp); // Rotate image if (cameraView.getFacing() == Facing.FRONT) Core.flip(mGrayTmp, mGrayTmp, -1); // Flip along both axis else Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis break; case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT: Core.transpose(mGrayTmp, mGrayTmp); // Rotate image if (cameraView.getFacing() == Facing.BACK) Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis break; case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE: if (cameraView.getFacing() == Facing.FRONT) Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis break; case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE: Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis if (cameraView.getFacing() == Facing.BACK) Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis break; } mGray = mGrayTmp; mRgba = mRgbaTmp; Log.d(TAG, "Gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total()); if (mGray.total() == 0) return; // Scale image in order to decrease computation time and make the image square, // so it does not crash on phones with different aspect ratios for the front // and back camera Size imageSize = new Size(200, 200); Imgproc.resize(mGray, mGray, imageSize); Log.d(TAG, "Small gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total()); //SaveImage(mGray); final Mat image = mGray.reshape(0, (int) mGray.total()); // Create column vector Log.d(TAG, "Vector height: " + image.height() + " Width: " + image.width() + " total: " + image.total()); images.add(image); // Add current image to the array imagesLabels.add("Mahesh"); Log.d(TAG, "image and label added, size: " + image.size()); NativeMethods.MeasureDistTask mMeasureDistTask = new NativeMethods.MeasureDistTask(useEigenfaces, measureDistTaskCallback); mMeasureDistTask.execute(image); } else { Log.d(TAG, "Bitmap was null"); frameProcessingCompleteListener.onFrameProcessed(); } } } private boolean trainFaces() { if (images.isEmpty()) return true; // The array might be empty if the method is changed in the OnClickListener Mat imagesMatrix = new Mat((int) images.get(0).total(), images.size(), images.get(0).type()); for (int i = 0; i < images.size(); i++) images.get(i).copyTo(imagesMatrix.col(i)); // Create matrix where each image is represented as a column vector Log.d(TAG, "Images height: " + imagesMatrix.height() + " Width: " + imagesMatrix.width() + " total: " + imagesMatrix.total()); Log.d(TAG, "Training Eigenfaces"); NativeMethods.TrainFacesTask mTrainFacesTask = new NativeMethods.TrainFacesTask(imagesMatrix, trainFacesTaskCallback); mTrainFacesTask.execute(); return true; } @TargetApi(Build.VERSION_CODES.M) private void checkAppPermissions() { List pp = new ArrayList<>(); boolean permissionRecordAudio = ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED; boolean permissionCamera = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; boolean permissionReadExtStorage = ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED; boolean permissionWriteExtStorage = ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED; if (!permissionRecordAudio) pp.add(android.Manifest.permission.RECORD_AUDIO); if (!permissionCamera) pp.add(Manifest.permission.CAMERA); if (!permissionReadExtStorage) pp.add(Manifest.permission.READ_EXTERNAL_STORAGE); if (!permissionWriteExtStorage) pp.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); String[] permissionsToRequest = pp.toArray(new String[0]); if (pp.size() > 0) { requestPermissions(permissionsToRequest, APP_PERMISSIONS_REQUEST_CODE); } else { permissionGranted = true; } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if (requestCode != APP_PERMISSIONS_REQUEST_CODE) { return; } for (int x = 0; x < grantResults.length; x++) { if (grantResults[x] != PackageManager.PERMISSION_GRANTED) { permissionGranted = false; break; } else { permissionGranted = true; loadOpenCV(); } } } private void showToast(String message, int duration) { if (duration != Toast.LENGTH_SHORT && duration != Toast.LENGTH_LONG) throw new IllegalArgumentException(); if (mToast != null && mToast.getView().isShown()) mToast.cancel(); // Close the toast if it is already open mToast = Toast.makeText(this, message, duration); mToast.show(); } @Override public void onFrameProcessed() { processedFrameIndex++; if (processedFrameIndex < frames.size()) { processFrames(processedFrameIndex); } else { showToast("All frames were processed and trained", Toast.LENGTH_SHORT); Log.d(TAG, "All frames were processed and trained"); if (images != null && imagesLabels != null) { tinydb.putListMat("images", images); tinydb.putListString("imagesLabels", imagesLabels); } editor.putFloat("faceThreshold", faceThreshold); editor.putFloat("distanceThreshold", distanceThreshold); editor.putInt("maximumImages", maximumImages); editor.putBoolean("useEigenfaces", useEigenfaces); editor.putInt("mCameraIndex", 98); editor.apply(); showToast("Training complete", Toast.LENGTH_SHORT); trainFace.setText("Training Done!"); } } @Override public void onMeasureCompleted() { // Calculate normalized Euclidean distance trainFaces(); } }