forked from react-native-camera/react-native-camera
-
Notifications
You must be signed in to change notification settings - Fork 0
/
RCTCameraViewFinder.java
474 lines (418 loc) · 17.6 KB
/
RCTCameraViewFinder.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
/**
* Created by Fabrice Armisen (farmisen@gmail.com) on 1/3/16.
*/
package com.lwansbrough.RCTCamera;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.view.MotionEvent;
import android.view.TextureView;
import android.os.AsyncTask;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import java.util.ArrayList;
import java.util.List;
import java.util.EnumMap;
import java.util.EnumSet;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.common.HybridBinarizer;
class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceTextureListener, Camera.PreviewCallback {
private int _cameraType;
private int _captureMode;
private SurfaceTexture _surfaceTexture;
private int _surfaceTextureWidth;
private int _surfaceTextureHeight;
private boolean _isStarting;
private boolean _isStopping;
private Camera _camera;
private float mFingerSpacing;
// concurrency lock for barcode scanner to avoid flooding the runtime
public static volatile boolean barcodeScannerTaskLock = false;
// reader instance for the barcode scanner
private final MultiFormatReader _multiFormatReader = new MultiFormatReader();
public RCTCameraViewFinder(Context context, int type) {
super(context);
this.setSurfaceTextureListener(this);
this._cameraType = type;
this.initBarcodeReader(RCTCamera.getInstance().getBarCodeTypes());
//start Deep Belief CNN activity here...
Intent deepBeliefIntent = new Intent(context, DeepBelief.class);
context.startActivity(deepBeliefIntent);
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
_surfaceTexture = surface;
_surfaceTextureWidth = width;
_surfaceTextureHeight = height;
startCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
_surfaceTextureWidth = width;
_surfaceTextureHeight = height;
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
_surfaceTexture = null;
_surfaceTextureWidth = 0;
_surfaceTextureHeight = 0;
stopCamera();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public double getRatio() {
int width = RCTCamera.getInstance().getPreviewWidth(this._cameraType);
int height = RCTCamera.getInstance().getPreviewHeight(this._cameraType);
return ((float) width) / ((float) height);
}
public void setCameraType(final int type) {
if (this._cameraType == type) {
return;
}
new Thread(new Runnable() {
@Override
public void run() {
stopPreview();
_cameraType = type;
startPreview();
}
}).start();
}
public void setCaptureMode(final int captureMode) {
RCTCamera.getInstance().setCaptureMode(_cameraType, captureMode);
this._captureMode = captureMode;
}
public void setCaptureQuality(String captureQuality) {
RCTCamera.getInstance().setCaptureQuality(_cameraType, captureQuality);
}
public void setTorchMode(int torchMode) {
RCTCamera.getInstance().setTorchMode(_cameraType, torchMode);
}
public void setFlashMode(int flashMode) {
RCTCamera.getInstance().setFlashMode(_cameraType, flashMode);
}
private void startPreview() {
if (_surfaceTexture != null) {
startCamera();
}
}
private void stopPreview() {
if (_camera != null) {
stopCamera();
}
}
synchronized private void startCamera() {
if (!_isStarting) {
_isStarting = true;
try {
_camera = RCTCamera.getInstance().acquireCameraInstance(_cameraType);
Camera.Parameters parameters = _camera.getParameters();
final boolean isCaptureModeStill = (_captureMode == RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_STILL);
final boolean isCaptureModeVideo = (_captureMode == RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_VIDEO);
if (!isCaptureModeStill && !isCaptureModeVideo) {
throw new RuntimeException("Unsupported capture mode:" + _captureMode);
}
// Set auto-focus. Try to set to continuous picture/video, and fall back to general
// auto if available.
List<String> focusModes = parameters.getSupportedFocusModes();
if (isCaptureModeStill && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if (isCaptureModeVideo && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set picture size
// defaults to max available size
List<Camera.Size> supportedSizes;
if (isCaptureModeStill) {
supportedSizes = parameters.getSupportedPictureSizes();
} else if (isCaptureModeVideo) {
supportedSizes = RCTCamera.getInstance().getSupportedVideoSizes(_camera);
} else {
throw new RuntimeException("Unsupported capture mode:" + _captureMode);
}
Camera.Size optimalPictureSize = RCTCamera.getInstance().getBestSize(
supportedSizes,
Integer.MAX_VALUE,
Integer.MAX_VALUE
);
parameters.setPictureSize(optimalPictureSize.width, optimalPictureSize.height);
_camera.setParameters(parameters);
_camera.setPreviewTexture(_surfaceTexture);
_camera.startPreview();
// send previews to `onPreviewFrame`
_camera.setPreviewCallback(this);
} catch (NullPointerException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
stopCamera();
} finally {
_isStarting = false;
}
}
}
synchronized private void stopCamera() {
if (!_isStopping) {
_isStopping = true;
try {
if (_camera != null) {
_camera.stopPreview();
// stop sending previews to `onPreviewFrame`
_camera.setPreviewCallback(null);
RCTCamera.getInstance().releaseCameraInstance(_cameraType);
_camera = null;
}
} catch (Exception e) {
e.printStackTrace();
} finally {
_isStopping = false;
}
}
}
/**
* Parse barcodes as BarcodeFormat constants.
*
* Supports all iOS codes except [code39mod43, itf14]
*
* Additionally supports [codabar, maxicode, rss14, rssexpanded, upca, upceanextension]
*/
private BarcodeFormat parseBarCodeString(String c) {
if ("aztec".equals(c)) {
return BarcodeFormat.AZTEC;
} else if ("ean13".equals(c)) {
return BarcodeFormat.EAN_13;
} else if ("ean8".equals(c)) {
return BarcodeFormat.EAN_8;
} else if ("qr".equals(c)) {
return BarcodeFormat.QR_CODE;
} else if ("pdf417".equals(c)) {
return BarcodeFormat.PDF_417;
} else if ("upce".equals(c)) {
return BarcodeFormat.UPC_E;
} else if ("datamatrix".equals(c)) {
return BarcodeFormat.DATA_MATRIX;
} else if ("code39".equals(c)) {
return BarcodeFormat.CODE_39;
} else if ("code93".equals(c)) {
return BarcodeFormat.CODE_93;
} else if ("interleaved2of5".equals(c)) {
return BarcodeFormat.ITF;
} else if ("codabar".equals(c)) {
return BarcodeFormat.CODABAR;
} else if ("code128".equals(c)) {
return BarcodeFormat.CODE_128;
} else if ("maxicode".equals(c)) {
return BarcodeFormat.MAXICODE;
} else if ("rss14".equals(c)) {
return BarcodeFormat.RSS_14;
} else if ("rssexpanded".equals(c)) {
return BarcodeFormat.RSS_EXPANDED;
} else if ("upca".equals(c)) {
return BarcodeFormat.UPC_A;
} else if ("upceanextension".equals(c)) {
return BarcodeFormat.UPC_EAN_EXTENSION;
} else {
android.util.Log.v("RCTCamera", "Unsupported code.. [" + c + "]");
return null;
}
}
/**
* Initialize the barcode decoder.
*/
private void initBarcodeReader(List<String> barCodeTypes) {
EnumMap<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
EnumSet<BarcodeFormat> decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
if (barCodeTypes != null) {
for (String code : barCodeTypes) {
BarcodeFormat format = parseBarCodeString(code);
if (format != null) {
decodeFormats.add(format);
}
}
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
_multiFormatReader.setHints(hints);
}
/**
* Spawn a barcode reader task if
* - the barcode scanner is enabled (has a onBarCodeRead function)
* - one isn't already running
*
* See {Camera.PreviewCallback}
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (RCTCamera.getInstance().isBarcodeScannerEnabled() && !RCTCameraViewFinder.barcodeScannerTaskLock) {
RCTCameraViewFinder.barcodeScannerTaskLock = true;
new ReaderAsyncTask(camera, data).execute();
}
}
private class ReaderAsyncTask extends AsyncTask<Void, Void, Void> {
private byte[] imageData;
private final Camera camera;
ReaderAsyncTask(Camera camera, byte[] imageData) {
this.camera = camera;
this.imageData = imageData;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled()) {
return null;
}
Camera.Size size = camera.getParameters().getPreviewSize();
int width = size.width;
int height = size.height;
// rotate for zxing if orientation is portrait
if (RCTCamera.getInstance().getActualDeviceOrientation() == 0) {
byte[] rotated = new byte[imageData.length];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
rotated[x * height + height - y - 1] = imageData[x + y * width];
}
}
width = size.height;
height = size.width;
imageData = rotated;
}
try {
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(imageData, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
Result result = _multiFormatReader.decodeWithState(bitmap);
ReactContext reactContext = RCTCameraModule.getReactContextSingleton();
WritableMap event = Arguments.createMap();
WritableArray resultPoints = Arguments.createArray();
ResultPoint[] points = result.getResultPoints();
if(points != null) {
for (ResultPoint point : points) {
WritableMap newPoint = Arguments.createMap();
newPoint.putString("x", String.valueOf(point.getX()));
newPoint.putString("y", String.valueOf(point.getY()));
resultPoints.pushMap(newPoint);
}
}
event.putArray("bounds", resultPoints);
event.putString("data", result.getText());
event.putString("type", result.getBarcodeFormat().toString());
reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class).emit("CameraBarCodeReadAndroid", event);
} catch (Throwable t) {
// meh
} finally {
_multiFormatReader.reset();
RCTCameraViewFinder.barcodeScannerTaskLock = false;
return null;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = _camera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mFingerSpacing = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
_camera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mFingerSpacing) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mFingerSpacing) {
//zoom out
if (zoom > 0)
zoom--;
}
mFingerSpacing = newDist;
params.setZoom(zoom);
_camera.setParameters(params);
}
/**
* Handles setting focus to the location of the event.
*
* Note that this will override the focus mode on the camera to FOCUS_MODE_AUTO if available,
* even if this was previously something else (such as FOCUS_MODE_CONTINUOUS_*; see also
* {@link #startCamera()}. However, this makes sense - after the user has initiated any
* specific focus intent, we shouldn't be refocusing and overriding their request!
*/
public void handleFocus(MotionEvent event, Camera.Parameters params) {
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// Ensure focus areas are enabled. If max num focus areas is 0, then focus area is not
// supported, so we cannot do anything here.
if (params.getMaxNumFocusAreas() == 0) {
return;
}
// Cancel any previous focus actions.
_camera.cancelAutoFocus();
// Compute focus area rect.
Camera.Area focusAreaFromMotionEvent;
try {
focusAreaFromMotionEvent = RCTCameraUtils.computeFocusAreaFromMotionEvent(event, _surfaceTextureWidth, _surfaceTextureHeight);
} catch (final RuntimeException e) {
e.printStackTrace();
return;
}
// Set focus mode to auto.
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// Set focus area.
final ArrayList<Camera.Area> focusAreas = new ArrayList<Camera.Area>();
focusAreas.add(focusAreaFromMotionEvent);
params.setFocusAreas(focusAreas);
// Also set metering area if enabled. If max num metering areas is 0, then metering area
// is not supported. We can usually safely omit this anyway, though.
if (params.getMaxNumMeteringAreas() > 0) {
params.setMeteringAreas(focusAreas);
}
// Set parameters before starting auto-focus.
_camera.setParameters(params);
// Start auto-focus now that focus area has been set. If successful, then can cancel
// it afterwards. Wrap in try-catch to avoid crashing on merely autoFocus fails.
try {
_camera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
camera.cancelAutoFocus();
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
}
/** Determine the space between the first two fingers */
private float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
}