Skip to content

Commit

Permalink
fix: Fix UI Thread race condition in setFrameProcessor(...) (#265)
Browse files Browse the repository at this point in the history
* fix: Fix UI Thread race condition in `setFrameProcessor(...)`

* Revert "fix: Fix UI Thread race condition in `setFrameProcessor(...)`"

This reverts commit 9c524e1.

* Use `setImmediate` to call `setFrameProcessor(...)`

* Fix frame processor order of applying

* Add `enableFrameProcessor` prop that defines if a FP is added

* rename constant

* Implement `enableFrameProcessor` prop for Android and make `frameProcessorFps` faster

* link to troubleshooting guide

* Update TROUBLESHOOTING.mdx

* Add logs for use-cases

* fix log

* set initial frame processor in `onLayout` instead of `componentDidMount`
  • Loading branch information
mrousavy committed Jul 12, 2021
1 parent 7acae0c commit 4b4ea0f
Show file tree
Hide file tree
Showing 11 changed files with 77 additions and 95 deletions.
15 changes: 1 addition & 14 deletions android/src/main/cpp/CameraView.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ void CameraView::registerNatives() {

void CameraView::frameProcessorCallback(const alias_ref<JImageProxy::javaobject>& frame) {
if (frameProcessor_ == nullptr) {
__android_log_write(ANDROID_LOG_WARN, TAG, "Frame Processor is null!");
setEnableFrameProcessor(false);
__android_log_write(ANDROID_LOG_WARN, TAG, "Called Frame Processor callback, but `frameProcessor` is null!");
return;
}

Expand All @@ -45,24 +44,12 @@ void CameraView::frameProcessorCallback(const alias_ref<JImageProxy::javaobject>
}
}

void CameraView::setEnableFrameProcessor(bool enable) {
if (enable) {
__android_log_write(ANDROID_LOG_INFO, TAG, "Enabling Frame Processor Callback...");
} else {
__android_log_write(ANDROID_LOG_INFO, TAG, "Disabling Frame Processor Callback...");
}
static const auto javaMethod = javaPart_->getClass()->getMethod<void(bool)>("setEnableFrameProcessor");
javaMethod(javaPart_.get(), enable);
}

void CameraView::setFrameProcessor(const FrameProcessor&& frameProcessor) {
frameProcessor_ = frameProcessor;
setEnableFrameProcessor(true);
}

void vision::CameraView::unsetFrameProcessor() {
frameProcessor_ = nullptr;
setEnableFrameProcessor(false);
}

} // namespace vision
1 change: 0 additions & 1 deletion android/src/main/cpp/CameraView.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ class CameraView : public jni::HybridClass<CameraView> {
// TODO: Use template<> to avoid heap allocation for std::function<>
void setFrameProcessor(const FrameProcessor&& frameProcessor);
void unsetFrameProcessor();
void setEnableFrameProcessor(bool enable);

private:
friend HybridBase;
Expand Down
40 changes: 12 additions & 28 deletions android/src/main/java/com/mrousavy/camera/CameraView.kt
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
var photo: Boolean? = null
var video: Boolean? = null
var audio: Boolean? = null
var enableFrameProcessor = false
// props that require format reconfiguring
var format: ReadableMap? = null
var fps: Int? = null
Expand All @@ -88,8 +89,6 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
private val reactContext: ReactContext
get() = context as ReactContext

private var enableFrameProcessor = false

@Suppress("JoinDeclarationAndAssignment")
internal val previewView: PreviewView
private val cameraExecutor = Executors.newSingleThreadExecutor()
Expand All @@ -99,7 +98,10 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
internal var camera: Camera? = null
internal var imageCapture: ImageCapture? = null
internal var videoCapture: VideoCapture? = null
internal var imageAnalysis: ImageAnalysis? = null
private var imageAnalysis: ImageAnalysis? = null

private var lastFrameProcessorCall = System.currentTimeMillis()

private var extensionsManager: ExtensionsManager? = null

private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
Expand Down Expand Up @@ -191,26 +193,6 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
private external fun initHybrid(): HybridData
private external fun frameProcessorCallback(frame: ImageProxy)

@Suppress("unused")
@DoNotStrip
fun setEnableFrameProcessor(enable: Boolean) {
Log.d(TAG, "Set enable frame processor: $enable")
val before = enableFrameProcessor
enableFrameProcessor = enable

if (before != enable) {
// reconfigure session if frame processor was added/removed to adjust use-cases.
GlobalScope.launch(Dispatchers.Main) {
try {
configureSession()
} catch (e: Throwable) {
Log.e(TAG, "Failed to configure session after setting frame processor! ${e.message}")
invokeOnError(e)
}
}
}
}

override fun getLifecycle(): Lifecycle {
return lifecycleRegistry
}
Expand Down Expand Up @@ -383,6 +365,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// Bind use cases to camera
val useCases = ArrayList<UseCase>()
if (video == true) {
Log.i(TAG, "Adding VideoCapture use-case...")
videoCapture = videoCaptureBuilder.build()
useCases.add(videoCapture!!)
}
Expand All @@ -391,18 +374,19 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
Log.i(TAG, "Tried to add photo use-case (`photo={true}`) but the Camera device only supports " +
"a single use-case at a time. Falling back to Snapshot capture.")
} else {
Log.i(TAG, "Adding ImageCapture use-case...")
imageCapture = imageCaptureBuilder.build()
useCases.add(imageCapture!!)
}
}
if (enableFrameProcessor) {
var lastCall = System.currentTimeMillis() - 1000
val intervalMs = (1.0 / frameProcessorFps) * 1000.0
Log.i(TAG, "Adding ImageAnalysis use-case...")
imageAnalysis = imageAnalysisBuilder.build().apply {
setAnalyzer(cameraExecutor, { image ->
val now = System.currentTimeMillis()
if (now - lastCall > intervalMs) {
lastCall = now
val intervalMs = (1.0 / frameProcessorFps) * 1000.0
if (now - lastFrameProcessorCall > intervalMs) {
lastFrameProcessorCall = now
frameProcessorCallback(image)
}
image.close()
Expand Down Expand Up @@ -477,7 +461,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
const val TAG = "CameraView"
const val TAG_PERF = "CameraView.performance"

private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video", "frameProcessorFps")
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video", "enableFrameProcessor")
private val arrayListOfZoom = arrayListOf("zoom")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,13 @@ class CameraViewManager : SimpleViewManager<CameraView>() {
view.audio = audio
}

@ReactProp(name = "enableFrameProcessor")
fun setEnableFrameProcessor(view: CameraView, enableFrameProcessor: Boolean) {
if (view.enableFrameProcessor != enableFrameProcessor)
addChangedPropToTransaction(view, "enableFrameProcessor")
view.enableFrameProcessor = enableFrameProcessor
}

@ReactProp(name = "enableDepthData")
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
if (view.enableDepthData != enableDepthData)
Expand Down
2 changes: 2 additions & 0 deletions docs/docs/guides/TROUBLESHOOTING.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ Before opening an issue, make sure you try the following:
5. Press **Create Bridging Header** when promted.
5. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>).
* For errors without messages, there's often an error code attached. Look up the error code on [osstatus.com](https://www.osstatus.com) to get more information about a specific error.
6. If your Frame Processor is not running, make sure you check the native Xcode logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.

## Android

Expand Down Expand Up @@ -65,6 +66,7 @@ Before opening an issue, make sure you try the following:
```
5. If you're having runtime issues, check the logs in Android Studio/Logcat to find out more. In Android Studio, go to **View** > **Tool Windows** > **Logcat** (<kbd>⌘</kbd>+<kbd>6</kbd>) or run `adb logcat` in Terminal.
6. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/camera.camera-1#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information.
7. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.

## Issues

Expand Down
4 changes: 2 additions & 2 deletions ios/CameraBridge.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
#import "JSConsoleHelper.h"

#ifdef VISION_CAMERA_DISABLE_FRAME_PROCESSORS
static bool enableFrameProcessors = false;
static bool VISION_CAMERA_ENABLE_FRAME_PROCESSORS = false;
#else
static bool enableFrameProcessors = true;
static bool VISION_CAMERA_ENABLE_FRAME_PROCESSORS = true;
#endif

@interface CameraBridge: RCTViewManager
Expand Down
2 changes: 1 addition & 1 deletion ios/CameraView+AVCaptureSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ extension CameraView {
captureSession.removeOutput(videoOutput)
self.videoOutput = nil
}
if video?.boolValue == true {
if video?.boolValue == true || enableFrameProcessor {
ReactLogger.log(level: .info, message: "Adding Video Data output...")
videoOutput = AVCaptureVideoDataOutput()
guard captureSession.canAddOutput(videoOutput!) else {
Expand Down
4 changes: 3 additions & 1 deletion ios/CameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enablePortraitEffectsMatteDelivery",
"preset",
"photo",
"video"]
"video",
"enableFrameProcessor"]
private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr",
"lowLightBoost",
Expand All @@ -47,6 +48,7 @@ public final class CameraView: UIView {
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@objc var audio: NSNumber? // nullable bool
@objc var enableFrameProcessor = false
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
Expand Down
1 change: 1 addition & 0 deletions ios/CameraViewManager.m
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ @interface RCT_EXTERN_REMAP_MODULE(CameraView, CameraViewManager, RCTViewManager
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL);
// device format
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
Expand Down
2 changes: 1 addition & 1 deletion ios/CameraViewManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ final class CameraViewManager: RCTViewManager {
#endif

// Install Frame Processor bindings and setup Runtime
if enableFrameProcessors {
if VISION_CAMERA_ENABLE_FRAME_PROCESSORS {
CameraQueues.frameProcessorQueue.async {
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
self.bridge.runOnJS {
Expand Down
94 changes: 47 additions & 47 deletions src/Camera.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
import React from 'react';
import { requireNativeComponent, NativeModules, NativeSyntheticEvent, findNodeHandle, NativeMethods, Platform } from 'react-native';
import {
requireNativeComponent,
NativeModules,
NativeSyntheticEvent,
findNodeHandle,
NativeMethods,
Platform,
LayoutChangeEvent,
} from 'react-native';
import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
Expand All @@ -21,6 +29,7 @@ interface OnErrorEvent {
}
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
cameraId: string;
enableFrameProcessor: boolean;
onInitialized?: (event: NativeSyntheticEvent<void>) => void;
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
};
Expand Down Expand Up @@ -63,25 +72,21 @@ if (CameraModule == null) console.error("Camera: Native Module 'CameraView' was
* @component
*/
export class Camera extends React.PureComponent<CameraProps> {
/**
* @internal
*/
/** @internal */
static displayName = 'Camera';
/**
* @internal
*/
/** @internal */
displayName = Camera.displayName;
private lastFrameProcessor: ((frame: Frame) => void) | undefined;
private isNativeViewMounted = false;

private readonly ref: React.RefObject<RefType>;

/**
* @internal
*/
/** @internal */
constructor(props: CameraProps) {
super(props);
this.onInitialized = this.onInitialized.bind(this);
this.onError = this.onError.bind(this);
this.onLayout = this.onLayout.bind(this);
this.ref = React.createRef<RefType>();
this.lastFrameProcessor = undefined;
}
Expand Down Expand Up @@ -331,13 +336,14 @@ export class Camera extends React.PureComponent<CameraProps> {
//#endregion

//#region Lifecycle
/**
* @internal
*/
/** @internal */
private assertFrameProcessorsEnabled(): void {
// @ts-expect-error JSI functions aren't typed
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null)
throw new Error('Frame Processors are not enabled. Make sure you install react-native-reanimated 2.2.0 or above!');
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null) {
throw new Error(
'Frame Processors are not enabled. See https://mrousavy.github.io/react-native-vision-camera/docs/guides/troubleshooting',
);
}
}

private setFrameProcessor(frameProcessor: (frame: Frame) => void): void {
Expand All @@ -352,60 +358,54 @@ export class Camera extends React.PureComponent<CameraProps> {
global.unsetFrameProcessor(this.handle);
}

/**
* @internal
*/
componentWillUnmount(): void {
if (this.lastFrameProcessor != null || this.props.frameProcessor != null) this.unsetFrameProcessor();
}

/**
* @internal
*/
componentDidMount(): void {
if (this.props.frameProcessor != null) {
if (Platform.OS === 'android') {
// on Android the View is not fully mounted yet (`findViewById` returns null), so we wait 300ms.
setTimeout(() => {
if (this.props.frameProcessor != null) this.setFrameProcessor(this.props.frameProcessor);
}, 300);
} else {
// on other platforms (iOS) the View we can assume that the View is immediatelly available.
private onLayout(event: LayoutChangeEvent): void {
if (!this.isNativeViewMounted) {
this.isNativeViewMounted = true;
if (this.props.frameProcessor != null) {
// user passed a `frameProcessor` but we didn't set it yet because the native view was not mounted yet. set it now.
this.setFrameProcessor(this.props.frameProcessor);
this.lastFrameProcessor = this.props.frameProcessor;
}
}

this.props.onLayout?.(event);
}

/**
* @internal
*/
/** @internal */
componentDidUpdate(): void {
if (this.props.frameProcessor !== this.lastFrameProcessor) {
if (!this.isNativeViewMounted) return;
const frameProcessor = this.props.frameProcessor;
if (frameProcessor !== this.lastFrameProcessor) {
// frameProcessor argument identity changed. Update native to reflect the change.
if (this.props.frameProcessor != null) this.setFrameProcessor(this.props.frameProcessor);
if (frameProcessor != null) this.setFrameProcessor(frameProcessor);
else this.unsetFrameProcessor();

this.lastFrameProcessor = this.props.frameProcessor;
this.lastFrameProcessor = frameProcessor;
}
}

/** @internal */
componentWillUnmount(): void {
if (this.lastFrameProcessor != null || this.props.frameProcessor != null) {
this.unsetFrameProcessor();
this.lastFrameProcessor = undefined;
}
}
//#endregion

/**
* @internal
*/
/** @internal */
public render(): React.ReactNode {
// We remove the big `device` object from the props because we only need to pass `cameraId` to native.
const { device, video: enableVideo, frameProcessor, ...props } = this.props;
// on iOS, enabling a frameProcessor requires `video` to be `true`. On Android, it doesn't.
const video = Platform.OS === 'ios' ? frameProcessor != null || enableVideo : enableVideo;
const { device, frameProcessor, ...props } = this.props;
return (
<NativeCameraView
{...props}
cameraId={device.id}
ref={this.ref}
onInitialized={this.onInitialized}
onError={this.onError}
video={video}
enableFrameProcessor={frameProcessor != null}
onLayout={this.onLayout}
/>
);
}
Expand Down

0 comments on commit 4b4ea0f

Please sign in to comment.