Skip to content

Commit

Permalink
[camera] Add ability to concurrently record and stream video (flutter…
Browse files Browse the repository at this point in the history
…#6290)

* Implement interface methods to allow concurrent stream and record

There will be a subsequent change to the `camera` package to make use of these implementations.

* Fix android test

* Format android_camera_test.dart

* Resolve analyze failures

* Fix version bumps

* Fix MethodChannelCameraTest

* Fix comment on FLTCam

* Add tests to confirm can't stream on windows or web

* CHANGELOG updates

* Fix analyze errors

* Fix dart analyze warnings for web

* Formatted
  • Loading branch information
adam-harwood committed Dec 6, 2022
1 parent 345bddb commit 374e598
Show file tree
Hide file tree
Showing 30 changed files with 410 additions and 76 deletions.
4 changes: 4 additions & 0 deletions packages/camera/camera_android/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.10.1

* Implements option to also stream when recording a video.

## 0.10.0+5

* Fixes `ArrayIndexOutOfBoundsException` when the permission request is interrupted.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -522,6 +522,21 @@ private void refreshPreviewCaptureSession(
}
}

private void startCapture(boolean record, boolean stream) throws CameraAccessException {
List<Surface> surfaces = new ArrayList<>();
Runnable successCallback = null;
if (record) {
surfaces.add(mediaRecorder.getSurface());
successCallback = () -> mediaRecorder.start();
}
if (stream) {
surfaces.add(imageStreamReader.getSurface());
}

createCaptureSession(
CameraDevice.TEMPLATE_RECORD, successCallback, surfaces.toArray(new Surface[0]));
}

public void takePicture(@NonNull final Result result) {
// Only take one picture at a time.
if (cameraCaptureCallback.getCameraState() != CameraState.STATE_PREVIEW) {
Expand Down Expand Up @@ -731,29 +746,17 @@ private void unlockAutoFocus() {
dartMessenger.error(flutterResult, errorCode, errorMessage, null));
}

public void startVideoRecording(@NonNull Result result) {
final File outputDir = applicationContext.getCacheDir();
try {
captureFile = File.createTempFile("REC", ".mp4", outputDir);
} catch (IOException | SecurityException e) {
result.error("cannotCreateFile", e.getMessage(), null);
return;
}
try {
prepareMediaRecorder(captureFile.getAbsolutePath());
} catch (IOException e) {
recordingVideo = false;
captureFile = null;
result.error("videoRecordingFailed", e.getMessage(), null);
return;
public void startVideoRecording(
@NonNull Result result, @Nullable EventChannel imageStreamChannel) {
prepareRecording(result);

if (imageStreamChannel != null) {
setStreamHandler(imageStreamChannel);
}
// Re-create autofocus feature so it's using video focus mode now.
cameraFeatures.setAutoFocus(
cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));

recordingVideo = true;
try {
createCaptureSession(
CameraDevice.TEMPLATE_RECORD, () -> mediaRecorder.start(), mediaRecorder.getSurface());
startCapture(true, imageStreamChannel != null);
result.success(null);
} catch (CameraAccessException e) {
recordingVideo = false;
Expand Down Expand Up @@ -1073,21 +1076,10 @@ public void startPreview() throws CameraAccessException {

public void startPreviewWithImageStream(EventChannel imageStreamChannel)
throws CameraAccessException {
createCaptureSession(CameraDevice.TEMPLATE_RECORD, imageStreamReader.getSurface());
Log.i(TAG, "startPreviewWithImageStream");

imageStreamChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink imageStreamSink) {
setImageStreamImageAvailableListener(imageStreamSink);
}
setStreamHandler(imageStreamChannel);

@Override
public void onCancel(Object o) {
imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
}
});
startCapture(false, true);
Log.i(TAG, "startPreviewWithImageStream");
}

/**
Expand Down Expand Up @@ -1117,6 +1109,42 @@ public void onError(String errorCode, String errorMessage) {
cameraCaptureCallback.setCameraState(CameraState.STATE_PREVIEW);
}

private void prepareRecording(@NonNull Result result) {
final File outputDir = applicationContext.getCacheDir();
try {
captureFile = File.createTempFile("REC", ".mp4", outputDir);
} catch (IOException | SecurityException e) {
result.error("cannotCreateFile", e.getMessage(), null);
return;
}
try {
prepareMediaRecorder(captureFile.getAbsolutePath());
} catch (IOException e) {
recordingVideo = false;
captureFile = null;
result.error("videoRecordingFailed", e.getMessage(), null);
return;
}
// Re-create autofocus feature so it's using video focus mode now.
cameraFeatures.setAutoFocus(
cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));
}

private void setStreamHandler(EventChannel imageStreamChannel) {
imageStreamChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink imageStreamSink) {
setImageStreamImageAvailableListener(imageStreamSink);
}

@Override
public void onCancel(Object o) {
imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
}
});
}

private void setImageStreamImageAvailableListener(final EventChannel.EventSink imageStreamSink) {
imageStreamReader.setOnImageAvailableListener(
reader -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import io.flutter.view.TextureRegistry;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

final class MethodCallHandlerImpl implements MethodChannel.MethodCallHandler {
private final Activity activity;
Expand Down Expand Up @@ -118,7 +119,9 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result)
}
case "startVideoRecording":
{
camera.startVideoRecording(result);
camera.startVideoRecording(
result,
Objects.equals(call.argument("enableStream"), true) ? imageStreamChannel : null);
break;
}
case "stopVideoRecording":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,4 +245,44 @@ void main() {
await controller.dispose();
},
);

testWidgets(
'recording with image stream',
(WidgetTester tester) async {
final List<CameraDescription> cameras =
await CameraPlatform.instance.availableCameras();
if (cameras.isEmpty) {
return;
}

final CameraController controller = CameraController(
cameras[0],
ResolutionPreset.low,
enableAudio: false,
);

await controller.initialize();
bool isDetecting = false;

await controller.startVideoRecording(
streamCallback: (CameraImageData image) {
if (isDetecting) {
return;
}

isDetecting = true;

expectLater(image, isNotNull).whenComplete(() => isDetecting = false);
});

expect(controller.value.isStreamingImages, true);

sleep(const Duration(milliseconds: 500));

await controller.stopVideoRecording();
await controller.dispose();

expect(controller.value.isStreamingImages, false);
},
);
}
12 changes: 10 additions & 2 deletions packages/camera/camera_android/example/lib/camera_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -306,11 +306,14 @@ class CameraController extends ValueNotifier<CameraValue> {
///
/// The video is returned as a [XFile] after calling [stopVideoRecording].
/// Throws a [CameraException] if the capture fails.
Future<void> startVideoRecording() async {
await CameraPlatform.instance.startVideoRecording(_cameraId);
Future<void> startVideoRecording(
{Function(CameraImageData image)? streamCallback}) async {
await CameraPlatform.instance.startVideoCapturing(
VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
value = value.copyWith(
isRecordingVideo: true,
isRecordingPaused: false,
isStreamingImages: streamCallback != null,
recordingOrientation: Optional<DeviceOrientation>.of(
value.lockedCaptureOrientation ?? value.deviceOrientation));
}
Expand All @@ -319,10 +322,15 @@ class CameraController extends ValueNotifier<CameraValue> {
///
/// Throws a [CameraException] if the capture failed.
Future<XFile> stopVideoRecording() async {
if (value.isStreamingImages) {
await stopImageStream();
}

final XFile file =
await CameraPlatform.instance.stopVideoRecording(_cameraId);
value = value.copyWith(
isRecordingVideo: false,
isRecordingPaused: false,
recordingOrientation: const Optional<DeviceOrientation>.absent(),
);
return file;
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_android/example/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ dependencies:
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ../
camera_platform_interface: ^2.2.0
camera_platform_interface: ^2.3.1
flutter:
sdk: flutter
path_provider: ^2.0.0
Expand Down
30 changes: 26 additions & 4 deletions packages/camera/camera_android/lib/src/android_camera.dart
Original file line number Diff line number Diff line change
Expand Up @@ -248,13 +248,25 @@ class AndroidCamera extends CameraPlatform {
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) async {
return startVideoCapturing(
VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
}

@override
Future<void> startVideoCapturing(VideoCaptureOptions options) async {
await _channel.invokeMethod<void>(
'startVideoRecording',
<String, dynamic>{
'cameraId': cameraId,
'maxVideoDuration': maxVideoDuration?.inMilliseconds,
'cameraId': options.cameraId,
'maxVideoDuration': options.maxDuration?.inMilliseconds,
'enableStream': options.streamCallback != null,
},
);

if (options.streamCallback != null) {
_installStreamController().stream.listen(options.streamCallback);
_startStreamListener();
}
}

@override
Expand Down Expand Up @@ -290,13 +302,19 @@ class AndroidCamera extends CameraPlatform {
@override
Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
{CameraImageStreamOptions? options}) {
_installStreamController(onListen: _onFrameStreamListen);
return _frameStreamController!.stream;
}

StreamController<CameraImageData> _installStreamController(
{Function()? onListen}) {
_frameStreamController = StreamController<CameraImageData>(
onListen: _onFrameStreamListen,
onListen: onListen ?? () {},
onPause: _onFrameStreamPauseResume,
onResume: _onFrameStreamPauseResume,
onCancel: _onFrameStreamCancel,
);
return _frameStreamController!.stream;
return _frameStreamController!;
}

void _onFrameStreamListen() {
Expand All @@ -305,6 +323,10 @@ class AndroidCamera extends CameraPlatform {

Future<void> _startPlatformStream() async {
await _channel.invokeMethod<void>('startImageStream');
_startStreamListener();
}

void _startStreamListener() {
const EventChannel cameraEventChannel =
EventChannel('plugins.flutter.io/camera_android/imageStream');
_platformImageStreamSubscription =
Expand Down
4 changes: 2 additions & 2 deletions packages/camera/camera_android/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_android
description: Android implementation of the camera plugin.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_android
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.10.0+5
version: 0.10.1

environment:
sdk: ">=2.14.0 <3.0.0"
Expand All @@ -18,7 +18,7 @@ flutter:
dartPluginClass: AndroidCamera

dependencies:
camera_platform_interface: ^2.2.0
camera_platform_interface: ^2.3.1
flutter:
sdk: flutter
flutter_plugin_android_lifecycle: ^2.0.2
Expand Down
29 changes: 28 additions & 1 deletion packages/camera/camera_android/test/android_camera_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -587,6 +587,7 @@ void main() {
isMethodCall('startVideoRecording', arguments: <String, Object?>{
'cameraId': cameraId,
'maxVideoDuration': null,
'enableStream': false,
}),
]);
});
Expand All @@ -609,7 +610,33 @@ void main() {
expect(channel.log, <Matcher>[
isMethodCall('startVideoRecording', arguments: <String, Object?>{
'cameraId': cameraId,
'maxVideoDuration': 10000
'maxVideoDuration': 10000,
'enableStream': false,
}),
]);
});

test(
'Should pass enableStream if callback is passed when starting recording a video',
() async {
// Arrange
final MethodChannelMock channel = MethodChannelMock(
channelName: _channelName,
methods: <String, dynamic>{'startVideoRecording': null},
);

// Act
await camera.startVideoCapturing(
VideoCaptureOptions(cameraId,
streamCallback: (CameraImageData imageData) {}),
);

// Assert
expect(channel.log, <Matcher>[
isMethodCall('startVideoRecording', arguments: <String, Object?>{
'cameraId': cameraId,
'maxVideoDuration': null,
'enableStream': true,
}),
]);
});
Expand Down
4 changes: 4 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.9.9

* Implements option to also stream when recording a video.

## 0.9.8+6

* Updates code for `no_leading_underscores_for_local_identifiers` lint.
Expand Down
Loading

0 comments on commit 374e598

Please sign in to comment.