Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@
import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils;
import com.cloudwebrtc.webrtc.utils.ObjectType;
import com.cloudwebrtc.webrtc.utils.PermissionUtils;
import com.cloudwebrtc.webrtc.videoEffects.VideoFrameProcessor;
import com.cloudwebrtc.webrtc.videoEffects.VideoEffectProcessor;
import com.cloudwebrtc.webrtc.videoEffects.ProcessorProvider;

import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
Expand Down Expand Up @@ -82,6 +85,8 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

import io.flutter.plugin.common.MethodChannel.Result;

Expand All @@ -108,6 +113,8 @@ class GetUserMediaImpl {

private final Map<String, VideoCapturerInfo> mVideoCapturers = new HashMap<>();
private final Map<String, SurfaceTextureHelper> mSurfaceTextureHelpers = new HashMap<>();
private final Map<String, VideoSource> mVideoSources = new HashMap<>();

private final StateProvider stateProvider;
private final Context applicationContext;

Expand Down Expand Up @@ -810,6 +817,7 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi
String trackId = stateProvider.getNextTrackUUID();
mVideoCapturers.put(trackId, info);
mSurfaceTextureHelpers.put(trackId, surfaceTextureHelper);
mVideoSources.put(trackId, videoSource);

Log.d(TAG, "Target: " + targetWidth + "x" + targetHeight + "@" + targetFps + ", Actual: " + info.width + "x" + info.height + "@" + info.fps);

Expand Down Expand Up @@ -857,11 +865,38 @@ void removeVideoCapturer(String id) {
helper.stopListening();
helper.dispose();
mSurfaceTextureHelpers.remove(id);
mVideoSources.remove(id);
}
}
}
}

void setVideoEffect(String trackId, List<String> names) {
VideoSource videoSource = mVideoSources.get(trackId);
SurfaceTextureHelper surfaceTextureHelper = mSurfaceTextureHelpers.get(trackId);

if (names != null && !names.isEmpty()) {
List<VideoFrameProcessor> processors = names.stream()
.filter(name -> name instanceof String)
.map(name -> {
VideoFrameProcessor videoFrameProcessor = ProcessorProvider.getProcessor((String) name);
if (videoFrameProcessor == null) {
Log.e(TAG, "no videoFrameProcessor associated with this name: " + name);
}
return videoFrameProcessor;
})
.filter(Objects::nonNull)
.collect(Collectors.toList());


VideoEffectProcessor videoEffectProcessor = new VideoEffectProcessor(processors, surfaceTextureHelper);
videoSource.setVideoProcessor(videoEffectProcessor);

} else {
videoSource.setVideoProcessor(null);
}
}

@RequiresApi(api = VERSION_CODES.M)
private void requestPermissions(
final ArrayList<String> permissions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,14 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
result.success(null);
break;
}
case "setVideoEffects": {
String trackId = call.argument("trackId");
List<String> names = call.argument("names");

getUserMediaImpl.setVideoEffect(trackId, names);
result.success(null);
break;
}
case "createPeerConnection": {
Map<String, Object> constraints = call.argument("constraints");
Map<String, Object> configuration = call.argument("configuration");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package com.cloudwebrtc.webrtc.videoEffects;

import java.util.HashMap;
import java.util.Map;

/**
* Manages VideoFrameProcessorFactoryInterfaces corresponding to name using hashmap, and provides
* get, add and remove functionality.
*/
public class ProcessorProvider {
private static Map<String, VideoFrameProcessorFactoryInterface> methodMap = new HashMap<String, VideoFrameProcessorFactoryInterface>();

public static VideoFrameProcessor getProcessor(String name) {
if (methodMap.containsKey(name)) {
return methodMap.get(name).build();
} else{
return null;
}
}

public static void addProcessor(String name,
VideoFrameProcessorFactoryInterface videoFrameProcessorFactoryInterface) {
if (name != null && videoFrameProcessorFactoryInterface != null) {
methodMap.put(name, videoFrameProcessorFactoryInterface);
} else{
throw new NullPointerException("Name or VideoFrameProcessorFactry can not be null");
}
}

public static void removeProcessor(String name) {
if (name != null && methodMap.containsKey(name)) {
methodMap.remove(name);
} else{
throw new RuntimeException("VideoFrameProcessorFactry with " + name + " does not exist");
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
package com.cloudwebrtc.webrtc.videoEffects;

import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoFrame;
import org.webrtc.VideoProcessor;
import org.webrtc.VideoSink;

import java.util.List;

/**
* Lightweight abstraction for an object that can receive video frames, process and add effects in
* them, and pass them on to another object.
*/
public class VideoEffectProcessor implements VideoProcessor {
private VideoSink mSink;
final private SurfaceTextureHelper textureHelper;
final private List<VideoFrameProcessor> videoFrameProcessors;

public VideoEffectProcessor(List<VideoFrameProcessor> processors, SurfaceTextureHelper textureHelper) {
this.textureHelper = textureHelper;
this.videoFrameProcessors = processors;
}

@Override
public void onCapturerStarted(boolean success) {

}

@Override
public void onCapturerStopped() {

}

@Override
public void setSink(VideoSink sink) {
mSink = sink;
}

/**
* Called just after the frame is captured.
* Will process the VideoFrame with the help of VideoFrameProcessor and send the processed
* VideoFrame back to webrtc using onFrame method in VideoSink.
* @param frame raw VideoFrame received from webrtc.
*/
@Override
public void onFrameCaptured(VideoFrame frame) {
frame.retain();
VideoFrame outputFrame = frame;

for (VideoFrameProcessor processor : this.videoFrameProcessors) {
outputFrame = processor.process(outputFrame, textureHelper);
}

mSink.onFrame(outputFrame);
outputFrame.release();
frame.release();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package com.cloudwebrtc.webrtc.videoEffects;

import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoFrame;

/**
* Interface contains process method to process VideoFrame.
* The caller takes ownership of the object.
*/
public interface VideoFrameProcessor {
/**
* Applies the image processing algorithms to the frame. Returns the processed frame.
* The caller is responsible for releasing the returned frame.
* @param frame raw videoframe which need to be processed
* @param textureHelper
* @return processed videoframe which will rendered
*/
public VideoFrame process(VideoFrame frame, SurfaceTextureHelper textureHelper);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.cloudwebrtc.webrtc.videoEffects;

/**
* Factory for creating VideoFrameProcessor instances.
*/
public interface VideoFrameProcessorFactoryInterface {

/**
* Dynamically allocates a VideoFrameProcessor instance and returns a pointer to it.
* The caller takes ownership of the object.
*/
public VideoFrameProcessor build();
}
4 changes: 4 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#import <Foundation/Foundation.h>
#import <WebRTC/WebRTC.h>

@class VideoEffectProcessor;
@class FlutterRTCVideoRenderer;
@class FlutterRTCFrameCapturer;

Expand Down Expand Up @@ -46,12 +47,15 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler);
@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer;
@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer;
@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput;
@property (nonatomic, strong) VideoEffectProcessor* videoEffectProcessor;

@property(nonatomic) BOOL _usingFrontCamera;
@property(nonatomic) NSInteger _lastTargetWidth;
@property(nonatomic) NSInteger _lastTargetHeight;
@property(nonatomic) NSInteger _lastTargetFps;

- (void)mediaStreamTrackSetVideoEffects:(nonnull NSString *)trackId
names:(nonnull NSArray<NSString *> *)names;
- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId peerConnectionId:(NSString* _Nullable)peerConnectionId;
- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId peerConnectionId:(NSString* _Nullable)peerConnectionId;
- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nullable)Id;
Expand Down
40 changes: 40 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
#import "FlutterRTCPeerConnection.h"
#import "FlutterRTCVideoRenderer.h"
#import "FlutterRTCFrameCryptor.h"
#import "VideoEffectProcessor.h"
#import "ProcessorProvider.h"
#import "VideoFrameProcessor.h"
#if TARGET_OS_IPHONE
#import "FlutterRTCVideoPlatformViewFactory.h"
#import "FlutterRTCVideoPlatformViewController.h"
Expand Down Expand Up @@ -173,6 +176,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel
self.frameCryptors = [NSMutableDictionary new];
self.keyProviders = [NSMutableDictionary new];
self.videoCapturerStopHandlers = [NSMutableDictionary new];

#if TARGET_OS_IPHONE
AVAudioSession* session = [AVAudioSession sharedInstance];
[[NSNotificationCenter defaultCenter] addObserver:self
Expand Down Expand Up @@ -294,6 +298,12 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
}
[self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing];
result(@"");
} else if([@"setVideoEffects" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
NSString* trackId = argsMap[@"trackId"];
NSArray* names = argsMap[@"names"];

[self mediaStreamTrackSetVideoEffects:trackId names:names];
} else if ([@"createPeerConnection" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
NSDictionary* configuration = argsMap[@"configuration"];
Expand Down Expand Up @@ -1452,6 +1462,36 @@ - (void)deactiveRtcAudioSession {
#endif
}

- (void)mediaStreamTrackSetVideoEffects:(nonnull NSString *)trackId names:(nonnull NSArray<NSString *> *)names
{
RTCMediaStreamTrack *track = [self trackForId:trackId peerConnectionId: nil];

if (track) {
NSLog(@"mediaStreamTrackSetVideoEffects: track found");

RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
RTCVideoSource *videoSource = videoTrack.source;

NSMutableArray *processors = [[NSMutableArray alloc] init];
for (NSString *name in names) {
NSObject<VideoFrameProcessorDelegate> *processor = [ProcessorProvider getProcessor:name];
if (processor != nil) {
[processors addObject:processor];
}
}

self.videoEffectProcessor = [[VideoEffectProcessor alloc] initWithProcessors:processors
videoSource:videoSource];


self.videoCapturer.delegate = self.videoEffectProcessor;
} else {
NSLog(@"mediaStreamTrackSetVideoEffects: track not found");
}


}

- (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result {
RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""];
if (stream) {
Expand Down
10 changes: 10 additions & 0 deletions common/darwin/Classes/ProcessorProvider.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#import "VideoFrameProcessor.h"

@interface ProcessorProvider : NSObject

+ (NSObject<VideoFrameProcessorDelegate> *)getProcessor:(NSString *)name;
+ (void)addProcessor:(NSObject<VideoFrameProcessorDelegate> *)processor
forName:(NSString *)name;
+ (void)removeProcessor:(NSString *)name;

@end
24 changes: 24 additions & 0 deletions common/darwin/Classes/ProcessorProvider.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#import "ProcessorProvider.h"

@implementation ProcessorProvider

static NSMutableDictionary<NSString *, NSObject<VideoFrameProcessorDelegate> *> *processorMap;

+ (void)initialize {
processorMap = [[NSMutableDictionary alloc] init];
}

+ (NSObject<VideoFrameProcessorDelegate> *)getProcessor:(NSString *)name {
return [processorMap objectForKey:name];
}

+ (void)addProcessor:(NSObject<VideoFrameProcessorDelegate> *)processor
forName:(NSString *)name {
[processorMap setObject:processor forKey:name];
}

+ (void)removeProcessor:(NSString *)name {
[processorMap removeObjectForKey:name];
}

@end
13 changes: 13 additions & 0 deletions common/darwin/Classes/VideoEffectProcessor.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#import <WebRTC/RTCVideoSource.h>

#import "VideoFrameProcessor.h"

@interface VideoEffectProcessor : NSObject<RTCVideoCapturerDelegate>

@property (nonatomic, strong) NSArray<NSObject<VideoFrameProcessorDelegate> *> *videoFrameProcessors;
@property (nonatomic, strong) RTCVideoSource *videoSource;

- (instancetype)initWithProcessors:(NSArray<NSObject<VideoFrameProcessorDelegate> *> *)videoFrameProcessors
videoSource:(RTCVideoSource *)videoSource;

@end
19 changes: 19 additions & 0 deletions common/darwin/Classes/VideoEffectProcessor.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#import <WebRTC/RTCVideoCapturer.h>
#import "VideoEffectProcessor.h"

@implementation VideoEffectProcessor
- (instancetype)initWithProcessors:(NSArray<NSObject<VideoFrameProcessorDelegate> *> *)videoFrameProcessors
videoSource:(RTCVideoSource *)videoSource {
self = [super init];
_videoFrameProcessors = videoFrameProcessors;
_videoSource = videoSource;
return self;
}
- (void)capturer:(nonnull RTCVideoCapturer *)capturer didCaptureVideoFrame:(nonnull RTCVideoFrame *)frame {
RTCVideoFrame *processedFrame = frame;
for (NSObject<VideoFrameProcessorDelegate> *processor in _videoFrameProcessors) {
processedFrame = [processor capturer:capturer didCaptureVideoFrame:processedFrame];
}
[self.videoSource capturer:capturer didCaptureVideoFrame:processedFrame];
}
@end
7 changes: 7 additions & 0 deletions common/darwin/Classes/VideoFrameProcessor.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#import <WebRTC/RTCVideoCapturer.h>
#import <WebRTC/RTCVideoFrame.h>

@protocol VideoFrameProcessorDelegate
- (RTCVideoFrame *)capturer:(RTCVideoCapturer *)capturer
didCaptureVideoFrame:(RTCVideoFrame *)frame;
@end
Loading