Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor Android implementation #6

Merged
merged 6 commits into from
Jan 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ buildscript {
}

dependencies {
classpath 'com.android.tools.build:gradle:3.6.3'
classpath 'com.android.tools.build:gradle:4.0.2'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
Expand Down Expand Up @@ -39,6 +39,7 @@ android {
}

compileOptions {
coreLibraryDesugaringEnabled true
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
Expand All @@ -49,7 +50,8 @@ android {
}

dependencies {
implementation 'com.github.webrtc-sdk:android:92.4515.01'
implementation 'androidx.annotation:annotation:1.1.0'
coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.5'
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.annotation:annotation:1.3.0'
implementation 'com.github.webrtc-sdk:android:92.4515.01'
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,42 +5,44 @@
import org.webrtc.CameraVideoCapturer;

class CameraEventsHandler implements CameraVideoCapturer.CameraEventsHandler {
private final static String TAG = FlutterWebRTCPlugin.TAG;
private static final String TAG = FlutterWebRTCPlugin.TAG;

// Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
/*
Camera error handler - invoked when camera can not be opened
or any camera exception happens on camera thread.
*/
@Override
public void onCameraError(String errorDescription) {
Log.d(TAG, String.format("CameraEventsHandler.onCameraError: errorDescription=%s", errorDescription));
}

// Called when camera is disconnected.
/* Called when camera is disconnected. */
@Override
public void onCameraDisconnected() {
Log.d(TAG, "CameraEventsHandler.onCameraDisconnected");
}

// Invoked when camera stops receiving frames
/* Invoked when camera stops receiving frames */
@Override
public void onCameraFreezed(String errorDescription) {
Log.d(TAG, String.format("CameraEventsHandler.onCameraFreezed: errorDescription=%s", errorDescription));
}

// Callback invoked when camera is opening.
/* Callback invoked when camera is opening. */
@Override
public void onCameraOpening(String cameraName) {
Log.d(TAG, String.format("CameraEventsHandler.onCameraOpening: cameraName=%s", cameraName));
}

// Callback invoked when first camera frame is available after camera is opened.
/* Callback invoked when first camera frame is available after camera is opened. */
@Override
public void onFirstFrameAvailable() {
Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable");
}

// Callback invoked when camera closed.
/* Callback invoked when camera closed. */
@Override
public void onCameraClosed() {
Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable");
Log.d(TAG, "CameraEventsHandler.onCameraClosed");
}
}
Original file line number Diff line number Diff line change
@@ -1,37 +1,42 @@
package com.cloudwebrtc.webrtc;

import android.util.Log;
import android.graphics.SurfaceTexture;
import android.util.Log;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

import com.cloudwebrtc.webrtc.utils.AnyThreadSink;
import com.cloudwebrtc.webrtc.utils.ConstraintsMap;
import com.cloudwebrtc.webrtc.utils.EglUtils;

import java.util.List;

import org.webrtc.EglBase;
import org.webrtc.MediaStream;
import org.webrtc.RendererCommon.RendererEvents;
import org.webrtc.VideoTrack;

import java.util.List;

import io.flutter.plugin.common.EventChannel;
import io.flutter.view.TextureRegistry;

public class FlutterRTCVideoRenderer implements EventChannel.StreamHandler {

private static final String TAG = FlutterWebRTCPlugin.TAG;
private final SurfaceTexture texture;
private TextureRegistry.SurfaceTextureEntry entry;
private final TextureRegistry.SurfaceTextureEntry entry;
private int id = -1;
@Nullable
private MediaStream mediaStream;

public void Dispose() {
//destroy
if (surfaceTextureRenderer != null) {
surfaceTextureRenderer.release();
}
if (eventChannel != null)
if (eventChannel != null) {
eventChannel.setStreamHandler(null);
}

eventSink = null;
entry.release();
Expand All @@ -45,8 +50,8 @@ public void Dispose() {

private void listenRendererEvents() {
rendererEvents = new RendererEvents() {
private int _rotation = -1;
private int _width = 0, _height = 0;
private int rotation = -1;
private int width, height;

@Override
public void onFirstFrameRendered() {
Expand All @@ -62,39 +67,42 @@ public void onFrameResolutionChanged(
int rotation) {

if (eventSink != null) {
if (_width != videoWidth || _height != videoHeight) {
if (width != videoWidth || height != videoHeight) {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "didTextureChangeVideoSize");
params.putInt("id", id);
params.putDouble("width", (double) videoWidth);
params.putDouble("height", (double) videoHeight);
_width = videoWidth;
_height = videoHeight;
params.putDouble("width", videoWidth);
params.putDouble("height", videoHeight);
width = videoWidth;
height = videoHeight;
eventSink.success(params.toMap());
}

if (_rotation != rotation) {
if (this.rotation != rotation) {
ConstraintsMap params2 = new ConstraintsMap();
params2.putString("event", "didTextureChangeRotation");
params2.putInt("id", id);
params2.putInt("rotation", rotation);
_rotation = rotation;
this.rotation = rotation;
eventSink.success(params2.toMap());
}
}
}
};
}

private SurfaceTextureRenderer surfaceTextureRenderer;
@NonNull
private final SurfaceTextureRenderer surfaceTextureRenderer;

/**
* The {@code VideoTrack}, if any, rendered by this {@code FlutterRTCVideoRenderer}.
*/
@Nullable
private VideoTrack videoTrack;

EventChannel eventChannel;
EventChannel.EventSink eventSink;
private EventChannel eventChannel;
@Nullable
private EventChannel.EventSink eventSink;

public FlutterRTCVideoRenderer(SurfaceTexture texture, TextureRegistry.SurfaceTextureEntry entry) {
this.surfaceTextureRenderer = new SurfaceTextureRenderer("");
Expand Down Expand Up @@ -141,7 +149,7 @@ private void removeRendererFromVideoTrack() {
* @param mediaStream The {@code MediaStream} to be rendered by this
* {@code FlutterRTCVideoRenderer} or {@code null}.
*/
public void setStream(MediaStream mediaStream) {
public void setStream(@Nullable MediaStream mediaStream) {
VideoTrack videoTrack;
this.mediaStream = mediaStream;
if (mediaStream == null) {
Expand All @@ -161,7 +169,7 @@ public void setStream(MediaStream mediaStream) {
* @param videoTrack The {@code VideoTrack} to be rendered by this
* {@code FlutterRTCVideoRenderer} or {@code null}.
*/
public void setVideoTrack(VideoTrack videoTrack) {
public void setVideoTrack(@Nullable VideoTrack videoTrack) {
VideoTrack oldValue = this.videoTrack;

if (oldValue != videoTrack) {
Expand Down Expand Up @@ -204,14 +212,14 @@ private void tryAddRendererToVideoTrack() {
}
}

public boolean checkMediaStream(String id) {
public boolean checkMediaStream(@Nullable String id) {
if (null == id || null == mediaStream) {
return false;
}
return id.equals(mediaStream.getId());
}

public boolean checkVideoTrack(String id) {
public boolean checkVideoTrack(@Nullable String id) {
if (null == id || null == videoTrack) {
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,61 +5,44 @@
import android.content.Context;
import android.os.Bundle;
import android.util.Log;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.lifecycle.DefaultLifecycleObserver;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleOwner;

import com.cloudwebrtc.webrtc.MethodCallHandlerImpl.AudioManager;
import com.cloudwebrtc.webrtc.utils.RTCAudioManager;
import io.flutter.embedding.android.FlutterActivity;

import java.util.Set;

import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.PluginRegistry.Registrar;
import io.flutter.view.TextureRegistry;

import java.util.Set;

/**
* FlutterWebRTCPlugin
*/
public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware {

static public final String TAG = "FlutterWebRTCPlugin";
public static final String TAG = "FlutterWebRTCPlugin";

private static Application application;

@Nullable
private RTCAudioManager rtcAudioManager;
private MethodChannel channel;
@Nullable
private MethodCallHandlerImpl methodCallHandler;
private LifeCycleObserver observer;
@Nullable
private Lifecycle lifecycle;

public FlutterWebRTCPlugin() {
}

/**
* Plugin registration.
*/
public static void registerWith(Registrar registrar) {
final FlutterWebRTCPlugin plugin = new FlutterWebRTCPlugin();

plugin.startListening(registrar.context(), registrar.messenger(), registrar.textures());

if (registrar.activeContext() instanceof Activity) {
plugin.methodCallHandler.setActivity((Activity) registrar.activeContext());
}
application = ((Application) registrar.context().getApplicationContext());
application.registerActivityLifecycleCallbacks(plugin.observer);

registrar.addViewDestroyListener(view -> {
plugin.stopListening();
return false;
});
}

@Override
public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) {
startListening(binding.getApplicationContext(), binding.getBinaryMessenger(),
Expand All @@ -74,9 +57,9 @@ public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
@Override
public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) {
methodCallHandler.setActivity(binding.getActivity());
this.observer = new LifeCycleObserver();
this.lifecycle = ((HiddenLifecycleReference) binding.getLifecycle()).getLifecycle();
this.lifecycle.addObserver(this.observer);
observer = new LifeCycleObserver();
lifecycle = ((HiddenLifecycleReference) binding.getLifecycle()).getLifecycle();
lifecycle.addObserver(observer);
}

@Override
Expand All @@ -92,16 +75,16 @@ public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBindin
@Override
public void onDetachedFromActivity() {
methodCallHandler.setActivity(null);
if (this.observer != null) {
this.lifecycle.removeObserver(this.observer);
if (this.application!=null) {
this.application.unregisterActivityLifecycleCallbacks(this.observer);
if (observer != null) {
lifecycle.removeObserver(observer);
if (FlutterWebRTCPlugin.application!=null) {
FlutterWebRTCPlugin.application.unregisterActivityLifecycleCallbacks(observer);
}
}
this.lifecycle = null;
}

private void startListening(final Context context, BinaryMessenger messenger,
private void startListening(@NonNull final Context context, BinaryMessenger messenger,
TextureRegistry textureRegistry) {
methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry,
new AudioManager() {
Expand All @@ -111,7 +94,7 @@ public void onAudioManagerRequested(boolean requested) {
if (rtcAudioManager == null) {
rtcAudioManager = RTCAudioManager.create(context);
}
rtcAudioManager.start(FlutterWebRTCPlugin.this::onAudioManagerDevicesChanged);
rtcAudioManager.start(FlutterWebRTCPlugin::onAudioManagerDevicesChanged);
} else {
if (rtcAudioManager != null) {
rtcAudioManager.stop();
Expand Down Expand Up @@ -151,9 +134,11 @@ private void stopListening() {
}
}

// This method is called when the audio manager reports audio device change,
// e.g. from wired headset to speakerphone.
private void onAudioManagerDevicesChanged(
/*
This method is called when the audio manager reports audio device change,
e.g. from wired headset to speakerphone.
*/
private static void onAudioManagerDevicesChanged(
final RTCAudioManager.AudioDevice device,
final Set<RTCAudioManager.AudioDevice> availableDevices) {
Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
Expand Down
Loading