Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

After multiple attempts to open the UVC camera, it fails to reopen. Assistance is requested #694

Open
makeking opened this issue Nov 24, 2023 · 2 comments

Comments

@makeking
Copy link

"Hello, let me provide an overview of our usage pattern. We frequently need to open and close the camera multiple times. However, after repeated openings, we noticed significant heating of the equipment. To address this, we adopted a strategy where we open the camera only for the first instance and subsequently handle openings through the stream object.

During our experiments, we observed that multiple openings and closings of the stream object lead to frequent changes in the USB device numbers under the /dev/bus/usb/001 directory.

Currently, we are facing three issues:

  1. mCurrentCamera NullPointerException.
  2. When opening/closing the camera, there are changes or omissions in the USB device numbers under /dev/bus/usb/001.

We would appreciate guidance on how to resolve these issues. Thank you."

@makeking
Copy link
Author

package com.bete.cellcounter.ui.view;

import static com.myutils.Canstant.HOME_ADMIN_NUM;
import static com.myutils.Canstant.HOME_MAIN_NUM;
import static com.myutils.Canstant.HOME_NUM;
import static com.myutils.Canstant.INDEX_RESULT;
import static com.myutils.GlobalDate.SHOW_TYPE;
import static com.myutils.GlobalDate.g_camera_path;
import static com.myutils.GlobalDate.g_camera_setting;
import static com.myutils.GlobalDate.g_save_photo;
import static com.myutils.GlobalDate.test;

import android.content.Context;
import android.content.DialogInterface;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.os.SystemClock;
import android.util.AttributeSet;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.LinearLayout;
import android.widget.RadioGroup;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

import com.bete.cellcounter.R;
import com.bete.cellcounter.bean.CameraSetting;
import com.bete.cellcounter.thread.LampSerial;
import com.bete.cellcounter.ui.new_activity.HomeActivity;
import com.customWidget.EditSpinner;
import com.jiangdg.ausbc.MultiCameraClient;
import com.jiangdg.ausbc.callback.ICameraStateCallBack;
import com.jiangdg.ausbc.callback.ICaptureCallBack;
import com.jiangdg.ausbc.callback.IDeviceConnectCallBack;
import com.jiangdg.ausbc.camera.CameraUVC;
import com.jiangdg.ausbc.camera.bean.CameraRequest;
import com.jiangdg.ausbc.render.env.RotateType;
import com.jiangdg.ausbc.utils.Logger;
import com.jiangdg.ausbc.utils.SettableFuture;
import com.jiangdg.ausbc.utils.Utils;
import com.jiangdg.ausbc.widget.AspectRatioTextureView;
import com.jiangdg.ausbc.widget.IAspectRatio;
import com.jiangdg.usb.DeviceFilter;
import com.jiangdg.usb.USBMonitor;
import com.myutils.Canstant;
import com.myutils.GlobalDate;
import com.utils.LogUtils;
import com.utils.SaveHelper;
import com.utils.SharedPreferencesUtils;
import com.warkiz.widget.IndicatorSeekBar;

import java.io.File;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

import kotlin.jvm.internal.Intrinsics;
import kotlin.jvm.internal.TypeIntrinsics;

public class OnlyCameraView extends BaseView implements ICameraStateCallBack {

Context mContext;
HomeActivity activity;
boolean istakePicture = false;
View view;
IndicatorSeekBar isbHue, isbSaturation, isbSharpness, isbGamma,
//            isbWhiteBalance,
isbBacklightComp,
// isbGain , isbExposureTime,
isbIris, isbFocus, isbZoom, isbPan, isbTilt, isbRoll;
CheckBox cbContrastAuto, cbHueAuto, cbWhiteBalanceAuto, cbExposureTimeAuto, cbFocusAuto, cbBrightness;
RadioGroup rgPowerLineFrequency;
SeekBar isbBrightness, isbContrast, isbWhiteBalance, isbGain, isbExposureTime;
EditSpinner spinner_resolution;
AspectRatioTextureView asp_artv;
//    AspectRatioSurfaceView asp_artv;
//    CardView cv_view;
public MultiCameraClient mCameraClient;
public IAspectRatio mCameraView;
public LinkedHashMap<Integer, MultiCameraClient.ICamera> mCameraMap = new LinkedHashMap<>();
public SettableFuture<MultiCameraClient.ICamera> mCurrentCamera;
boolean canOpenStream = false;
public static AtomicBoolean mRequestPermission = new AtomicBoolean(false);
int pubSpace;
TextView tv_load_value;
Button btn_load_cancel;
LinearLayout ll_layout_camera_setting;
LinearLayout ll_layout_loading;

public OnlyCameraView(@NonNull Context context) {
    this(context, null);
}

public OnlyCameraView(@NonNull Context context, AttributeSet attrs) {
    this(context, attrs, -1);
}

public OnlyCameraView(@NonNull Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);
    mContext = context;
    activity = (HomeActivity) context;
    initLauout();


}


private void initLauout() {
    view = LayoutInflater.from(mContext).inflate(R.layout.view_only_camera, this);
    initView();
}

private void initView() {
    initCamerSettingView();
    initLoadingView();
    initAllView();

}

/**
 * Android 设置 view 的监听
 */
private void cameraSettingViewListenter() {
    // 设置进度的内容
    setProgress();
    // 退出按钮的操作
    activity.iv_back.setOnClickListener(v -> {
        activity.checkView(HOME_NUM, HOME_ADMIN_NUM, 3);
        if (GlobalDate.DEVICE)
            LampSerial.getInstance().myPause();
        if (isbBrightness.getProgress() == 0 && isbContrast.getProgress() == 0
                && isbGain.getProgress() == 0 && isbExposureTime.getProgress() == 0
        ) {
            return;
        }
        CameraSetting cameraSetting = new CameraSetting();
        cameraSetting.setCamera_brightness(isbBrightness.getProgress());
        cameraSetting.setCamera_Contrast(isbContrast.getProgress());
        cameraSetting.setCamera_Gain(isbGain.getProgress());
        cameraSetting.setCamera_exposure_time(isbExposureTime.getProgress());
        cameraSetting.setCamera_auto_exposure_mode(cbExposureTimeAuto.isChecked());
        cameraSetting.setCamera_contrast_auto(cbContrastAuto.isChecked());
        SharedPreferencesUtils.saveCameraSetting(cameraSetting);
        g_camera_setting = SharedPreferencesUtils.getCameraSetting();
    });
}


/**
 * 设置各种进度条的内容
 */
private void setProgress() {
    setOldSeekBarParams(
            isbBrightness,
            true,
            new int[]{0, 100},
            g_camera_setting.getCamera_brightness());
    LogUtils.i(" g_camera_setting.getCamera_brightness() : " + g_camera_setting.getCamera_brightness());
    isbBrightness.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
                                                 @Override
                                                 public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                                                     LogUtils.i("current value Brightness is " + progress);
                                                     setBrightness(progress);
                                                     int brightness = getBrightness();
                                                     LogUtils.i("current value Brightness is " + brightness);
                                                 }

                                                 @Override
                                                 public void onStartTrackingTouch(SeekBar seekBar) {
                                                 }

                                                 @Override
                                                 public void onStopTrackingTouch(SeekBar seekBar) {
                                                 }
                                             }
    );

    setOldSeekBarParams(
            isbContrast,
            true,
            new int[]{0, 100},
            g_camera_setting.getCamera_Contrast());
    LogUtils.i(" g_camera_setting.getCamera_Contrast() : " + g_camera_setting.getCamera_Contrast());
    isbContrast.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
                                               @Override
                                               public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                                                   LogUtils.i("current value contrast is " + progress);
                                                   setContrast(progress);
                                                   int contrast = getContrast();
                                                   LogUtils.i("current value contrast value is " + contrast);

//// g_camera_setting.setCamera_Contrast(contrast);
}

                                               @Override
                                               public void onStartTrackingTouch(SeekBar seekBar) {

                                               }

                                               @Override
                                               public void onStopTrackingTouch(SeekBar seekBar) {

                                               }
                                           }
    );

    cbContrastAuto.setChecked(g_camera_setting.isCamera_contrast_auto());
    isbContrast.setEnabled(!g_camera_setting.isCamera_contrast_auto());
    cbContrastAuto.setOnCheckedChangeListener((buttonView, isChecked) -> {
        try {
            isbContrast.setEnabled(!isChecked);
            setAutoContrast(isChecked);
            boolean autoContrast = getAutoContrast();
            LogUtils.i("autoContrast state is : " + autoContrast);
        } catch (Exception exception) {
            exception.printStackTrace();
        }

    });
    // 1. 设置增益
    setOldSeekBarParams(
            isbGain,
            true,
            new int[]{0, 100},
            g_camera_setting.getCamera_Gain());
    LogUtils.i(" g_camera_setting.getCamera_Gain() : " + g_camera_setting.getCamera_Gain());
    isbGain.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
                                           @Override
                                           public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                                               LogUtils.i("current value gain is " + progress);
                                               setGain(progress);
                                               int gain = getGain();
                                               LogUtils.i("current value gain value is " + gain);
                                           }

                                           @Override
                                           public void onStartTrackingTouch(SeekBar seekBar) {

                                           }

                                           @Override
                                           public void onStopTrackingTouch(SeekBar seekBar) {

                                           }
                                       }
    );
    // 5. 曝光时间
    // 1. 获取曝光时间
    setOldSeekBarParams(
            isbExposureTime,
            true,
            new int[]{0, 100},
            g_camera_setting.getCamera_exposure_time());
    LogUtils.i(" g_camera_setting.getCamera_exposure_time() : " + g_camera_setting.getCamera_exposure_time());
    isbExposureTime.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
                                                   @Override
                                                   public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                                                       LogUtils.i("current value exposureTime is " + progress);
                                                       setExposureTime(progress);
                                                       Integer exposureTime = getExposureTime();
                                                       LogUtils.i("current value exposureTime value is " + exposureTime);

                                                   }

                                                   @Override
                                                   public void onStartTrackingTouch(SeekBar seekBar) {

                                                   }

                                                   @Override
                                                   public void onStopTrackingTouch(SeekBar seekBar) {

                                                   }
                                               }
    );
    isbExposureTime.setEnabled(!g_camera_setting.isCamera_auto_exposure_mode());
    cbExposureTimeAuto.setChecked(g_camera_setting.isCamera_auto_exposure_mode());
    cbExposureTimeAuto.setOnCheckedChangeListener((buttonView, isChecked) -> {
        try {
            setAutoExposureTimeState(isChecked ? 1 : 0);
            setAutoExposureTimeState(isChecked ? 8 : 1);
            isbExposureTime.setEnabled(!isChecked);
            boolean autoExposureTimeState = getAutoExposureTimeState();
            LogUtils.i("autoExposureTimeState is :" + autoExposureTimeState);

        } catch (Exception exception) {
            exception.printStackTrace();
        }

    });

}

protected final void setBrightness(int brightness) {
    setFunctionValue(FunctionType.BRIGHTNESS, brightness);
}

@org.jetbrains.annotations.Nullable
protected final Integer getBrightness() {
    return (Integer) getFunctionValue(FunctionType.BRIGHTNESS);
}

protected final void setAutoContrast(boolean autoContrast) {
    setFunctionValue(FunctionType.AUTOCONTRAST, autoContrast);

}

protected boolean getAutoContrast() {
    return (boolean) getFunctionValue(FunctionType.AUTOCONTRAST);

}

protected final void setContrast(int contrast) {
    setFunctionValue(FunctionType.CONTRAST, contrast);
}

protected int getContrast() {
    return (Integer) getFunctionValue(FunctionType.CONTRAST);
}

protected final void setGain(int gain) {
    setFunctionValue(FunctionType.GAIN, gain);
}

@org.jetbrains.annotations.Nullable
protected final Integer getGain() {
    return (Integer) getFunctionValue(FunctionType.GAIN);
}


public final boolean getAutoExposureTimeState() {
    return (boolean) getFunctionValue(FunctionType.AUTOEXPOSURETIMESTATE);
}

public final void setAutoExposureTimeState(int mode) {
    setFunctionValue(FunctionType.AUTOEXPOSURETIMESTATE, mode);

}

/**
 * Set saturation
 */

protected final void setExposureTime(int exposureTime) {
    setFunctionValue(FunctionType.EXPOSURETIME, exposureTime);

}

protected final Integer getExposureTime() {
    return (Integer) getFunctionValue(FunctionType.EXPOSURETIME);
}

protected final Object getFunctionValue(FunctionType functionType) {
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();
    if (currentCamera != null) {
        switch (functionType) {
            case EXPOSURETIME:
                return ((CameraUVC) currentCamera).getExposureTime();
            case CONTRAST:
                return ((CameraUVC) currentCamera).getContrast();
            case BRIGHTNESS:
                return ((CameraUVC) currentCamera).getBrightness();
            case GAIN:
                return ((CameraUVC) currentCamera).getGain();
            // aoto 的操作
            case AUTOEXPOSURETIMESTATE:
                return ((CameraUVC) currentCamera).getAutoExposureTimeState();
            case AUTOCONTRAST:
                return ((CameraUVC) currentCamera).getAutoContrast();
        }

    } else {
        return 0;
    }
    return 0;
}

protected final void setFunctionValue(FunctionType functionType, Object value) {
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();
    if (currentCamera != null) {
        if (!(currentCamera instanceof CameraUVC)) {
            return;
        }
        switch (functionType) {
            case EXPOSURETIME:
                ((CameraUVC) currentCamera).setExposureTime((int) value);
                break;
            case CONTRAST:
                ((CameraUVC) currentCamera).setContrast((int) value);
                break;
            case BRIGHTNESS:
                ((CameraUVC) currentCamera).setBrightness((int) value);
                break;
            case GAIN:
                ((CameraUVC) currentCamera).setGain((int) value);
                break;
            // aoto 的内容
            case AUTOEXPOSURETIMESTATE:
                ((CameraUVC) currentCamera).setAutoExposureTimeState((int) value);
                break;
            case AUTOCONTRAST:
                ((CameraUVC) currentCamera).setAutoContrast((boolean) value);
                break;
        }

    }

}

enum FunctionType {
    EXPOSURETIME, GAIN, CONTRAST, BRIGHTNESS, AUTOEXPOSURETIMESTATE, AUTOCONTRAST
}


private void setOldSeekBarParams(SeekBar seekBar, boolean isEnable, int[] limit,
                                 int value) {
    seekBar.setEnabled(isEnable);
    if (isEnable) {
        seekBar.setMax(limit[1]);
        seekBar.setProgress(value);
    }
}

private void loadingViewListenter() {
    istakePicture = false;
    g_camera_path = "";
    tv_load_value.setText(GlobalDate.g_sample_num);
    if (GlobalDate.DEVICE) {
        new Thread() {
            @Override
            public void run() {
                super.run();
                long currentTimeMillis = System.currentTimeMillis();
                while (!istakePicture) {
                    if (System.currentTimeMillis() - currentTimeMillis > 5000) {
                        activity.runOnUiThread(() -> {
                            activity.showCustomDialog(activity.getString(R.string.xiangjidakaishibai), activity.getString(R.string.tishi), -1, activity.getString(R.string.queding),
                                    new DialogInterface.OnClickListener() {
                                        @Override
                                        public void onClick(DialogInterface dialog, int which) {
                                            activity.checkView(HOME_NUM, HOME_MAIN_NUM, 0);
                                            dialog.dismiss();
                                        }
                                    }, "", null, R.layout.dialog_normal_message, true);
                        });
                        return;
                    }

                    if (isCameraOpened()) {
                        if (GlobalDate.DEVICE)
                            LampSerial.getInstance().myResume();
                        surfaceSizeChanged(3648, 2736);
                        // 拍照
                        SystemClock.sleep(2500);

                        activity.runOnUiThread(() -> {

// iscallBack = true;
// g_camera_path = "/sdcard/CellCounter/data/20231120173355/2023-11-20-17-33-55.jpg";
// g_save_photo = g_camera_path;
// activity.checkView(HOME_NUM, HOME_MAIN_NUM, 3);
takePicture();
});
SystemClock.sleep(400);
return;
}

                }
            }
        }.start();
    } else {
        iscallBack = true;
        g_camera_path = "/sdcard/CellCounter/DataTest/src_1677476835988.jpg";

// g_camera_path = "/sdcard/CellCounter/DataTest/src_16774768359881.png";
// g_camera_path = "/sdcard/CellCounter/DataTest/2023-11-03-09-51-06.jpg";
File file1 = new File(g_camera_path);
if (file1.exists()) {
if (file1.length() < 100) {
// 报错提示,拍照失败
takeFaile();
return;
}

        }
        g_save_photo = g_camera_path;
        activity.checkView(HOME_NUM, HOME_MAIN_NUM, 3);
    }

}


private void initAllView() {

// cv_view = view.findViewById(R.id.cv_view);
asp_artv = view.findViewById(R.id.asp_artv);
// IAspectRatio cameraView = (IAspectRatio) asp_artv;
ll_layout_camera_setting = (LinearLayout) view.findViewById(R.id.ll_layout_camera_setting);
ll_layout_loading = (LinearLayout) view.findViewById(R.id.ll_layout_loading);
canOpenStream = true;
getDefultCameraBoxHight(1);
mCameraView = asp_artv;
if (asp_artv == null) {
registerMultiCamera();
}
handleTextureView(asp_artv);
// handleSurfaceView(asp_artv);

}

// private void handleSurfaceView(AspectRatioSurfaceView surfaceView) {
// Log.i(TAG, "handleTextureView: can use");
// surfaceView.addOnAttachStateChangeListener(new OnAttachStateChangeListener() {
// @OverRide
// public void onViewAttachedToWindow(View v) {
// registerMultiCamera();
// }
//
// @OverRide
// public void onViewDetachedFromWindow(View v) {
// LogUtils.d("onSurfaceTextureDestroyed: can use");
// boolean cameraOpened = isCameraOpened();
// LogUtils.d("onSurfaceTextureDestroyed: cameraOpened state : " + cameraOpened + " : can use ");
// if (cameraOpened) {
// captureStreamStop();
// stopTakeStream();
// } else {
// LogUtils.d(" this method can call close stream ");
// unRegisterMultiCamera();
// }
// // 让仪器将内存释放
// System.gc();
// }
//
// });
//
//// surfaceView.addOnLayoutChangeListener(new OnLayoutChangeListener() {
//// @OverRide
//// public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
////
//// }
//// });
//// surfaceView.setStateListAnimator(new TextureView.SurfaceTextureListener() {
//
//// @OverRide
//// public void onSurfaceTextureAvailable(@nonnull SurfaceTexture surface, int width, int height) {
//// LogUtils.d("onSurfaceTextureAvailable: can use ");
//// LogUtils.d("camera result is onSurfaceTextureAvailable: can use ");
//// registerMultiCamera();
////
//// }
////
//// @OverRide
//// public void onSurfaceTextureSizeChanged(@nonnull SurfaceTexture surface, int width, int height) {
//// LogUtils.d("onSurfaceTextureSizeChanged: can use width " + width + " height " + pubSpace);
//// surfaceSizeChanged(width, pubSpace);
//// }
////
//// @OverRide
//// public boolean onSurfaceTextureDestroyed(@nonnull SurfaceTexture surface) {
//// LogUtils.d("onSurfaceTextureDestroyed: can use");
//// boolean cameraOpened = isCameraOpened();
//// LogUtils.d("onSurfaceTextureDestroyed: cameraOpened state : " + cameraOpened + " : can use ");
//// if (cameraOpened) {
//// captureStreamStop();
//// stopTakeStream();
//// } else {
//// LogUtils.d(" this method can call close stream ");
//// unRegisterMultiCamera();
//// }
//// // 让仪器将内存释放
//// System.gc();
//// return false;
//// }
////
//// @OverRide
//// public void onSurfaceTextureUpdated(@nonnull SurfaceTexture surface) {
////
//// }
//// });
// }

private void initLoadingView() {
    tv_load_value = view.findViewById(R.id.tv_load_value);
    btn_load_cancel = view.findViewById(R.id.btn_load_cancel);
}

int pollOutNum = 0;
boolean pullOutState = false;
boolean iscallBack = false;

public void takePicture() {
    iscallBack = false;
    SystemClock.sleep(500);
    String savePhotoPath = SaveHelper.getSavePhotoPath();
    File file = new File(savePhotoPath);
    if (!file.exists()) {
        file.delete();
    }
    try {
        file.createNewFile();
        captureImage(new ICaptureCallBack() {
            @Override
            public void onBegin() {
                LogUtils.i("take onBegin : ");
                iscallBack = true;
            }

            @Override
            public void onError(@Nullable String error) {
                iscallBack = true;
                LogUtils.i("take error : " + error.toString());
                takeFaile();
            }

            @Override
            public void onComplete(@Nullable String path) {
                iscallBack = true;
                // 1. 查看文件的大小,如果文件太小,就报错,拍照失败
                File file1 = new File(path);
                if (file1.exists()) {
                    if (file1.length() < 100) {
                        // 报错提示,拍照失败
                        takeFaile();
                        return;
                    }

                }
                LogUtils.i("take success : " + path);

// g_camera_path = path;
g_camera_path = savePhotoPath;
g_save_photo = g_camera_path;
// activity.checkMainView(3);
activity.checkView(HOME_NUM, HOME_MAIN_NUM, INDEX_RESULT);
}
}, savePhotoPath);
} catch (Exception e) {
e.printStackTrace();
// 如果拍照的时候发生报错,那么就返回到首页去
takeFaile();

    }
    SystemClock.sleep(1000);
}

private void takeFaile() {
    activity.showCustomDialog(activity.getString(R.string.paizhaoshibai),
            activity.getString(R.string.tishi), -1, activity.getString(R.string.queding), new DialogInterface.OnClickListener() {
                @Override
                public void onClick(DialogInterface dialog, int which) {

// activity.checkMainView(0);
activity.checkView(HOME_NUM, HOME_MAIN_NUM, 0);
dialog.dismiss();
}
}, "", null, R.layout.dialog_normal_message, true);
}

private void initCamerSettingView() {
    isbBrightness = view.findViewById(R.id.isbBrightness);
    isbContrast = view.findViewById(R.id.isbContrast);
    cbBrightness = view.findViewById(R.id.cb_Brightness);
    cbContrastAuto = view.findViewById(R.id.cbContrastAuto);
    isbHue = view.findViewById(R.id.isbHue);
    isbSaturation = view.findViewById(R.id.isbSaturation);
    cbHueAuto = view.findViewById(R.id.cbHueAuto);
    isbSharpness = view.findViewById(R.id.isbSharpness);
    isbGamma = view.findViewById(R.id.isbGamma);
    isbWhiteBalance = view.findViewById(R.id.isbWhiteBalance);
    cbWhiteBalanceAuto = view.findViewById(R.id.cbWhiteBalanceAuto);
    isbBacklightComp = view.findViewById(R.id.isbBacklightComp);
    isbGain = view.findViewById(R.id.isbGain);
    isbExposureTime = view.findViewById(R.id.isbExposureTime);
    cbExposureTimeAuto = view.findViewById(R.id.cbExposureTimeAuto);
    isbIris = view.findViewById(R.id.isbIris);
    isbFocus = view.findViewById(R.id.isbFocus);
    cbFocusAuto = view.findViewById(R.id.cbFocusAuto);
    isbZoom = view.findViewById(R.id.isbZoom);
    isbPan = view.findViewById(R.id.isbPan);
    isbTilt = view.findViewById(R.id.isbTilt);
    isbRoll = view.findViewById(R.id.isbRoll);
    spinner_resolution = view.findViewById(R.id.spinner_resolution);
    rgPowerLineFrequency = view.findViewById(R.id.rgPowerLineFrequency);

}

void clear() {
    unRegisterMultiCamera();
}

public UsbDevice mDevice;
int mDeviceId = -1;

protected final void registerMultiCamera() {

// LogUtils.e("camera result is : registerMultiCamera " + mCameraClient);
// LogUtils.d("registerMultiCamera :" + this.getClass().getName());
Intrinsics.checkNotNullExpressionValue(mContext, "requireContext()");
mCameraClient = new MultiCameraClient(mContext, (IDeviceConnectCallBack) (new IDeviceConnectCallBack() {

        @Override
        public void onConnectDev(@Nullable UsbDevice device, @Nullable USBMonitor.UsbControlBlock ctrlBlock) {

// LogUtils.e("camera result is : " + device);
LogUtils.d("onConnectDev :" + this.getClass().getName());
if (device != null) {
mDevice = device;
if (ctrlBlock != null) {
MultiCameraClient.ICamera iCamera = mCameraMap.get(device.getDeviceId());
if (iCamera != null) {
LogUtils.i(" iCamera 初始化阈值 ");
iCamera.setUsbControlBlock(ctrlBlock);
if (mCurrentCamera != null) {
try {
// 如果设备还存在那么就将设备重置为 null
mCurrentCamera.cancel(true);
mCurrentCamera = null;
LogUtils.i("camera result is : " + mCurrentCamera);
} catch (Exception exception) {
exception.printStackTrace();
}
}
// 未來值
LogUtils.i(" SettableFuture 初始化阈值 ");
// LogUtils.i("将设备调整为 常亮的对象 ");
mCurrentCamera = new SettableFuture<MultiCameraClient.ICamera>();
mCurrentCamera.set(iCamera);
// LogUtils.e("camera result is : " + mCurrentCamera);
boolean currentOPenState = isCameraOpened();
LogUtils.i("currentOPenState is : " + currentOPenState + " : can use ");
if (!currentOPenState) {
LogUtils.d(" call open camera : can use ");
openCamera(mCameraView);
}
startStream();
if (mDeviceId == -1)
mDeviceId = device.getDeviceId();
LogUtils.d("camera connection. pid: ${" + device.getProductId() + "}, vid: ${" + device.getVendorId() + "}" + (GlobalDate.testNum++) + " : can use ");

                    }


                }
            }
        }

        public void onAttachDev(@org.jetbrains.annotations.Nullable UsbDevice device) {

// List deviceList = getDeviceList();
// for (int i = 0; i < deviceList.size(); i++) {
// LogUtils.e("连接的设备为 : " + deviceList.get(i) + " i == " + i);
// }
//
//
LogUtils.d("onAttachDev :" + this.getClass().getName());
try {
if (device != null) {
mDevice = device;
if (mCameraMap.containsKey(device.getDeviceId())) {
// todo 添加内容,如果是实验的话不用打开流信息,如果是预览的话就打开流对象
if (isCameraOpened() && canOpenStream) {
LogUtils.d(" mRequestPermission : can use ");
// todo 之前的时候为什么没有报错? 只是执行了一次,没有再次打开 该 方法
// todo 之后的时候为什么会调用,因为相机再次进行调用了
startStream();
}
return;
}
Intrinsics.checkNotNullExpressionValue(mContext, "it");
MultiCameraClient.ICamera iCamera = generateCamera(mContext, device);
mCameraMap.put(device.getDeviceId(), iCamera);
if (!mRequestPermission.get()) {
UsbDevice usbDevice = getDefaultCamera();
if (usbDevice != null) {
if (usbDevice.getVendorId() == device.getVendorId() && usbDevice.getProductId() == device.getProductId()) {
// Logger.INSTANCE.i("CameraFragment", "default camera pid: " + usbDevice.getProductId() + ", vid: " + usbDevice.getVendorId());
LogUtils.d("default camera pid: " + usbDevice.getProductId() + ", vid: " + usbDevice.getVendorId() + " : can use");
requestPermission(device);
}
} else {
LogUtils.d("CameraFragment usbDevice not null : can use");
// mDeviceId = device.getDeviceId();
requestPermission(device);
}
// 开始调用 流对象

                    }


                }
            } catch (Exception exception) {
                exception.printStackTrace();

            }

        }

        public void onDetachDec(@org.jetbrains.annotations.Nullable UsbDevice device) {

// Toast.makeText(mContext, "调用了设备拔出的广播 1111 ! mDeviceId " + mDeviceId + " device " + device.getDeviceId(), Toast.LENGTH_LONG).show();
// LogUtils.d("调用了设备拔出的广播 1111 ! mDeviceId " + mDeviceId + " device " + device.getDeviceId());
// if (mDeviceId != device.getDeviceId()) {
// Toast.makeText(mContext, "调用了设备拔出的广播 1111 !", Toast.LENGTH_LONG).show();
// LogUtils.d("调用了设备拔出的广播 1111 ! ");
//// return;
// } else {
// Toast.makeText(mContext, "调用了设备拔出的广播!", Toast.LENGTH_LONG).show();
// LogUtils.d("onDetachDec :" + this.getClass().getName());
int value = device != null ? device.getDeviceId() : 0;
mDevice = null;
if (mCameraMap == null) {
throw new NullPointerException("null cannot be cast to non-null type kotlin.collections.MutableMap<K, V>");
} else {
MultiCameraClient.ICamera iCamera = (MultiCameraClient.ICamera) TypeIntrinsics.asMutableMap(mCameraMap).remove(value);
if (iCamera != null) {
iCamera.setUsbControlBlock((USBMonitor.UsbControlBlock) null);
}
mRequestPermission.set(false);
try {
if (mCurrentCamera != null) {
mCurrentCamera.cancel(true);
}
LogUtils.i("mCurrentCamera 将设置为 null ");
// todo 将 mCurrentCamera 设置为null 的方式 固然会导致软件无法调用 reguest ,也会让相机每次都弹出相机打开失败
mCurrentCamera = null;
} catch (Exception exception) {
exception.printStackTrace();

                    }

                }
                mDeviceId = -1;

// }
}

        public void onDisConnectDec(@org.jetbrains.annotations.Nullable UsbDevice device, @org.jetbrains.annotations.Nullable USBMonitor.UsbControlBlock ctrlBlock) {

// if (mDeviceId != device.getDeviceId()) {
// Toast.makeText(mContext, "调用了设备拔出的广播 onDisConnectDec !", Toast.LENGTH_LONG).show();
// LogUtils.d("调用了设备拔出的广播 onDisConnectDec !");
// return;
// }
LogUtils.d("onDisConnectDec :" + this.getClass().getName());
// todo 正在运行的过程中这个回调执行了
closeCamera();
mRequestPermission.set(false);
}

        public void onCancelDev(@org.jetbrains.annotations.Nullable UsbDevice device) {

// if (mDeviceId != device.getDeviceId()) {
// Toast.makeText(mContext, "调用了设备拔出的广播 onCancelDev 2222!", Toast.LENGTH_LONG).show();
// LogUtils.d("调用了设备拔出的广播 onCancelDev 22222 !");
// return;
// }
LogUtils.i("onCancelDev :" + this.getClass().getName());
mRequestPermission.set(false);
try {
if (mCurrentCamera != null) {
mCurrentCamera.cancel(true);
}
mCurrentCamera = (SettableFuture) null;
LogUtils.e("camera result is : " + mCurrentCamera);
} catch (Exception var3) {
var3.printStackTrace();
}

        }
    }));

    LogUtils.i("camera result is : registerMultiCamera " + mCameraClient);
    mCameraClient.register();

}

// Surface mySurface;

protected void startStream() {
    if (mCameraView != null) {
        if (mCameraView instanceof TextureView) {
            TextureView textureView = (TextureView) mCameraView;
            SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();
            if (surfaceTexture != null) {
                Surface mySurface = new Surface(surfaceTexture);
                startTakeStream(mySurface);
            }

        } else if (mCameraView instanceof SurfaceView) {
            SurfaceView surfaceView = (SurfaceView) mCameraView;
            startTakeStream(surfaceView.getHolder().getSurface());
        }
    }

}

protected void unRegisterMultiCamera() {
    LogUtils.i("unRegisterMultiCamera use ");
    if (mCameraMap != null && mCameraMap.size() > 0) {
        for (int i = 0; i < mCameraMap.size(); i++) {
            MultiCameraClient.ICamera camera = mCameraMap.get(i);
            if (camera != null) {
                Log.i(TAG, "unRegisterMultiCamera: camera is not null");
                if (camera.isCameraOpened()) {
                    camera.closeCamera();
                }
            } else {
                Log.e(TAG, "unRegisterMultiCamera: camera is null");
            }

        }
    }

}

@org.jetbrains.annotations.Nullable
protected final List getDeviceList() {
    return mCameraClient.getDeviceList(null);
}

private void handleTextureView(TextureView textureView) {
    Log.i(TAG, "handleTextureView: can use");
    textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {

        @Override
        public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
            LogUtils.d("onSurfaceTextureAvailable: can use ");
            LogUtils.d("camera result is onSurfaceTextureAvailable: can use ");
            registerMultiCamera();

        }

        @Override
        public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
            LogUtils.d("onSurfaceTextureSizeChanged: can use width " + width + " height " + pubSpace);
            surfaceSizeChanged(width, pubSpace);
        }

        @Override
        public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
            LogUtils.d("onSurfaceTextureDestroyed: can use");
            boolean cameraOpened = isCameraOpened();
            LogUtils.d("onSurfaceTextureDestroyed: cameraOpened state : " + cameraOpened + " : can use ");
            if (cameraOpened) {
                captureStreamStop();
                stopTakeStream();
            } else {
                LogUtils.d(" this method can call close stream ");
                unRegisterMultiCamera();
            }
            // 让仪器将内存释放
            System.gc();
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {

        }
    });
}

/**
 * Get current opened camera
 *
 * @return current camera, see [MultiCameraClient.ICamera]
 */
protected MultiCameraClient.ICamera getCurrentCamera() {
    try {

// if (mCurrentCamera == null) {
// LogUtils.e("camera result is : " + mCurrentCamera);
// }
MultiCameraClient.ICamera camera = mCurrentCamera.get(2, TimeUnit.SECONDS); // 空指针异常 mCurrentCamera 为 null
LogUtils.e("camera result is : " + camera);
return camera;
} catch (Exception e) {
e.printStackTrace();
// LogUtils.e("camera result is : " + e);
return null;
}
}

/**
 * Request permission
 *
 * @param device see [UsbDevice]
 */
protected void requestPermission(UsbDevice device) {
    mRequestPermission.set(true);
    mCameraClient.requestPermission(device);
}

/**
 * Generate camera
 *
 * @param ctx    context [Context]
 * @param device Usb device, see [UsbDevice]
 * @return Inheritor assignment camera api policy
 */
protected MultiCameraClient.ICamera generateCamera(Context ctx, UsbDevice device) {
    return new CameraUVC(ctx, device);
}

/**
 * Get default camera
 *
 * @return Open camera by default, should be [UsbDevice]
 */
// todo 这里的内容需要进行修改
protected UsbDevice getDefaultCamera() {
    return null;
}


protected void captureImage(ICaptureCallBack callBack, String savePath) {
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();
    if (currentCamera != null) {
        currentCamera.captureImage(callBack, savePath);
    }
}


/**
 * Is camera opened
 *
 * @return camera open status
 */
protected boolean isCameraOpened() {
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();

//\ LogUtils.e("11111111");
if (currentCamera != null) {
// LogUtils.e("11111111");
boolean cameraOpened = currentCamera.isCameraOpened();
// LogUtils.e("11111111");
// if (!cameraOpened)
// LogUtils.e("11111111");
LogUtils.i("cameraOpened state IS : " + false);
return cameraOpened;
}
return false;

}

/**
 * Stop capture H264 & AAC only
 */
protected void captureStreamStop() {
    getCurrentCamera().captureStreamStop();
}

protected final void openCamera(@org.jetbrains.annotations.Nullable IAspectRatio st) {
    IAspectRatio isapectRatio = !(st instanceof TextureView) && !(st instanceof SurfaceView) ? null : st;
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();
    if (currentCamera != null) {

// LogUtils.i("开始打开相机,调用这个方法会让相机重新进行创建 ");
currentCamera.openCamera(isapectRatio, getCameraRequest());
currentCamera.setCameraStateCallBack((ICameraStateCallBack) this);
}
}

/**
 * 先暂时隐藏相机的关闭,实现关闭流的操作 。
 */
protected void closeCamera() {
    if (getCurrentCamera() != null) {
        getCurrentCamera().closeCamera();
    }

}

public void startTakeStream(Surface surface) {
    LogUtils.d("startTakeStream method is Call ");
    try {
        MultiCameraClient.ICamera currentCamera = getCurrentCamera();
        if (currentCamera != null) {
            if (!(currentCamera instanceof CameraUVC)) {
                return;
            }
            ((CameraUVC) currentCamera).startCapture(surface);
        }
    } catch (Exception exception) {
        exception.printStackTrace();
        LogUtils.e(" exception :  " + exception);
    }

}

public void stopTakeStream() {
    MultiCameraClient.ICamera currentCamera = getCurrentCamera();
    if (currentCamera != null) {
        if (!(currentCamera instanceof CameraUVC)) {
            return;
        }
        currentCamera.stopCache();
        ((CameraUVC) currentCamera).stopCapture();
    }

}

public void surfaceSizeChanged(int surfaceWidth, int surfaceHeight) {
    getCurrentCamera().setRenderSize(surfaceWidth, surfaceHeight);
}


protected CameraRequest getCameraRequest() {
    return new CameraRequest.Builder()
            .setPreviewWidth(720)

// .setPreviewWidth(640)
.setPreviewHeight(480)
.setRenderMode(CameraRequest.RenderMode.OPENGL)
.setDefaultRotateType(RotateType.ANGLE_0)
.setAudioSource(CameraRequest.AudioSource.SOURCE_SYS_MIC)
.setPreviewFormat(CameraRequest.PreviewFormat.FORMAT_MJPEG)
.setAspectRatioShow(true)
.setCaptureRawImage(false)
.setRawPreviewData(false)
.create();
}

private String TAG = "CameraView";

@Override
public void onDetachedFromWindow() {
    super.onDetachedFromWindow();
    // 将资源占用清除

}

@Override
public void onCameraState(@NonNull MultiCameraClient.ICamera self, @NonNull State code, @Nullable String msg) {

}

/**
 * 当界面可见的  加载界面 还是 预览设置界面
 */
@Override
protected void onAttachedToWindow() {
    super.onAttachedToWindow();
    Runtime rt = Runtime.getRuntime();
    long maxMemory = rt.maxMemory();
    long totalMemory = rt.totalMemory();
    long freeMemory = rt.freeMemory();
    LogUtils.d("maxMemory:" + maxMemory / (1024 * 1024));
    LogUtils.d("totalMemory:" + totalMemory / (1024 * 1024));
    LogUtils.d("freeMemory:" + freeMemory / (1024 * 1024));
    LogUtils.d("show  : " + SHOW_TYPE + "view ");
    if (SHOW_TYPE.equals(Canstant.LOADING_VIEW)) { // 加载界面
        showCameraSettingOrLoading(1);
        // 1. 打开灯
        if (GlobalDate.DEVICE)
            LampSerial.getInstance().myResume();
        pollOutNum = 0;
        activity.iv_back.setVisibility(View.INVISIBLE);
        activity.iv_back.setOnClickListener(v -> {
            activity.checkView(HOME_NUM, HOME_MAIN_NUM, 0);
            istakePicture = true;
            if (GlobalDate.DEVICE)
                LampSerial.getInstance().myPause();
        });
        btn_load_cancel.setOnClickListener(v -> {
            activity.checkView(HOME_NUM, HOME_MAIN_NUM, 0);
            istakePicture = true;
            if (GlobalDate.DEVICE)
                LampSerial.getInstance().myPause();
        });
        loadingViewListenter();
        g_camera_setting = SharedPreferencesUtils.getCameraSetting();
        LogUtils.e("跳转到  相机加载界面");

// setProgress();
// todo 设置内容
} else { // 相机预览界面
showCameraSettingOrLoading(0);
cameraSettingViewListenter();
if (test) {
new Thread() {
@OverRide
public void run() {
super.run();
SystemClock.sleep(5000);
LogUtils.e("跳转到 相机设置界面");
activity.runOnUiThread(() -> {
GlobalDate.go_or_back = false;
// todo 暂时性质的等待5s ,之后可能会去除
// todo 跳转到样本输入界面
// 返回之前的界面
// activity.checkView(HOME_NUM, HOME_MAIN_NUM, INDEX_JIANCEVIEW);
// activity.checkView(HOME_NUM, HOME_MAIN_NUM, 0);
// activity.checkView(HOME_NUM, HOME_MAIN_NUM, 3);
activity.checkView(HOME_NUM, HOME_ADMIN_NUM, 3);
istakePicture = true;
});
}
}.start();

        }
    }


}

public void showCameraSettingOrLoading(int num) {
    int showHigh = num == 0 ? 300 : 1;
    ll_layout_camera_setting.setVisibility(num == 0 ? View.VISIBLE : View.GONE);
    ll_layout_loading.setVisibility(num == 0 ? View.GONE : View.VISIBLE);
    getDefultCameraBoxHight(showHigh);
    LogUtils.i("show box high is   " + showHigh);
}

WindowManager windowManager;

protected void getDefultCameraBoxHight(int itemHigh) {
    pubSpace = itemHigh;
    windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
    int width = windowManager.getDefaultDisplay().getWidth();
    // 设置为常规
    LinearLayout.LayoutParams layoutParams1 = new LinearLayout.LayoutParams(width, Utils.INSTANCE.dp2px(com.utils.Utils.getApp(), pubSpace));
    asp_artv.setLayoutParams(layoutParams1);

// asp_artv.setBackgroundColor(ContextCompat.getColor(mContext,R.color.red));
}
}

@makeking
Copy link
Author

"The above content represents the implementation approach of my code."

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant