Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Split CameraSession into multiple files #2957

Merged
merged 2 commits into from
Jun 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,332 @@
package com.mrousavy.camera.core

import android.annotation.SuppressLint
import android.util.Log
import android.util.Range
import android.util.Size
import androidx.annotation.OptIn
import androidx.camera.core.CameraSelector
import androidx.camera.core.CameraState
import androidx.camera.core.DynamicRange
import androidx.camera.core.ExperimentalGetImage
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageCapture
import androidx.camera.core.MirrorMode
import androidx.camera.core.Preview
import androidx.camera.core.TorchState
import androidx.camera.core.resolutionselector.ResolutionSelector
import androidx.camera.extensions.ExtensionMode
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.Recorder
import androidx.camera.video.VideoCapture
import androidx.lifecycle.Lifecycle
import com.mrousavy.camera.core.extensions.byId
import com.mrousavy.camera.core.extensions.forSize
import com.mrousavy.camera.core.extensions.id
import com.mrousavy.camera.core.extensions.isSDR
import com.mrousavy.camera.core.extensions.setTargetFrameRate
import com.mrousavy.camera.core.extensions.toCameraError
import com.mrousavy.camera.core.extensions.withExtension
import com.mrousavy.camera.core.types.CameraDeviceFormat
import com.mrousavy.camera.core.types.Torch
import com.mrousavy.camera.core.types.VideoStabilizationMode
import kotlin.math.roundToInt

internal fun getTargetFpsRange(configuration: CameraConfiguration): Range<Int>? {
val fps = configuration.fps ?: return null
return if (configuration.enableLowLightBoost) {
Range(fps / 2, fps)
} else {
Range(fps, fps)
}
}

internal fun assertFormatRequirement(
propName: String,
format: CameraDeviceFormat?,
throwIfNotMet: CameraError,
requirement: (format: CameraDeviceFormat) -> Boolean
) {
if (format == null) {
// we need a format for this to work.
throw PropRequiresFormatToBeNonNullError(propName)
}
val isSupported = requirement(format)
if (!isSupported) {
throw throwIfNotMet
}
}

@OptIn(ExperimentalGetImage::class)
@SuppressLint("RestrictedApi")
@Suppress("LiftReturnOrAssignment")
internal fun CameraSession.configureOutputs(configuration: CameraConfiguration) {
Log.i(CameraSession.TAG, "Creating new Outputs for Camera #${configuration.cameraId}...")
val fpsRange = getTargetFpsRange(configuration)
val format = configuration.format

Log.i(CameraSession.TAG, "Using FPS Range: $fpsRange")

// 1. Preview
val previewConfig = configuration.preview as? CameraConfiguration.Output.Enabled<CameraConfiguration.Preview>
if (previewConfig != null) {
Log.i(CameraSession.TAG, "Creating Preview output...")
val preview = Preview.Builder().also { preview ->
// Configure Preview Output
if (configuration.videoStabilizationMode.isAtLeast(VideoStabilizationMode.CINEMATIC)) {
assertFormatRequirement("videoStabilizationMode", format, InvalidVideoStabilizationMode(configuration.videoStabilizationMode)) {
it.videoStabilizationModes.contains(configuration.videoStabilizationMode)
}
preview.setPreviewStabilizationEnabled(true)
}
if (fpsRange != null) {
assertFormatRequirement("fps", format, InvalidFpsError(fpsRange.upper)) {
fpsRange.lower >= it.minFps && fpsRange.upper <= it.maxFps
}
preview.setTargetFrameRate(fpsRange)
}
}.build()
preview.setSurfaceProvider(previewConfig.config.surfaceProvider)
previewOutput = preview
} else {
previewOutput = null
}

// 2. Image Capture
val photoConfig = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo>
if (photoConfig != null) {
Log.i(CameraSession.TAG, "Creating Photo output...")
val photo = ImageCapture.Builder().also { photo ->
// Configure Photo Output
photo.setCaptureMode(photoConfig.config.photoQualityBalance.toCaptureMode())
if (format != null) {
Log.i(CameraSession.TAG, "Photo size: ${format.photoSize}")
val resolutionSelector = ResolutionSelector.Builder()
.forSize(format.photoSize)
.setAllowedResolutionMode(ResolutionSelector.PREFER_HIGHER_RESOLUTION_OVER_CAPTURE_RATE)
.build()
photo.setResolutionSelector(resolutionSelector)
}
}.build()
photoOutput = photo
} else {
photoOutput = null
}

// 3. Video Capture
val videoConfig = configuration.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video>
if (videoConfig != null) {
Log.i(CameraSession.TAG, "Creating Video output...")
val currentRecorder = recorderOutput
val recorder = if (recording != null && currentRecorder != null) {
// If we are currently recording, then don't re-create the recorder instance.
// Instead, re-use it so we don't cancel the active recording.
Log.i(CameraSession.TAG, "Re-using active Recorder because we are currently recording...")
currentRecorder
} else {
// We are currently not recording, so we can re-create a recorder instance if needed.
Log.i(CameraSession.TAG, "Creating new Recorder...")
Recorder.Builder().also { recorder ->
configuration.format?.let { format ->
recorder.setQualitySelector(format.videoQualitySelector)
}
// TODO: Make videoBitRate a Camera Prop
// video.setTargetVideoEncodingBitRate()
}.build()
}

val video = VideoCapture.Builder(recorder).also { video ->
// Configure Video Output
video.setMirrorMode(MirrorMode.MIRROR_MODE_ON_FRONT_ONLY)
if (configuration.videoStabilizationMode.isAtLeast(VideoStabilizationMode.STANDARD)) {
assertFormatRequirement("videoStabilizationMode", format, InvalidVideoStabilizationMode(configuration.videoStabilizationMode)) {
it.videoStabilizationModes.contains(configuration.videoStabilizationMode)
}
video.setVideoStabilizationEnabled(true)
}
if (fpsRange != null) {
assertFormatRequirement("fps", format, InvalidFpsError(fpsRange.upper)) {
fpsRange.lower >= it.minFps &&
fpsRange.upper <= it.maxFps
}
video.setTargetFrameRate(fpsRange)
}
if (videoConfig.config.enableHdr) {
assertFormatRequirement("videoHdr", format, InvalidVideoHdrError()) { it.supportsVideoHdr }
video.setDynamicRange(DynamicRange.HDR_UNSPECIFIED_10_BIT)
}
if (format != null) {
Log.i(CameraSession.TAG, "Video size: ${format.videoSize}")
val resolutionSelector = ResolutionSelector.Builder()
.forSize(format.videoSize)
.setAllowedResolutionMode(ResolutionSelector.PREFER_CAPTURE_RATE_OVER_HIGHER_RESOLUTION)
.build()
video.setResolutionSelector(resolutionSelector)
}
}.build()
videoOutput = video
recorderOutput = recorder
} else {
videoOutput = null
recorderOutput = null
}

// 4. Frame Processor
val frameProcessorConfig = configuration.frameProcessor as? CameraConfiguration.Output.Enabled<CameraConfiguration.FrameProcessor>
if (frameProcessorConfig != null) {
val pixelFormat = frameProcessorConfig.config.pixelFormat
Log.i(CameraSession.TAG, "Creating $pixelFormat Frame Processor output...")
val analyzer = ImageAnalysis.Builder().also { analysis ->
analysis.setBackpressureStrategy(ImageAnalysis.STRATEGY_BLOCK_PRODUCER)
analysis.setOutputImageFormat(pixelFormat.toImageAnalysisFormat())
if (fpsRange != null) {
assertFormatRequirement("fps", format, InvalidFpsError(fpsRange.upper)) {
fpsRange.lower >= it.minFps &&
fpsRange.upper <= it.maxFps
}
analysis.setTargetFrameRate(fpsRange)
}
if (format != null) {
Log.i(CameraSession.TAG, "Frame Processor size: ${format.videoSize}")
val resolutionSelector = ResolutionSelector.Builder()
.forSize(format.videoSize)
.setAllowedResolutionMode(ResolutionSelector.PREFER_CAPTURE_RATE_OVER_HIGHER_RESOLUTION)
.build()
analysis.setResolutionSelector(resolutionSelector)
}
}.build()
val pipeline = FrameProcessorPipeline(callback)
analyzer.setAnalyzer(CameraQueues.videoQueue.executor, pipeline)
frameProcessorOutput = analyzer
} else {
frameProcessorOutput = null
}

// 5. Code Scanner
val codeScannerConfig = configuration.codeScanner as? CameraConfiguration.Output.Enabled<CameraConfiguration.CodeScanner>
if (codeScannerConfig != null) {
Log.i(CameraSession.TAG, "Creating CodeScanner output...")
val analyzer = ImageAnalysis.Builder().also { analysis ->
val targetSize = Size(1280, 720)
val resolutionSelector = ResolutionSelector.Builder().forSize(targetSize).build()
analysis.setResolutionSelector(resolutionSelector)
}.build()
val pipeline = CodeScannerPipeline(codeScannerConfig.config, callback)
analyzer.setAnalyzer(CameraQueues.analyzerExecutor, pipeline)
codeScannerOutput = analyzer
} else {
codeScannerOutput = null
}
Log.i(CameraSession.TAG, "Successfully created new Outputs for Camera #${configuration.cameraId}!")
}

@SuppressLint("RestrictedApi")
internal suspend fun CameraSession.configureCamera(provider: ProcessCameraProvider, configuration: CameraConfiguration) {
Log.i(CameraSession.TAG, "Binding Camera #${configuration.cameraId}...")
checkCameraPermission()

// Outputs
val useCases = listOfNotNull(previewOutput, photoOutput, videoOutput, frameProcessorOutput, codeScannerOutput)
if (useCases.isEmpty()) {
throw NoOutputsError()
}

// Input
val cameraId = configuration.cameraId ?: throw NoCameraDeviceError()
var cameraSelector = CameraSelector.Builder().byId(cameraId).build()

// Wrap input with a vendor extension if needed (see https://developer.android.com/media/camera/camera-extensions)
val isStreamingHDR = useCases.any { !it.currentConfig.dynamicRange.isSDR }
val needsImageAnalysis = codeScannerOutput != null || frameProcessorOutput != null
val photoOptions = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo>
val enableHdrExtension = photoOptions != null && photoOptions.config.enableHdr
if (enableHdrExtension) {
if (isStreamingHDR) {
// extensions don't work if a camera stream is running at 10-bit HDR.
throw PhotoHdrAndVideoHdrNotSupportedSimultaneously()
}
// Load HDR Vendor extension (HDR only applies to image capture)
cameraSelector = cameraSelector.withExtension(context, provider, needsImageAnalysis, ExtensionMode.HDR, "HDR")
}
if (configuration.enableLowLightBoost) {
if (isStreamingHDR) {
// extensions don't work if a camera stream is running at 10-bit HDR.
throw LowLightBoostNotSupportedWithHdr()
}
if (enableHdrExtension) {
// low-light boost does not work when another HDR extension is already applied
throw LowLightBoostNotSupportedWithHdr()
}
// Load night mode Vendor extension (only applies to image capture)
cameraSelector = cameraSelector.withExtension(context, provider, needsImageAnalysis, ExtensionMode.NIGHT, "NIGHT")
}

// Unbind all currently bound use-cases before rebinding
if (currentUseCases.isNotEmpty()) {
Log.i(CameraSession.TAG, "Unbinding ${currentUseCases.size} use-cases for Camera #${camera?.cameraInfo?.id}...")
provider.unbind(*currentUseCases.toTypedArray())
}

// Bind it all together (must be on UI Thread)
Log.i(CameraSession.TAG, "Binding ${useCases.size} use-cases...")
camera = provider.bindToLifecycle(this, cameraSelector, *useCases.toTypedArray())

// Update currentUseCases for next unbind
currentUseCases = useCases

// Listen to Camera events
var lastState = CameraState.Type.OPENING
camera!!.cameraInfo.cameraState.observe(this) { state ->
Log.i(CameraSession.TAG, "Camera State: ${state.type} (has error: ${state.error != null})")

if (state.type == CameraState.Type.OPEN && state.type != lastState) {
// Camera has now been initialized!
callback.onInitialized()
lastState = state.type
}

val error = state.error
if (error != null) {
// A Camera error occurred!
callback.onError(error.toCameraError())
}
}
Log.i(CameraSession.TAG, "Successfully bound Camera #${configuration.cameraId}!")
}

internal fun CameraSession.configureSideProps(config: CameraConfiguration) {
val camera = camera ?: throw CameraNotReadyError()

// Zoom
val currentZoom = camera.cameraInfo.zoomState.value?.zoomRatio
if (currentZoom != config.zoom) {
camera.cameraControl.setZoomRatio(config.zoom)
}

// Torch
val currentTorch = camera.cameraInfo.torchState.value == TorchState.ON
val newTorch = config.torch == Torch.ON
if (currentTorch != newTorch) {
if (newTorch && !camera.cameraInfo.hasFlashUnit()) {
throw FlashUnavailableError()
}
camera.cameraControl.enableTorch(newTorch)
}

// Exposure
val currentExposureCompensation = camera.cameraInfo.exposureState.exposureCompensationIndex
val exposureCompensation = config.exposure?.roundToInt() ?: 0
if (currentExposureCompensation != exposureCompensation) {
camera.cameraControl.setExposureCompensationIndex(exposureCompensation)
}
}

internal fun CameraSession.configureIsActive(config: CameraConfiguration) {
if (config.isActive) {
lifecycleRegistry.currentState = Lifecycle.State.STARTED
lifecycleRegistry.currentState = Lifecycle.State.RESUMED
} else {
lifecycleRegistry.currentState = Lifecycle.State.STARTED
lifecycleRegistry.currentState = Lifecycle.State.CREATED
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.mrousavy.camera.core

import android.annotation.SuppressLint
import android.util.Log
import androidx.camera.core.CameraControl
import androidx.camera.core.FocusMeteringAction
import androidx.camera.core.MeteringPoint
import com.mrousavy.camera.core.extensions.await

@SuppressLint("RestrictedApi")
suspend fun CameraSession.focus(meteringPoint: MeteringPoint) {
val camera = camera ?: throw CameraNotReadyError()

val action = FocusMeteringAction.Builder(meteringPoint).build()
if (!camera.cameraInfo.isFocusMeteringSupported(action)) {
throw FocusNotSupportedError()
}

try {
Log.i(CameraSession.TAG, "Focusing to ${action.meteringPointsAf.joinToString { "(${it.x}, ${it.y})" }}...")
val future = camera.cameraControl.startFocusAndMetering(action)
val result = future.await(CameraQueues.cameraExecutor)
if (result.isFocusSuccessful) {
Log.i(CameraSession.TAG, "Focused successfully!")
} else {
Log.i(CameraSession.TAG, "Focus failed.")
}
} catch (e: CameraControl.OperationCanceledException) {
throw FocusCanceledError()
}
}
Loading