-
Notifications
You must be signed in to change notification settings - Fork 19
/
Capture.swift
212 lines (180 loc) · 7.4 KB
/
Capture.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
//
// Capture.swift
// FilterCam
//
// Copyright © 2018 hajime-nakamura. All rights reserved.
//
import AVFoundation
import Foundation
protocol CaptureDelegate: class {
func captureWillStart()
func captureDidStart()
func captureWillStop()
func captureDidStop()
func captureDidFail(with error: CaptureError)
}
final class Capture {
weak var delegate: CaptureDelegate?
weak var videoDataOutputSampleBufferDelegate: AVCaptureVideoDataOutputSampleBufferDelegate?
weak var audioDataOutputSampleBufferDelegate: AVCaptureAudioDataOutputSampleBufferDelegate?
var hasTorch: Bool {
return videoDevice.hasTorch
}
var torchLevel: Float = 0 {
didSet {
if !hasTorch { return }
if !videoDevice.isTorchAvailable { return }
try? videoDevice.lockForConfiguration()
if torchLevel > 0.1 {
try? videoDevice.setTorchModeOn(level: torchLevel)
} else {
videoDevice.torchMode = .off
}
videoDevice.unlockForConfiguration()
}
}
let queue = DispatchQueue(label: "caputre_session_queue")
private(set) var session: AVCaptureSession?
private var audioDevice: AVCaptureDevice!
private var videoDevice: AVCaptureDevice!
private var videoDeviceInput: AVCaptureDeviceInput? {
do {
return try AVCaptureDeviceInput(device: videoDevice)
} catch {
delegate?.captureDidFail(with: .couldNotObtainVideoDeviceInput(error))
return nil
}
}
private var audioDeviceInput: AVCaptureDeviceInput? {
do {
return try AVCaptureDeviceInput(device: audioDevice)
} catch {
delegate?.captureDidFail(with: .couldNotObtainAudioDeviceInput(error))
return nil
}
}
// create and configure video data output
private var videoDataOutput: AVCaptureVideoDataOutput {
let output = AVCaptureVideoDataOutput()
output.videoSettings = [
// CoreImage wants BGRA pixel format
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
]
output.alwaysDiscardsLateVideoFrames = true
output.setSampleBufferDelegate(videoDataOutputSampleBufferDelegate, queue: queue)
return output
}
private var audioDataOutput: AVCaptureAudioDataOutput {
// configure audio data output
let output = AVCaptureAudioDataOutput()
output.setSampleBufferDelegate(audioDataOutputSampleBufferDelegate, queue: queue)
return output
}
init(devicePosition: AVCaptureDevice.Position,
preset: AVCaptureSession.Preset,
delegate: CaptureDelegate,
videoDataOutputSampleBufferDelegate: AVCaptureVideoDataOutputSampleBufferDelegate,
audioDataOutputSampleBufferDelegate: AVCaptureAudioDataOutputSampleBufferDelegate) {
self.delegate = delegate
self.videoDataOutputSampleBufferDelegate = videoDataOutputSampleBufferDelegate
self.audioDataOutputSampleBufferDelegate = audioDataOutputSampleBufferDelegate
do {
try AVAudioSession.sharedInstance().setActive(false)
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .videoRecording, options: [.mixWithOthers, .defaultToSpeaker, .allowBluetooth, .allowAirPlay, .allowBluetoothA2DP])
try AVAudioSession.sharedInstance().setActive(true)
} catch {
NSLog("Failed to set background audio preference")
}
// check the availability of video and audio devices
// create and start the capture session only if the devices are present
do {
#if targetEnvironment(simulator)
NSLog("On iPhone Simulator, the app still gets a video device, but the video device will not work")
NSLog("On iPad Simulator, the app gets no video device")
#endif
// see if we have any video device
// get the input device and also validate the settings
if let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: devicePosition) {
// obtain the preset and validate the preset
if videoDevice.supportsSessionPreset(preset) {
self.videoDevice = videoDevice
// find the audio device
if let audioDevice = AVCaptureDevice.default(.builtInMicrophone, for: .audio, position: .unspecified) {
self.audioDevice = audioDevice
start(preset)
}
} else {
delegate.captureDidFail(with: .presetNotSupportedByVideoDevice(preset))
}
} else {
delegate.captureDidFail(with: .couldNotGetVideoDevice)
}
}
}
func start(_ preset: AVCaptureSession.Preset) {
if session != nil {
return
}
delegate?.captureWillStart()
queue.async { [unowned self] in
// obtain device input
guard let videoDeviceInput = self.videoDeviceInput else { return }
guard let audioDeviceInput = self.audioDeviceInput else { return }
// create the capture session
let session = AVCaptureSession()
session.sessionPreset = preset
session.automaticallyConfiguresApplicationAudioSession = false
self.session = session
// obtain data output
let videoDataOutput = self.videoDataOutput
let audioDataOutput = self.audioDataOutput
if !session.canAddOutput(videoDataOutput) {
self.delegate?.captureDidFail(with: .couldNotAddVideoDataOutput)
self.session = nil
return
}
if !session.canAddOutput(audioDataOutput) {
self.delegate?.captureDidFail(with: .couldNotAddAudioDataOutput)
self.session = nil
return
}
// begin configure capture session
session.beginConfiguration()
// connect the video device input and video data and still image outputs
session.addInput(videoDeviceInput)
session.addOutput(videoDataOutput)
session.addInput(audioDeviceInput)
session.addOutput(audioDataOutput)
session.commitConfiguration()
session.startRunning()
DispatchQueue.main.async {
self.delegate?.captureDidStart()
}
}
}
func stop() {
guard let session = session else { return }
if !session.isRunning { return }
delegate?.captureWillStop()
session.stopRunning()
queue.async {
NSLog("waiting for capture session to end")
}
self.session = nil
delegate?.captureDidStop()
}
func focus(at point: CGPoint) {
do {
try videoDevice.lockForConfiguration()
if videoDevice.isFocusPointOfInterestSupported == true {
videoDevice.focusPointOfInterest = point
videoDevice.focusMode = .autoFocus
}
videoDevice.exposurePointOfInterest = point
videoDevice.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
videoDevice.unlockForConfiguration()
} catch {
// just ignore
}
}
}