-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathMainController.swift
118 lines (100 loc) · 3.81 KB
/
MainController.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import UIKit
import AVFoundation
import Vision
import VisionLab
final class ViewController: UIViewController {
/// Text view to display results of classification
private lazy var textView: UITextView = self.makeTextView()
/// Array of vision requests
private var requests = [VNRequest]()
/// Service used to capture video output from the camera
private lazy var videoCaptureService: VideoCaptureService = .init()
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
// MARK: - View lifecycle
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .black
view.layer.addSublayer(videoCaptureService.previewLayer)
view.addSubview(textView)
setupConstraints()
videoCaptureService.delegate = self
videoCaptureService.startCapturing()
setupVision()
}
// MARK: - Subviews
private func makeTextView() -> UITextView {
let textView = UITextView()
textView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
textView.textContainerInset = UIEdgeInsets(top: 20, left: 10, bottom: 20, right: 10)
textView.font = .systemFont(ofSize: 18)
textView.textColor = .white
textView.textAlignment = .center
textView.isSelectable = false
textView.isScrollEnabled = false
textView.text = "Point 📷 at objects around you"
return textView
}
// MARK: - Layout
override func viewWillLayoutSubviews() {
super.viewWillLayoutSubviews()
videoCaptureService.previewLayer.frame = view.layer.bounds
}
private func setupConstraints() {
textView.translatesAutoresizingMaskIntoConstraints = false
textView.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true
textView.trailingAnchor.constraint(equalTo: view.trailingAnchor).isActive = true
textView.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
}
}
// MARK: - Vision
private extension ViewController {
/// Create CoreML model and classification request
func setupVision() {
do {
let model = try VNCoreMLModel(for: Inceptionv3().model)
let classificationRequest = VNCoreMLRequest(model: model, completionHandler: handleClassifications)
classificationRequest.imageCropAndScaleOption = VNImageCropAndScaleOption.centerCrop
requests = [classificationRequest]
} catch {
assertionFailure("can't load Vision ML model: \(error)")
}
}
/// Handle results of the classification request
func handleClassifications(request: VNRequest, error: Error?) {
var text = "No results"
if let observations = request.results as? [VNClassificationObservation] {
text = observations.prefix(upTo: 3)
.filter({ $0.confidence > 0.1 })
.map({ "\($0.identifier): \($0.confidence.roundTo(places: 3) * 100.0)%" })
.joined(separator: "\n")
}
DispatchQueue.main.async {
self.textView.text = text
self.textView.sizeToFit()
self.textView.frame.size.width = self.view.bounds.width
}
}
}
// MARK: - VideoCaptureManagerDelegate
extension ViewController: VideoCaptureServiceDelegate {
func videoCaptureService(_ service: VideoCaptureService,
didOutput sampleBuffer: CMSampleBuffer,
pixelBuffer: CVPixelBuffer) {
var requestOptions = [VNImageOption: Any]()
let attachment = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil)
if let cameraIntrinsicData = attachment {
requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
}
do {
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: requestOptions)
try imageRequestHandler.perform(requests)
} catch {
print(error)
}
}
func videoCaptureService(_ service: VideoCaptureService, didFailWithError error: Error) {
print(error)
}
}