Skip to content

Commit

Permalink
Merge pull request #2 from dokun1/swift4update
Browse files Browse the repository at this point in the history
Swift4update
  • Loading branch information
dokun1 committed Aug 18, 2017
2 parents 182e23d + 090e64d commit 49748ac
Show file tree
Hide file tree
Showing 7 changed files with 86 additions and 43 deletions.
29 changes: 23 additions & 6 deletions Lumina/Lumina.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -167,18 +167,19 @@
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0820;
LastUpgradeCheck = 0820;
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = "David Okun";
TargetAttributes = {
53B828CF1EAAA07F00E3A624 = {
CreatedOnToolsVersion = 8.2.1;
DevelopmentTeam = RQ632LL2X3;
LastSwiftMigration = 0820;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
53B828D81EAAA07F00E3A624 = {
CreatedOnToolsVersion = 8.2.1;
DevelopmentTeam = RQ632LL2X3;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
};
Expand Down Expand Up @@ -258,15 +259,21 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
Expand Down Expand Up @@ -311,15 +318,21 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
Expand Down Expand Up @@ -366,7 +379,8 @@
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
Expand All @@ -387,7 +401,8 @@
PRODUCT_BUNDLE_IDENTIFIER = com.okun.io.Lumina;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
Expand All @@ -400,7 +415,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.okun.io.LuminaTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
Expand All @@ -413,7 +429,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.okun.io.LuminaTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0830"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
Expand All @@ -26,6 +26,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
<TestableReference
Expand Down Expand Up @@ -55,6 +56,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
Expand Down
2 changes: 1 addition & 1 deletion Lumina/Lumina/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>0.4.0</string>
<string>0.4.1</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
<key>NSPrincipalClass</key>
Expand Down
74 changes: 42 additions & 32 deletions Lumina/Lumina/LuminaController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public final class LuminaController: UIViewController {
fileprivate var videoOutput: AVCaptureVideoDataOutput {
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.alwaysDiscardsLateVideoFrames = true
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable : kCVPixelFormatType_32BGRA]
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : kCVPixelFormatType_32BGRA]
videoOutput.setSampleBufferDelegate(self, queue: videoBufferQueue)
return videoOutput
}
Expand Down Expand Up @@ -68,14 +68,15 @@ public final class LuminaController: UIViewController {
return true
}

private var discoverySession: AVCaptureDeviceDiscoverySession? {
var deviceTypes = [AVCaptureDeviceType]()
deviceTypes.append(.builtInWideAngleCamera)
private var discoverySession: AVCaptureDevice.DiscoverySession? {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
deviceTypes.append(AVCaptureDevice.DeviceType.builtInWideAngleCamera)
if #available(iOS 10.2, *) {
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInTelephotoCamera)
deviceTypes.append(AVCaptureDevice.DeviceType.builtInDualCamera)
deviceTypes.append(AVCaptureDevice.DeviceType.builtInTelephotoCamera)
}
let discoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)

return discoverySession
}

Expand All @@ -87,12 +88,12 @@ public final class LuminaController: UIViewController {
}
for discoveryDevice: AVCaptureDevice in discoverySession.devices {
if cameraDirection == .front {
if discoveryDevice.position == AVCaptureDevicePosition.front {
if discoveryDevice.position == AVCaptureDevice.Position.front {
device = discoveryDevice
break
}
} else {
if discoveryDevice.position == AVCaptureDevicePosition.back { // TODO: add support for iPhone 7 plus dual cameras
if discoveryDevice.position == AVCaptureDevice.Position.back { // TODO: add support for iPhone 7 plus dual cameras
device = discoveryDevice
break
}
Expand All @@ -104,19 +105,20 @@ public final class LuminaController: UIViewController {
public init?(camera: CameraDirection) {
super.init(nibName: nil, bundle: nil)

self.session = AVCaptureSession()
let session = AVCaptureSession()
self.previewLayer = AVCaptureVideoPreviewLayer(session: session)
self.previewView = self.view

guard let previewLayer = self.previewLayer else {
print("Could not access image preview layer")
return
}
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.view.layer.addSublayer(previewLayer)
self.view.bounds = UIScreen.main.bounds

previewLayer.frame = self.view.bounds
self.session = session
commitSession(for: camera)
createUI()
createTextPromptView()
Expand All @@ -129,25 +131,29 @@ public final class LuminaController: UIViewController {
}
self.currentCameraDirection = desiredCameraDirection

session.sessionPreset = AVCaptureSessionPresetHigh
session.sessionPreset = AVCaptureSession.Preset.high

if let input = self.input {
session.removeInput(input)
}

do {
try self.input = AVCaptureDeviceInput(device: getDevice(for: desiredCameraDirection))
guard let device = getDevice(for: desiredCameraDirection) else {
print("could not get desired camera direction")
return
}
try input = AVCaptureDeviceInput(device: device)
if session.canAddInput(input!) {
session.addInput(input!)
self.input = input!
}
} catch {
print("Error getting device input for \(desiredCameraDirection.rawValue)")
return
}

let metadataOutput = AVCaptureMetadataOutput()

if session.canAddInput(self.input) {
session.addInput(self.input)
}

let videoOutput = self.videoOutput

if session.canAddOutput(videoOutput) {
Expand All @@ -165,7 +171,7 @@ public final class LuminaController: UIViewController {
session.commitConfiguration()
session.startRunning()

if let connection = videoOutput.connection(withMediaType: AVMediaTypeVideo) {
if let connection = videoOutput.connection(with: AVMediaType.video) {
connection.isEnabled = true
if connection.isVideoMirroringSupported && desiredCameraDirection == .front {
connection.isVideoMirrored = true
Expand Down Expand Up @@ -209,6 +215,8 @@ public final class LuminaController: UIViewController {
cameraCancelButton.addTarget(self, action: #selector(cameraCancelButtonTapped), for: UIControlEvents.touchUpInside)
self.view.addSubview(cameraCancelButton)



let cameraTorchButton = UIButton(frame: CGRect(origin: CGPoint(x: self.view.frame.minX + 10, y: self.view.frame.minY + 10), size: CGSize(width: 40, height: 40)))
cameraTorchButton.backgroundColor = UIColor.clear
cameraTorchButton.addTarget(self, action: #selector(cameraTorchButtonTapped), for: UIControlEvents.touchUpInside)
Expand Down Expand Up @@ -298,7 +306,7 @@ private extension LuminaController { //MARK: Button Tap Methods
do {
if input.device.isTorchModeSupported(.on) {
try input.device.lockForConfiguration()
try input.device.setTorchModeOnWithLevel(1.0)
try input.device.setTorchModeOn(level: 1.0)
self.torchOn = !self.torchOn
input.device.unlockForConfiguration()
}
Expand Down Expand Up @@ -380,18 +388,19 @@ private extension CMSampleBuffer { // MARK: Extending CMSampleBuffer
}

extension LuminaController: AVCaptureVideoDataOutputSampleBufferDelegate { // MARK: Image Tracking Output
public func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard case self.trackImages = true else {
return
}
guard let delegate = self.delegate else {
print("Warning!! No delegate set, but image tracking turned on")
return
}
guard let sampleBuffer = sampleBuffer else {
print("No sample buffer detected")
return
}

// guard let sampleBuffer = sampleBuffer else {
// print("No sample buffer detected")
// return
// }
let startTime = Date()
var sample: CGImage? = nil
if self.improvedImageDetectionPerformance {
Expand Down Expand Up @@ -490,7 +499,7 @@ extension LuminaController { // MARK: Tap to focus methods
}

extension LuminaController: AVCaptureMetadataOutputObjectsDelegate { // MARK: Metadata output buffer
public func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
public func metadataOutput(_ captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
guard case self.trackMetadata = true else {
return
}
Expand All @@ -507,7 +516,7 @@ extension LuminaController: AVCaptureMetadataOutputObjectsDelegate { // MARK: Me
guard let firstObject = metadataObjects.first else {
return
}
if let _: AVMetadataMachineReadableCodeObject = previewLayer.transformedMetadataObject(for: firstObject as! AVMetadataObject) as? AVMetadataMachineReadableCodeObject { // TODO: Figure out exactly why Faces and Barcodes fire this method separately
if let _: AVMetadataMachineReadableCodeObject = previewLayer.transformedMetadataObject(for: firstObject ) as? AVMetadataMachineReadableCodeObject { // TODO: Figure out exactly why Faces and Barcodes fire this method separately
if let oldBorders = self.metadataBordersCodes {
for oldBorder in oldBorders {
DispatchQueue.main.async {
Expand All @@ -519,13 +528,14 @@ extension LuminaController: AVCaptureMetadataOutputObjectsDelegate { // MARK: Me
var newBorders = [LuminaMetadataBorderView]()

for metadata in metadataObjects {
guard let transformed: AVMetadataMachineReadableCodeObject = previewLayer.transformedMetadataObject(for: metadata as! AVMetadataObject) as? AVMetadataMachineReadableCodeObject else {
guard let transformed: AVMetadataMachineReadableCodeObject = previewLayer.transformedMetadataObject(for: metadata ) as? AVMetadataMachineReadableCodeObject else {
continue
}
var border = LuminaMetadataBorderView()
border.isHidden = true
border.frame = transformed.bounds
let translatedCorners = translate(points: transformed.corners as! [[String: Any]], fromView: self.view, toView: border)

let translatedCorners = translate(points: transformed.corners, fromView: self.view, toView: border)
border = LuminaMetadataBorderView(frame: transformed.bounds, corners: translatedCorners)
border.isHidden = false
newBorders.append(border)
Expand All @@ -549,7 +559,7 @@ extension LuminaController: AVCaptureMetadataOutputObjectsDelegate { // MARK: Me
var newBorders = [LuminaMetadataBorderView]()

for metadata in metadataObjects {
guard let face: AVMetadataFaceObject = previewLayer.transformedMetadataObject(for: metadata as! AVMetadataObject) as? AVMetadataFaceObject else {
guard let face: AVMetadataFaceObject = previewLayer.transformedMetadataObject(for: metadata ) as? AVMetadataFaceObject else {
continue
}
let border = LuminaMetadataBorderView(frame: face.bounds)
Expand All @@ -567,10 +577,10 @@ extension LuminaController: AVCaptureMetadataOutputObjectsDelegate { // MARK: Me
}
}

private func translate(points: [[String: Any]], fromView: UIView, toView: UIView) -> [CGPoint] {
private func translate(points: [CGPoint], fromView: UIView, toView: UIView) -> [CGPoint] {
var translatedPoints = [CGPoint]()
for point: [String: Any] in points {
let currentPoint = CGPoint(x: point["X"] as! Double, y: point["Y"] as! Double)
for point in points {
let currentPoint = CGPoint(x: point.x, y: point.y) //CGPoint(x: point["X"] as! Double, y: point["Y"] as! Double)
let translatedPoint = fromView.convert(currentPoint, to: toView)
translatedPoints.append(translatedPoint)
}
Expand Down

0 comments on commit 49748ac

Please sign in to comment.