Skip to content
This repository has been archived by the owner on Apr 3, 2019. It is now read-only.

updated to the latest version of xcode and optimized detection #6

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
# VisionFaceDetection
An example of use a Vision framework for face landmarks detection

# Landmark detection needs to be divided in to two steps.
First one is face rectangle detection by using `VNDetectFaceRectanglesRequest` based on pixelBuffer provided by delegate function `captureOutput`.

Next we need to setup the property `inputFaceObservations` of `VNDetectFaceLandmarksRequest` object, to provide the input.
# Landmark detection.
We need to setup the property `inputFaceObservations` of `VNDetectFaceLandmarksRequest` object, to provide the input.
Now we are redy to start landmarks detection.

It's possible to detects landmarks like: `faceContour`, `leftEye`, `rightEye`, `nose`, `noseCrest`, `lips`, `outerLips`, `leftEyebrow`, and `rightEyebrow`.
Expand Down
12 changes: 4 additions & 8 deletions Vision Face Detection.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
objects = {

/* Begin PBXBuildFile section */
E56B7E711EFD261E006704F3 /* CGRect+Scaled.swift in Sources */ = {isa = PBXBuildFile; fileRef = E56B7E701EFD261E006704F3 /* CGRect+Scaled.swift */; };
E5E2CD531EFA419000EBCCBF /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E5E2CD521EFA419000EBCCBF /* AppDelegate.swift */; };
E5E2CD551EFA419000EBCCBF /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E5E2CD541EFA419000EBCCBF /* ViewController.swift */; };
E5E2CD581EFA419000EBCCBF /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E5E2CD561EFA419000EBCCBF /* Main.storyboard */; };
Expand All @@ -16,7 +15,6 @@
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
E56B7E701EFD261E006704F3 /* CGRect+Scaled.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGRect+Scaled.swift"; sourceTree = "<group>"; };
E5E2CD4F1EFA419000EBCCBF /* Vision Face Detection.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Vision Face Detection.app"; sourceTree = BUILT_PRODUCTS_DIR; };
E5E2CD521EFA419000EBCCBF /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
E5E2CD541EFA419000EBCCBF /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -58,7 +56,6 @@
children = (
E5E2CD521EFA419000EBCCBF /* AppDelegate.swift */,
E5E2CD541EFA419000EBCCBF /* ViewController.swift */,
E56B7E701EFD261E006704F3 /* CGRect+Scaled.swift */,
E5E2CD561EFA419000EBCCBF /* Main.storyboard */,
E5E2CD591EFA419000EBCCBF /* Assets.xcassets */,
E5E2CD5B1EFA419000EBCCBF /* LaunchScreen.storyboard */,
Expand Down Expand Up @@ -138,7 +135,6 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
E56B7E711EFD261E006704F3 /* CGRect+Scaled.swift in Sources */,
E5E2CD551EFA419000EBCCBF /* ViewController.swift in Sources */,
E5E2CD531EFA419000EBCCBF /* AppDelegate.swift in Sources */,
);
Expand Down Expand Up @@ -273,10 +269,10 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = URYNPG7RQE;
DEVELOPMENT_TEAM = V75RFW43CG;
INFOPLIST_FILE = "Vision Face Detection/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.droidsonroids.Vision-Face-Detection";
PRODUCT_BUNDLE_IDENTIFIER = "com.lolkek.vision-face-detection";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
Expand All @@ -287,10 +283,10 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = URYNPG7RQE;
DEVELOPMENT_TEAM = V75RFW43CG;
INFOPLIST_FILE = "Vision Face Detection/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.droidsonroids.Vision-Face-Detection";
PRODUCT_BUNDLE_IDENTIFIER = "com.lolkek.vision-face-detection";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
21 changes: 0 additions & 21 deletions Vision Face Detection/CGRect+Scaled.swift

This file was deleted.

4 changes: 2 additions & 2 deletions Vision Face Detection/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>because</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
Expand All @@ -34,8 +36,6 @@
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>NSCameraUsageDescription</key>
<string>because</string>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
Expand Down
167 changes: 75 additions & 92 deletions Vision Face Detection/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ final class ViewController: UIViewController {
var session: AVCaptureSession?
let shapeLayer = CAShapeLayer()

let faceDetection = VNDetectFaceRectanglesRequest()
let faceLandmarks = VNDetectFaceLandmarksRequest()
let faceLandmarksDetectionRequest = VNSequenceRequestHandler()
let faceDetectionRequest = VNSequenceRequestHandler()
Expand Down Expand Up @@ -63,7 +62,6 @@ final class ViewController: UIViewController {
func sessionPrepare() {
session = AVCaptureSession()
guard let session = session, let captureDevice = frontCamera else { return }

do {
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
session.beginConfiguration()
Expand Down Expand Up @@ -94,97 +92,88 @@ final class ViewController: UIViewController {
}

extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)

let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate)
let ciImage = CIImage(cvImageBuffer: pixelBuffer!, options: attachments as! [String : Any]?)

//leftMirrored for front camera
let ciImageWithOrientation = ciImage.applyingOrientation(Int32(UIImageOrientation.leftMirrored.rawValue))

detectFace(on: ciImageWithOrientation)
}

}

extension ViewController {

func detectFace(on image: CIImage) {
try? faceDetectionRequest.perform([faceDetection], on: image)
if let results = faceDetection.results as? [VNFaceObservation] {
if !results.isEmpty {
faceLandmarks.inputFaceObservations = results
detectLandmarks(on: image)

DispatchQueue.main.async {
self.shapeLayer.sublayers?.removeAll()
}
}
}
}

func detectLandmarks(on image: CIImage) {
try? faceLandmarksDetectionRequest.perform([faceLandmarks], on: image)
if let landmarksResults = faceLandmarks.results as? [VNFaceObservation] {
for observation in landmarksResults {
DispatchQueue.main.async {
if let boundingBox = self.faceLandmarks.inputFaceObservations?.first?.boundingBox {
let faceBoundingBox = boundingBox.scaled(to: self.view.bounds.size)

//different types of landmarks
let faceContour = observation.landmarks?.faceContour
self.convertPointsForFace(faceContour, faceBoundingBox)

let leftEye = observation.landmarks?.leftEye
self.convertPointsForFace(leftEye, faceBoundingBox)

let rightEye = observation.landmarks?.rightEye
self.convertPointsForFace(rightEye, faceBoundingBox)

let nose = observation.landmarks?.nose
self.convertPointsForFace(nose, faceBoundingBox)

let lips = observation.landmarks?.innerLips
self.convertPointsForFace(lips, faceBoundingBox)

let leftEyebrow = observation.landmarks?.leftEyebrow
self.convertPointsForFace(leftEyebrow, faceBoundingBox)

let rightEyebrow = observation.landmarks?.rightEyebrow
self.convertPointsForFace(rightEyebrow, faceBoundingBox)

let noseCrest = observation.landmarks?.noseCrest
self.convertPointsForFace(noseCrest, faceBoundingBox)

let outerLips = observation.landmarks?.outerLips
self.convertPointsForFace(outerLips, faceBoundingBox)
let ciImageWithOrientation = ciImage.oriented(forExifOrientation: Int32(UIImageOrientation.leftMirrored.rawValue))

//detectFace(on: ciImageWithOrientation)
let detectFaceRequest = VNDetectFaceLandmarksRequest { (request, error) in
if error == nil {
if let results = request.results as? [VNFaceObservation] {
print("Found \(results.count) faces")
if results.isEmpty {
DispatchQueue.main.async {
self.shapeLayer.sublayers?.forEach({ (layer) in
layer.removeFromSuperlayer()
})
}
}
else {
for faceObservation in results {
guard let landmarks = faceObservation.landmarks else {
continue
}
DispatchQueue.main.async {
self.shapeLayer.sublayers?.forEach({ (layer) in
layer.removeFromSuperlayer()
})
if let faceContour = landmarks.faceContour {
self.draw(points: faceContour.pointsInImage(imageSize: self.view.frame.size))
}
if let medianLine = landmarks.medianLine {
self.draw(points: medianLine.pointsInImage(imageSize: self.view.frame.size))
}
if let leftEye = landmarks.leftEye {
self.draw(points: leftEye.pointsInImage(imageSize: self.view.frame.size))
}
if let rightEye = landmarks.rightEye {
self.draw(points: rightEye.pointsInImage(imageSize: self.view.frame.size))
}
if let innerLips = landmarks.innerLips {
self.draw(points: innerLips.pointsInImage(imageSize: self.view.frame.size))
}
if let outerLips = landmarks.outerLips {
self.draw(points: outerLips.pointsInImage(imageSize: self.view.frame.size))
}
if let leftEyebrow = landmarks.leftEyebrow {
self.draw(points: leftEyebrow.pointsInImage(imageSize: self.view.frame.size))
}
if let rightEyebrow = landmarks.rightEyebrow {
self.draw(points: rightEyebrow.pointsInImage(imageSize: self.view.frame.size))
}
if let leftPupil = landmarks.leftPupil {
self.draw(points: leftPupil.pointsInImage(imageSize: self.view.frame.size))
}
if let rightPupil = landmarks.rightPupil {
self.draw(points: rightPupil.pointsInImage(imageSize: self.view.frame.size))
}
if let nose = landmarks.nose {
self.draw(points: nose.pointsInImage(imageSize: self.view.frame.size))
}
if let noseCrest = landmarks.noseCrest {
self.draw(points: noseCrest.pointsInImage(imageSize: self.view.frame.size))
}
}
}
}
}
} else {
print(error!.localizedDescription)
}
}
let cgImage = convertCIImageToCGImage(inputImage: ciImageWithOrientation)
let vnImage = VNImageRequestHandler(cgImage: cgImage!, options: [:])
try? vnImage.perform([detectFaceRequest])
}

func convertPointsForFace(_ landmark: VNFaceLandmarkRegion2D?, _ boundingBox: CGRect) {
if let points = landmark?.points, let count = landmark?.pointCount {
let convertedPoints = convert(points, with: count)

let faceLandmarkPoints = convertedPoints.map { (point: (x: CGFloat, y: CGFloat)) -> (x: CGFloat, y: CGFloat) in
let pointX = point.x * boundingBox.width + boundingBox.origin.x
let pointY = point.y * boundingBox.height + boundingBox.origin.y

return (x: pointX, y: pointY)
}

DispatchQueue.main.async {
self.draw(points: faceLandmarkPoints)
}
}
}

func draw(points: [(x: CGFloat, y: CGFloat)]) {
}

extension ViewController {
func draw(points: [CGPoint]) {
let newLayer = CAShapeLayer()
newLayer.strokeColor = UIColor.red.cgColor
newLayer.lineWidth = 2.0
Expand All @@ -198,17 +187,11 @@ extension ViewController {
}
path.addLine(to: CGPoint(x: points[0].x, y: points[0].y))
newLayer.path = path.cgPath

shapeLayer.addSublayer(newLayer)
}


func convert(_ points: UnsafePointer<vector_float2>, with count: Int) -> [(x: CGFloat, y: CGFloat)] {
var convertedPoints = [(x: CGFloat, y: CGFloat)]()
for i in 0...count {
convertedPoints.append((CGFloat(points[i].x), CGFloat(points[i].y)))
}

return convertedPoints
}
}

func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
let context = CIContext(options: nil)
return context.createCGImage(inputImage, from: inputImage.extent)
}