Commit 6316f203 authored by Nguyen Huy Nhat Anh's avatar Nguyen Huy Nhat Anh

update uiview

parent b9a8c8f0
File deleted
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */; };
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; }; 95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; };
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 954230E425344601006F13F9 /* valid_card_10102020.tflite */; }; 954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 954230E425344601006F13F9 /* valid_card_10102020.tflite */; };
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDB4247D171500AF50DE /* ExtString.swift */; }; 9546DDB5247D171500AF50DE /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDB4247D171500AF50DE /* ExtString.swift */; };
...@@ -69,7 +70,6 @@ ...@@ -69,7 +70,6 @@
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; }; 95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; };
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; }; 95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; };
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; }; 95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; };
A442B6EE25299DED0058D675 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */; };
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; }; A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; };
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; }; A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; };
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; }; A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; };
...@@ -94,6 +94,7 @@ ...@@ -94,6 +94,7 @@
2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; }; 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; }; 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
9509925E25355E0300C570D8 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKValidateCardView.swift; sourceTree = "<group>"; };
95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; }; 95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; };
954230E425344601006F13F9 /* valid_card_10102020.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_card_10102020.tflite; sourceTree = "<group>"; }; 954230E425344601006F13F9 /* valid_card_10102020.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_card_10102020.tflite; sourceTree = "<group>"; };
9546DDB4247D171500AF50DE /* ExtString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; }; 9546DDB4247D171500AF50DE /* ExtString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
...@@ -160,7 +161,6 @@ ...@@ -160,7 +161,6 @@
95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; }; 95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; }; 95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; }; 95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SBKValidateCardView.swift; path = "../../../../ios-sdk/OCR-SDK/UI/SBKCaptureCard/SBKValidateCardView.swift"; sourceTree = "<group>"; };
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKValidateCardView.xib; sourceTree = "<group>"; }; A442B6EF25299E160058D675 /* SBKValidateCardView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKValidateCardView.xib; sourceTree = "<group>"; };
A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKRecordFaceView.swift; sourceTree = "<group>"; }; A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKRecordFaceView.swift; sourceTree = "<group>"; };
A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKRecordFaceView.xib; sourceTree = "<group>"; }; A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKRecordFaceView.xib; sourceTree = "<group>"; };
...@@ -358,7 +358,7 @@ ...@@ -358,7 +358,7 @@
children = ( children = (
9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */, 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */,
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */, 955105AA247774CC0053036F /* SBKCaptureCardVC.swift */,
A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */, 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */,
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */, A442B6EF25299E160058D675 /* SBKValidateCardView.xib */,
); );
path = SBKCaptureCard; path = SBKCaptureCard;
...@@ -561,7 +561,6 @@ ...@@ -561,7 +561,6 @@
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
955105AD247774CC0053036F /* Loadding.swift in Sources */, 955105AD247774CC0053036F /* Loadding.swift in Sources */,
A442B6EE25299DED0058D675 /* SBKValidateCardView.swift in Sources */,
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */, 955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */,
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */, 955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */,
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */, 955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */,
...@@ -574,6 +573,7 @@ ...@@ -574,6 +573,7 @@
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */, 955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */,
9546DDDC247E197800AF50DE /* Global.swift in Sources */, 9546DDDC247E197800AF50DE /* Global.swift in Sources */,
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */, 9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */,
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */,
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */, 95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */,
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */, 955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */,
955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */, 955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */,
......
...@@ -246,8 +246,8 @@ public class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -246,8 +246,8 @@ public class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
let validateView = SBKValidateInput.shared let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer) let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage? var imageOutput: UIImage?
if self.statusScreen == "horizontal" { if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImageHorizontal(image: imageInput, rect: CGRect(x: imageInput.size.width * 1 / 10, y: imageInput.size.height * 3 / 20, width: imageInput.size.width * 8 / 10, height: imageInput.size.height * 9 / 10), scale: 1.0) imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
} else { } else {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0) imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
} }
......
...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = private var modelDataHandler: SBKModelDataHandler? =
SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
private var result: [Float]? private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000 private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000 private let delayBetweenInferencesMs: Double = 1000
...@@ -80,7 +80,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -80,7 +80,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2)) path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2))
return path return path
}() }()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size)) pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer) let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor maskLayer.backgroundColor = UIColor.black.cgColor
...@@ -94,10 +94,10 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -94,10 +94,10 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if self.checkStatusRecord { if self.checkStatusRecord {
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1) (result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1)
self.numberPass += 1 self.numberPass += 1
...@@ -170,7 +170,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -170,7 +170,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
} }
} }
} }
} }
init() { init() {
super.init(nibName: "SBKRecordFace", bundle: Bundle(for: SBKRecordFace.self)) super.init(nibName: "SBKRecordFace", bundle: Bundle(for: SBKRecordFace.self))
...@@ -178,7 +178,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -178,7 +178,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
fatalError("Model set up failed") fatalError("Model set up failed")
} }
} }
required init?(coder aDecoder: NSCoder) { required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented") fatalError("init(coder:) has not been implemented")
} }
...@@ -218,10 +218,10 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -218,10 +218,10 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
self.captureSession.sessionPreset = .medium self.captureSession.sessionPreset = .medium
if #available(iOS 11.1, *) { if #available(iOS 11.1, *) {
guard let device = AVCaptureDevice.DiscoverySession( guard let device = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video, mediaType: .video,
position: .front).devices.first else { position: .front).devices.first else {
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator") fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
} }
let cameraInput = try! AVCaptureDeviceInput(device: device) let cameraInput = try! AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(cameraInput) { if captureSession.canAddInput(cameraInput) {
...@@ -247,7 +247,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -247,7 +247,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6) overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath() let path = CGMutablePath()
path.addRoundedRect(in: CGRect(x: self.viewOval.frame.origin.x, y: self.viewOval.frame.origin.y, width: self.viewOval.frame.width, height: self.viewOval.frame.height ), cornerWidth: 0, cornerHeight: 0) path.addRoundedRect(in: CGRect(x: self.viewOval.frame.origin.x, y: self.viewOval.frame.origin.y, width: self.viewOval.frame.width, height: self.viewOval.frame.height ), cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size)) path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer() let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor maskLayer.backgroundColor = UIColor.black.cgColor
...@@ -255,7 +255,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -255,7 +255,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
maskLayer.fillRule = .evenOdd maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true overlayView.clipsToBounds = true
return overlayView return overlayView
} }
...@@ -265,15 +265,15 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -265,15 +265,15 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue")) self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput) self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return } connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait connection.videoOrientation = .portrait
} }
@available(iOS 11.0, *) @available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) { private func detectFace(in image: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async { DispatchQueue.main.async {
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
if (request.results as? [VNFaceObservation]) != nil { if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 { if let results = request.results as? [VNFaceObservation], results.count > 0 {
...@@ -285,20 +285,6 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -285,20 +285,6 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
self.startTimer() self.startTimer()
// if !self.checkStartRecord && !self.checkStatusRecord {
// self.checkStartRecord = true
// self.lbDescription.textColor = UIColor.white
// //self.lbDescription.text = "Are you ready. Let's start!".localized()//"Bạn đã sẵn sàng. Hãy bắt đầu!"
// if !self.checkStatusRecord {
// self.checkStatusRecord = true
// self.timeRecord = 0
// self.checkStep = 0
// self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#333333")
// self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#333333")
// self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#333333")
// }
// }
DispatchQueue.global().async { DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString) self.didOutput(pixelBuffer: image, statusFace: statusString)
} }
...@@ -319,17 +305,17 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -319,17 +305,17 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
} }
} }
} }
}} }}
}) })
if #available(iOS 12.0, *) { if #available(iOS 12.0, *) {
// Force the revision to 2 (68-points) even on iOS 13 or greater // Force the revision to 2 (68-points) even on iOS 13 or greater
// when VNDetectFaceLandmarksRequestRevision3 is available. // when VNDetectFaceLandmarksRequestRevision3 is available.
faceDetectionRequest.revision = 2 faceDetectionRequest.revision = 2
} }
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:]) let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest]) try? imageRequestHandler.perform([faceDetectionRequest])
} }
func startTimer() { func startTimer() {
...@@ -350,27 +336,27 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -350,27 +336,27 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
case 3: case 3:
if self.dataImageSuccess.count > 2 { if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async { DispatchQueue.main.async {
let viewController = SBKResultFaceVC() let viewController = SBKResultFaceVC()
viewController.imageData = self.dataImageSuccess[0] viewController.imageData = self.dataImageSuccess[0]
viewController.idFront = self.idFront viewController.idFront = self.idFront
viewController.idBack = self.idBack viewController.idBack = self.idBack
viewController.URLToken = self.URLToken viewController.URLToken = self.URLToken
viewController.dataImageSuccess = self.dataImageSuccess viewController.dataImageSuccess = self.dataImageSuccess
viewController.completionSuccessFaceData = { [weak self] data, cardImage, faceImage in viewController.completionSuccessFaceData = { [weak self] data, cardImage, faceImage in
guard let `self` = self else { return } guard let `self` = self else { return }
self.completionSuccessFaceRecord(data, cardImage, faceImage) self.completionSuccessFaceRecord(data, cardImage, faceImage)
} }
self.timeRecord = 0 self.timeRecord = 0
self.checkStep = 0 self.checkStep = 0
self.resetRecord() self.resetRecord()
self.timer.invalidate() self.timer.invalidate()
self.captureSession.stopRunning() self.captureSession.stopRunning()
self.captureSession.removeOutput(self.videoDataOutput) self.captureSession.removeOutput(self.videoDataOutput)
self.navigationController?.pushViewController(viewController, animated: true) self.navigationController?.pushViewController(viewController, animated: true)
} }
} else { } else {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Unsatisfactory, please try again!".localized()//"Chưa đạt yêu cầu, hãy thử lại!" self.lbDescription.text = "Unsatisfactory, please try again!".localized()//"Chưa đạt yêu cầu, hãy thử lại!"
} }
default: default:
break break
...@@ -395,7 +381,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -395,7 +381,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
let ciimage = CIImage(image: imageOutput!) let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil) let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent) let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)! return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
} }
......
...@@ -15,11 +15,13 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -15,11 +15,13 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
let nibName = "SBKRecordFaceView" let nibName = "SBKRecordFaceView"
var contentView:UIView? var contentView:UIView?
@IBOutlet public weak var viewBackground: UIView! @IBOutlet weak var viewBackground: UIView!
@IBOutlet public weak var lbDescription: UILabel! @IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel! @IBOutlet weak var viewOval: UIView!
//@IBOutlet public weak var viewOval: UIView! @IBOutlet weak var viewCheckStep1: UIView!
@IBOutlet public weak var btnRecord: UIButton! @IBOutlet weak var viewCheckStep2: UIView!
@IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView!
private let captureSession = AVCaptureSession() private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
...@@ -43,7 +45,7 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -43,7 +45,7 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
var idBack: String = "" var idBack: String = ""
var URLToken: String = "" var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in} var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?, _ timeRecord: Int?)->Void = {_,_,_,_ in} public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?)->Void = {_,_,_ in}
public var timeSpace: Int = 3 public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0 public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage? public var imageStartRecord: UIImage?
...@@ -76,7 +78,6 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -76,7 +78,6 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
} }
func loadCamera() { func loadCamera() {
self.lbCopyright.text = " "
self.addCameraInput() self.addCameraInput()
self.showCameraFeed() self.showCameraFeed()
self.getCameraFrames() self.getCameraFrames()
...@@ -122,36 +123,36 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -122,36 +123,36 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
self.addSubview(viewOval) self.addSubview(viewOval)
self.reloadInputViews() self.reloadInputViews()
let pathss: CGMutablePath = { let pathss: CGMutablePath = {
let path = CGMutablePath() let path = CGMutablePath()
if viewOval.frame.height > viewOval.frame.width { if viewOval.frame.height > viewOval.frame.width {
path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2), path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2),
radius: viewOval.frame.width / 2, radius: viewOval.frame.width / 2,
startAngle: 0.0, startAngle: 0.0,
endAngle: 2.0 * .pi, endAngle: 2.0 * .pi,
clockwise: false) clockwise: false)
} else { } else {
path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2), path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2),
radius: viewOval.frame.height / 2, radius: viewOval.frame.height / 2,
startAngle: 0.0, startAngle: 0.0,
endAngle: 2.0 * .pi, endAngle: 2.0 * .pi,
clockwise: false) clockwise: false)
} }
return path return path
}() }()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size)) pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer) let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss maskLayer.path = pathss
maskLayer.fillRule = .evenOdd maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true viewOval.clipsToBounds = true
} }
} }
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) { func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height
let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height
let locationTop = originYLBCopyright + heightLBCopyright + 35 let locationTop = originYLBCopyright + heightLBCopyright + 35
...@@ -167,146 +168,146 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -167,146 +168,146 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
self.addSubview(viewOval) self.addSubview(viewOval)
self.reloadInputViews() self.reloadInputViews()
let pathss: CGMutablePath = { let pathss: CGMutablePath = {
let path = CGMutablePath() let path = CGMutablePath()
path.move(to: CGPoint(x: 0, y: viewOval.frame.height/2)) path.move(to: CGPoint(x: 0, y: viewOval.frame.height/2))
if viewOval.frame.width < viewOval.frame.height { if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: 0, y: 0 + viewOval.frame.width / 2)) path.addLine(to: CGPoint(x: 0, y: 0 + viewOval.frame.width / 2))
} }
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: 0), control: CGPoint(x: 0, y: 0)) path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: 0), control: CGPoint(x: 0, y: 0))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width, y: 0 + viewOval.frame.width / 2), control: CGPoint(x: viewOval.frame.width, y: 0)) path.addQuadCurve(to: CGPoint(x: viewOval.frame.width, y: 0 + viewOval.frame.width / 2), control: CGPoint(x: viewOval.frame.width, y: 0))
if viewOval.frame.width < viewOval.frame.height { if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height / 2)) path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height / 2))
} }
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height - viewOval.frame.width / 2)) path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height - viewOval.frame.width / 2))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height), control: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height)) path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height), control: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height))
path.addQuadCurve(to: CGPoint(x: 0, y: viewOval.frame.height - viewOval.frame.width / 2), control: CGPoint(x: 0, y: viewOval.frame.height)) path.addQuadCurve(to: CGPoint(x: 0, y: viewOval.frame.height - viewOval.frame.width / 2), control: CGPoint(x: 0, y: viewOval.frame.height))
path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2)) path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2))
return path return path
}() }()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size)) pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer) let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss maskLayer.path = pathss
maskLayer.fillRule = .evenOdd maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true viewOval.clipsToBounds = true
} }
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) { func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if self.checkStatusRecord { if self.checkStatusRecord {
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1) (result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1)
self.numberPass += 1 self.numberPass += 1
DispatchQueue.main.async { DispatchQueue.main.async {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer) let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage) let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage)
if self.result![0] < self.result![1] { if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && 0 <= self.timeRecord && self.timeRecord < self.timeSpace { if statusFace == .STRAIGHTFACE && 0 <= self.timeRecord && self.timeRecord < self.timeSpace {
if self.dataImageSuccess.count == 0 { if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
}
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
if 1 <= self.timeRecord {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()
}
} else if statusFace != .STRAIGHTFACE && 1 <= self.timeRecord && self.timeRecord < self.timeSpace {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT_FAILD, nil, nil)
} else if statusFace == .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!) self.dataImageSuccess.append(imageView.pngData()!)
} }
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil, nil) } else if statusFace != .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 {
if 1 <= self.timeRecord { self.lbDescription.textColor = UIColor.red
self.lbDescription.textColor = UIColor.green self.lbDescription.text = " ".localized()
self.lbDescription.text = " ".localized() self.completionSuccessFaceRecordStep(.FACE_RIGHT_FAILD, nil, nil)
} } else if statusFace == .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 {
} else if statusFace != .STRAIGHTFACE && 1 <= self.timeRecord && self.timeRecord < self.timeSpace { self.lbDescription.textColor = UIColor.green
self.lbDescription.textColor = UIColor.red self.lbDescription.text = " ".localized()
self.lbDescription.text = " ".localized() self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT_FAILD, nil, nil, nil) if self.dataImageSuccess.count == 2 {
} else if statusFace == .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 { self.dataImageSuccess.append(imageView.pngData()!)
self.lbDescription.textColor = UIColor.green }
self.lbDescription.text = " ".localized() } else if statusFace != .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 {
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil, nil) self.lbDescription.textColor = UIColor.red
if self.dataImageSuccess.count == 1 { self.lbDescription.text = " ".localized()
self.dataImageSuccess.append(imageView.pngData()!) self.completionSuccessFaceRecordStep(.FACE_LEFT_FAILD, nil, nil)
} }
} else if statusFace != .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 { } else {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized() self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT_FAILD, nil, nil, nil) self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil)
} else if statusFace == .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 { }
self.lbDescription.textColor = UIColor.green }
self.lbDescription.text = " ".localized() } else {
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil, nil)
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
}
} else if statusFace != .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_LEFT_FAILD, nil, nil, nil)
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil, nil)
}
}
} else {
if result![0] < result![1] { if result![0] < result![1] {
DispatchQueue.main.async { DispatchQueue.main.async {
self.checkStartRecord = true self.checkStartRecord = true
self.lbDescription.textColor = UIColor.green self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()//"Bạn đã sẵn sàng. Hãy bắt đầu!" self.lbDescription.text = " ".localized()//"Bạn đã sẵn sàng. Hãy bắt đầu!"
self.completionSuccessFaceRecordStep(.FACE_READY, nil, nil, nil) self.completionSuccessFaceRecordStep(.FACE_READY, nil, nil)
} }
} else { } else {
DispatchQueue.main.async { DispatchQueue.main.async {
self.checkStartRecord = false self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized() self.lbDescription.text = "Incorrect face, please check!".localized()
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil, nil) self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil)
} }
} }
} }
} }
@IBAction func onRecord(_ sender: Any) { @IBAction func onRecord(_ sender: Any) {
if !self.checkStatusRecord && self.checkStartRecord { if !self.checkStatusRecord && self.checkStartRecord {
self.startTimer() self.startTimer()
self.checkStatusRecord = true self.checkStatusRecord = true
self.timeRecord = 0 self.timeRecord = 0
let image = UIImage(named: "player_stop", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil) let image = UIImage(named: "player_stop", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.btnRecord.setImage(imageStartRecord ?? image, for: .normal) //self.btnRecord.setImage(imageStartRecord ?? image, for: .normal)
} }
} }
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else { guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer") debugPrint("unable to get image from sample buffer")
return return
} }
if #available(iOS 11.0, *) { if #available(iOS 11.0, *) {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame)) self.detectFace(in: self.resizeImageFace(pixelBuffer: frame))
} }
} }
private func addCameraInput() { private func addCameraInput() {
self.captureSession.sessionPreset = .medium self.captureSession.sessionPreset = .medium
if #available(iOS 11.1, *) { if #available(iOS 11.1, *) {
guard let device = AVCaptureDevice.DiscoverySession( guard let device = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video, mediaType: .video,
position: .front).devices.first else { position: .front).devices.first else {
print("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator") print("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
return return
} }
let cameraInput = try! AVCaptureDeviceInput(device: device) let cameraInput = try! AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(cameraInput) { if captureSession.canAddInput(cameraInput) {
self.captureSession.addInput(cameraInput) self.captureSession.addInput(cameraInput)
} }
do { do {
try device.lockForConfiguration() try device.lockForConfiguration()
...@@ -316,24 +317,24 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -316,24 +317,24 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
print("\(error.localizedDescription)") print("\(error.localizedDescription)")
} }
} }
} }
private func showCameraFeed() { private func showCameraFeed() {
self.previewLayer.videoGravity = .resizeAspectFill self.previewLayer.videoGravity = .resizeAspectFill
self.viewBackground.layer.addSublayer(self.previewLayer) self.viewBackground.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.bounds self.previewLayer.frame = self.bounds
let overlayOval = createOverlayOval(frame: self.bounds, xOffset: viewBackground.frame.midX, yOffset: viewBackground.frame.midY, radius: 50.0) let overlayOval = createOverlayOval(frame: self.bounds, xOffset: viewBackground.frame.midX, yOffset: viewBackground.frame.midY, radius: 50.0)
self.viewBackground.addSubview(overlayOval) self.viewBackground.addSubview(overlayOval)
self.createOverlay(frame: self.bounds, xOffset: self.bounds.midX, yOffset: self.bounds.midY, radius: 50.0) self.createOverlay(frame: self.bounds, xOffset: self.bounds.midX, yOffset: self.bounds.midY, radius: 50.0)
} }
func createOverlayOval(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) -> UIView { func createOverlayOval(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) -> UIView {
let overlayView = UIView(frame: frame) let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6) overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath() let path = CGMutablePath()
let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height
let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height
...@@ -344,174 +345,176 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele ...@@ -344,174 +345,176 @@ public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDele
let heightOval = originYLBDescription - locationTop let heightOval = originYLBDescription - locationTop
path.addRoundedRect(in: CGRect(x: CGFloat( Int(self.bounds.origin.x + 40 / 896 * self.bounds.height)), y: CGFloat( Int(locationTop)), width: CGFloat( Int(self.bounds.width - 80 / 896 * self.bounds.height)), height: CGFloat( Int(heightOval - 65))), cornerWidth: 0, cornerHeight: 0) path.addRoundedRect(in: CGRect(x: CGFloat( Int(self.bounds.origin.x + 40 / 896 * self.bounds.height)), y: CGFloat( Int(locationTop)), width: CGFloat( Int(self.bounds.width - 80 / 896 * self.bounds.height)), height: CGFloat( Int(heightOval - 65))), cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size)) path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer() let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path maskLayer.path = path
maskLayer.fillRule = .evenOdd maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true overlayView.clipsToBounds = true
return overlayView return overlayView
} }
private func getCameraFrames() { private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any] self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue")) self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput) self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), connection.isVideoOrientationSupported else { return } guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait connection.videoOrientation = .portrait
} }
@available(iOS 11.0, *) @available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) { private func detectFace(in image: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async { DispatchQueue.main.async {
if (request.results as? [VNFaceObservation]) != nil { DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
if let results = request.results as? [VNFaceObservation], results.count > 0 { if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
if results.count > 1 {
self.lbDescription.textColor = UIColor.red if results.count > 1 {
self.lbDescription.text = " ".localized() self.lbDescription.textColor = UIColor.red
self.completionSuccessFaceRecordStep(.MANY_FACE, nil, nil, nil) self.lbDescription.text = "There are many faces in the frame".localized()
} else { self.completionSuccessFaceRecordStep(.MANY_FACE, nil, nil)
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!) } else {
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!)
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString) DispatchQueue.global().async {
} self.didOutput(pixelBuffer: image, statusFace: statusString)
} }
} else { }
if !self.checkStatusRecord { } else {
self.checkStartRecord = false if !self.checkStatusRecord {
self.lbDescription.textColor = UIColor.red self.checkStartRecord = false
self.lbDescription.text = " ".localized()//"Đặt vị trí mặt bạn vào hình" self.lbDescription.textColor = UIColor.red
self.completionSuccessFaceRecordStep(.NO_FACE, nil, nil, nil) self.lbDescription.text = "Position your face in the oval".localized()//"Đặt vị trí mặt bạn vào hình"
} else { self.completionSuccessFaceRecordStep(.NO_FACE, nil, nil)
self.lbDescription.textColor = UIColor.red } else {
self.lbDescription.text = " ".localized() self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR) DispatchQueue.global().async {
} self.didOutput(pixelBuffer: image, statusFace: .ERROR)
} }
} }
}
}
})
if #available(iOS 12.0, *) {
// Force the revision to 2 (68-points) even on iOS 13 or greater
// when VNDetectFaceLandmarksRequestRevision3 is available.
faceDetectionRequest.revision = 2
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest])
}
func startTimer() {
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(setLableRecord(timer:)), userInfo: nil, repeats: true)
}
func resetRecord() {
self.numberPass = 0
self.numberTrue = 0
self.numberFalse = 0
self.space = 0.0
self.dataImageSuccess.removeAll()
self.checkStartRecord = false
self.checkStatusRecord = false
let image = UIImage(named: "ic_record", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.btnRecord.setImage(image, for: .normal)
}
@objc func setLableRecord(timer: Timer) {
self.timeRecord += 1
self.completionSuccessFaceRecordStep(nil, nil, nil, self.timeRecord)
switch self.timeRecord {
case 1:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"vui lòng nhìn thẳng"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_STRAIGHT, nil, self.timeRecord)
case self.timeSpace:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"Quay sang phải"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_RIGHT, nil, self.timeRecord)
case self.timeSpace * 2:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"Quay sang trái"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_LEFT, nil, self.timeRecord)
case self.timeSpace * 3:
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
]
self.completionSuccessFaceRecordStep(nil, nil, data, nil)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
} }
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()//"Chưa đạt yêu cầu, hãy thử lại!"
self.completionSuccessFaceRecordStep(.INVALID, nil, nil, nil)
} }
case self.timeSpace * 4: }
self.timer.invalidate() }
self.resetRecord() })
self.timeRecord = 0
default: if #available(iOS 12.0, *) {
break // Force the revision to 2 (68-points) even on iOS 13 or greater
} // when VNDetectFaceLandmarksRequestRevision3 is available.
faceDetectionRequest.revision = 2
} }
func resizeImageFace(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer { let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
let validateView = SBKValidateInput.shared try? imageRequestHandler.perform([faceDetectionRequest])
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer) }
let imageOutput = validateView.cropImageFace(image: imageInput, rect: CGRect(x: imageInput.size.width / 6, y: imageInput.size.height / 12, width: imageInput.size.width * 4 / 6, height: imageInput.size.height * 8 / 10 ), scale: 1.0)
func startTimer() {
let ciimage = CIImage(image: imageOutput!) timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(setLableRecord(timer:)), userInfo: nil, repeats: true)
let eaglContext = EAGLContext(api: .openGLES2) }
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent) func resetRecord() {
self.numberPass = 0
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)! self.numberTrue = 0
} self.numberFalse = 0
self.space = 0.0
@available(iOS 11.0, *) self.dataImageSuccess.removeAll()
func checkFaceRightLeft(landmarks: VNFaceLandmarks2D) -> StatusFace { self.checkStartRecord = false
let face = landmarks.faceContour?.normalizedPoints self.checkStatusRecord = false
let nose = landmarks.nose?.normalizedPoints
//let image = UIImage(named: "ic_record", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
let faceLeftPoint = face![0] //self.btnRecord.setImage(image, for: .normal)
let faceRightPoint = face![face!.count - 1] }
let faceBottomPoint = face![5]
let nosePoint = nose![4] @objc func setLableRecord(timer: Timer) {
self.timeRecord += 1
let sRight = (faceBottomPoint.x - faceLeftPoint.x) * (nosePoint.y - faceLeftPoint.y) - (nosePoint.x - faceLeftPoint.x) * (faceBottomPoint.y - faceLeftPoint.y) self.completionSuccessFaceRecordStep(nil, nil, nil)
let sLeft = (faceBottomPoint.x - faceRightPoint.x) * (nosePoint.y - faceRightPoint.y) - (nosePoint.x - faceRightPoint.x) * (faceBottomPoint.y - faceRightPoint.y) switch self.timeRecord {
case 1:
let sFaceRight = sqrt(sRight * sRight) / 2 self.lbDescription.textColor = UIColor.white
let sFaceLeft = sqrt(sLeft * sLeft) / 2 self.lbDescription.text = " ".localized()//"vui lòng nhìn thẳng"
let totalS = sFaceLeft + sFaceRight self.completionSuccessFaceRecordStep(nil, .STEP_FACE_STRAIGHT, nil)
case self.timeSpace:
if sFaceLeft / totalS > 0.6 { self.lbDescription.textColor = UIColor.white
return .TOLEFT self.lbDescription.text = " ".localized()//"Quay sang phải"
} else if sFaceRight / totalS > 0.6 { self.completionSuccessFaceRecordStep(nil, .STEP_FACE_RIGHT, nil)
return .TORIGHT case self.timeSpace * 2:
} else { self.lbDescription.textColor = UIColor.white
return .STRAIGHTFACE self.lbDescription.text = " ".localized()//"Quay sang trái"
} self.completionSuccessFaceRecordStep(nil, .STEP_FACE_LEFT, nil)
} case self.timeSpace * 3:
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()//"Chưa đạt yêu cầu, hãy thử lại!"
self.completionSuccessFaceRecordStep(.INVALID, nil, nil)
}
case self.timeSpace * 4:
self.timer.invalidate()
self.resetRecord()
self.timeRecord = 0
default:
break
}
}
func resizeImageFace(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
let imageOutput = validateView.cropImageFace(image: imageInput, rect: CGRect(x: imageInput.size.width / 6, y: imageInput.size.height / 12, width: imageInput.size.width * 4 / 6, height: imageInput.size.height * 8 / 10 ), scale: 1.0)
let ciimage = CIImage(image: imageOutput!)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@available(iOS 11.0, *)
func checkFaceRightLeft(landmarks: VNFaceLandmarks2D) -> StatusFace {
let face = landmarks.faceContour?.normalizedPoints
let nose = landmarks.nose?.normalizedPoints
let faceLeftPoint = face![0]
let faceRightPoint = face![face!.count - 1]
let faceBottomPoint = face![5]
let nosePoint = nose![4]
let sRight = (faceBottomPoint.x - faceLeftPoint.x) * (nosePoint.y - faceLeftPoint.y) - (nosePoint.x - faceLeftPoint.x) * (faceBottomPoint.y - faceLeftPoint.y)
let sLeft = (faceBottomPoint.x - faceRightPoint.x) * (nosePoint.y - faceRightPoint.y) - (nosePoint.x - faceRightPoint.x) * (faceBottomPoint.y - faceRightPoint.y)
let sFaceRight = sqrt(sRight * sRight) / 2
let sFaceLeft = sqrt(sLeft * sLeft) / 2
let totalS = sFaceLeft + sFaceRight
if sFaceLeft / totalS > 0.6 {
return .TOLEFT
} else if sFaceRight / totalS > 0.6 {
return .TORIGHT
} else {
return .STRAIGHTFACE
}
}
} }
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097.2" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES"> <document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17156" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/> <device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies> <dependencies>
<deployment identifier="iOS"/> <deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17125"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/> <capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies> </dependencies>
<objects> <objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKRecordFaceView" customModule="SB_KYC_SDK" customModuleProvider="target"> <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKRecordFaceView" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections> <connections>
<outlet property="btnRecord" destination="kiz-B2-kqL" id="OpP-C8-4lo"/> <outlet property="imageDescription" destination="qAP-z1-ph6" id="4WK-9l-Hhw"/>
<outlet property="lbCopyright" destination="fBN-yA-pbq" id="Qwz-Ye-DJs"/> <outlet property="lbDescription" destination="l4S-dU-FVT" id="7qE-CY-e4c"/>
<outlet property="lbDescription" destination="3NW-5M-g0h" id="e9r-PT-NWj"/> <outlet property="viewBackground" destination="vtt-nP-K7e" id="p2H-QE-FHN"/>
<outlet property="viewBackground" destination="Z9C-Eh-mo9" id="pNl-8k-48n"/> <outlet property="viewCheckStep1" destination="Ikb-Rh-oGt" id="gg1-L1-h5k"/>
<outlet property="viewCheckStep2" destination="q10-fa-3fY" id="YaT-X4-UMq"/>
<outlet property="viewCheckStep3" destination="KKw-Ea-56U" id="91G-Kf-nGW"/>
<outlet property="viewOval" destination="o70-jf-1ia" id="W81-Ef-mxN"/>
</connections> </connections>
</placeholder> </placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/> <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
...@@ -21,62 +25,115 @@ ...@@ -21,62 +25,115 @@
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/> <rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews> <subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Z9C-Eh-mo9"> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="vtt-nP-K7e">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/> <rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view> </view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="hK9-ib-LmH"> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="wyh-9h-sMx">
<rect key="frame" x="0.0" y="144.5" width="414" height="611"/> <rect key="frame" x="173" y="771" width="68.5" height="20.5"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 3/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Maa-bq-dEa">
<rect key="frame" x="10" y="3" width="48.5" height="14.5"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.39954239130000002" green="0.39960256220000001" blue="0.39952337739999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="Maa-bq-dEa" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="top" constant="3" id="1h4-xu-9Jn"/>
<constraint firstItem="Maa-bq-dEa" firstAttribute="leading" secondItem="wyh-9h-sMx" secondAttribute="leading" constant="10" id="7Sy-eo-0q9"/>
<constraint firstAttribute="bottom" secondItem="Maa-bq-dEa" secondAttribute="bottom" constant="3" id="c2B-Vb-FHV"/>
<constraint firstAttribute="trailing" secondItem="Maa-bq-dEa" secondAttribute="trailing" constant="10" id="i4F-Gz-xe8"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view> </view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="fBN-yA-pbq"> <stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="5" translatesAutoresizingMaskIntoConstraints="NO" id="lfu-gT-avH">
<rect key="frame" x="205" y="84" width="4.5" height="20.5"/> <rect key="frame" x="100" y="670" width="214" height="6"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <subviews>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ikb-Rh-oGt">
<nil key="highlightedColor"/> <rect key="frame" x="0.0" y="0.0" width="68" height="6"/>
</label> <color key="backgroundColor" red="0.20000000000000001" green="0.20000000000000001" blue="0.20000000000000001" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="3NW-5M-g0h"> <userDefinedRuntimeAttributes>
<rect key="frame" x="10" y="795.5" width="394" height="20.5"/> <userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="q10-fa-3fY">
<rect key="frame" x="73" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="KKw-Ea-56U">
<rect key="frame" x="146" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="6" id="zRt-Eh-zot"/>
</constraints>
</stackView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="l4S-dU-FVT">
<rect key="frame" x="96.5" y="801.5" width="221.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="kiz-B2-kqL"> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="o70-jf-1ia">
<rect key="frame" x="182" y="826" width="50" height="50"/> <rect key="frame" x="60" y="69" width="294" height="581"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="qAP-z1-ph6">
<rect key="frame" x="183.5" y="696" width="47" height="60"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="50" id="Zge-fG-frc"/> <constraint firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="lbZ-b0-JQ6"/>
<constraint firstAttribute="width" constant="50" id="keQ-2a-Xpa"/> <constraint firstAttribute="height" constant="60" id="lpk-mw-Ydj"/>
</constraints> </constraints>
<state key="normal" image="ic_record.png"/> </imageView>
<connections>
<action selector="onRecord:" destination="-1" eventType="touchUpInside" id="ILY-WV-Xbt"/>
</connections>
</button>
</subviews> </subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/> <viewLayoutGuide key="safeArea" id="9lf-e8-SmZ"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints> <constraints>
<constraint firstItem="3NW-5M-g0h" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" constant="10" id="4ym-MQ-aiv"/> <constraint firstItem="qAP-z1-ph6" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="0MA-kq-exD"/>
<constraint firstAttribute="bottom" secondItem="kiz-B2-kqL" secondAttribute="bottom" constant="20" id="7Ve-1m-MDs"/> <constraint firstItem="qAP-z1-ph6" firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="4yn-TJ-yqm"/>
<constraint firstItem="Z9C-Eh-mo9" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" id="8JA-pa-TC1"/> <constraint firstAttribute="trailing" secondItem="vtt-nP-K7e" secondAttribute="trailing" id="6aW-K2-jlG"/>
<constraint firstItem="3NW-5M-g0h" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="II3-ki-ySd"/> <constraint firstItem="qAP-z1-ph6" firstAttribute="top" secondItem="lfu-gT-avH" secondAttribute="bottom" constant="20" id="Fh8-GZ-8yL"/>
<constraint firstAttribute="trailing" secondItem="Z9C-Eh-mo9" secondAttribute="trailing" id="REw-9J-X08"/> <constraint firstItem="wyh-9h-sMx" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="I6p-U2-cTt"/>
<constraint firstItem="fBN-yA-pbq" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="Ruf-Rt-odr"/> <constraint firstItem="vtt-nP-K7e" firstAttribute="top" secondItem="UFA-SZ-Qoz" secondAttribute="top" id="N3v-KG-OEA"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="top" secondItem="fBN-yA-pbq" secondAttribute="bottom" constant="40" id="TTn-Mn-7u7"/> <constraint firstItem="o70-jf-1ia" firstAttribute="top" secondItem="9lf-e8-SmZ" secondAttribute="top" constant="25" id="NPG-HN-ut1"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="Uxb-XZ-mT4"/> <constraint firstItem="lfu-gT-avH" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="100" id="Uqs-Lb-Gn3"/>
<constraint firstItem="3NW-5M-g0h" firstAttribute="top" secondItem="hK9-ib-LmH" secondAttribute="bottom" constant="40" id="Wii-y3-Z3n"/> <constraint firstItem="wyh-9h-sMx" firstAttribute="top" secondItem="qAP-z1-ph6" secondAttribute="bottom" constant="15" id="Ur0-Sk-VRP"/>
<constraint firstAttribute="bottom" secondItem="Z9C-Eh-mo9" secondAttribute="bottom" id="X1j-RU-oQ0"/> <constraint firstItem="o70-jf-1ia" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="XTc-Jw-HgS"/>
<constraint firstItem="Z9C-Eh-mo9" firstAttribute="top" secondItem="UFA-SZ-Qoz" secondAttribute="top" id="ddy-Yj-eLX"/> <constraint firstItem="9lf-e8-SmZ" firstAttribute="bottom" secondItem="l4S-dU-FVT" secondAttribute="bottom" constant="40" id="aMm-fz-lIb"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" id="ejz-BC-pDd"/> <constraint firstItem="vtt-nP-K7e" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" id="ct7-fr-gJL"/>
<constraint firstItem="kiz-B2-kqL" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="li9-pn-4K7"/> <constraint firstItem="lfu-gT-avH" firstAttribute="top" secondItem="o70-jf-1ia" secondAttribute="bottom" constant="20" id="e65-sJ-G8O"/>
<constraint firstItem="kiz-B2-kqL" firstAttribute="top" secondItem="3NW-5M-g0h" secondAttribute="bottom" constant="10" id="pYI-bW-8SR"/> <constraint firstItem="o70-jf-1ia" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="60" id="g9o-8i-MtB"/>
<constraint firstItem="fBN-yA-pbq" firstAttribute="top" secondItem="9lf-e8-SmZ" secondAttribute="top" constant="40" id="xcc-3V-dgz"/> <constraint firstItem="l4S-dU-FVT" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="hd0-Yp-38K"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="bottom" constant="10" id="lo9-cy-Wk0"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="nV7-Zn-tQz"/>
<constraint firstAttribute="bottom" secondItem="vtt-nP-K7e" secondAttribute="bottom" id="okN-H6-tDN"/>
</constraints> </constraints>
<viewLayoutGuide key="safeArea" id="9lf-e8-SmZ"/>
<point key="canvasLocation" x="133" y="154"/> <point key="canvasLocation" x="133" y="154"/>
</view> </view>
</objects> </objects>
<resources> <resources>
<image name="ic_record.png" width="100" height="100"/> <image name="Scan-1.png" width="170.5" height="218.5"/>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources> </resources>
</document> </document>
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment