Commit 702ef53a authored by Itsol-AnhNguyen's avatar Itsol-AnhNguyen

add customer view

parent 529a1ac4
File added
......@@ -69,6 +69,10 @@
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; };
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; };
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; };
A442B6EE25299DED0058D675 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */; };
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; };
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; };
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; };
CCCF85EB83511B97EF23244B /* Pods_OCR_SDK.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */; };
/* End PBXBuildFile section */
......@@ -156,6 +160,10 @@
95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SBKValidateCardView.swift; path = "../../../../ios-sdk/OCR-SDK/UI/SBKCaptureCard/SBKValidateCardView.swift"; sourceTree = "<group>"; };
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKValidateCardView.xib; sourceTree = "<group>"; };
A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKRecordFaceView.swift; sourceTree = "<group>"; };
A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKRecordFaceView.xib; sourceTree = "<group>"; };
B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
......@@ -350,6 +358,8 @@
children = (
9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */,
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */,
A442B6ED25299DED0058D675 /* SBKValidateCardView.swift */,
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */,
);
path = SBKCaptureCard;
sourceTree = "<group>";
......@@ -367,6 +377,8 @@
children = (
9580130D2489F1EA00846F8A /* SBKRecordFace.swift */,
9580130E2489F1EA00846F8A /* SBKRecordFace.xib */,
A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */,
A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */,
);
path = SBKRecordFace;
sourceTree = "<group>";
......@@ -491,6 +503,8 @@
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */,
95FAB2672499C89400CE7913 /* rotate.png in Resources */,
95FAB20B24986B9600CE7913 /* valid_idcard.tflite in Resources */,
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */,
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */,
955BECE624935A14001FB052 /* ic_record.png in Resources */,
955105FA2477B52C0053036F /* back.png in Resources */,
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
......@@ -547,11 +561,13 @@
buildActionMask = 2147483647;
files = (
955105AD247774CC0053036F /* Loadding.swift in Sources */,
A442B6EE25299DED0058D675 /* SBKValidateCardView.swift in Sources */,
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */,
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */,
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */,
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */,
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */,
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */,
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */,
955105C3247774CC0053036F /* SBKResultFaceVC.swift in Sources */,
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */,
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>
......@@ -56,11 +56,11 @@ public class SB_KYC_SDK {
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
case .IMAGE_FAKE:
return 0
case .IMAGEFRONT:
case .IMAGE_FRONT:
return 1
case .IMAGEBACK:
case .IMAGE_BACK:
return 2
}
}
......
......@@ -194,7 +194,7 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGEFRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGEBACK && self.checkScreen == 2) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
......@@ -265,7 +265,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGEFAKE || validateImageCard == .ERROR {
if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
......@@ -275,7 +275,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGEFRONT && self.checkScreen == 2 {
if validateImageCard == .IMAGE_FRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
......@@ -285,7 +285,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGEBACK && self.checkScreen == 1 {
if validateImageCard == .IMAGE_BACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
......@@ -295,7 +295,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
self.viewCamera.addSubview(overlay)
}
if (self.statusValidateImage == ValidateCard.IMAGEFRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGEBACK && self.checkScreen == 2) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
......
//
// SBKValidateCardView.swift
// OCR-SDK
//
// Created by itsol on 8/25/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
import AVFoundation
public class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
let nibName = "SBKValidateCardView"
var contentView:UIView?
@IBOutlet public weak var viewCamera: UIView!
@IBOutlet public weak var btnTakePhoto: UIButton!
public var captureSession: AVCaptureSession = AVCaptureSession()
public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
public var videoPreviewLayer: AVCaptureVideoPreviewLayer!
public let videoDataOutput = AVCaptureVideoDataOutput()
public var inputCamera: AVCaptureDeviceInput!
var descriptionScreen: String = "Front of your personal card".localized()
var checkScreen: Int = 1 // checkScreen = 1: screen front card, checkScreen = 2: screen front
var idFront: String = ""
var URLToken: String = ""
var statusTakePhoto: Bool = true
var statusValidateImage: ValidateCard = .ERROR
var statusScreen: String = "vertical" //horizontal
public var typeCamera: TypeCard = TypeCard.FRONT
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ data: Data?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
public required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
public override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
func commonInit() {
guard let view = loadViewFromNib() else { return }
view.frame = self.bounds
self.addSubview(view)
contentView = view
AVCaptureDevice.requestAccess(for: .video, completionHandler: { (granted: Bool) in
if !granted {
self.completionSuccessCardStep(nil, nil, false)
return
}
})
if AVCaptureDevice.authorizationStatus(for: .video) == .denied {
self.completionSuccessCardStep(nil, nil, false)
return
}
self.loadCamera()
}
func loadViewFromNib() -> UIView? {
let bundle = Bundle(for: type(of: self))
let nib = UINib(nibName: nibName, bundle: bundle)
return nib.instantiate(withOwner: self, options: nil).first as? UIView
}
public func setButtonImage(iconTakeCard: UIImage? = nil, iconFrameTopLeft: UIImage? = nil, iconFrameTopRight: UIImage? = nil, iconFrameBottomLeft: UIImage? = nil, iconFrameBottomRight: UIImage? = nil) {
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
if iconTakeCard != nil {
self.btnTakePhoto.setImage(iconTakeCard, for: .normal)
}
if iconFrameTopLeft != nil {
let ivLeftTop = UIImageView(image: iconFrameTopLeft)
ivLeftTop.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20, width: 50, height: 50)
self.viewCamera.addSubview(ivLeftTop)
}
if iconFrameTopRight != nil {
let ivRightTop = UIImageView(image: iconFrameTopRight)
ivRightTop.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20 + fWidth * 18 / 20 - 50, y: topLeft.y - fHeight/20, width: 50, height: 50)
self.viewCamera.addSubview(ivRightTop)
}
if iconFrameBottomLeft != nil {
let ivLeftBottom = UIImageView(image: iconFrameBottomLeft)
ivLeftBottom.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20 + fWidth * 18 * 3 / 20 / 4 - 50, width: 50, height: 50)
self.viewCamera.addSubview(ivLeftBottom)
}
if iconFrameBottomRight != nil {
let ivRightBottom = UIImageView(image: iconFrameBottomRight)
ivRightBottom.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20 + fWidth * 18 / 20 - 50, y: topLeft.y - fHeight/20 + fWidth * 18 * 3 / 20 / 4 - 50, width: 50, height: 50)
self.viewCamera.addSubview(ivRightBottom)
}
}
public func startCamera() {
self.captureSession.startRunning()
}
public func stopCamera() {
self.captureSession.stopRunning()
}
//Cài đặt máy ảnh
func loadCamera() {
captureSession.sessionPreset = .hd1920x1080
if #available(iOS 11.1, *) {
guard let backCamera = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .back).devices.first else {
print("Unable to access back camera!")
return
}
do {
self.inputCamera = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(self.inputCamera) {
captureSession.addInput(self.inputCamera)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.connection?.videoOrientation = .portrait
viewCamera.layer.addSublayer(videoPreviewLayer)
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
let overlay = createOverlay(frame: self.bounds)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
path.addRoundedRect(in: CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
// self.captureSession.removeOutput(self.stillImageOutput)
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 6 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0)
self.completionSuccessCardStep(nil, cropImage!.pngData()!, nil)
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 10
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
} else {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
}
}
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if self.statusScreen == "horizontal" {
imageOutput = validateView.cropImageHorizontal(image: imageInput, rect: CGRect(x: imageInput.size.width * 1 / 10, y: imageInput.size.height * 3 / 20, width: imageInput.size.width * 8 / 10, height: imageInput.size.height * 9 / 10), scale: 1.0)
} else {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
}
let ciimage = CIImage(image: imageOutput!)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
}
extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
if captureSession.canAddOutput(self.videoDataOutput) {
self.captureSession.addOutput(self.videoDataOutput)
}
//self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
DispatchQueue.main.async {
self.completionSuccessCardStep(validateImageCard, nil, nil)
self.statusValidateImage = validateImageCard
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097.3" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKValidateCardView" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnTakePhoto" destination="MUh-i4-cOT" id="XIX-vk-Xo9"/>
<outlet property="viewCamera" destination="vId-Gb-QYw" id="dO3-YL-g95"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="tMz-JL-QhO">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="vId-Gb-QYw">
<rect key="frame" x="0.0" y="44" width="414" height="698"/>
<color key="backgroundColor" white="0.66666666669999997" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="MUh-i4-cOT">
<rect key="frame" x="182" y="782" width="50" height="50"/>
<constraints>
<constraint firstAttribute="height" constant="50" id="dPQ-aO-sFP"/>
<constraint firstAttribute="width" constant="50" id="nMV-8l-zek"/>
</constraints>
<state key="normal" image="iconCap.png"/>
<connections>
<action selector="onCapturePhoto:" destination="-1" eventType="touchUpInside" id="bv8-Oo-gSm"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="MUh-i4-cOT" firstAttribute="top" secondItem="vId-Gb-QYw" secondAttribute="bottom" constant="40" id="5HA-ie-rYI"/>
<constraint firstItem="vId-Gb-QYw" firstAttribute="top" secondItem="hB9-pv-7hU" secondAttribute="top" id="9yW-SW-kTY"/>
<constraint firstItem="vId-Gb-QYw" firstAttribute="leading" secondItem="tMz-JL-QhO" secondAttribute="leading" id="COE-Wm-3Sg"/>
<constraint firstItem="hB9-pv-7hU" firstAttribute="bottom" secondItem="MUh-i4-cOT" secondAttribute="bottom" constant="30" id="G5T-sy-Cra"/>
<constraint firstItem="MUh-i4-cOT" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="RAV-Ta-Xst"/>
<constraint firstAttribute="trailing" secondItem="vId-Gb-QYw" secondAttribute="trailing" id="U1Q-AI-Opp"/>
</constraints>
<viewLayoutGuide key="safeArea" id="hB9-pv-7hU"/>
<point key="canvasLocation" x="133" y="154"/>
</view>
</objects>
<resources>
<image name="iconCap.png" width="172" height="172"/>
</resources>
</document>
......@@ -402,9 +402,3 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
}
}
enum StatusFace: String {
case TOLEFT
case TORIGHT
case STRAIGHTFACE
case ERROR
}
//
// SBKRecordFaceView.swift
// OCR-SDK
//
// Created by itsol on 8/25/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
import AVFoundation
import Vision
import CoreML
public class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let nibName = "SBKRecordFaceView"
var contentView:UIView?
@IBOutlet public weak var viewBackground: UIView!
@IBOutlet public weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
//@IBOutlet public weak var viewOval: UIView!
@IBOutlet public weak var btnRecord: UIButton!
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000
var numberPass = 0
var numberTrue = 0
var numberFalse = 0
var space: Float = 0.0
var dataImageSuccess: [Data] = []
var checkStartRecord: Bool = false
var timer = Timer()
var timeRecord: Int = 0
var checkStatusRecord: Bool = false
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?, _ timeRecord: Int?)->Void = {_,_,_,_ in}
public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage?
public required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
public override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
func commonInit() {
guard let view = loadViewFromNib() else { return }
view.frame = self.bounds
self.addSubview(view)
contentView = view
if AVCaptureDevice.authorizationStatus(for: .video) == .denied {
return
}
self.loadCamera()
}
func loadViewFromNib() -> UIView? {
let bundle = Bundle(for: type(of: self))
let nib = UINib(nibName: nibName, bundle: bundle)
return nib.instantiate(withOwner: self, options: nil).first as? UIView
}
func loadCamera() {
self.lbCopyright.text = " "
self.addCameraInput()
self.showCameraFeed()
self.getCameraFrames()
DispatchQueue.main.async {
self.captureSession.startRunning()
}
}
public func startCamera() {
self.captureSession.startRunning()
}
public func stopCamera() {
self.captureSession.stopRunning()
}
public func setOverlay(type: TypeOverlay) {
switch type {
case .OVAL:
print("oval")
case .SQUARE:
if let viewWithTag = self.viewWithTag(1) {
viewWithTag.removeFromSuperview()
}
case .CIRCLE:
if let viewWithTag = self.viewWithTag(1) {
viewWithTag.removeFromSuperview()
}
let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height
let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height
let locationTop = originYLBCopyright + heightLBCopyright + 35
let originYLBDescription = self.lbDescription.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
let viewOval = UIView(frame: CGRect(x: CGFloat( Int(self.bounds.origin.x + 40 / 896 * self.bounds.height)), y: CGFloat( Int(locationTop)), width: CGFloat( Int(self.bounds.width - 80 / 896 * self.bounds.height)).rounded(.up), height: CGFloat( Int(heightOval - 65))))
viewOval.backgroundColor = UIColor.black.withAlphaComponent(0.6)
viewOval.tag = 2
self.addSubview(viewOval)
self.reloadInputViews()
let pathss: CGMutablePath = {
let path = CGMutablePath()
if viewOval.frame.height > viewOval.frame.width {
path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2),
radius: viewOval.frame.width / 2,
startAngle: 0.0,
endAngle: 2.0 * .pi,
clockwise: false)
} else {
path.addArc(center: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height / 2),
radius: viewOval.frame.height / 2,
startAngle: 0.0,
endAngle: 2.0 * .pi,
clockwise: false)
}
return path
}()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss
maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true
}
}
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height
let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height
let locationTop = originYLBCopyright + heightLBCopyright + 35
let originYLBDescription = self.lbDescription.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
let viewOval = UIView(frame: CGRect(x: CGFloat( Int(self.bounds.origin.x + 40 / 896 * self.bounds.height)), y: CGFloat( Int(locationTop)), width: CGFloat( Int(self.bounds.width - 80 / 896 * self.bounds.height)).rounded(.up), height: CGFloat( Int(heightOval - 65))))
viewOval.backgroundColor = UIColor.black.withAlphaComponent(0.6)
viewOval.tag = 1
self.addSubview(viewOval)
self.reloadInputViews()
let pathss: CGMutablePath = {
let path = CGMutablePath()
path.move(to: CGPoint(x: 0, y: viewOval.frame.height/2))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: 0, y: 0 + viewOval.frame.width / 2))
}
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: 0), control: CGPoint(x: 0, y: 0))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width, y: 0 + viewOval.frame.width / 2), control: CGPoint(x: viewOval.frame.width, y: 0))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height / 2))
}
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height - viewOval.frame.width / 2))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height), control: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height))
path.addQuadCurve(to: CGPoint(x: 0, y: viewOval.frame.height - viewOval.frame.width / 2), control: CGPoint(x: 0, y: viewOval.frame.height))
path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2))
return path
}()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss
maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true
}
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if self.checkStatusRecord {
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1)
self.numberPass += 1
DispatchQueue.main.async {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage)
if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && 0 <= self.timeRecord && self.timeRecord < self.timeSpace {
if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
}
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil, nil)
if 1 <= self.timeRecord {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()
}
} else if statusFace != .STRAIGHTFACE && 1 <= self.timeRecord && self.timeRecord < self.timeSpace {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT_FAILD, nil, nil, nil)
} else if statusFace == .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil, nil)
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!)
}
} else if statusFace != .TORIGHT && self.timeSpace <= self.timeRecord && self.timeRecord < self.timeSpace * 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT_FAILD, nil, nil, nil)
} else if statusFace == .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil, nil)
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
}
} else if statusFace != .TOLEFT && self.timeSpace * 2 <= self.timeRecord && self.timeRecord < self.timeSpace * 3 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_LEFT_FAILD, nil, nil, nil)
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil, nil)
}
}
} else {
if result![0] < result![1] {
DispatchQueue.main.async {
self.checkStartRecord = true
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = " ".localized()//"Bạn đã sẵn sàng. Hãy bắt đầu!"
self.completionSuccessFaceRecordStep(.FACE_READY, nil, nil, nil)
}
} else {
DispatchQueue.main.async {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil, nil)
}
}
}
}
@IBAction func onRecord(_ sender: Any) {
if !self.checkStatusRecord && self.checkStartRecord {
self.startTimer()
self.checkStatusRecord = true
self.timeRecord = 0
let image = UIImage(named: "player_stop", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.btnRecord.setImage(imageStartRecord ?? image, for: .normal)
}
}
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
if #available(iOS 11.0, *) {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame))
}
}
private func addCameraInput() {
self.captureSession.sessionPreset = .medium
if #available(iOS 11.1, *) {
guard let device = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .front).devices.first else {
print("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
return
}
let cameraInput = try! AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(cameraInput) {
self.captureSession.addInput(cameraInput)
}
do {
try device.lockForConfiguration()
defer { device.unlockForConfiguration() }
device.videoZoomFactor = self.zoom
} catch {
print("\(error.localizedDescription)")
}
}
}
private func showCameraFeed() {
self.previewLayer.videoGravity = .resizeAspectFill
self.viewBackground.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.bounds
let overlayOval = createOverlayOval(frame: self.bounds, xOffset: viewBackground.frame.midX, yOffset: viewBackground.frame.midY, radius: 50.0)
self.viewBackground.addSubview(overlayOval)
self.createOverlay(frame: self.bounds, xOffset: self.bounds.midX, yOffset: self.bounds.midY, radius: 50.0)
}
func createOverlayOval(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let originYLBCopyright = self.lbCopyright.frame.origin.y / 896 * self.bounds.height
let heightLBCopyright = self.lbCopyright.frame.height / 896 * self.bounds.height
let locationTop = originYLBCopyright + heightLBCopyright + 35
let originYLBDescription = self.lbDescription.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
path.addRoundedRect(in: CGRect(x: CGFloat( Int(self.bounds.origin.x + 40 / 896 * self.bounds.height)), y: CGFloat( Int(locationTop)), width: CGFloat( Int(self.bounds.width - 80 / 896 * self.bounds.height)), height: CGFloat( Int(heightOval - 65))), cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
@available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
if results.count > 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
self.completionSuccessFaceRecordStep(.MANY_FACE, nil, nil, nil)
} else {
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!)
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString)
}
}
} else {
if !self.checkStatusRecord {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()//"Đặt vị trí mặt bạn vào hình"
self.completionSuccessFaceRecordStep(.NO_FACE, nil, nil, nil)
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR)
}
}
}
}
}
})
if #available(iOS 12.0, *) {
// Force the revision to 2 (68-points) even on iOS 13 or greater
// when VNDetectFaceLandmarksRequestRevision3 is available.
faceDetectionRequest.revision = 2
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest])
}
func startTimer() {
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(setLableRecord(timer:)), userInfo: nil, repeats: true)
}
func resetRecord() {
self.numberPass = 0
self.numberTrue = 0
self.numberFalse = 0
self.space = 0.0
self.dataImageSuccess.removeAll()
self.checkStartRecord = false
self.checkStatusRecord = false
let image = UIImage(named: "ic_record", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.btnRecord.setImage(image, for: .normal)
}
@objc func setLableRecord(timer: Timer) {
self.timeRecord += 1
self.completionSuccessFaceRecordStep(nil, nil, nil, self.timeRecord)
switch self.timeRecord {
case 1:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"vui lòng nhìn thẳng"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_STRAIGHT, nil, self.timeRecord)
case self.timeSpace:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"Quay sang phải"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_RIGHT, nil, self.timeRecord)
case self.timeSpace * 2:
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = " ".localized()//"Quay sang trái"
self.completionSuccessFaceRecordStep(nil, .STEP_FACE_LEFT, nil, self.timeRecord)
case self.timeSpace * 3:
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
]
self.completionSuccessFaceRecordStep(nil, nil, data, nil)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = " ".localized()//"Chưa đạt yêu cầu, hãy thử lại!"
self.completionSuccessFaceRecordStep(.INVALID, nil, nil, nil)
}
case self.timeSpace * 4:
self.timer.invalidate()
self.resetRecord()
self.timeRecord = 0
default:
break
}
}
func resizeImageFace(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
let imageOutput = validateView.cropImageFace(image: imageInput, rect: CGRect(x: imageInput.size.width / 6, y: imageInput.size.height / 12, width: imageInput.size.width * 4 / 6, height: imageInput.size.height * 8 / 10 ), scale: 1.0)
let ciimage = CIImage(image: imageOutput!)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@available(iOS 11.0, *)
func checkFaceRightLeft(landmarks: VNFaceLandmarks2D) -> StatusFace {
let face = landmarks.faceContour?.normalizedPoints
let nose = landmarks.nose?.normalizedPoints
let faceLeftPoint = face![0]
let faceRightPoint = face![face!.count - 1]
let faceBottomPoint = face![5]
let nosePoint = nose![4]
let sRight = (faceBottomPoint.x - faceLeftPoint.x) * (nosePoint.y - faceLeftPoint.y) - (nosePoint.x - faceLeftPoint.x) * (faceBottomPoint.y - faceLeftPoint.y)
let sLeft = (faceBottomPoint.x - faceRightPoint.x) * (nosePoint.y - faceRightPoint.y) - (nosePoint.x - faceRightPoint.x) * (faceBottomPoint.y - faceRightPoint.y)
let sFaceRight = sqrt(sRight * sRight) / 2
let sFaceLeft = sqrt(sLeft * sLeft) / 2
let totalS = sFaceLeft + sFaceRight
if sFaceLeft / totalS > 0.6 {
return .TOLEFT
} else if sFaceRight / totalS > 0.6 {
return .TORIGHT
} else {
return .STRAIGHTFACE
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097.2" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKRecordFaceView" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnRecord" destination="kiz-B2-kqL" id="OpP-C8-4lo"/>
<outlet property="lbCopyright" destination="fBN-yA-pbq" id="Qwz-Ye-DJs"/>
<outlet property="lbDescription" destination="3NW-5M-g0h" id="e9r-PT-NWj"/>
<outlet property="viewBackground" destination="Z9C-Eh-mo9" id="pNl-8k-48n"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="UFA-SZ-Qoz">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Z9C-Eh-mo9">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="hK9-ib-LmH">
<rect key="frame" x="0.0" y="144.5" width="414" height="611"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="fBN-yA-pbq">
<rect key="frame" x="205" y="84" width="4.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="3NW-5M-g0h">
<rect key="frame" x="10" y="795.5" width="394" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="kiz-B2-kqL">
<rect key="frame" x="182" y="826" width="50" height="50"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstAttribute="height" constant="50" id="Zge-fG-frc"/>
<constraint firstAttribute="width" constant="50" id="keQ-2a-Xpa"/>
</constraints>
<state key="normal" image="ic_record.png"/>
<connections>
<action selector="onRecord:" destination="-1" eventType="touchUpInside" id="ILY-WV-Xbt"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="3NW-5M-g0h" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" constant="10" id="4ym-MQ-aiv"/>
<constraint firstAttribute="bottom" secondItem="kiz-B2-kqL" secondAttribute="bottom" constant="20" id="7Ve-1m-MDs"/>
<constraint firstItem="Z9C-Eh-mo9" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" id="8JA-pa-TC1"/>
<constraint firstItem="3NW-5M-g0h" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="II3-ki-ySd"/>
<constraint firstAttribute="trailing" secondItem="Z9C-Eh-mo9" secondAttribute="trailing" id="REw-9J-X08"/>
<constraint firstItem="fBN-yA-pbq" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="Ruf-Rt-odr"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="top" secondItem="fBN-yA-pbq" secondAttribute="bottom" constant="40" id="TTn-Mn-7u7"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="Uxb-XZ-mT4"/>
<constraint firstItem="3NW-5M-g0h" firstAttribute="top" secondItem="hK9-ib-LmH" secondAttribute="bottom" constant="40" id="Wii-y3-Z3n"/>
<constraint firstAttribute="bottom" secondItem="Z9C-Eh-mo9" secondAttribute="bottom" id="X1j-RU-oQ0"/>
<constraint firstItem="Z9C-Eh-mo9" firstAttribute="top" secondItem="UFA-SZ-Qoz" secondAttribute="top" id="ddy-Yj-eLX"/>
<constraint firstItem="hK9-ib-LmH" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" id="ejz-BC-pDd"/>
<constraint firstItem="kiz-B2-kqL" firstAttribute="centerX" secondItem="UFA-SZ-Qoz" secondAttribute="centerX" id="li9-pn-4K7"/>
<constraint firstItem="kiz-B2-kqL" firstAttribute="top" secondItem="3NW-5M-g0h" secondAttribute="bottom" constant="10" id="pYI-bW-8SR"/>
<constraint firstItem="fBN-yA-pbq" firstAttribute="top" secondItem="9lf-e8-SmZ" secondAttribute="top" constant="40" id="xcc-3V-dgz"/>
</constraints>
<viewLayoutGuide key="safeArea" id="9lf-e8-SmZ"/>
<point key="canvasLocation" x="133" y="154"/>
</view>
</objects>
<resources>
<image name="ic_record.png" width="100" height="100"/>
</resources>
</document>
......@@ -31,3 +31,49 @@ struct Global {
static var imageCard1: Data?
static var imageCard2: Data?
}
enum StatusFace: String {
case TOLEFT
case TORIGHT
case STRAIGHTFACE
case ERROR
}
public enum ValidateFace: String {
case FACE_STRAIGHT
case FACE_STRAIGHT_FAILD
case FACE_RIGHT
case FACE_RIGHT_FAILD
case FACE_LEFT
case FACE_LEFT_FAILD
case FACE_FAKE
case FACE_READY
case MANY_FACE
case NO_FACE
case ERROR
case INVALID
}
public enum StepRecord: String {
case STEP_FACE_STRAIGHT
case STEP_FACE_LEFT
case STEP_FACE_RIGHT
}
public enum ValidateCard: String {
case ERROR
case IMAGE_FAKE
case IMAGE_FRONT
case IMAGE_BACK
}
public enum TypeOverlay: String {
case OVAL
case CIRCLE
case SQUARE
}
public enum TypeCard: String {
case FRONT
case BACK
}
......@@ -29,11 +29,11 @@ class SBKValidateInput {
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
case .IMAGE_FAKE:
return 0
case .IMAGEFRONT:
case .IMAGE_FRONT:
return 1
case .IMAGEBACK:
case .IMAGE_BACK:
return 2
}
}
......@@ -64,11 +64,11 @@ class SBKValidateInput {
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result![0] > result![1] && result![0] > result![2]{
return .IMAGEFAKE
return .IMAGE_FAKE
} else if result![1] > result![0] && result![1] > result![2] {
return .IMAGEFRONT
return .IMAGE_FRONT
} else {
return .IMAGEBACK
return .IMAGE_BACK
}
}
......@@ -190,10 +190,3 @@ class SBKValidateInput {
return croppedImage
}
}
enum ValidateCard: String {
case ERROR
case IMAGEFAKE
case IMAGEFRONT
case IMAGEBACK
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
</dict>
</dict>
</plist>
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment