Commit 529a1ac4 authored by Nguyen Huy Nhat Anh's avatar Nguyen Huy Nhat Anh

init new framework

parents
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:OCR-SDK.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1140"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "955105762477746A0053036F"
BuildableName = "SB_KYC_SDK.framework"
BlueprintName = "OCR-SDK"
ReferencedContainer = "container:OCR-SDK.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Release"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "955105762477746A0053036F"
BuildableName = "SB_KYC_SDK.framework"
BlueprintName = "OCR-SDK"
ReferencedContainer = "container:OCR-SDK.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict>
<key>955105762477746A0053036F</key>
<dict>
<key>primary</key>
<true/>
</dict>
</dict>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:OCR-SDK.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "B08B0591-1A45-4415-9C04-B83659FBFE35"
type = "0"
version = "2.0">
</Bucket>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
</dict>
</plist>
This diff is collapsed.
//
// OCR_SDK.h
// OCR-SDK
//
// Created by itsol on 5/22/20.
// Copyright © 2020 itsol. All rights reserved.
//
#import <Foundation/Foundation.h>
//! Project version number for OCR_SDK.
FOUNDATION_EXPORT double OCR_SDKVersionNumber;
//! Project version string for OCR_SDK.
FOUNDATION_EXPORT const unsigned char OCR_SDKVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <OCR_SDK/PublicHeader.h>
//
// OCRSDK.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
public class SB_KYC_SDK {
public static let shared = SB_KYC_SDK()
public typealias CompletionHandle = (_ data: [String: Any]?) -> Void
public typealias CompletionHandleFace = (_ data: Bool) -> Void
let viewController = SBKTutorialVC(URLToken: "http://sdk.sb.gotai.ml")
//Khởi tạo framework
//input: viewCurrent, sdkConfig
public func initSDK(viewCurrent: UIViewController, sdkConfig: [String:Any], completion: @escaping CompletionHandle) {
DispatchQueue.main.async {
Global.language = sdkConfig["language"] as? String ?? Global.language
Global.url_font = sdkConfig["url_font"] as? String ?? Global.url_font
Global.url_back = sdkConfig["url_back"] as? String ?? Global.url_back
Global.url_face = sdkConfig["url_face"] as? String ?? Global.url_face
Global.header = sdkConfig["header"] as? [String: Any] ?? Global.header
Global.typeFace = sdkConfig["type_face"] as? String ?? Global.typeFace
Global.colorConfig = sdkConfig["color"] as? String ?? Global.colorConfig
Global.frontConfig = sdkConfig["front"] as? String ?? Global.frontConfig
Global.copyright = sdkConfig["copyright"] as? String ?? Global.copyright
Global.step = sdkConfig["step"] as? String ?? Global.step
Global.colorButtonNext = sdkConfig["colorBackgroundButtonNext"] as? String ?? Global.colorButtonNext
Global.colorButtonBack = sdkConfig["colorBackgroundButtonBack"] as? String ?? Global.colorButtonBack
Global.colorTextButtonNext = sdkConfig["colorTextButtonNext"] as? String ?? Global.colorTextButtonNext
Global.colorTextButtonBack = sdkConfig["colorTextButtonBack"] as? String ?? Global.colorTextButtonBack
Global.colorTextPreview = sdkConfig["colorTextPreview"] as? String ?? Global.colorTextPreview
let ratio = sdkConfig["ratioPass"] as? Int ?? 70
Global.ratioPass = Float(ratio)
Global.useFileCer = sdkConfig[""] as? Bool ?? Global.useFileCer
self.viewController.completionSuccessTutorial = { [weak self] data in
guard self != nil else { return }
completion(data)
}
viewCurrent.navigationController?.pushViewController(self.viewController, animated: true)
viewCurrent.navigationController?.setNavigationBarHidden(false, animated: false)
}
}
//Validate card
//input: UIImage
//output: Int, 0: Image fake, 1: Image front, 2: Image back, -1: error
public func eKycCardValid(image: CVPixelBuffer) -> Int {
let resultVaidate = SBKValidateInput.shared.didOutput(pixelBuffer: image)
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
return 0
case .IMAGEFRONT:
return 1
case .IMAGEBACK:
return 2
}
}
//Validate face
//input: UIImage
public func eKycFaceValid(image: CVPixelBuffer, completion: @escaping CompletionHandleFace){
SBKValidateInput.shared.validateFace(imageFace: image) {[weak self] data in
guard self != nil else { return }
completion(data)
}
}
//Call API face
//input: UIImage
public func eKycFaceMatch(imageFace: CVPixelBuffer, imageCardFront: CVPixelBuffer, imageCardBack: CVPixelBuffer, completion: @escaping CompletionHandle) {
SBKValidateInput.shared.validateFace(imageFace: imageFace) {[weak self] data in
guard self != nil else { return }
if data {
Global.imageCard1 = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageCardFront).pngData()
Global.imageCard2 = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageCardFront).pngData()
let imageFaceUI = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageFace).pngData()
SBOCRRequest.shared.processFace(image: imageFaceUI! , pathURL: Global.url_face, idBack: "", idFront: "") {(errorCode,data) -> Void in
guard errorCode == nil else {
let error: [String: Any] = ["error": errorCode!]
completion(error)
return
}
completion(data)
}
} else {
let error: [String: Any] = ["error": "image faild"]
completion(error)
}
}
}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKCaptureFaceVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="lbCopyright" destination="sQx-YF-9mT" id="GIf-FP-b30"/>
<outlet property="lbDescription" destination="vEo-Bc-5XO" id="SRU-fS-EZI"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
<outlet property="viewOval" destination="Qyv-Ap-TFB" id="r4K-p7-GrS"/>
<outlet property="viewbaackground" destination="dHE-nU-pVB" id="MrX-Ag-s0Q"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="dHE-nU-pVB">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Qyv-Ap-TFB">
<rect key="frame" x="40" y="94.5" width="334" height="672"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="sQx-YF-9mT">
<rect key="frame" x="10" y="64" width="394" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="vEo-Bc-5XO">
<rect key="frame" x="10" y="771.5" width="394" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eoo-q0-oXp">
<rect key="frame" x="182" y="802" width="50" height="50"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="Ocl-5u-hSa"/>
<constraint firstAttribute="height" constant="50" id="RYC-E4-iS3"/>
</constraints>
<state key="normal" image="iconCap.png"/>
<connections>
<action selector="onCapture:" destination="-1" eventType="touchUpInside" id="Kll-et-YXX"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="dHE-nU-pVB" secondAttribute="trailing" id="C6C-oe-xWb"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="top" secondItem="Qyv-Ap-TFB" secondAttribute="bottom" constant="5" id="FIE-96-ylZ"/>
<constraint firstItem="eoo-q0-oXp" firstAttribute="top" secondItem="vEo-Bc-5XO" secondAttribute="bottom" constant="10" id="PPO-Iz-6Om"/>
<constraint firstAttribute="bottom" secondItem="dHE-nU-pVB" secondAttribute="bottom" id="SDS-kl-rae"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="eoo-q0-oXp" secondAttribute="bottom" constant="10" id="UgG-h0-6jb"/>
<constraint firstItem="dHE-nU-pVB" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" id="VGi-MV-mn2"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="10" id="Z61-q1-Mcf"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="a0C-ae-8Fa"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" constant="20" id="a72-fY-IOo"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="top" secondItem="sQx-YF-9mT" secondAttribute="bottom" constant="10" id="gUQ-xb-FQi"/>
<constraint firstItem="eoo-q0-oXp" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="jCw-e4-JLW"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="10" id="kR7-Ie-Uwj"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="nUl-ft-cgQ"/>
<constraint firstItem="dHE-nU-pVB" firstAttribute="top" secondItem="i5M-Pr-FkT" secondAttribute="top" id="rdM-v3-KGL"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="40" id="vxR-vG-Tjg"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="xRm-qx-ZZJ"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="133" y="124"/>
</view>
</objects>
<resources>
<image name="iconCap.png" width="172" height="172"/>
</resources>
</document>
This diff is collapsed.
This diff is collapsed.
//
// ResultFaceVC.swift
// SSSSS
//
// Created by itsol on 5/15/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKResultFaceVC: UIViewController {
@IBOutlet weak var imgFace: UIImageView!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
@IBOutlet weak var imagetest1: UIImageView!
@IBOutlet weak var imagetest2: UIImageView!
var imageData: Data?
var dataImageSuccess: [Data] = []
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var validateImage: Bool = true
var dataPush: [String:Any]?
static let shared = SBKResultFaceVC()
var completionSuccessFaceData: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
}
func loadConfigUI() {
//self.lbCopyright.text = Global.copyright
//self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
// self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonNext)
// self.btnClose.backgroundColor = UIColor.colorFromHexa(Global.colorButtonBack)
// self.btnNext.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonNext), for: .normal)
// self.btnClose.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonBack), for: .normal)
if Global.frontConfig == "" {
//self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
// self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnNext.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnClose.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
let imageCap = UIImage(data: imageData!)
self.imgFace.layer.cornerRadius = 8
let scale = imgFace.frame.width / imageCap!.size.width
let cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 10, y: imageCap!.size.height / 10, width: self.imgFace.frame.width, height: self.imgFace.frame.height ), scale: scale)
if Global.typeFace == "TAKEPHOTO" {
self.imgFace.image = cropImage
} else {
self.imgFace.image = imageCap
}
// btnNext.setTitle("Confirm my selfie".localized(), for: .normal)
// btnClose.setTitle("Take a new selfie".localized(), for: .normal)
lbDescription.text = "Make sure your selfie clearly shows your face".localized()
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = UIImage(data: imageData!)
let widthCrop = imageCap!.size.width - imageCap!.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: (imageCap!.size.width - imageCap!.size.width / 3) * 4 / 3), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
init() {
super.init(nibName: "SBKResultFaceVC", bundle: Bundle(for: SBKResultFaceVC.self))
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Chuyển sang màn hình kết quả
@IBAction func onNext(_ sender: Any) {
Loading.shared.showLoading(viewMain: self)
SBOCRRequest.shared.processFace(image: self.imgFace.image!.pngData()! , pathURL: Global.url_face, idBack: self.idBack, idFront: self.idFront) {(errorCode,data) -> Void in
Loading.shared.hideLoading(viewMain: self)
guard errorCode == nil else {
DispatchQueue.main.async {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Server error".localized()
}
return
}
self.completionSuccessFaceData(data!)
}
}
//quay về màn chụp mặt
@IBAction func onBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
@IBAction func onSelectBackTop(_ sender: Any) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
}
This diff is collapsed.
//
// ResultCapture.swift
//
// Created by itsol on 5/11/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKResultCapture: UIViewController {
@IBOutlet weak var imgPhotoCard: UIImageView!
public var imageData: Data?
public var dataCrop: Data?
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
var statusScreen: String = "vertical"
var checkScreen: Int = 1
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var completionSuccessResultCard: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
}
init() {
super.init(nibName: "SBKResultCapture", bundle: Bundle(for: SBKResultCapture.self))
}
func loadConfigUI() {
//self.lbCopyright.text = Global.copyright
//self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
//self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonNext)
//self.btnClose.backgroundColor = UIColor.colorFromHexa(Global.colorButtonBack)
//self.btnNext.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonNext), for: .normal)
//self.btnClose.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonBack), for: .normal)
if Global.frontConfig == "" {
// self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
// self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnNext.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnClose.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
}
//btnNext.setTitle("My license is readable".localized() , for: .normal)
// btnClose.setTitle("Take a new picture".localized(), for: .normal)
lbDescription.text = "Make sure your license details are clear to read, with no blur or glare".localized()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
let imageCap = UIImage(data: imageData!)
let scale = imgPhotoCard.frame.width / imageCap!.size.width
var cropImage: UIImage?
if self.statusScreen == "horizontal" {
cropImage = SBKValidateInput.shared.cropImageHorizontal(image: imageCap!, rect: CGRect(x: imageCap!.size.width * 1 / 10, y: imageCap!.size.height * 3 / 20, width: imageCap!.size.width * 8 / 10, height: imageCap!.size.height * 8 / 10), scale: 1.0)!.rotate(radians: .pi / 2)
} else {
cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 20, y: imageCap!.size.height / 8 + imageCap!.size.height / 50, width: imageCap!.size.width * 18 / 20, height: imageCap!.size.width * 18 / 20 * 3 / 4 ), scale: scale)
}
dataCrop = cropImage!.pngData()
self.imgPhotoCard.image = cropImage
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func navigateToFace() {
DispatchQueue.main.async {
if Global.typeFace == "TAKEPHOTO" {
let viewControllers = SBKTutorialFaceVC()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessTutorialFace = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
} else {
let viewControllers = SBKRecordFace()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessFaceRecord = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
}
}
//validate ảnh, gửi resquest
@IBAction func onResquest(_ sender: Any) {
if self.checkScreen == 1 {
Global.imageCard1 = dataCrop!
if Global.step == "ALL" {
DispatchQueue.main.async {
let viewControllers = SBKCaptureCardVC()
viewControllers.descriptionScreen = "Back of your personal card".localized()
viewControllers.checkScreen = 2
viewControllers.idFront = self.idFront
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessCard = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
} else {
self.navigateToFace()
}
} else {
Global.imageCard2 = dataCrop!
self.navigateToFace()
}
}
//quay về màn hình chụp
@IBAction func onBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
@IBAction func onSelectBackTop(_ sender: Any) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
}
extension UIImage {
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)
// Rotate around middle
context.rotate(by: CGFloat(radians))
// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
This diff is collapsed.
//
// TutorialVC.swift
//
// Created by itsol on 5/11/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKTutorialVC: UIViewController {
@IBOutlet weak var btnProceed: UIButton!
var URLToken: String = ""
var completionSuccessTutorial: ([String:Any]) -> Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
// var image = UIImage(named: "back", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
// image = image?.withRenderingMode(.alwaysOriginal)
//
// let button = UIButton(type: .system)
// button.setImage(image, for: .normal)
// button.setTitle("Back".localized(), for: .normal)
// button.imageEdgeInsets.left = -5
// button.setTitleColor(UIColor.black, for: .normal)
// button.sizeToFit()
// button.addTarget(self, action: #selector(self.back), for: .touchUpInside)
//
// let newBackButton = UIBarButtonItem(customView: button)
// self.navigationItem.leftBarButtonItem = newBackButton
// btnProceed.imageView?.contentMode = .scaleAspectFit
// btnProceed.imageEdgeInsets = UIEdgeInsets(top: 100, left: 200, bottom: 100, right: 200)
}
//Chuyển sang màn chụp thẻ mặt trước
@IBAction func onProceedToCaptureID(_ sender: Any) {
let controller = SBKCaptureCardVC()
controller.URLToken = self.URLToken
controller.completionSuccessCard = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessTutorial(data)
}
self.navigationController?.pushViewController(controller, animated: true)
}
override func viewDidAppear(_ animated: Bool) {
// btnProceed.setTitle("Proceed to Capture ID".localized(), for: .normal)
// btnProceed.backgroundColor = UIColor.colorFromHexa(Global.colorButtonTutorial)
}
@IBAction func onSelectBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
//Sự kiện back màn hình
@objc func back(sender: UIBarButtonItem) {
self.navigationController?.popViewController(animated: true)
}
init(URLToken: String) {
super.init(nibName: "SBKTutorialVC", bundle: Bundle(for: SBKTutorialVC.self))
self.URLToken = URLToken
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
This diff is collapsed.
//
// TutorialFaceVC.swift
// SSSSS
//
// Created by itsol on 5/15/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKTutorialFaceVC: UIViewController {
@IBOutlet weak var brightLight: UILabel!
@IBOutlet weak var lbNoHat: UILabel!
@IBOutlet weak var lbNoGlasses: UILabel!
@IBOutlet weak var btnNext: UIButton!
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var completionSuccessTutorialFace: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
//Cài đặt button Back
var image = UIImage(named: "back", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
image = image?.withRenderingMode(.alwaysOriginal)
let button = UIButton(type: .system)
button.setImage(image, for: .normal)
button.setTitle("Back".localized(), for: .normal)
button.sizeToFit()
button.imageEdgeInsets.left = -5
button.setTitleColor(UIColor.black, for: .normal)
button.addTarget(self, action: #selector(self.back), for: .touchUpInside)
let newBackButton = UIBarButtonItem(customView: button)
self.navigationItem.leftBarButtonItem = newBackButton
}
//Xử lý back màn hình
@objc func back(sender: UIBarButtonItem) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
override func viewDidAppear(_ animated: Bool) {
self.lbNoHat.text = "No Hat".localized()
self.lbNoGlasses.text = "No Glasses".localized()
self.brightLight.text = "Bright Light".localized()
// self.btnNext.setTitle("Proceed to Take Selfie".localized(), for: .normal)
//self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonTutorial)
}
init() {
super.init(nibName: "SBKTutorialFaceVC", bundle: Bundle(for: SBKTutorialFaceVC.self))
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Chuyển sang màn hình chụp mặt
@IBAction func onStart(_ sender: Any) {
DispatchQueue.main.async {
let viewControllers = SBKCaptureFaceVC()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessFace = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessTutorialFace(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
}
}
This diff is collapsed.
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
import UIKit
import Accelerate
extension CVPixelBuffer {
/**
Returns thumbnail by cropping pixel buffer to biggest square and scaling the cropped image to
model dimensions.
*/
func centerThumbnail(ofSize size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
let thumbnailSize = min(imageWidth, imageHeight)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
var originX = 0
var originY = 0
if imageWidth > imageHeight {
originX = (imageWidth - imageHeight) / 2
}
else {
originY = (imageHeight - imageWidth) / 2
}
// Finds the biggest square in the pixel buffer and advances rows based on it.
guard let inputBaseAddress = CVPixelBufferGetBaseAddress(self)?.advanced(
by: originY * inputImageRowBytes + originX * imageChannels) else {
return nil
}
// Gets vImage Buffer from input image
var inputVImageBuffer = vImage_Buffer(
data: inputBaseAddress, height: UInt(thumbnailSize), width: UInt(thumbnailSize),
rowBytes: inputImageRowBytes)
let thumbnailRowBytes = Int(size.width) * imageChannels
guard let thumbnailBytes = malloc(Int(size.height) * thumbnailRowBytes) else {
return nil
}
// Allocates a vImage buffer for thumbnail image.
var thumbnailVImageBuffer = vImage_Buffer(data: thumbnailBytes, height: UInt(size.height), width: UInt(size.width), rowBytes: thumbnailRowBytes)
// Performs the scale operation on input image buffer and stores it in thumbnail image buffer.
let scaleError = vImageScale_ARGB8888(&inputVImageBuffer, &thumbnailVImageBuffer, nil, vImage_Flags(0))
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
guard scaleError == kvImageNoError else {
return nil
}
let releaseCallBack: CVPixelBufferReleaseBytesCallback = {mutablePointer, pointer in
if let pointer = pointer {
free(UnsafeMutableRawPointer(mutating: pointer))
}
}
var thumbnailPixelBuffer: CVPixelBuffer?
// Converts the thumbnail vImage buffer to CVPixelBuffer
let conversionStatus = CVPixelBufferCreateWithBytes(
nil, Int(size.width), Int(size.height), pixelBufferType, thumbnailBytes,
thumbnailRowBytes, releaseCallBack, nil, nil, &thumbnailPixelBuffer)
guard conversionStatus == kCVReturnSuccess else {
free(thumbnailBytes)
return nil
}
return thumbnailPixelBuffer
}
static func buffer(from image: UIImage) -> CVPixelBuffer? {
let attrs = [
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue
] as CFDictionary
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(image.size.width),
Int(image.size.height),
kCVPixelFormatType_32BGRA,
attrs,
&pixelBuffer)
guard let buffer = pixelBuffer, status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
defer { CVPixelBufferUnlockBaseAddress(buffer, []) }
let pixelData = CVPixelBufferGetBaseAddress(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.noneSkipLast.rawValue) else {
return nil
}
context.translateBy(x: 0, y: image.size.height)
context.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context)
image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
UIGraphicsPopContext()
return pixelBuffer
}
}
//
// ExtString.swift
// OCR-SDK
//
// Created by itsol on 5/26/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
extension String {
func localized() -> String {
let bundle = Bundle(identifier: "itsol.OCR-SDK")
let basePath = bundle!.path(forResource: Global.language, ofType: "lproj")!
return Bundle(path: basePath)!.localizedString(forKey: self, value: "", table: nil)
}
}
//
// ExtUIColor.swift
// OCR-SDK
//
// Created by itsol on 6/10/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIColor {
class func colorFromHexa(_ hex: String, alpha: CGFloat = 1.0) -> UIColor {
var cString: String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if cString.hasPrefix("#") {
cString.remove(at: cString.startIndex)
}
if (cString.count) != 6 {
return UIColor.gray
}
var rgbValue: UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha
)
}
}
//
// ExtUiViewController.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIViewController {
//Hiển thị thông báo
//input: message: String
func showToast(message : String) {
let toastLabel = UILabel(frame: CGRect(x: self.view.frame.size.width/2 - 75, y: self.view.frame.size.height-100, width: 150, height: 35))
toastLabel.backgroundColor = UIColor.black.withAlphaComponent(0.6)
toastLabel.textColor = UIColor.white
toastLabel.textAlignment = .center;
toastLabel.font = UIFont(name: "Montserrat-Light", size: 12.0)
toastLabel.text = message
toastLabel.alpha = 1.0
toastLabel.layer.cornerRadius = 10;
toastLabel.clipsToBounds = true
self.view.addSubview(toastLabel)
UIView.animate(withDuration: 4.0, delay: 0.1, options: .curveEaseOut, animations: {
toastLabel.alpha = 0.0
}, completion: {(isCompleted) in
toastLabel.removeFromSuperview()
})
}
//kiểm tra ảnh sau khi chụp
//input: image: Data
//output: Bool
func validateImage(image: Data) -> Bool {
return true
}
}
//
// Global.swift
// OCR-SDK
//
// Created by itsol on 5/27/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
struct Global {
static var language: String = "en"//"vi"
static var url_font: String = "http://sdk.sb.gotai.ml/process_card_front"
static var url_back: String = "http://sdk.sb.gotai.ml/process_card_back"
static var url_face: String = "http://sdk.sb.gotai.ml/call/predict_front_back_face"
static var header: [String: Any] = [:]
static var typeFace: String = "RECORD" //TAKEPHOTO
static var ratioPass: Float = 70.0
static var colorConfig: String = "#FFFFFF"
static var colorButtonNext: String = "#225F8D"
static var colorButtonBack: String = "#DBDBDD"
static var colorTextButtonNext: String = "#FFFFFF"
static var colorTextButtonBack: String = "#225F8D"
static var colorTextPreview: String = "#000000"
static var frontConfig: String = ""
static var copyright: String = "Copyright by ITSOL"
static var validateCardFront: Bool = false
static var step: String = "ALL"
static var colorButtonTutorial: String = "#2C3D51"
static var useFileCer: Bool = true
static var imageCard1: Data?
static var imageCard2: Data?
}
//
// Loadding.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
class Loading {
static let shared = Loading()
var loadingView = UIView()
var activityIndicator = UIActivityIndicatorView()
func showLoading(viewMain: UIViewController) {
DispatchQueue.main.async {
self.loadingView = UIView(frame: viewMain.view.frame)
self.loadingView.tag = 999999
self.loadingView.backgroundColor = UIColor.gray.withAlphaComponent(0.5)
self.activityIndicator.style = .white
self.activityIndicator.center = self.loadingView.center
self.activityIndicator.startAnimating()
self.loadingView.addSubview(self.activityIndicator)
viewMain.view.addSubview(self.loadingView)
}
}
func hideLoading(viewMain: UIViewController){
DispatchQueue.main.async {
UIView.animate(withDuration: 0.0, delay: 1.0, options: .curveEaseOut, animations: {
self.loadingView.alpha = 0.0
self.activityIndicator.stopAnimating()
}, completion: { finished in
self.activityIndicator.removeFromSuperview()
self.loadingView.removeFromSuperview()
let removeView = viewMain.view.viewWithTag(999999)
removeView?.removeFromSuperview()
})
}}
}
//
// Validate.swift
// OCR-SDK
//
// Created by itsol on 6/10/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
import Vision
class SBKValidateInput {
static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
public typealias CompletionHandle = (_ data: Bool) -> Void
func validateCard(imageInput: UIImage) -> Int {
let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
let cvpixelImge = self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)
let resultVaidate = self.didOutput(pixelBuffer: cvpixelImge!)
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
return 0
case .IMAGEFRONT:
return 1
case .IMAGEBACK:
return 2
}
}
func validateFace(imageFace: CVPixelBuffer, completion: @escaping CompletionHandle) {
// let ciimage = CIImage(image: imageFace)
//
// let tmpcontext = CIContext(options: nil)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// let cvpixelImge = self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)
self.detectFace(in: imageFace) { [weak self] data in
guard let `self` = self else { return }
let result = data && self.didOutputFace(pixelBuffer: imageFace)
completion(result)
}
}
func didOutput(pixelBuffer: CVPixelBuffer) -> ValidateCard {
var result: [Float]?
var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
let delayBetweenInferencesMs: Double = 1000
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result![0] > result![1] && result![0] > result![2]{
return .IMAGEFAKE
} else if result![1] > result![0] && result![1] > result![2] {
return .IMAGEFRONT
} else {
return .IMAGEBACK
}
}
func didOutputFace(pixelBuffer: CVPixelBuffer) -> Bool {
var result: [Float]?
var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
let delayBetweenInferencesMs: Double = 1000
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return false }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] {
return true
} else {
return false
}
}
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
func convertCVPixelToUIImage(pixelBuffer: CVPixelBuffer) -> UIImage {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageView : UIImage = self.convertCIToUIImage(cmage: ciimage)
return imageView
}
func convertCIToUIImage(cmage: CIImage) -> UIImage {
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
func convertCGImgeToCVPixelBuffer (forImage image: CGImage) -> CVPixelBuffer? {
let frameSize = CGSize(width: image.width, height: image.height)
var pixelBuffer:CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(frameSize.width), Int(frameSize.height), kCVPixelFormatType_32BGRA , nil, &pixelBuffer)
if status != kCVReturnSuccess {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0))
let data = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: data, width: Int(frameSize.width), height: Int(frameSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue)
context?.draw(image, in: CGRect(x: 0, y: 0, width: image.width, height: image.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
func detectFace(in image: CVPixelBuffer, completion: @escaping CompletionHandle) {
if #available(iOS 11.0, *) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
completion(true)
} else {
completion(false)
}
}
}
})
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest])
} else {
// Fallback on earlier versions
}
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 10
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func cropImageHorizontal(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 4 / 3), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
}
enum ValidateCard: String {
case ERROR
case IMAGEFAKE
case IMAGEFRONT
case IMAGEBACK
}
/*
Localizable.strings
OCR-SDK
Created by itsol on 5/26/20.
Copyright © 2020 itsol. All rights reserved.
*/
"Place it within the box"="Place it within the box";
"Do not place outside"="Do not place outside";
"Avoid glare"="Avoid glare";
"Front of your personal card"="Front of your personal card";
//
// OCRRequest.swift
// SSSSS
//
// Created by itsol on 5/12/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
class SBOCRRequest: NSObject{
public typealias CompletionHandle = (_ errorMess : String?, _ data: [String: Any]?) -> Void
static let shared = SBOCRRequest()
let certificates: [Data] = {
let bundle = Bundle(identifier: "itsol.OCR-SDK")
let url = bundle!.url(forResource: "objcio", withExtension: "cer")!
let data = try! Data(contentsOf: url)
return [data]
}()
//Call Api card front và card back
//input: image
//output: data[String: Any]
func processCardFont(image: Data, pathURL: String, completion: @escaping CompletionHandle){
var request = URLRequest(url: URL(string: pathURL)!)
request.httpMethod = "POST"
if !Global.header.isEmpty {
for (key, value) in Global.header {
request.setValue(value as? String, forHTTPHeaderField: key)
}
}
let boundary = generateBoundaryString()
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
let imageData = image
request.httpBody = createBodyWithParameters(filePathKey: "image", imageDataKey: imageData as NSData, boundary: boundary) as Data
let urlSession = URLSession(configuration: URLSessionConfiguration.default, delegate: self, delegateQueue: nil)
let task = urlSession.dataTask(with: request, completionHandler: { (data, _, error) -> Void in
var errorMessage = ""
var jsonBody = [String: Any]()
self.processJsonBody(errorMessage: &errorMessage, withJsonBody: &jsonBody, dataResponse: data, errorResponse: error)
guard errorMessage.count == 0 else {
completion(errorMessage,nil)
return
}
completion(nil,jsonBody)
})
task.resume()
}
//Call Api ProcessFace
//input: image, idBack, idFront
//output: data[String: Any]
func processFace(image: Data, pathURL: String, idBack: String, idFront: String, completion: @escaping CompletionHandle){
var params: [String: String] = [
"id_back": idBack,
"id_front": idFront,
]
if idBack == "" {
params.removeValue(forKey: "id_back")
}
let urlPath = URL(string: pathURL)!
var request = URLRequest(url: urlPath)
print("request====", urlPath, "==", request)
request.httpMethod = "POST"
if !Global.header.isEmpty {
for (key, value) in Global.header {
request.setValue(value as? String, forHTTPHeaderField: key)
}
}
let boundary = generateBoundaryString()
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
let imageData = image
request.httpBody = createBodyWithParameters(parameters: params, filePathKey: "image_general", imageDataKey: imageData as NSData, boundary: boundary) as Data
let urlSession = URLSession(configuration: URLSessionConfiguration.default, delegate: self, delegateQueue: nil)
let task = urlSession.dataTask(with: request, completionHandler: { (data, respone, error) -> Void in
var errorMessage = ""
var jsonBody = [String: Any]()
self.processJsonBody(errorMessage: &errorMessage, withJsonBody: &jsonBody, dataResponse: data, errorResponse: error)
guard errorMessage.count == 0 else {
completion(errorMessage,nil)
return
}
completion(nil,jsonBody)
})
task.resume()
}
func createBodyWithParameters(parameters: [String: String] = [:], filePathKey: String?, imageDataKey: NSData, boundary: String) -> NSData {
let body = NSMutableData();
for (key, value) in parameters {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(key)\"\r\n\r\n")
body.appendString(string: "\(value)\r\n")
}
let face = "face.jpg"
let card1 = "cardFront.jpg"
let card2 = "cardBack.jpg"
let filePathKeyCard1 = "image_card1"
let filePathKeyCard2 = "image_card2"
let mimetype = "image/jpg"
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKey!)\"; filename=\"\(face)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(imageDataKey as Data)
body.appendString(string: "\r\n")
if Global.imageCard1 != nil {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKeyCard1)\"; filename=\"\(card1)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(Global.imageCard1!)
body.appendString(string: "\r\n")
}
if Global.imageCard2 != nil {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKeyCard2)\"; filename=\"\(card2)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(Global.imageCard2!)
body.appendString(string: "\r\n")
}
body.appendString(string: "--\(boundary)--\r\n")
return body
}
func generateBoundaryString() -> String {
return "Boundary-\(NSUUID().uuidString)"
}
private func processJsonBody (errorMessage errorMess: inout String,
withJsonBody jsonResult: inout [String: Any],
dataResponse data: Data?,
errorResponse error: Error? ) {
guard error == nil else {
errorMess = "\((error! as NSError).code)"
return
}
guard data != nil else {
errorMess = "Error: Did not receive data"
return
}
//let str = String(decoding: data!, as: UTF8.self)
guard let _jsonResult = try? JSONSerialization.jsonObject(with: data!, options: JSONSerialization.ReadingOptions.mutableContainers) as? [String: Any] else {
errorMess = "Error: data can not convert to jsonObject"
return
}
jsonResult = _jsonResult
}
}
extension SBOCRRequest: URLSessionDelegate {
func urlSession(_ session: URLSession, didReceive challenge: URLAuthenticationChallenge, completionHandler: @escaping (URLSession.AuthChallengeDisposition, URLCredential?) -> Void) {
print("urlsession====", Global.useFileCer)
if let trust = challenge.protectionSpace.serverTrust, SecTrustGetCertificateCount(trust) > 0, Global.useFileCer {
if let certificate = SecTrustGetCertificateAtIndex(trust, 0) {
let data = SecCertificateCopyData(certificate) as Data
if certificates.contains(data) {
completionHandler(.useCredential, URLCredential(trust: trust))
return
}
}
}
completionHandler(.cancelAuthenticationChallenge, nil)
}
}
extension NSMutableData {
func appendString(string: String) {
let data = string.data(using: String.Encoding.utf8, allowLossyConversion: true)
append(data!)
}
}
/*
Localizable.strings
OCR-SDK
Created by itsol on 5/26/20.
Copyright © 2020 itsol. All rights reserved.
*/
"Place it within the box"="Đặt đúng vị trí";
"Do not place outside"="Không đặt bên ngoài";
"Avoid glare"="Tránh ánh sáng chói";
"Position your face in the oval" = "Đặt vị trí mặt bạn vào hình";
"Front of your personal card" = "Mặt trước thẻ cá nhân của bạn";
"Invalid image, please check again!" = "Hình ảnh không hợp lệ, vui lòng kiểm tra lại!";
"Make sure your selfie clearly shows your face" = "Hãy chắc chắn rằng ảnh của bạn hiển thị rõ ràng khuôn mặt của bạn";
"Confirm my selfie" = "Tôi thấy ổn rồi";
"Take a new selfie" = "Chụp lại ảnh mới";
"Back of your personal card" = "Mặt sau thẻ cá nhân của bạn";
"Make sure your license details are clear to read, with no blur or glare" = "Đảm bảo chi tiết giấy phép của bạn rõ ràng để đọc, không bị mờ hoặc lóa";
"Take a new picture" = "Chụp lại ảnh mới";
"My license is readable" = "Tôi thấy ổn rồi";
"Proceed to Capture ID" = "Bắt đầu chụp thẻ";
"Good lighting on your face" = "Ánh sáng tốt trên khuôn mặt của bạn";
"Hold phone in front of you" = "Giữ điện thoại trước mặt bạn";
"Bright Light" = "ánh sáng";
"No Hat" = "Không đội mũ";
"No Glasses" = "Không đeo kính";
"Proceed to Take Selfie" = "Bắt đầu chụp ảnh";
"Incorrect face, please check!" = "Khuôn mặt không chính xác, xin vui lòng kiểm tra!";
"Are you ready. Let's start!" = "Bạn đã sẵn sàng. Hãy bắt đầu!";
"Please look straight" = "Vui lòng nhìn thẳng";
"Please turn to the left" = "Quay sang trái";
"Please turn to the right" = "Quay sang phải";
"Unsatisfactory, please try again!" = "Chưa đạt yêu cầu, hãy thử lại!";
"Back" = "Trở về";
"Incorrect card, please check!" = "Thẻ không chính xác, xin vui lòng kiểm tra!";
"Please put the back of the card in" = "Vui lòng đặt mặt sau của thẻ vào";
"Please put the front of the card in" = "Vui lòng đặt mặt trước của thẻ vào";
"Exactly" = "Chính xác";
"There are many faces in the frame" = "Có nhiều khuôn mặt trong khung hình";
# Uncomment the next line to define a global platform for your project
#platform :ios, '10.0'
target 'OCR-SDK' do
# Comment the next line if you don't want to use dynamic frameworks
#use_modular_headers!
use_frameworks!
# Pods for OCR-SDK
pod 'TensorFlowLiteSwift'
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
SPEC REPOS:
trunk:
- TensorFlowLiteC
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: 8c4b84d229daab67aa0a162f14470a2461899c16
COCOAPODS: 1.9.1
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
SPEC REPOS:
trunk:
- TensorFlowLiteC
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: 8c4b84d229daab67aa0a162f14470a2461899c16
COCOAPODS: 1.9.1
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DE1F4D51AD94C30627575AEE202FD099"
BuildableName = "Pods_OCR_SDK.framework"
BlueprintName = "Pods-OCR-SDK"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
This diff is collapsed.
#import <Foundation/Foundation.h>
@interface PodsDummy_Pods_OCR_SDK : NSObject
@end
@implementation PodsDummy_Pods_OCR_SDK
@end
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment