Commit 7d8d0acf authored by Nguyễn Văn An's avatar Nguyễn Văn An

update model

parent c17d42a5
No preview for this file type
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "FD833007-BFDE-4E90-B09E-D1112B21135E"
type = "0"
version = "2.0">
</Bucket>
No preview for this file type
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import CoreImage import CoreImage
import TensorFlowLite import TensorFlowLite
import UIKit import UIKit
import Accelerate import Accelerate
/// A result from invoking the `Interpreter`.
struct Result { struct Result {
let inferenceTime: Double let inferenceTime: Double
let inferences: [Inference] let inferences: [Inference]
} }
/// An inference from invoking the `Interpreter`.
struct Inference { struct Inference {
let confidence: Float let confidence: Float
let label: String let label: String
} }
/// Information about a model file or labels file.
typealias FileInfo = (name: String, extension: String) typealias FileInfo = (name: String, extension: String)
/// Information about the MobileNet model.
enum MobileNet { enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite") static let livenessModel: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "idcard15072021", extension: "tflite") static let cardModel: FileInfo = (name: "card", extension: "tflite")
static let landMarkModel: FileInfo = (name: "face_detection_front", extension: "tflite") static let faceModel: FileInfo = (name: "face", extension: "tflite")
} }
/// This class handles all data preprocessing and makes calls to run inference on a given frame
/// by invoking the `Interpreter`. It then formats the inferences obtained and returns the top N
/// results for a successful inference.
class SBKModelDataHandler { class SBKModelDataHandler {
// MARK: - Internal Properties
/// The current thread count used by the TensorFlow Lite Interpreter.
let threadCount: Int let threadCount: Int
let resultCount = 3 let resultCount = 3
let threadCountLimit = 10 let threadCountLimit = 10
var subtract: Float = 127.5
// MARK: - Model Parameters // MARK: - Model Parameters
let batchSize = 1 let batchSize = 1
let inputChannels = 3 let inputChannels = 3
let inputWidth = 224 var inputWidth = 256
let inputHeight = 224 var inputHeight = 256
// MARK: - Private Properties
/// List of labels from the given labels file.
private var labels: [String] = [] private var labels: [String] = []
/// TensorFlow Lite `Interpreter` object for performing inference on a given model.
private var interpreter: Interpreter private var interpreter: Interpreter
/// Information about the alpha component in RGBA data.
private let alphaComponent = (baseOffset: 4, moduloRemainder: 3) private let alphaComponent = (baseOffset: 4, moduloRemainder: 3)
// MARK: - Initialization
/// A failable initializer for `ModelDataHandler`. A new instance is created if the model and
/// labels files are successfully loaded from the app's main bundle. Default `threadCount` is 1.
init?(modelFileInfo: FileInfo, threadCount: Int = 1) { init?(modelFileInfo: FileInfo, threadCount: Int = 1) {
let modelFilename = modelFileInfo.name let modelFilename = modelFileInfo.name
let bundle = Bundle(for: type(of: self))
// Construct the path to the model file.
let bundle = Bundle(for: SBKRecordFace.self)
guard let modelPath = bundle.path( guard let modelPath = bundle.path(
forResource: modelFilename, forResource: modelFilename,
ofType: modelFileInfo.extension ofType: modelFileInfo.extension
...@@ -86,16 +45,11 @@ class SBKModelDataHandler { ...@@ -86,16 +45,11 @@ class SBKModelDataHandler {
print("Failed to load the model file with name: \(modelFilename).") print("Failed to load the model file with name: \(modelFilename).")
return nil return nil
} }
// Specify the options for the `Interpreter`.
self.threadCount = threadCount self.threadCount = threadCount
var options = Interpreter.Options() var options = Interpreter.Options()
options.threadCount = threadCount options.threadCount = threadCount
do { do {
// Create the `Interpreter`.
interpreter = try Interpreter(modelPath: modelPath, options: options) interpreter = try Interpreter(modelPath: modelPath, options: options)
// Allocate memory for the model's input `Tensor`s.
try interpreter.allocateTensors() try interpreter.allocateTensors()
} catch let error { } catch let error {
print("Failed to create the interpreter with error: \(error.localizedDescription)") print("Failed to create the interpreter with error: \(error.localizedDescription)")
...@@ -103,45 +57,6 @@ class SBKModelDataHandler { ...@@ -103,45 +57,6 @@ class SBKModelDataHandler {
} }
} }
func fromImage(image: UIImage, datas: Data, imagesss: UIImage) -> UIColor {
var totalR: CGFloat = 0
var totalG: CGFloat = 0
var totalB: CGFloat = 0
var count: CGFloat = 0
for x in 0..<Int(image.size.width) {
for y in 0..<Int(image.size.height) {
count += 1
var rF: CGFloat = 0,
gF: CGFloat = 0,
bF: CGFloat = 0,
aF: CGFloat = 0
image.getPixelColor(pos: CGPoint(x: x, y: y), dataImage: datas, image: imagesss ).getRed(&rF, green: &gF, blue: &bF, alpha: &aF)
totalR += rF
totalG += gF
totalB += bF
}
}
let averageR = totalR / count
let averageG = totalG / count
let averageB = totalB / count
return UIColor(red: averageR, green: averageG, blue: averageB, alpha: 1.0)
}
func convert(cmage:CIImage) -> UIImage
{
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
// MARK: - Internal Methods
/// Performs image preprocessing, invokes the `Interpreter`, and processes the inference results.
func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? { func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? {
let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer) let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
...@@ -149,8 +64,11 @@ class SBKModelDataHandler { ...@@ -149,8 +64,11 @@ class SBKModelDataHandler {
sourcePixelFormat == kCVPixelFormatType_32BGRA || sourcePixelFormat == kCVPixelFormatType_32BGRA ||
sourcePixelFormat == kCVPixelFormatType_32RGBA) sourcePixelFormat == kCVPixelFormatType_32RGBA)
let imageChannels = 4 let imageChannels = 4
assert(imageChannels >= inputChannels) assert(imageChannels >= inputChannels)
// Crops the image to the biggest square in the center and scales it down to model dimensions.
let scaledSize = CGSize(width: inputWidth, height: inputHeight) let scaledSize = CGSize(width: inputWidth, height: inputHeight)
guard let thumbnailPixelBuffer = pixelBuffer.resized(to: scaledSize) else { guard let thumbnailPixelBuffer = pixelBuffer.resized(to: scaledSize) else {
return nil return nil
...@@ -159,6 +77,8 @@ class SBKModelDataHandler { ...@@ -159,6 +77,8 @@ class SBKModelDataHandler {
let outputTensor: Tensor let outputTensor: Tensor
do { do {
let inputTensor = try interpreter.input(at: 0) let inputTensor = try interpreter.input(at: 0)
// Remove the alpha component from the image buffer to get the RGB data.
guard let rgbData = rgbDataFromBuffer( guard let rgbData = rgbDataFromBuffer(
thumbnailPixelBuffer, thumbnailPixelBuffer,
byteCount: batchSize * inputWidth * inputHeight * inputChannels, byteCount: batchSize * inputWidth * inputHeight * inputChannels,
...@@ -167,10 +87,19 @@ class SBKModelDataHandler { ...@@ -167,10 +87,19 @@ class SBKModelDataHandler {
print("Failed to convert the image buffer to RGB data.") print("Failed to convert the image buffer to RGB data.")
return nil return nil
} }
let imageCap = UIImage(data: rgbData)
// self.fromImage(image: imageCap!, datas: rgbData, imagesss: imageCap!)
// Copy the RGB data to the input `Tensor`.
try interpreter.copy(rgbData, toInputAt: 0) try interpreter.copy(rgbData, toInputAt: 0)
// Run inference by invoking the `Interpreter`.
let startDate = Date() let startDate = Date()
try interpreter.invoke() try interpreter.invoke()
interval = Date().timeIntervalSince(startDate) * 1000 interval = Date().timeIntervalSince(startDate) * 1000
// Get the output `Tensor` to process the inference results.
outputTensor = try interpreter.output(at: 0) outputTensor = try interpreter.output(at: 0)
} catch let error { } catch let error {
print("Failed to invoke the interpreter with error: \(error.localizedDescription)") print("Failed to invoke the interpreter with error: \(error.localizedDescription)")
...@@ -211,7 +140,6 @@ class SBKModelDataHandler { ...@@ -211,7 +140,6 @@ class SBKModelDataHandler {
guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else { guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else {
return nil return nil
} }
let width = CVPixelBufferGetWidth(buffer) let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer) let height = CVPixelBufferGetHeight(buffer)
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer) let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
...@@ -224,7 +152,7 @@ class SBKModelDataHandler { ...@@ -224,7 +152,7 @@ class SBKModelDataHandler {
rowBytes: sourceBytesPerRow) rowBytes: sourceBytesPerRow)
guard let destinationData = malloc(height * destinationBytesPerRow) else { guard let destinationData = malloc(height * destinationBytesPerRow) else {
print("Error: out of memory") print("Error: tran bo dem")
return nil return nil
} }
...@@ -260,11 +188,15 @@ class SBKModelDataHandler { ...@@ -260,11 +188,15 @@ class SBKModelDataHandler {
let bytes = Array<UInt8>(unsafeData: byteData)! let bytes = Array<UInt8>(unsafeData: byteData)!
var floats = [Float]() var floats = [Float]()
for i in 0..<bytes.count { for i in 0..<bytes.count {
floats.append(Float(bytes[i]) / 255.0) if self.subtract != 0 {
floats.append((Float(bytes[i]) - subtract) / subtract)
} else {
floats.append((Float(bytes[i])))
} }
return Data(copyingBufferOf: floats)
} }
} return Data(copyingBufferOf: floats)
}}
// MARK: - Extensions // MARK: - Extensions
......
...@@ -18,6 +18,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -18,6 +18,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
@IBOutlet public weak var lbStep: UILabel! @IBOutlet public weak var lbStep: UILabel!
@IBOutlet public weak var btnCapture: UIButton! @IBOutlet public weak var btnCapture: UIButton!
@IBOutlet public weak var imgCaution: UIImageView! @IBOutlet public weak var imgCaution: UIImageView!
@IBOutlet weak var viewTakePhoto: UIView!
public var captureSession: AVCaptureSession = AVCaptureSession() public var captureSession: AVCaptureSession = AVCaptureSession()
public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput() public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
...@@ -39,7 +40,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -39,7 +40,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
private var cropZone: CGRect? private var cropZone: CGRect?
private var cropImage: CGRect? private var cropImage: CGRect?
var overlayView: OverLayCardView? var overlayView: OverLayCardView?
var imagePreview = UIImageView(frame: CGRect(x: 20, y: 20, width: 280, height: 210))
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in} public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()! public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
...@@ -79,6 +80,13 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -79,6 +80,13 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
self.labelTypeCard.text = "Use front" self.labelTypeCard.text = "Use front"
self.labelTypeCard.textColor = UIColor.white self.labelTypeCard.textColor = UIColor.white
self.addSubview(labelTypeCard) self.addSubview(labelTypeCard)
self.addSubview(imagePreview)
}
func setImage(_ image: UIImage) {
DispatchQueue.main.async {
self.imagePreview.image = image
}
} }
func loadViewFromNib() -> UIView? { func loadViewFromNib() -> UIView? {
...@@ -224,13 +232,14 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -224,13 +232,14 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0) let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) { if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil) self.completionSuccessCardStep(nil, urlImage, nil)
self.stopCamera()
} }
} }
//Sự kiện chụp ảnh //Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) { @IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) { if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) || self.statusValidateImage == .PASSPORT {
if #available(iOS 11.0, *) { if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self) stillImageOutput.capturePhoto(with: settings, delegate: self)
...@@ -255,6 +264,8 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { ...@@ -255,6 +264,8 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
connection.videoOrientation = .portrait connection.videoOrientation = .portrait
} }
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else { guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer") debugPrint("unable to get image from sample buffer")
...@@ -309,7 +320,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { ...@@ -309,7 +320,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
overlayView.setBorderColor(color: UIColor.red.cgColor) overlayView.setBorderColor(color: UIColor.red.cgColor)
} }
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) { if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) || self.statusValidateImage == .PASSPORT {
self.lbDescription.textColor = .green self.lbDescription.textColor = .green
self.lbDescription.text = "Are you ready. Let's start!".localized() self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true self.imgCaution.isHidden = true
......
...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = private var modelDataHandler: SBKModelDataHandler? =
SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
private var result: [Float]? private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000 private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000 private let delayBetweenInferencesMs: Double = 1000
......
...@@ -30,7 +30,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -30,7 +30,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) private var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
private var result: [Float]? private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000 private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000 private let delayBetweenInferencesMs: Double = 1000
...@@ -74,6 +74,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -74,6 +74,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
self.checkScreen() self.checkScreen()
self.loadCamera() self.loadCamera()
modelDataHandler?.subtract = 0
} }
func loadViewFromNib() -> UIView? { func loadViewFromNib() -> UIView? {
...@@ -206,21 +207,33 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -206,21 +207,33 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) { func getMaxResult(result: [Float]) -> Int {
var max = result[0]
var index = 0
for i in 0..<result.count {
index = max < result[i] ? i : index
max = max < result[i] ? result[i] : max
}
return index
}
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace, pureImage: CVPixelBuffer) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if let result = result {
let maxResult = getMaxResult(result: result)
if self.checkStatusRecord { if self.checkStatusRecord {
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1) maxResult == 0 ? (self.numberTrue += 1) : (self.numberFalse += 1)
self.numberPass += 1 self.numberPass += 1
DispatchQueue.main.async { DispatchQueue.main.async {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer) let ciimage : CIImage = CIImage(cvPixelBuffer: pureImage)
let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage) let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage)
if self.result![0] < self.result![1] { if maxResult == 0 {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 { if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 { if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView) self.dataImageSuccess.append(imageView)
...@@ -254,10 +267,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -254,10 +267,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.lbDescription.text = "Exactly".localized() self.lbDescription.text = "Exactly".localized()
self.checkStep = 3 self.checkStep = 3
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
if self.dataImageSuccess.count == 2 { if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView) self.dataImageSuccess.append(imageView)
} }
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
} else if statusFace != .TOLEFT && self.checkStep == 2 { } else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the left".localized() self.lbDescription.text = "Please turn to the left".localized()
...@@ -271,7 +284,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -271,7 +284,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
} }
} else { } else {
if result![0] < result![1] { if maxResult == 0 {
DispatchQueue.main.async { DispatchQueue.main.async {
self.checkStartRecord = true self.checkStartRecord = true
self.lbDescription.textColor = UIColor.white self.lbDescription.textColor = UIColor.white
...@@ -296,6 +309,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -296,6 +309,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
} }
}
@IBAction func onRecord(_ sender: Any) { @IBAction func onRecord(_ sender: Any) {
if !self.checkStatusRecord && self.checkStartRecord { if !self.checkStatusRecord && self.checkStartRecord {
self.startTimer() self.startTimer()
...@@ -312,19 +327,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -312,19 +327,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
debugPrint("unable to get image from sample buffer") debugPrint("unable to get image from sample buffer")
return return
} }
let pureImage = self.resizeImageFace(pixelBuffer: frame)
if self.startusCheck { if self.startusCheck {
if #available(iOS 11.0, *) { if #available(iOS 11.0, *) {
if self.screenHeight == 2436 { if self.screenHeight == 2436 {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame.resized(to: CGSize(width: 360, height: 480))!))
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame.resized(to: CGSize(width: 360, height: 480))!), pureImage: pureImage)
} else { } else {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame)) self.detectFace(in: pureImage, pureImage: pureImage)
} }
} }
} }
} }
func checkScreen(){ func checkScreen(){
if UIDevice().userInterfaceIdiom == .phone { if UIDevice().userInterfaceIdiom == .phone {
self.screenHeight = UIScreen.main.nativeBounds.height self.screenHeight = UIScreen.main.nativeBounds.height
...@@ -406,7 +422,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -406,7 +422,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
@available(iOS 11.0, *) @available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) { private func detectFace(in image: CVPixelBuffer, pureImage: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async { DispatchQueue.main.async {
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
...@@ -423,7 +439,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -423,7 +439,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.startTimer() self.startTimer()
DispatchQueue.global().async { DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString) self.didOutput(pixelBuffer: image, statusFace: statusString, pureImage: pureImage)
} }
} }
} else { } else {
...@@ -437,7 +453,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -437,7 +453,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.lbDescription.text = "Incorrect face, please check!".localized() self.lbDescription.text = "Incorrect face, please check!".localized()
DispatchQueue.global().async { DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR) self.didOutput(pixelBuffer: image, statusFace: .ERROR, pureImage: pureImage)
} }
} }
} }
......
No preview for this file type
...@@ -14,9 +14,13 @@ class SBKValidateInput { ...@@ -14,9 +14,13 @@ class SBKValidateInput {
static let shared = SBKValidateInput() static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel) var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
public typealias CompletionHandle = (_ data: Bool) -> Void public typealias CompletionHandle = (_ data: Bool) -> Void
init(){
modelDataHandler?.inputWidth = 280
modelDataHandler?.inputHeight = 210
modelDataHandler?.subtract = 0
}
func validateCard(imageInput: UIImage) -> Int { func validateCard(imageInput: UIImage) -> Int {
let ciimage = CIImage(image: imageInput) let ciimage = CIImage(image: imageInput)
...@@ -56,24 +60,23 @@ class SBKValidateInput { ...@@ -56,24 +60,23 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result == nil { if result == nil {
return .ERROR return .ERROR
} }
switch self.getResultCard(result: result!) { let max = getResultCard(result: result!)
let total = result!.reduce(0, +)
switch max {
case 0: case 0:
return .IMAGE_FAKE return .IMAGE_FAKE
case 1: case 1, 3, 17:
return .IMAGE_FRONT return .IMAGE_FRONT
case 2: case 2, 4, 18:
return .IMAGE_BACK return .IMAGE_BACK
case 3: case 25:
return .PASSPORT return .PASSPORT
case 4,5,6,7,8,9:
return .IMAGE_FAKE
default: default:
return .ERROR return .IMAGE_FAKE
} }
} }
...@@ -97,12 +100,17 @@ class SBKValidateInput { ...@@ -97,12 +100,17 @@ class SBKValidateInput {
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] { if result != nil {
let max = getResultCard(result: result!)
if max == 0 {
return true return true
} else { } else {
return false return false
} }
} else {
return false
}
} }
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer { func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
...@@ -172,19 +180,6 @@ class SBKValidateInput { ...@@ -172,19 +180,6 @@ class SBKValidateInput {
} }
} }
//Xử lý ảnh hiển thị
// func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
// let imageCap = image
//
// let widthCrop = imageCap.size.width - imageCap.size.width / 10
//
// UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
// image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
// let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
// UIGraphicsEndImageContext()
// return croppedImage
// }
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? { func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image let imageCap = image
...@@ -222,6 +217,7 @@ class SBKValidateInput { ...@@ -222,6 +217,7 @@ class SBKValidateInput {
return croppedImage return croppedImage
} }
func saveImage(imageName: String, image: UIImage) -> String? { func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil } guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName let fileName = imageName
...@@ -230,7 +226,6 @@ class SBKValidateInput { ...@@ -230,7 +226,6 @@ class SBKValidateInput {
if FileManager.default.fileExists(atPath: fileURL.path) { if FileManager.default.fileExists(atPath: fileURL.path) {
do { do {
try FileManager.default.removeItem(atPath: fileURL.path) try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError { } catch let removeError {
print("couldn't remove file at path", removeError) print("couldn't remove file at path", removeError)
} }
......
...@@ -21,4 +21,4 @@ SPEC CHECKSUMS: ...@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74 PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2 COCOAPODS: 1.11.2
...@@ -21,4 +21,4 @@ SPEC CHECKSUMS: ...@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74 PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2 COCOAPODS: 1.11.2
This diff is collapsed.
...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO ...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks" FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers" HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks' LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC" OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR} PODS_BUILD_DIR = ${BUILD_DIR}
......
...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO ...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks" FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers" HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks' LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC" OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR} PODS_BUILD_DIR = ${BUILD_DIR}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment