Commit 7d8d0acf authored by Nguyễn Văn An's avatar Nguyễn Văn An

update model

parent c17d42a5
No preview for this file type
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "FD833007-BFDE-4E90-B09E-D1112B21135E"
type = "0"
version = "2.0">
</Bucket>
No preview for this file type
......@@ -18,6 +18,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
@IBOutlet public weak var lbStep: UILabel!
@IBOutlet public weak var btnCapture: UIButton!
@IBOutlet public weak var imgCaution: UIImageView!
@IBOutlet weak var viewTakePhoto: UIView!
public var captureSession: AVCaptureSession = AVCaptureSession()
public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
......@@ -39,7 +40,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
private var cropZone: CGRect?
private var cropImage: CGRect?
var overlayView: OverLayCardView?
var imagePreview = UIImageView(frame: CGRect(x: 20, y: 20, width: 280, height: 210))
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
......@@ -79,6 +80,13 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
self.labelTypeCard.text = "Use front"
self.labelTypeCard.textColor = UIColor.white
self.addSubview(labelTypeCard)
self.addSubview(imagePreview)
}
func setImage(_ image: UIImage) {
DispatchQueue.main.async {
self.imagePreview.image = image
}
}
func loadViewFromNib() -> UIView? {
......@@ -224,13 +232,14 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
self.stopCamera()
}
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) || self.statusValidateImage == .PASSPORT {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
......@@ -255,6 +264,8 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
connection.videoOrientation = .portrait
}
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
......@@ -273,7 +284,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if let overlayView = self.overlayView {
DispatchQueue.main.async {
......@@ -309,7 +320,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) || self.statusValidateImage == .PASSPORT {
self.lbDescription.textColor = .green
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
......
......@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? =
SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000
......
No preview for this file type
......@@ -14,9 +14,13 @@ class SBKValidateInput {
static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
public typealias CompletionHandle = (_ data: Bool) -> Void
init(){
modelDataHandler?.inputWidth = 280
modelDataHandler?.inputHeight = 210
modelDataHandler?.subtract = 0
}
func validateCard(imageInput: UIImage) -> Int {
let ciimage = CIImage(image: imageInput)
......@@ -56,24 +60,23 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result == nil {
return .ERROR
}
switch self.getResultCard(result: result!) {
let max = getResultCard(result: result!)
let total = result!.reduce(0, +)
switch max {
case 0:
return .IMAGE_FAKE
case 1:
case 1, 3, 17:
return .IMAGE_FRONT
case 2:
case 2, 4, 18:
return .IMAGE_BACK
case 3:
case 25:
return .PASSPORT
case 4,5,6,7,8,9:
return .IMAGE_FAKE
default:
return .ERROR
return .IMAGE_FAKE
}
}
......@@ -97,12 +100,17 @@ class SBKValidateInput {
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] {
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result != nil {
let max = getResultCard(result: result!)
if max == 0 {
return true
} else {
return false
}
} else {
return false
}
}
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
......@@ -172,19 +180,6 @@ class SBKValidateInput {
}
}
//Xử lý ảnh hiển thị
// func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
// let imageCap = image
//
// let widthCrop = imageCap.size.width - imageCap.size.width / 10
//
// UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
// image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
// let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
// UIGraphicsEndImageContext()
// return croppedImage
// }
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
......@@ -222,6 +217,7 @@ class SBKValidateInput {
return croppedImage
}
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
......@@ -230,7 +226,6 @@ class SBKValidateInput {
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
......
......@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2
COCOAPODS: 1.11.2
......@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2
COCOAPODS: 1.11.2
This diff is collapsed.
......@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
......
......@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment