Commit 0277c014 authored by Vmo-AnhNguyen's avatar Vmo-AnhNguyen

save image

parent 276686b7
...@@ -732,6 +732,7 @@ ...@@ -732,6 +732,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */; baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = { buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
...@@ -765,6 +766,7 @@ ...@@ -765,6 +766,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */; baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = { buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
......
...@@ -12,7 +12,7 @@ import Vision ...@@ -12,7 +12,7 @@ import Vision
class SBKValidateInput { class SBKValidateInput {
static let shared = SBKValidateInput() static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel) var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
public typealias CompletionHandle = (_ data: Bool) -> Void public typealias CompletionHandle = (_ data: Bool) -> Void
...@@ -56,7 +56,7 @@ class SBKValidateInput { ...@@ -56,7 +56,7 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
...@@ -64,18 +64,18 @@ class SBKValidateInput { ...@@ -64,18 +64,18 @@ class SBKValidateInput {
return .ERROR return .ERROR
} }
switch self.getResultCard(result: result!) { switch self.getResultCard(result: result!) {
case 0: case 0:
return .IMAGE_FAKE return .IMAGE_FAKE
case 1: case 1:
return .IMAGE_FRONT return .IMAGE_FRONT
case 2: case 2:
return .IMAGE_BACK return .IMAGE_BACK
case 3: case 3:
return .PASSPORT return .PASSPORT
case 4: case 4:
return .IMAGE_FAKE return .IMAGE_FAKE
default: default:
return .ERROR return .ERROR
} }
} }
...@@ -97,7 +97,7 @@ class SBKValidateInput { ...@@ -97,7 +97,7 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return false } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return false }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer) result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] { if result![0] < result![1] {
...@@ -106,7 +106,7 @@ class SBKValidateInput { ...@@ -106,7 +106,7 @@ class SBKValidateInput {
return false return false
} }
} }
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer { func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
let ciimage = CIImage(image: imageInput) let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil) let tmpcontext = CIContext(options: nil)
...@@ -122,10 +122,10 @@ class SBKValidateInput { ...@@ -122,10 +122,10 @@ class SBKValidateInput {
} }
func convertCIToUIImage(cmage: CIImage) -> UIImage { func convertCIToUIImage(cmage: CIImage) -> UIImage {
let context:CIContext = CIContext.init(options: nil) let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)! let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage) let image:UIImage = UIImage.init(cgImage: cgImage)
return image return image
} }
func convertCGImgeToCVPixelBuffer (forImage image: CGImage) -> CVPixelBuffer? { func convertCGImgeToCVPixelBuffer (forImage image: CGImage) -> CVPixelBuffer? {
...@@ -196,14 +196,36 @@ class SBKValidateInput { ...@@ -196,14 +196,36 @@ class SBKValidateInput {
} }
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? { func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0) UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale)) image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext() let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext() UIGraphicsEndImageContext()
return croppedImage return croppedImage
} }
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
let fileURL = documentsDirectory.appendingPathComponent(fileName)
guard let data = image.jpegData(compressionQuality: 1) else { return nil }
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
}
do {
try data.write(to: fileURL)
return fileURL.path
} catch let error {
print("error saving file with error", error)
}
return nil
}
} }
...@@ -10,3 +10,9 @@ target 'OCR-SDK' do ...@@ -10,3 +10,9 @@ target 'OCR-SDK' do
#pod 'GoogleMobileVision/FaceDetector' #pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher' #pod 'GTMSessionFetcher'
end end
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
end
end
...@@ -12,12 +12,12 @@ ...@@ -12,12 +12,12 @@
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key> <key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict> <dict>
<key>orderHint</key> <key>orderHint</key>
<integer>2</integer> <integer>1</integer>
</dict> </dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key> <key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict> <dict>
<key>orderHint</key> <key>orderHint</key>
<integer>1</integer> <integer>2</integer>
</dict> </dict>
</dict> </dict>
</dict> </dict>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment