Commit 0277c014 authored by Vmo-AnhNguyen's avatar Vmo-AnhNguyen

save image

parent 276686b7
...@@ -732,6 +732,7 @@ ...@@ -732,6 +732,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */; baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = { buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
...@@ -765,6 +766,7 @@ ...@@ -765,6 +766,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */; baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = { buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
......
...@@ -35,7 +35,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -35,7 +35,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
public var typeCamera: TypeCard = TypeCard.FRONT public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel() public let labelTypeCard = UILabel()
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ data: Data?, _ permissionCamera: Bool?)->Void = {_,_,_ in} public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()! public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
...@@ -166,7 +166,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -166,7 +166,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
} }
} }
//set frame preview //set frame preview
func setupLivePreview() { func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
...@@ -223,15 +222,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -223,15 +222,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
//Xử lý data sâu khi chụp //Xử lý data sâu khi chụp
@available(iOS 11.0, *) @available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else { return }
guard let imageData = photo.fileDataRepresentation()
else { return }
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
// self.captureSession.removeOutput(self.stillImageOutput)
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 4.5 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0) let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 4.5 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0)
self.completionSuccessCardStep(nil, cropImage!.pngData()!, nil) if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
} }
//Xử lý ảnh hiển thị //Xử lý ảnh hiển thị
......
...@@ -13,7 +13,7 @@ import CoreML ...@@ -13,7 +13,7 @@ import CoreML
open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate { open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let nibName = "SBKRecordFaceView" let nibName = "SBKRecordFaceView"
var contentView:UIView? var contentView: UIView?
@IBOutlet weak var viewBackground: UIView! @IBOutlet weak var viewBackground: UIView!
@IBOutlet weak var lbDescription: UILabel! @IBOutlet weak var lbDescription: UILabel!
...@@ -23,6 +23,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -23,6 +23,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
@IBOutlet weak var viewCheckStep3: UIView! @IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView! @IBOutlet weak var imageDescription: UIImageView!
@IBOutlet weak var stackView: UIStackView! @IBOutlet weak var stackView: UIStackView!
@IBOutlet weak var stepView: UIView!
@IBOutlet weak var viewSubIcon: UIView!
private let captureSession = AVCaptureSession() private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
...@@ -36,7 +38,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -36,7 +38,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var numberTrue = 0 var numberTrue = 0
var numberFalse = 0 var numberFalse = 0
var space: Float = 0.0 var space: Float = 0.0
var dataImageSuccess: [Data] = [] var dataImageSuccess: [UIImage] = []
var checkStartRecord: Bool = false var checkStartRecord: Bool = false
var timer = Timer() var timer = Timer()
var timeRecord: Int = 0 var timeRecord: Int = 0
...@@ -46,7 +48,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -46,7 +48,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var idBack: String = "" var idBack: String = ""
var URLToken: String = "" var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in} var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?)->Void = {_,_,_ in} public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ urlImages: [String: Any]?)->Void = {_,_,_ in}
public var timeSpace: Int = 3 public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0 public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage? public var imageStartRecord: UIImage?
...@@ -89,6 +91,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -89,6 +91,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
} }
public func statusSubView(statusHide: Bool) {
viewSubIcon.isHidden = statusHide
}
public func startCamera() { public func startCamera() {
self.captureSession.startRunning() self.captureSession.startRunning()
} }
...@@ -113,7 +119,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -113,7 +119,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35 let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop let heightOval = originYLBDescription - locationTop
...@@ -156,7 +162,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -156,7 +162,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) { func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let locationTop = self.bounds.origin.y + 35 let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop let heightOval = originYLBDescription - locationTop
...@@ -212,7 +218,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -212,7 +218,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
if self.result![0] < self.result![1] { if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 { if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 { if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!) self.dataImageSuccess.append(imageView)
} }
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil) self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
self.lbDescription.textColor = UIColor.green self.lbDescription.textColor = UIColor.green
...@@ -231,7 +237,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -231,7 +237,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil) self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
if self.dataImageSuccess.count == 1 { if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!) self.dataImageSuccess.append(imageView)
} }
} else if statusFace != .TORIGHT && self.checkStep == 1 { } else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
...@@ -245,7 +251,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -245,7 +251,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil) self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
if self.dataImageSuccess.count == 2 { if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!) self.dataImageSuccess.append(imageView)
} }
} else if statusFace != .TOLEFT && self.checkStep == 2 { } else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
...@@ -351,7 +357,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -351,7 +357,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35 let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop let heightOval = originYLBDescription - locationTop
...@@ -461,17 +467,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -461,17 +467,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let pass: Float = Float(self.numberTrue)/Float(self.numberPass) let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 { if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async { DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [ let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0], "imageStraightFile": urlImageStraight,
"imageRight": self.dataImageSuccess[1], "imageRightFile": urlImageRight,
"imageLeft": self.dataImageSuccess[2], "imageLeftFile": urlImageLeft,
] ]
self.completionSuccessFaceRecordStep(nil, nil, data) self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0 self.timeRecord = 0
self.resetRecord() self.resetRecord()
self.timer.invalidate() self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
} }
} else { } else {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
...@@ -509,10 +518,15 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -509,10 +518,15 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
case 3: case 3:
if self.dataImageSuccess.count > 2 { if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async { DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [ let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0], "imageStraightFile": urlImageStraight,
"imageRight": self.dataImageSuccess[1], "imageRightFile": urlImageRight,
"imageLeft": self.dataImageSuccess[2], "imageLeftFile": urlImageLeft,
] ]
self.completionSuccessFaceRecordStep(nil, nil, data) self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0 self.timeRecord = 0
......
...@@ -196,14 +196,36 @@ class SBKValidateInput { ...@@ -196,14 +196,36 @@ class SBKValidateInput {
} }
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? { func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0) UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale)) image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext() let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext() UIGraphicsEndImageContext()
return croppedImage return croppedImage
} }
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
let fileURL = documentsDirectory.appendingPathComponent(fileName)
guard let data = image.jpegData(compressionQuality: 1) else { return nil }
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
}
do {
try data.write(to: fileURL)
return fileURL.path
} catch let error {
print("error saving file with error", error)
}
return nil
}
} }
...@@ -10,3 +10,9 @@ target 'OCR-SDK' do ...@@ -10,3 +10,9 @@ target 'OCR-SDK' do
#pod 'GoogleMobileVision/FaceDetector' #pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher' #pod 'GTMSessionFetcher'
end end
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
end
end
...@@ -12,12 +12,12 @@ ...@@ -12,12 +12,12 @@
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key> <key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict> <dict>
<key>orderHint</key> <key>orderHint</key>
<integer>2</integer> <integer>1</integer>
</dict> </dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key> <key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict> <dict>
<key>orderHint</key> <key>orderHint</key>
<integer>1</integer> <integer>2</integer>
</dict> </dict>
</dict> </dict>
</dict> </dict>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment