Commit 0277c014 authored by Vmo-AnhNguyen's avatar Vmo-AnhNguyen

save image

parent 276686b7
......@@ -732,6 +732,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......@@ -765,6 +766,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......
......@@ -35,7 +35,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel()
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ data: Data?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
......@@ -57,7 +57,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
AVCaptureDevice.requestAccess(for: .video, completionHandler: { (granted: Bool) in
if !granted {
self.completionSuccessCardStep(nil, nil, false)
self.completionSuccessCardStep(nil, nil, false)
return
}
})
......@@ -83,7 +83,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
public func setButtonImage(iconTakeCard: UIImage? = nil, iconFrameTopLeft: UIImage? = nil, iconFrameTopRight: UIImage? = nil, iconFrameBottomLeft: UIImage? = nil, iconFrameBottomRight: UIImage? = nil) {
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
......@@ -112,7 +112,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
if iconFrameBottomRight != nil {
let ivRightBottom = UIImageView(image: iconFrameBottomRight)
let ivRightBottom = UIImageView(image: iconFrameBottomRight)
ivRightBottom.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20 + fWidth * 18 / 20 - 50, y: topLeft.y - fHeight/20 + fWidth * 18 * 3 / 20 / 4 - 50, width: 50, height: 50)
self.viewCamera.addSubview(ivRightBottom)
}
......@@ -125,82 +125,81 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
public func stopCamera() {
self.captureSession.stopRunning()
}
//Cài đặt máy ảnh
func loadCamera() {
captureSession.sessionPreset = .hd1920x1080
if #available(iOS 11.1, *) {
guard let backCamera = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .back).devices.first else {
print("Unable to access back camera!")
return
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .back).devices.first else {
print("Unable to access back camera!")
return
}
do {
self.inputCamera = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(self.inputCamera) {
captureSession.addInput(self.inputCamera)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
do {
self.inputCamera = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(self.inputCamera) {
captureSession.addInput(self.inputCamera)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.connection?.videoOrientation = .portrait
viewCamera.layer.addSublayer(videoPreviewLayer)
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.connection?.videoOrientation = .portrait
viewCamera.layer.addSublayer(videoPreviewLayer)
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
let overlay = createOverlay(frame: self.bounds)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
path.addRoundedRect(in: CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 0, cornerHeight: 0)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
path.addRoundedRect(in: CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 0, cornerHeight: 0)
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
......@@ -209,74 +208,70 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
// self.captureSession.removeOutput(self.stillImageOutput)
guard let imageData = photo.fileDataRepresentation() else { return }
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 4.5 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0)
self.completionSuccessCardStep(nil, cropImage!.pngData()!, nil)
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 10
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 10
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
} else {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
}
}
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
} else {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
}
}
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
} else {
} else {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 1.5 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4 + 7), scale: 1.0)
}
let ciimage = CIImage(image: imageOutput!)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
}
let ciimage = CIImage(image: imageOutput!)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
}
extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
......@@ -289,7 +284,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
}
//self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
......
......@@ -13,7 +13,7 @@ import CoreML
open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let nibName = "SBKRecordFaceView"
var contentView:UIView?
var contentView: UIView?
@IBOutlet weak var viewBackground: UIView!
@IBOutlet weak var lbDescription: UILabel!
......@@ -23,6 +23,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
@IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView!
@IBOutlet weak var stackView: UIStackView!
@IBOutlet weak var stepView: UIView!
@IBOutlet weak var viewSubIcon: UIView!
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
......@@ -36,7 +38,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var numberTrue = 0
var numberFalse = 0
var space: Float = 0.0
var dataImageSuccess: [Data] = []
var dataImageSuccess: [UIImage] = []
var checkStartRecord: Bool = false
var timer = Timer()
var timeRecord: Int = 0
......@@ -46,7 +48,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var idBack: String = ""
var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?)->Void = {_,_,_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ urlImages: [String: Any]?)->Void = {_,_,_ in}
public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage?
......@@ -89,6 +91,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
}
}
public func statusSubView(statusHide: Bool) {
viewSubIcon.isHidden = statusHide
}
public func startCamera() {
self.captureSession.startRunning()
}
......@@ -113,7 +119,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -156,7 +162,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -212,7 +218,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
self.lbDescription.textColor = UIColor.green
......@@ -231,7 +237,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red
......@@ -245,7 +251,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red
......@@ -351,7 +357,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -382,39 +388,39 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
if results.count > 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "There are many faces in the frame".localized()
self.completionSuccessFaceRecordStep(.MANY_FACE, nil, nil)
} else {
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!)
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
self.startTimer()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString)
if results.count > 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "There are many faces in the frame".localized()
self.completionSuccessFaceRecordStep(.MANY_FACE, nil, nil)
} else {
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!)
self.startTimer()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString)
}
}
}
} else {
if !self.checkStatusRecord {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Position your face in the oval".localized()//"Đặt vị trí mặt bạn vào hình"
self.completionSuccessFaceRecordStep(.NO_FACE, nil, nil)
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR)
if !self.checkStatusRecord {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Position your face in the oval".localized()//"Đặt vị trí mặt bạn vào hình"
self.completionSuccessFaceRecordStep(.NO_FACE, nil, nil)
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR)
}
}
}
}
}
}
}
})
......@@ -461,17 +467,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
}
} else {
self.lbDescription.textColor = UIColor.red
......@@ -509,10 +518,15 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
case 3:
if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
......
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17156" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17701" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17125"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17703"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
......@@ -14,11 +14,13 @@
<outlet property="imageDescription" destination="qAP-z1-ph6" id="4WK-9l-Hhw"/>
<outlet property="lbDescription" destination="l4S-dU-FVT" id="7qE-CY-e4c"/>
<outlet property="stackView" destination="lfu-gT-avH" id="UPQ-PC-13s"/>
<outlet property="stepView" destination="wyh-9h-sMx" id="OKm-zE-5ba"/>
<outlet property="viewBackground" destination="vtt-nP-K7e" id="p2H-QE-FHN"/>
<outlet property="viewCheckStep1" destination="Ikb-Rh-oGt" id="gg1-L1-h5k"/>
<outlet property="viewCheckStep2" destination="q10-fa-3fY" id="YaT-X4-UMq"/>
<outlet property="viewCheckStep3" destination="KKw-Ea-56U" id="91G-Kf-nGW"/>
<outlet property="viewOval" destination="o70-jf-1ia" id="W81-Ef-mxN"/>
<outlet property="viewSubIcon" destination="vJK-3l-9HU" id="g2w-rY-foz"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
......@@ -30,102 +32,113 @@
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="wyh-9h-sMx">
<rect key="frame" x="173" y="771" width="68.5" height="20.5"/>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="o70-jf-1ia">
<rect key="frame" x="60" y="69" width="294" height="581"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="vJK-3l-9HU">
<rect key="frame" x="0.0" y="670" width="414" height="226"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 3/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Maa-bq-dEa">
<rect key="frame" x="10" y="3" width="48.5" height="14.5"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="l4S-dU-FVT" userLabel="position your face in the oval">
<rect key="frame" x="96.5" y="165.5" width="221.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.39954239130000002" green="0.39960256220000001" blue="0.39952337739999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="Maa-bq-dEa" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="top" constant="3" id="1h4-xu-9Jn"/>
<constraint firstItem="Maa-bq-dEa" firstAttribute="leading" secondItem="wyh-9h-sMx" secondAttribute="leading" constant="10" id="7Sy-eo-0q9"/>
<constraint firstAttribute="bottom" secondItem="Maa-bq-dEa" secondAttribute="bottom" constant="3" id="c2B-Vb-FHV"/>
<constraint firstAttribute="trailing" secondItem="Maa-bq-dEa" secondAttribute="trailing" constant="10" id="i4F-Gz-xe8"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="5" translatesAutoresizingMaskIntoConstraints="NO" id="lfu-gT-avH">
<rect key="frame" x="100" y="670" width="214" height="6"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ikb-Rh-oGt">
<rect key="frame" x="0.0" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20000000000000001" green="0.20000000000000001" blue="0.20000000000000001" alpha="1" colorSpace="calibratedRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="q10-fa-3fY">
<rect key="frame" x="73" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="KKw-Ea-56U">
<rect key="frame" x="146" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="wyh-9h-sMx">
<rect key="frame" x="173" y="125" width="68.5" height="20.5"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 3/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Maa-bq-dEa">
<rect key="frame" x="10" y="3" width="48.5" height="14.5"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.39954239130000002" green="0.39960256220000001" blue="0.39952337739999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="Maa-bq-dEa" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="top" constant="3" id="1h4-xu-9Jn"/>
<constraint firstItem="Maa-bq-dEa" firstAttribute="leading" secondItem="wyh-9h-sMx" secondAttribute="leading" constant="10" id="7Sy-eo-0q9"/>
<constraint firstAttribute="bottom" secondItem="Maa-bq-dEa" secondAttribute="bottom" constant="3" id="c2B-Vb-FHV"/>
<constraint firstAttribute="trailing" secondItem="Maa-bq-dEa" secondAttribute="trailing" constant="10" id="i4F-Gz-xe8"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="qAP-z1-ph6">
<rect key="frame" x="183.5" y="46" width="47" height="60"/>
<constraints>
<constraint firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="lbZ-b0-JQ6"/>
<constraint firstAttribute="height" constant="60" id="lpk-mw-Ydj"/>
</constraints>
</imageView>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="5" translatesAutoresizingMaskIntoConstraints="NO" id="lfu-gT-avH">
<rect key="frame" x="100" y="20" width="214" height="6"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ikb-Rh-oGt">
<rect key="frame" x="0.0" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20000000000000001" green="0.20000000000000001" blue="0.20000000000000001" alpha="1" colorSpace="calibratedRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="q10-fa-3fY">
<rect key="frame" x="73" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="KKw-Ea-56U">
<rect key="frame" x="146" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="6" id="zRt-Eh-zot"/>
</constraints>
</stackView>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="6" id="zRt-Eh-zot"/>
</constraints>
</stackView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="l4S-dU-FVT">
<rect key="frame" x="96.5" y="801.5" width="221.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="o70-jf-1ia">
<rect key="frame" x="60" y="69" width="294" height="581"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="qAP-z1-ph6">
<rect key="frame" x="183.5" y="696" width="47" height="60"/>
<constraints>
<constraint firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="lbZ-b0-JQ6"/>
<constraint firstAttribute="height" constant="60" id="lpk-mw-Ydj"/>
<constraint firstAttribute="bottom" secondItem="l4S-dU-FVT" secondAttribute="bottom" constant="40" id="7Yx-6Q-gLs"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="top" secondItem="vJK-3l-9HU" secondAttribute="top" constant="20" id="IJS-1d-cdc"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="leading" secondItem="vJK-3l-9HU" secondAttribute="leading" constant="100" id="K2Y-OR-tdS"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="top" secondItem="qAP-z1-ph6" secondAttribute="bottom" constant="19" id="KoA-Uc-RVP"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="centerX" secondItem="qAP-z1-ph6" secondAttribute="centerX" id="NQh-Qp-Edn"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="centerX" secondItem="wyh-9h-sMx" secondAttribute="centerX" id="Oku-d6-8xF"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="top" secondItem="lfu-gT-avH" secondAttribute="bottom" constant="20" id="T8S-a8-xgG"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="centerX" secondItem="l4S-dU-FVT" secondAttribute="centerX" id="V3j-1h-qaX"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="centerX" secondItem="vJK-3l-9HU" secondAttribute="centerX" id="ae7-T7-2n4"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="bottom" constant="20" id="nRu-XH-t9b"/>
</constraints>
</imageView>
</view>
</subviews>
<viewLayoutGuide key="safeArea" id="9lf-e8-SmZ"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints>
<constraint firstItem="qAP-z1-ph6" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="0MA-kq-exD"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="4yn-TJ-yqm"/>
<constraint firstAttribute="trailing" secondItem="vtt-nP-K7e" secondAttribute="trailing" id="6aW-K2-jlG"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="top" secondItem="lfu-gT-avH" secondAttribute="bottom" constant="20" id="Fh8-GZ-8yL"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="I6p-U2-cTt"/>
<constraint firstAttribute="bottom" secondItem="vJK-3l-9HU" secondAttribute="bottom" id="GFF-jv-UtV"/>
<constraint firstItem="vtt-nP-K7e" firstAttribute="top" secondItem="UFA-SZ-Qoz" secondAttribute="top" id="N3v-KG-OEA"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="top" secondItem="9lf-e8-SmZ" secondAttribute="top" constant="25" id="NPG-HN-ut1"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="100" id="Uqs-Lb-Gn3"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="top" secondItem="qAP-z1-ph6" secondAttribute="bottom" constant="15" id="Ur0-Sk-VRP"/>
<constraint firstItem="vJK-3l-9HU" firstAttribute="top" secondItem="o70-jf-1ia" secondAttribute="bottom" constant="20" id="QCr-Od-Hn9"/>
<constraint firstItem="vJK-3l-9HU" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" id="WBE-5x-ey3"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="XTc-Jw-HgS"/>
<constraint firstItem="9lf-e8-SmZ" firstAttribute="bottom" secondItem="l4S-dU-FVT" secondAttribute="bottom" constant="40" id="aMm-fz-lIb"/>
<constraint firstItem="vtt-nP-K7e" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" id="ct7-fr-gJL"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="top" secondItem="o70-jf-1ia" secondAttribute="bottom" constant="20" id="e65-sJ-G8O"/>
<constraint firstItem="9lf-e8-SmZ" firstAttribute="trailing" secondItem="vJK-3l-9HU" secondAttribute="trailing" id="eg7-gG-37U"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="60" id="g9o-8i-MtB"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="hd0-Yp-38K"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="bottom" constant="10" id="lo9-cy-Wk0"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="nV7-Zn-tQz"/>
<constraint firstAttribute="bottom" secondItem="vtt-nP-K7e" secondAttribute="bottom" id="okN-H6-tDN"/>
</constraints>
<point key="canvasLocation" x="133" y="154"/>
......
......@@ -12,7 +12,7 @@ import Vision
class SBKValidateInput {
static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
public typealias CompletionHandle = (_ data: Bool) -> Void
......@@ -56,7 +56,7 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
......@@ -64,18 +64,18 @@ class SBKValidateInput {
return .ERROR
}
switch self.getResultCard(result: result!) {
case 0:
return .IMAGE_FAKE
case 1:
return .IMAGE_FRONT
case 2:
return .IMAGE_BACK
case 3:
return .PASSPORT
case 4:
return .IMAGE_FAKE
default:
return .ERROR
case 0:
return .IMAGE_FAKE
case 1:
return .IMAGE_FRONT
case 2:
return .IMAGE_BACK
case 3:
return .PASSPORT
case 4:
return .IMAGE_FAKE
default:
return .ERROR
}
}
......@@ -97,7 +97,7 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return false }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] {
......@@ -106,7 +106,7 @@ class SBKValidateInput {
return false
}
}
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil)
......@@ -122,10 +122,10 @@ class SBKValidateInput {
}
func convertCIToUIImage(cmage: CIImage) -> UIImage {
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
func convertCGImgeToCVPixelBuffer (forImage image: CGImage) -> CVPixelBuffer? {
......@@ -196,14 +196,36 @@ class SBKValidateInput {
}
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
let fileURL = documentsDirectory.appendingPathComponent(fileName)
guard let data = image.jpegData(compressionQuality: 1) else { return nil }
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
}
do {
try data.write(to: fileURL)
return fileURL.path
} catch let error {
print("error saving file with error", error)
}
return nil
}
}
......@@ -10,3 +10,9 @@ target 'OCR-SDK' do
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
end
end
......@@ -12,12 +12,12 @@
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
<integer>2</integer>
</dict>
</dict>
</dict>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment