Commit 0277c014 authored by Vmo-AnhNguyen's avatar Vmo-AnhNguyen

save image

parent 276686b7
......@@ -732,6 +732,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......@@ -765,6 +766,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......
......@@ -35,7 +35,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel()
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ data: Data?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
......@@ -166,7 +166,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
......@@ -223,15 +222,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
// self.captureSession.removeOutput(self.stillImageOutput)
guard let imageData = photo.fileDataRepresentation() else { return }
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 4.5 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0)
self.completionSuccessCardStep(nil, cropImage!.pngData()!, nil)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
}
//Xử lý ảnh hiển thị
......
......@@ -13,7 +13,7 @@ import CoreML
open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let nibName = "SBKRecordFaceView"
var contentView:UIView?
var contentView: UIView?
@IBOutlet weak var viewBackground: UIView!
@IBOutlet weak var lbDescription: UILabel!
......@@ -23,6 +23,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
@IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView!
@IBOutlet weak var stackView: UIStackView!
@IBOutlet weak var stepView: UIView!
@IBOutlet weak var viewSubIcon: UIView!
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
......@@ -36,7 +38,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var numberTrue = 0
var numberFalse = 0
var space: Float = 0.0
var dataImageSuccess: [Data] = []
var dataImageSuccess: [UIImage] = []
var checkStartRecord: Bool = false
var timer = Timer()
var timeRecord: Int = 0
......@@ -46,7 +48,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var idBack: String = ""
var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?)->Void = {_,_,_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ urlImages: [String: Any]?)->Void = {_,_,_ in}
public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage?
......@@ -89,6 +91,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
}
}
public func statusSubView(statusHide: Bool) {
viewSubIcon.isHidden = statusHide
}
public func startCamera() {
self.captureSession.startRunning()
}
......@@ -113,7 +119,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -156,7 +162,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -212,7 +218,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
self.lbDescription.textColor = UIColor.green
......@@ -231,7 +237,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red
......@@ -245,7 +251,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red
......@@ -351,7 +357,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -461,17 +467,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
}
} else {
self.lbDescription.textColor = UIColor.red
......@@ -509,10 +518,15 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
case 3:
if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
......
......@@ -196,14 +196,36 @@ class SBKValidateInput {
}
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
let fileURL = documentsDirectory.appendingPathComponent(fileName)
guard let data = image.jpegData(compressionQuality: 1) else { return nil }
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
}
do {
try data.write(to: fileURL)
return fileURL.path
} catch let error {
print("error saving file with error", error)
}
return nil
}
}
......@@ -10,3 +10,9 @@ target 'OCR-SDK' do
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
end
end
......@@ -12,12 +12,12 @@
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
<integer>2</integer>
</dict>
</dict>
</dict>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment