// // SBKValidateCardView.swift // OCR-SDK // // Created by itsol on 8/25/20. // Copyright © 2020 itsol. All rights reserved. // import UIKit import AVFoundation open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { let nibName = "SBKValidateCardView" var contentView:UIView? @IBOutlet public weak var viewCamera: UIView! @IBOutlet public weak var lbDescription: UILabel! @IBOutlet public weak var lbCopyright: UILabel! @IBOutlet public weak var lbStep: UILabel! @IBOutlet public weak var btnCapture: UIButton! @IBOutlet public weak var imgCaution: UIImageView! public var captureSession: AVCaptureSession = AVCaptureSession() public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput() public var videoPreviewLayer: AVCaptureVideoPreviewLayer! public let videoDataOutput = AVCaptureVideoDataOutput() public var inputCamera: AVCaptureDeviceInput! var descriptionScreen: String = "Front of your personal card".localized() var checkScreen: Int = 1 // checkScreen = 1: screen front card, checkScreen = 2: screen front var idFront: String = "" var URLToken: String = "" var statusTakePhoto: Bool = true var statusValidateImage: ValidateCard = .ERROR var statusScreen: String = "vertical" //horizontal public var typeCamera: TypeCard = TypeCard.FRONT public let labelTypeCard = UILabel() private var previewWidth: CGFloat = 128.0 private var previewHeight: CGFloat = 128.0 private var cropZone: CGRect? private var cropImage: CGRect? var overlayView: OverLayCardView? public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in} public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()! required public init?(coder aDecoder: NSCoder) { super.init(coder: aDecoder) commonInit() } override public init(frame: CGRect) { super.init(frame: frame) commonInit() } func commonInit() { guard let view = loadViewFromNib() else { return } view.frame = self.bounds self.addSubview(view) contentView = view AVCaptureDevice.requestAccess(for: .video, completionHandler: { (granted: Bool) in if !granted { self.completionSuccessCardStep(nil, nil, false) return } }) if AVCaptureDevice.authorizationStatus(for: .video) == .denied { self.completionSuccessCardStep(nil, nil, false) return } self.labelTypeCard.frame = CGRect(x: self.center.x - 50, y: self.frame.size.height / 8 + self.viewCamera.frame.origin.y - 40, width: 100, height: 20) self.labelTypeCard.textAlignment = .center self.labelTypeCard.text = "Use front" self.labelTypeCard.textColor = UIColor.white self.addSubview(labelTypeCard) } func loadViewFromNib() -> UIView? { let bundle = Bundle(for: type(of: self)) let nib = UINib(nibName: nibName, bundle: bundle) return nib.instantiate(withOwner: self, options: nil).first as? UIView } public func setButtonImage(iconTakeCard: UIImage? = nil, iconFrameTopLeft: UIImage? = nil, iconFrameTopRight: UIImage? = nil, iconFrameBottomLeft: UIImage? = nil, iconFrameBottomRight: UIImage? = nil) { let fWidth = self.bounds.size.width let fHeight = self.bounds.size.height let squareWidth = fWidth/2 let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4) if iconTakeCard != nil { self.btnCapture.setImage(iconTakeCard, for: .normal) } if iconFrameTopLeft != nil { let ivLeftTop = UIImageView(image: iconFrameTopLeft) ivLeftTop.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20, width: 50, height: 50) self.viewCamera.addSubview(ivLeftTop) } if iconFrameTopRight != nil { let ivRightTop = UIImageView(image: iconFrameTopRight) ivRightTop.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20 + fWidth * 18 / 20 - 50, y: topLeft.y - fHeight/20, width: 50, height: 50) self.viewCamera.addSubview(ivRightTop) } if iconFrameBottomLeft != nil { let ivLeftBottom = UIImageView(image: iconFrameBottomLeft) ivLeftBottom.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20 + fWidth * 18 * 3 / 20 / 4 - 50, width: 50, height: 50) self.viewCamera.addSubview(ivLeftBottom) } if iconFrameBottomRight != nil { let ivRightBottom = UIImageView(image: iconFrameBottomRight) ivRightBottom.frame = CGRect(x: self.viewCamera.frame.origin.x + fWidth/20 + fWidth * 18 / 20 - 50, y: topLeft.y - fHeight/20 + fWidth * 18 * 3 / 20 / 4 - 50, width: 50, height: 50) self.viewCamera.addSubview(ivRightBottom) } } public func startCamera() { self.captureSession.startRunning() } public func stopCamera() { self.captureSession.stopRunning() } //Cài đặt máy ảnh func loadCamera() { captureSession.sessionPreset = .high var device : AVCaptureDevice? if #available(iOS 11.1, *) { guard let devicetype = AVCaptureDevice.DiscoverySession( deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .back).devices.first else { fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator") } device = devicetype } else if #available(iOS 11.0, *) { guard let devicetype = AVCaptureDevice.DiscoverySession( deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera], mediaType: .video, position: .back).devices.first else { fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator") } device = devicetype } do { self.inputCamera = try AVCaptureDeviceInput(device: device!) if captureSession.canAddInput(self.inputCamera) { captureSession.addInput(self.inputCamera) } if captureSession.canAddOutput(stillImageOutput) { captureSession.addOutput(stillImageOutput) setupLivePreview() } } catch let error { print("Error Unable to initialize back camera: \(error.localizedDescription)") } self.getCameraFrames() DispatchQueue.global(qos: .userInitiated).async { self.captureSession.startRunning() } DispatchQueue.main.async { self.videoPreviewLayer.frame = self.viewCamera.bounds } } //set frame preview func setupLivePreview() { videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) videoPreviewLayer.videoGravity = .resizeAspectFill videoPreviewLayer.connection?.videoOrientation = .portrait viewCamera.layer.addSublayer(videoPreviewLayer) if let viewWithTag = self.viewCamera.viewWithTag(11) { viewWithTag.removeFromSuperview() } if let viewWithTag = self.viewCamera.viewWithTag(22) { viewWithTag.removeFromSuperview() } } public override func draw(_ rect: CGRect) { initOverLay() } func initOverLay(){ if self.overlayView == nil { self.loadCamera() let fWidth = self.bounds.size.width self.overlayView = OverLayCardView(frame: self.viewCamera.bounds) self.overlayView!.marginTop = self.viewCamera.frame.origin.x + fWidth/20 previewWidth = self.viewCamera.frame.width * 18 / 20 previewHeight = self.viewCamera.frame.width * 18 * 3 / 20 / 4 self.overlayView!.previewWidth = self.previewWidth self.overlayView!.previewHeight = self.previewHeight self.overlayView!.borderLength = 50 self.overlayView!.borderWidth = 2 self.overlayView!.connerRadius = 0 viewCamera.addSubview(self.overlayView!) self.overlayView!.setLayer() } } //Xử lý data sâu khi chụp @available(iOS 11.0, *) public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { guard let imageData = photo.fileDataRepresentation() else { return } let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0) if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) { self.completionSuccessCardStep(nil, urlImage, nil) } } //Sự kiện chụp ảnh @IBAction func onCapturePhoto(_ sender: Any) { if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) { if #available(iOS 11.0, *) { let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) stillImageOutput.capturePhoto(with: settings, delegate: self) } else { let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG]) stillImageOutput.capturePhoto(with: settings, delegate: self) } } } } extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { private func getCameraFrames() { self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any] self.videoDataOutput.alwaysDiscardsLateVideoFrames = true self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue")) if captureSession.canAddOutput(self.videoDataOutput) { self.captureSession.addOutput(self.videoDataOutput) } guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), connection.isVideoOrientationSupported else { return } connection.videoOrientation = .portrait } public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else { debugPrint("unable to get image from sample buffer") return } if self.cropZone == nil && self.overlayView != nil { DispatchQueue.main.async { let scaleSizeWidth = self.viewCamera.frame.width / CGFloat( CVPixelBufferGetWidth(imageFrameInput)) let scaleSizeHeight = self.viewCamera.frame.height / CGFloat( CVPixelBufferGetHeight(imageFrameInput)) let x = self.overlayView!.marginTop let y = (self.viewCamera.frame.height - self.previewHeight) / 2 self.cropZone = CGRect(x: CGFloat(x) / scaleSizeWidth, y: CGFloat(y) / scaleSizeHeight, width: CGFloat(self.previewWidth) / scaleSizeWidth , height: CGFloat(self.previewHeight) / scaleSizeHeight ) self.cropImage = CGRect(x: CGFloat(x - 10) / scaleSizeWidth, y: CGFloat(y + 10) / scaleSizeHeight, width: CGFloat(self.previewWidth + 20) / scaleSizeWidth , height: CGFloat(self.previewHeight - 20) / scaleSizeHeight ) } } guard let crop = self.cropZone else {return} guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return} let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput) if let overlayView = self.overlayView { DispatchQueue.main.async { if let viewWithTag = self.viewCamera.viewWithTag(11) { viewWithTag.removeFromSuperview() } self.completionSuccessCardStep(validateImageCard, nil, nil) self.statusValidateImage = validateImageCard self.lbDescription.text = validateImageCard.rawValue if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR { self.lbDescription.textColor = UIColor.red self.lbDescription.text = "Incorrect card, please check!".localized() self.imgCaution.isHidden = false self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal) overlayView.setBorderColor(color: UIColor.red.cgColor) } if validateImageCard == .IMAGE_FRONT && self.typeCamera == .BACK { self.lbDescription.textColor = UIColor.red self.lbDescription.text = "Please put the back of the card in".localized() self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal) self.imgCaution.isHidden = false overlayView.setBorderColor(color: UIColor.red.cgColor) } if validateImageCard == .IMAGE_BACK && self.typeCamera == .FRONT { self.lbDescription.textColor = UIColor.red self.lbDescription.text = "Please put the front of the card in".localized() self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal) self.imgCaution.isHidden = false overlayView.setBorderColor(color: UIColor.red.cgColor) } if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) { self.lbDescription.textColor = .green self.lbDescription.text = "Are you ready. Let's start!".localized() self.imgCaution.isHidden = true self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal) overlayView.setBorderColor(color: UIColor.blue.cgColor) } } } } }