Commit 0277c014 authored by Vmo-AnhNguyen's avatar Vmo-AnhNguyen

save image

parent 276686b7
......@@ -732,6 +732,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......@@ -765,6 +766,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
BUILD_LIBRARY_FOR_DISTRIBUTION = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
......
......@@ -35,7 +35,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel()
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ data: Data?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
......@@ -166,7 +166,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
......@@ -223,15 +222,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
// self.captureSession.removeOutput(self.stillImageOutput)
guard let imageData = photo.fileDataRepresentation() else { return }
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height * 4.5 / 20 , width: self.viewCamera.frame.size.width * 18 / 20, height: self.viewCamera.frame.size.width * 18 * 3 / 20 / 4), scale: 1.0)
self.completionSuccessCardStep(nil, cropImage!.pngData()!, nil)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
}
//Xử lý ảnh hiển thị
......
......@@ -13,7 +13,7 @@ import CoreML
open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let nibName = "SBKRecordFaceView"
var contentView:UIView?
var contentView: UIView?
@IBOutlet weak var viewBackground: UIView!
@IBOutlet weak var lbDescription: UILabel!
......@@ -23,6 +23,8 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
@IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView!
@IBOutlet weak var stackView: UIStackView!
@IBOutlet weak var stepView: UIView!
@IBOutlet weak var viewSubIcon: UIView!
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
......@@ -36,7 +38,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var numberTrue = 0
var numberFalse = 0
var space: Float = 0.0
var dataImageSuccess: [Data] = []
var dataImageSuccess: [UIImage] = []
var checkStartRecord: Bool = false
var timer = Timer()
var timeRecord: Int = 0
......@@ -46,7 +48,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
var idBack: String = ""
var URLToken: String = ""
var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ data: [String: Any]?)->Void = {_,_,_ in}
public var completionSuccessFaceRecordStep: (_ validate: ValidateFace?, _ step: StepRecord?, _ urlImages: [String: Any]?)->Void = {_,_,_ in}
public var timeSpace: Int = 3
public var zoom: CGFloat = 1.0
public var imageStartRecord: UIImage?
......@@ -89,6 +91,10 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
}
}
public func statusSubView(statusHide: Bool) {
viewSubIcon.isHidden = statusHide
}
public func startCamera() {
self.captureSession.startRunning()
}
......@@ -113,7 +119,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -156,7 +162,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -212,7 +218,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
self.lbDescription.textColor = UIColor.green
......@@ -231,7 +237,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red
......@@ -245,7 +251,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red
......@@ -351,7 +357,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let locationTop = self.bounds.origin.y + 35
let originYLBDescription = self.stackView.frame.origin.y / 896 * self.bounds.height
let originYLBDescription = self.viewSubIcon.frame.origin.y / 896 * self.bounds.height
let heightOval = originYLBDescription - locationTop
......@@ -461,17 +467,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
let pass: Float = Float(self.numberTrue)/Float(self.numberPass)
if Global.ratioPass < pass * 100 && self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
self.resetRecord()
self.timer.invalidate()
// self.captureSession.stopRunning()
// self.captureSession.removeOutput(self.videoDataOutput)
}
} else {
self.lbDescription.textColor = UIColor.red
......@@ -509,10 +518,15 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
case 3:
if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
guard let urlImageStraight = SBKValidateInput.shared.saveImage(imageName: "imageStraightFile.png", image: self.dataImageSuccess[0]),
let urlImageRight = SBKValidateInput.shared.saveImage(imageName: "imageRightFile.png", image: self.dataImageSuccess[1]),
let urlImageLeft = SBKValidateInput.shared.saveImage(imageName: "imageLeftFile.png", image: self.dataImageSuccess[2]) else {
return
}
let data: [String: Any] = [
"imagestraight": self.dataImageSuccess[0],
"imageRight": self.dataImageSuccess[1],
"imageLeft": self.dataImageSuccess[2],
"imageStraightFile": urlImageStraight,
"imageRightFile": urlImageRight,
"imageLeftFile": urlImageLeft,
]
self.completionSuccessFaceRecordStep(nil, nil, data)
self.timeRecord = 0
......
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17156" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17701" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17125"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17703"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
......@@ -14,11 +14,13 @@
<outlet property="imageDescription" destination="qAP-z1-ph6" id="4WK-9l-Hhw"/>
<outlet property="lbDescription" destination="l4S-dU-FVT" id="7qE-CY-e4c"/>
<outlet property="stackView" destination="lfu-gT-avH" id="UPQ-PC-13s"/>
<outlet property="stepView" destination="wyh-9h-sMx" id="OKm-zE-5ba"/>
<outlet property="viewBackground" destination="vtt-nP-K7e" id="p2H-QE-FHN"/>
<outlet property="viewCheckStep1" destination="Ikb-Rh-oGt" id="gg1-L1-h5k"/>
<outlet property="viewCheckStep2" destination="q10-fa-3fY" id="YaT-X4-UMq"/>
<outlet property="viewCheckStep3" destination="KKw-Ea-56U" id="91G-Kf-nGW"/>
<outlet property="viewOval" destination="o70-jf-1ia" id="W81-Ef-mxN"/>
<outlet property="viewSubIcon" destination="vJK-3l-9HU" id="g2w-rY-foz"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
......@@ -30,8 +32,21 @@
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="o70-jf-1ia">
<rect key="frame" x="60" y="69" width="294" height="581"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="vJK-3l-9HU">
<rect key="frame" x="0.0" y="670" width="414" height="226"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="l4S-dU-FVT" userLabel="position your face in the oval">
<rect key="frame" x="96.5" y="165.5" width="221.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="wyh-9h-sMx">
<rect key="frame" x="173" y="771" width="68.5" height="20.5"/>
<rect key="frame" x="173" y="125" width="68.5" height="20.5"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 3/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Maa-bq-dEa">
<rect key="frame" x="10" y="3" width="48.5" height="14.5"/>
......@@ -53,8 +68,15 @@
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="qAP-z1-ph6">
<rect key="frame" x="183.5" y="46" width="47" height="60"/>
<constraints>
<constraint firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="lbZ-b0-JQ6"/>
<constraint firstAttribute="height" constant="60" id="lpk-mw-Ydj"/>
</constraints>
</imageView>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="5" translatesAutoresizingMaskIntoConstraints="NO" id="lfu-gT-avH">
<rect key="frame" x="100" y="670" width="214" height="6"/>
<rect key="frame" x="100" y="20" width="214" height="6"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ikb-Rh-oGt">
<rect key="frame" x="0.0" y="0.0" width="68" height="6"/>
......@@ -88,44 +110,35 @@
<constraint firstAttribute="height" constant="6" id="zRt-Eh-zot"/>
</constraints>
</stackView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="l4S-dU-FVT">
<rect key="frame" x="96.5" y="801.5" width="221.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="o70-jf-1ia">
<rect key="frame" x="60" y="69" width="294" height="581"/>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="qAP-z1-ph6">
<rect key="frame" x="183.5" y="696" width="47" height="60"/>
<constraints>
<constraint firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="lbZ-b0-JQ6"/>
<constraint firstAttribute="height" constant="60" id="lpk-mw-Ydj"/>
<constraint firstAttribute="bottom" secondItem="l4S-dU-FVT" secondAttribute="bottom" constant="40" id="7Yx-6Q-gLs"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="top" secondItem="vJK-3l-9HU" secondAttribute="top" constant="20" id="IJS-1d-cdc"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="leading" secondItem="vJK-3l-9HU" secondAttribute="leading" constant="100" id="K2Y-OR-tdS"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="top" secondItem="qAP-z1-ph6" secondAttribute="bottom" constant="19" id="KoA-Uc-RVP"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="centerX" secondItem="qAP-z1-ph6" secondAttribute="centerX" id="NQh-Qp-Edn"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="centerX" secondItem="wyh-9h-sMx" secondAttribute="centerX" id="Oku-d6-8xF"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="top" secondItem="lfu-gT-avH" secondAttribute="bottom" constant="20" id="T8S-a8-xgG"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="centerX" secondItem="l4S-dU-FVT" secondAttribute="centerX" id="V3j-1h-qaX"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="centerX" secondItem="vJK-3l-9HU" secondAttribute="centerX" id="ae7-T7-2n4"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="bottom" constant="20" id="nRu-XH-t9b"/>
</constraints>
</imageView>
</view>
</subviews>
<viewLayoutGuide key="safeArea" id="9lf-e8-SmZ"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints>
<constraint firstItem="qAP-z1-ph6" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="0MA-kq-exD"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="width" secondItem="qAP-z1-ph6" secondAttribute="height" multiplier="341:437" id="4yn-TJ-yqm"/>
<constraint firstAttribute="trailing" secondItem="vtt-nP-K7e" secondAttribute="trailing" id="6aW-K2-jlG"/>
<constraint firstItem="qAP-z1-ph6" firstAttribute="top" secondItem="lfu-gT-avH" secondAttribute="bottom" constant="20" id="Fh8-GZ-8yL"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="I6p-U2-cTt"/>
<constraint firstAttribute="bottom" secondItem="vJK-3l-9HU" secondAttribute="bottom" id="GFF-jv-UtV"/>
<constraint firstItem="vtt-nP-K7e" firstAttribute="top" secondItem="UFA-SZ-Qoz" secondAttribute="top" id="N3v-KG-OEA"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="top" secondItem="9lf-e8-SmZ" secondAttribute="top" constant="25" id="NPG-HN-ut1"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="100" id="Uqs-Lb-Gn3"/>
<constraint firstItem="wyh-9h-sMx" firstAttribute="top" secondItem="qAP-z1-ph6" secondAttribute="bottom" constant="15" id="Ur0-Sk-VRP"/>
<constraint firstItem="vJK-3l-9HU" firstAttribute="top" secondItem="o70-jf-1ia" secondAttribute="bottom" constant="20" id="QCr-Od-Hn9"/>
<constraint firstItem="vJK-3l-9HU" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" id="WBE-5x-ey3"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="XTc-Jw-HgS"/>
<constraint firstItem="9lf-e8-SmZ" firstAttribute="bottom" secondItem="l4S-dU-FVT" secondAttribute="bottom" constant="40" id="aMm-fz-lIb"/>
<constraint firstItem="vtt-nP-K7e" firstAttribute="leading" secondItem="UFA-SZ-Qoz" secondAttribute="leading" id="ct7-fr-gJL"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="top" secondItem="o70-jf-1ia" secondAttribute="bottom" constant="20" id="e65-sJ-G8O"/>
<constraint firstItem="9lf-e8-SmZ" firstAttribute="trailing" secondItem="vJK-3l-9HU" secondAttribute="trailing" id="eg7-gG-37U"/>
<constraint firstItem="o70-jf-1ia" firstAttribute="leading" secondItem="9lf-e8-SmZ" secondAttribute="leading" constant="60" id="g9o-8i-MtB"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="hd0-Yp-38K"/>
<constraint firstItem="l4S-dU-FVT" firstAttribute="top" secondItem="wyh-9h-sMx" secondAttribute="bottom" constant="10" id="lo9-cy-Wk0"/>
<constraint firstItem="lfu-gT-avH" firstAttribute="centerX" secondItem="9lf-e8-SmZ" secondAttribute="centerX" id="nV7-Zn-tQz"/>
<constraint firstAttribute="bottom" secondItem="vtt-nP-K7e" secondAttribute="bottom" id="okN-H6-tDN"/>
</constraints>
<point key="canvasLocation" x="133" y="154"/>
......
......@@ -196,14 +196,36 @@ class SBKValidateInput {
}
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName
let fileURL = documentsDirectory.appendingPathComponent(fileName)
guard let data = image.jpegData(compressionQuality: 1) else { return nil }
if FileManager.default.fileExists(atPath: fileURL.path) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError {
print("couldn't remove file at path", removeError)
}
}
do {
try data.write(to: fileURL)
return fileURL.path
} catch let error {
print("error saving file with error", error)
}
return nil
}
}
......@@ -10,3 +10,9 @@ target 'OCR-SDK' do
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
end
end
......@@ -12,12 +12,12 @@
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
<integer>2</integer>
</dict>
</dict>
</dict>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment