Commit 7d8d0acf authored by Nguyễn Văn An's avatar Nguyễn Văn An

update model

parent c17d42a5
No preview for this file type
...@@ -7,10 +7,12 @@ ...@@ -7,10 +7,12 @@
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
60B3E4342745FC5C00D58AD2 /* idcard15072021.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */; }; 600A87B727C685FB00A0A270 /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 600A87B627C685FB00A0A270 /* Car-2 copy@2x.png */; };
600A87BA27C6862300A0A270 /* ic_camera.png in Resources */ = {isa = PBXBuildFile; fileRef = 600A87B927C6862300A0A270 /* ic_camera.png */; };
606B7B06282D50CF001E20DB /* card.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 606B7B04282D50CF001E20DB /* card.tflite */; };
606B7B07282D50CF001E20DB /* face.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 606B7B05282D50CF001E20DB /* face.tflite */; };
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */; }; 9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */; };
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; }; 95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; };
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 954230E425344601006F13F9 /* valid_card_10102020.tflite */; };
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDC2247D1FA200AF50DE /* Localizable.strings */; }; 9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDC2247D1FA200AF50DE /* Localizable.strings */; };
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */; }; 9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */; };
9551057C2477746A0053036F /* OCR_SDK.h in Headers */ = {isa = PBXBuildFile; fileRef = 9551057A2477746A0053036F /* OCR_SDK.h */; settings = {ATTRIBUTES = (Public, ); }; }; 9551057C2477746A0053036F /* OCR_SDK.h in Headers */ = {isa = PBXBuildFile; fileRef = 9551057A2477746A0053036F /* OCR_SDK.h */; settings = {ATTRIBUTES = (Public, ); }; };
...@@ -31,7 +33,6 @@ ...@@ -31,7 +33,6 @@
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9580130D2489F1EA00846F8A /* SBKRecordFace.swift */; }; 9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9580130D2489F1EA00846F8A /* SBKRecordFace.swift */; };
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9580130E2489F1EA00846F8A /* SBKRecordFace.xib */; }; 958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9580130E2489F1EA00846F8A /* SBKRecordFace.xib */; };
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801346248A237000846F8A /* SBKModelDataHandler.swift */; }; 95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801346248A237000846F8A /* SBKModelDataHandler.swift */; };
958D36C224C18BB1004B27EB /* Pods_OCR_SDK.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; }; A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; };
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; }; A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; };
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; }; A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; };
...@@ -91,11 +92,13 @@ ...@@ -91,11 +92,13 @@
/* Begin PBXFileReference section */ /* Begin PBXFileReference section */
2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; }; 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = idcard15072021.tflite; sourceTree = "<group>"; }; 600A87B627C685FB00A0A270 /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
600A87B927C6862300A0A270 /* ic_camera.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_camera.png; sourceTree = "<group>"; };
606B7B04282D50CF001E20DB /* card.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = card.tflite; sourceTree = "<group>"; };
606B7B05282D50CF001E20DB /* face.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = face.tflite; sourceTree = "<group>"; };
8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; }; 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
9509925E25355E0300C570D8 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKValidateCardView.swift; sourceTree = "<group>"; }; 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKValidateCardView.swift; sourceTree = "<group>"; };
95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; }; 95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; };
954230E425344601006F13F9 /* valid_card_10102020.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_card_10102020.tflite; sourceTree = "<group>"; };
9546DDC1247D1FA200AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; }; 9546DDC1247D1FA200AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDC3247D1FAA00AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/Localizable.strings; sourceTree = "<group>"; }; 9546DDC3247D1FAA00AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDD1247D2C0C00AF50DE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/SBKCaptureCardVC.xib; sourceTree = "<group>"; }; 9546DDD1247D2C0C00AF50DE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/SBKCaptureCardVC.xib; sourceTree = "<group>"; };
...@@ -142,7 +145,6 @@ ...@@ -142,7 +145,6 @@
B7622AAB2647E5730077D3CF /* NormalizeOp.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NormalizeOp.swift; sourceTree = "<group>"; }; B7622AAB2647E5730077D3CF /* NormalizeOp.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NormalizeOp.swift; sourceTree = "<group>"; };
B7622AAC2647E5730077D3CF /* Detection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Detection.swift; sourceTree = "<group>"; }; B7622AAC2647E5730077D3CF /* Detection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Detection.swift; sourceTree = "<group>"; };
B7622AAD2647E5730077D3CF /* LandMark.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LandMark.swift; sourceTree = "<group>"; }; B7622AAD2647E5730077D3CF /* LandMark.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LandMark.swift; sourceTree = "<group>"; };
B7622AB52647E6230077D3CF /* face_detection_front.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = face_detection_front.tflite; sourceTree = "<group>"; };
B7622AC32647EB230077D3CF /* OverLayCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OverLayCardView.swift; sourceTree = "<group>"; }; B7622AC32647EB230077D3CF /* OverLayCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OverLayCardView.swift; sourceTree = "<group>"; };
B7622ACB2647EE420077D3CF /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; }; B7622ACB2647EE420077D3CF /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; }; B7622ACC2647EE420077D3CF /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
...@@ -353,9 +355,8 @@ ...@@ -353,9 +355,8 @@
9580134A248A25E700846F8A /* Model */ = { 9580134A248A25E700846F8A /* Model */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */, 606B7B04282D50CF001E20DB /* card.tflite */,
B7622AB52647E6230077D3CF /* face_detection_front.tflite */, 606B7B05282D50CF001E20DB /* face.tflite */,
954230E425344601006F13F9 /* valid_card_10102020.tflite */,
95182D0524B3343D00405EA9 /* liveness.tflite */, 95182D0524B3343D00405EA9 /* liveness.tflite */,
); );
path = Model; path = Model;
...@@ -412,12 +413,14 @@ ...@@ -412,12 +413,14 @@
B7622ACA2647EE420077D3CF /* image */ = { B7622ACA2647EE420077D3CF /* image */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
600A87B627C685FB00A0A270 /* Car-2 copy@2x.png */,
B7622ACB2647EE420077D3CF /* TutorialFace1.png */, B7622ACB2647EE420077D3CF /* TutorialFace1.png */,
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */, B7622ACC2647EE420077D3CF /* TutorialFaceP.png */,
B7622ACD2647EE420077D3CF /* Scan-4@2x.png */, B7622ACD2647EE420077D3CF /* Scan-4@2x.png */,
B7622ACE2647EE420077D3CF /* Back@2x.png */, B7622ACE2647EE420077D3CF /* Back@2x.png */,
B7622ACF2647EE420077D3CF /* TutorialFace3.png */, B7622ACF2647EE420077D3CF /* TutorialFace3.png */,
B7622AD02647EE420077D3CF /* TutorialFace2.png */, B7622AD02647EE420077D3CF /* TutorialFace2.png */,
600A87B927C6862300A0A270 /* ic_camera.png */,
B7622AD12647EE420077D3CF /* Hat@2x.png */, B7622AD12647EE420077D3CF /* Hat@2x.png */,
B7622AD22647EE420077D3CF /* Glasses@2x.png */, B7622AD22647EE420077D3CF /* Glasses@2x.png */,
B7622AD32647EE420077D3CF /* Caution@2x.png */, B7622AD32647EE420077D3CF /* Caution@2x.png */,
...@@ -530,7 +533,6 @@ ...@@ -530,7 +533,6 @@
B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */, B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */,
B7622AFB2647EE420077D3CF /* background.png in Resources */, B7622AFB2647EE420077D3CF /* background.png in Resources */,
B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */, B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */,
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */,
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */, 957DF5F324C035C700FE6A67 /* objcio.cer in Resources */,
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */, 95182D0624B3343E00405EA9 /* liveness.tflite in Resources */,
B7622B072647EE420077D3CF /* cmndF1.png in Resources */, B7622B072647EE420077D3CF /* cmndF1.png in Resources */,
...@@ -551,15 +553,19 @@ ...@@ -551,15 +553,19 @@
955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */, 955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */,
B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */, B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */,
B7622B012647EE420077D3CF /* back_light.png in Resources */, B7622B012647EE420077D3CF /* back_light.png in Resources */,
600A87BA27C6862300A0A270 /* ic_camera.png in Resources */,
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */, 9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */,
B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */, B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */,
B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */, B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */,
B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */, B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
606B7B06282D50CF001E20DB /* card.tflite in Resources */,
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */, 958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */,
B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */, B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */,
955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */, 955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */,
606B7B07282D50CF001E20DB /* face.tflite in Resources */,
B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */, B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */,
B7622AF52647EE420077D3CF /* rotate.png in Resources */, B7622AF52647EE420077D3CF /* rotate.png in Resources */,
600A87B727C685FB00A0A270 /* Car-2 copy@2x.png in Resources */,
B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */, B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */,
B7622B032647EE420077D3CF /* ic_record.png in Resources */, B7622B032647EE420077D3CF /* ic_record.png in Resources */,
B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */, B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */,
...@@ -567,7 +573,6 @@ ...@@ -567,7 +573,6 @@
B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */, B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */,
B7622AFD2647EE420077D3CF /* iconCap.png in Resources */, B7622AFD2647EE420077D3CF /* iconCap.png in Resources */,
B7622B042647EE420077D3CF /* Next@2x.png in Resources */, B7622B042647EE420077D3CF /* Next@2x.png in Resources */,
60B3E4342745FC5C00D58AD2 /* idcard15072021.tflite in Resources */,
B7622B092647EE420077D3CF /* cmndF2.png in Resources */, B7622B092647EE420077D3CF /* cmndF2.png in Resources */,
B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */, B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */,
B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */, B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */,
......
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "FD833007-BFDE-4E90-B09E-D1112B21135E"
type = "0"
version = "2.0">
</Bucket>
No preview for this file type
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import CoreImage import CoreImage
import TensorFlowLite import TensorFlowLite
import UIKit import UIKit
import Accelerate import Accelerate
/// A result from invoking the `Interpreter`.
struct Result { struct Result {
let inferenceTime: Double let inferenceTime: Double
let inferences: [Inference] let inferences: [Inference]
} }
/// An inference from invoking the `Interpreter`.
struct Inference { struct Inference {
let confidence: Float let confidence: Float
let label: String let label: String
} }
/// Information about a model file or labels file.
typealias FileInfo = (name: String, extension: String) typealias FileInfo = (name: String, extension: String)
/// Information about the MobileNet model.
enum MobileNet { enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite") static let livenessModel: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "idcard15072021", extension: "tflite") static let cardModel: FileInfo = (name: "card", extension: "tflite")
static let landMarkModel: FileInfo = (name: "face_detection_front", extension: "tflite") static let faceModel: FileInfo = (name: "face", extension: "tflite")
} }
/// This class handles all data preprocessing and makes calls to run inference on a given frame
/// by invoking the `Interpreter`. It then formats the inferences obtained and returns the top N
/// results for a successful inference.
class SBKModelDataHandler { class SBKModelDataHandler {
let threadCount: Int
// MARK: - Internal Properties let resultCount = 3
let threadCountLimit = 10
/// The current thread count used by the TensorFlow Lite Interpreter. var subtract: Float = 127.5
let threadCount: Int
let resultCount = 3
let threadCountLimit = 10
// MARK: - Model Parameters
let batchSize = 1
let inputChannels = 3
let inputWidth = 224
let inputHeight = 224
// MARK: - Private Properties
/// List of labels from the given labels file.
private var labels: [String] = []
/// TensorFlow Lite `Interpreter` object for performing inference on a given model.
private var interpreter: Interpreter
/// Information about the alpha component in RGBA data.
private let alphaComponent = (baseOffset: 4, moduloRemainder: 3)
// MARK: - Initialization
/// A failable initializer for `ModelDataHandler`. A new instance is created if the model and
/// labels files are successfully loaded from the app's main bundle. Default `threadCount` is 1.
init?(modelFileInfo: FileInfo, threadCount: Int = 1) {
let modelFilename = modelFileInfo.name
// Construct the path to the model file.
let bundle = Bundle(for: SBKRecordFace.self)
guard let modelPath = bundle.path(
forResource: modelFilename,
ofType: modelFileInfo.extension
) else {
print("Failed to load the model file with name: \(modelFilename).")
return nil
}
// Specify the options for the `Interpreter`.
self.threadCount = threadCount
var options = Interpreter.Options()
options.threadCount = threadCount
do {
// Create the `Interpreter`.
interpreter = try Interpreter(modelPath: modelPath, options: options)
// Allocate memory for the model's input `Tensor`s.
try interpreter.allocateTensors()
} catch let error {
print("Failed to create the interpreter with error: \(error.localizedDescription)")
return nil
}
}
func fromImage(image: UIImage, datas: Data, imagesss: UIImage) -> UIColor { // MARK: - Model Parameters
var totalR: CGFloat = 0
var totalG: CGFloat = 0 let batchSize = 1
var totalB: CGFloat = 0 let inputChannels = 3
var inputWidth = 256
var count: CGFloat = 0 var inputHeight = 256
private var labels: [String] = []
for x in 0..<Int(image.size.width) { private var interpreter: Interpreter
for y in 0..<Int(image.size.height) { private let alphaComponent = (baseOffset: 4, moduloRemainder: 3)
count += 1 init?(modelFileInfo: FileInfo, threadCount: Int = 1) {
var rF: CGFloat = 0, let modelFilename = modelFileInfo.name
gF: CGFloat = 0, let bundle = Bundle(for: type(of: self))
bF: CGFloat = 0, guard let modelPath = bundle.path(
aF: CGFloat = 0 forResource: modelFilename,
image.getPixelColor(pos: CGPoint(x: x, y: y), dataImage: datas, image: imagesss ).getRed(&rF, green: &gF, blue: &bF, alpha: &aF) ofType: modelFileInfo.extension
totalR += rF ) else {
totalG += gF print("Failed to load the model file with name: \(modelFilename).")
totalB += bF return nil
} }
self.threadCount = threadCount
var options = Interpreter.Options()
options.threadCount = threadCount
do {
interpreter = try Interpreter(modelPath: modelPath, options: options)
try interpreter.allocateTensors()
} catch let error {
print("Failed to create the interpreter with error: \(error.localizedDescription)")
return nil
} }
let averageR = totalR / count
let averageG = totalG / count
let averageB = totalB / count
return UIColor(red: averageR, green: averageG, blue: averageB, alpha: 1.0)
}
func convert(cmage:CIImage) -> UIImage
{
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
} }
// MARK: - Internal Methods
/// Performs image preprocessing, invokes the `Interpreter`, and processes the inference results.
func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? { func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? {
let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer) let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
...@@ -149,8 +64,11 @@ class SBKModelDataHandler { ...@@ -149,8 +64,11 @@ class SBKModelDataHandler {
sourcePixelFormat == kCVPixelFormatType_32BGRA || sourcePixelFormat == kCVPixelFormatType_32BGRA ||
sourcePixelFormat == kCVPixelFormatType_32RGBA) sourcePixelFormat == kCVPixelFormatType_32RGBA)
let imageChannels = 4 let imageChannels = 4
assert(imageChannels >= inputChannels) assert(imageChannels >= inputChannels)
// Crops the image to the biggest square in the center and scales it down to model dimensions.
let scaledSize = CGSize(width: inputWidth, height: inputHeight) let scaledSize = CGSize(width: inputWidth, height: inputHeight)
guard let thumbnailPixelBuffer = pixelBuffer.resized(to: scaledSize) else { guard let thumbnailPixelBuffer = pixelBuffer.resized(to: scaledSize) else {
return nil return nil
...@@ -159,6 +77,8 @@ class SBKModelDataHandler { ...@@ -159,6 +77,8 @@ class SBKModelDataHandler {
let outputTensor: Tensor let outputTensor: Tensor
do { do {
let inputTensor = try interpreter.input(at: 0) let inputTensor = try interpreter.input(at: 0)
// Remove the alpha component from the image buffer to get the RGB data.
guard let rgbData = rgbDataFromBuffer( guard let rgbData = rgbDataFromBuffer(
thumbnailPixelBuffer, thumbnailPixelBuffer,
byteCount: batchSize * inputWidth * inputHeight * inputChannels, byteCount: batchSize * inputWidth * inputHeight * inputChannels,
...@@ -167,10 +87,19 @@ class SBKModelDataHandler { ...@@ -167,10 +87,19 @@ class SBKModelDataHandler {
print("Failed to convert the image buffer to RGB data.") print("Failed to convert the image buffer to RGB data.")
return nil return nil
} }
let imageCap = UIImage(data: rgbData)
// self.fromImage(image: imageCap!, datas: rgbData, imagesss: imageCap!)
// Copy the RGB data to the input `Tensor`.
try interpreter.copy(rgbData, toInputAt: 0) try interpreter.copy(rgbData, toInputAt: 0)
// Run inference by invoking the `Interpreter`.
let startDate = Date() let startDate = Date()
try interpreter.invoke() try interpreter.invoke()
interval = Date().timeIntervalSince(startDate) * 1000 interval = Date().timeIntervalSince(startDate) * 1000
// Get the output `Tensor` to process the inference results.
outputTensor = try interpreter.output(at: 0) outputTensor = try interpreter.output(at: 0)
} catch let error { } catch let error {
print("Failed to invoke the interpreter with error: \(error.localizedDescription)") print("Failed to invoke the interpreter with error: \(error.localizedDescription)")
...@@ -197,95 +126,98 @@ class SBKModelDataHandler { ...@@ -197,95 +126,98 @@ class SBKModelDataHandler {
return results return results
} }
private func rgbDataFromBuffer(
_ buffer: CVPixelBuffer,
byteCount: Int,
isModelQuantized: Bool
) -> Data? {
CVPixelBufferLockBaseAddress(buffer, .readOnly)
defer {
CVPixelBufferUnlockBaseAddress(buffer, .readOnly)
}
guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else {
return nil
}
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let destinationChannelCount = 3
let destinationBytesPerRow = destinationChannelCount * width
var sourceBuffer = vImage_Buffer(data: sourceData,
height: vImagePixelCount(height),
width: vImagePixelCount(width),
rowBytes: sourceBytesPerRow)
guard let destinationData = malloc(height * destinationBytesPerRow) else { private func rgbDataFromBuffer(
print("Error: out of memory") _ buffer: CVPixelBuffer,
return nil byteCount: Int,
} isModelQuantized: Bool
) -> Data? {
defer { CVPixelBufferLockBaseAddress(buffer, .readOnly)
free(destinationData) defer {
} CVPixelBufferUnlockBaseAddress(buffer, .readOnly)
}
var destinationBuffer = vImage_Buffer(data: destinationData, guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else {
height: vImagePixelCount(height), return nil
width: vImagePixelCount(width), }
rowBytes: destinationBytesPerRow) let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let pixelBufferFormat = CVPixelBufferGetPixelFormatType(buffer) let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let destinationChannelCount = 3
switch (pixelBufferFormat) { let destinationBytesPerRow = destinationChannelCount * width
case kCVPixelFormatType_32BGRA:
vImageConvert_BGRA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags)) var sourceBuffer = vImage_Buffer(data: sourceData,
case kCVPixelFormatType_32ARGB: height: vImagePixelCount(height),
vImageConvert_ARGB8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags)) width: vImagePixelCount(width),
case kCVPixelFormatType_32RGBA: rowBytes: sourceBytesPerRow)
vImageConvert_RGBA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
default: guard let destinationData = malloc(height * destinationBytesPerRow) else {
// Unknown pixel format. print("Error: tran bo dem")
return nil return nil
} }
let byteData = Data(bytes: destinationBuffer.data, count: destinationBuffer.rowBytes * height) defer {
if isModelQuantized { free(destinationData)
return byteData }
}
var destinationBuffer = vImage_Buffer(data: destinationData,
// Not quantized, convert to floats height: vImagePixelCount(height),
let bytes = Array<UInt8>(unsafeData: byteData)! width: vImagePixelCount(width),
var floats = [Float]() rowBytes: destinationBytesPerRow)
for i in 0..<bytes.count {
floats.append(Float(bytes[i]) / 255.0) let pixelBufferFormat = CVPixelBufferGetPixelFormatType(buffer)
}
return Data(copyingBufferOf: floats) switch (pixelBufferFormat) {
} case kCVPixelFormatType_32BGRA:
} vImageConvert_BGRA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
case kCVPixelFormatType_32ARGB:
vImageConvert_ARGB8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
case kCVPixelFormatType_32RGBA:
vImageConvert_RGBA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
default:
// Unknown pixel format.
return nil
}
let byteData = Data(bytes: destinationBuffer.data, count: destinationBuffer.rowBytes * height)
if isModelQuantized {
return byteData
}
// Not quantized, convert to floats
let bytes = Array<UInt8>(unsafeData: byteData)!
var floats = [Float]()
for i in 0..<bytes.count {
if self.subtract != 0 {
floats.append((Float(bytes[i]) - subtract) / subtract)
} else {
floats.append((Float(bytes[i])))
}
}
return Data(copyingBufferOf: floats)
}}
// MARK: - Extensions // MARK: - Extensions
extension Data { extension Data {
init<T>(copyingBufferOf array: [T]) { init<T>(copyingBufferOf array: [T]) {
self = array.withUnsafeBufferPointer(Data.init) self = array.withUnsafeBufferPointer(Data.init)
} }
} }
extension Array { extension Array {
init?(unsafeData: Data) { init?(unsafeData: Data) {
guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil } guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil }
#if swift(>=5.0) #if swift(>=5.0)
self = unsafeData.withUnsafeBytes { .init($0.bindMemory(to: Element.self)) } self = unsafeData.withUnsafeBytes { .init($0.bindMemory(to: Element.self)) }
#else #else
self = unsafeData.withUnsafeBytes { self = unsafeData.withUnsafeBytes {
.init(UnsafeBufferPointer<Element>( .init(UnsafeBufferPointer<Element>(
start: $0, start: $0,
count: unsafeData.count / MemoryLayout<Element>.stride count: unsafeData.count / MemoryLayout<Element>.stride
)) ))
}
#endif // swift(>=5.0)
} }
#endif // swift(>=5.0)
}
} }
...@@ -18,6 +18,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -18,6 +18,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
@IBOutlet public weak var lbStep: UILabel! @IBOutlet public weak var lbStep: UILabel!
@IBOutlet public weak var btnCapture: UIButton! @IBOutlet public weak var btnCapture: UIButton!
@IBOutlet public weak var imgCaution: UIImageView! @IBOutlet public weak var imgCaution: UIImageView!
@IBOutlet weak var viewTakePhoto: UIView!
public var captureSession: AVCaptureSession = AVCaptureSession() public var captureSession: AVCaptureSession = AVCaptureSession()
public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput() public var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
...@@ -39,7 +40,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -39,7 +40,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
private var cropZone: CGRect? private var cropZone: CGRect?
private var cropImage: CGRect? private var cropImage: CGRect?
var overlayView: OverLayCardView? var overlayView: OverLayCardView?
var imagePreview = UIImageView(frame: CGRect(x: 20, y: 20, width: 280, height: 210))
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in} public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()! public var iconTakeCard: Data = UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)!.pngData()!
...@@ -79,6 +80,13 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -79,6 +80,13 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
self.labelTypeCard.text = "Use front" self.labelTypeCard.text = "Use front"
self.labelTypeCard.textColor = UIColor.white self.labelTypeCard.textColor = UIColor.white
self.addSubview(labelTypeCard) self.addSubview(labelTypeCard)
self.addSubview(imagePreview)
}
func setImage(_ image: UIImage) {
DispatchQueue.main.async {
self.imagePreview.image = image
}
} }
func loadViewFromNib() -> UIView? { func loadViewFromNib() -> UIView? {
...@@ -224,13 +232,14 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate { ...@@ -224,13 +232,14 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0) let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) { if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil) self.completionSuccessCardStep(nil, urlImage, nil)
self.stopCamera()
} }
} }
//Sự kiện chụp ảnh //Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) { @IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) { if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) || self.statusValidateImage == .PASSPORT {
if #available(iOS 11.0, *) { if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self) stillImageOutput.capturePhoto(with: settings, delegate: self)
...@@ -255,6 +264,8 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { ...@@ -255,6 +264,8 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
connection.videoOrientation = .portrait connection.videoOrientation = .portrait
} }
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else { guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer") debugPrint("unable to get image from sample buffer")
...@@ -273,7 +284,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { ...@@ -273,7 +284,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
guard let crop = self.cropZone else {return} guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return} guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput) let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if let overlayView = self.overlayView { if let overlayView = self.overlayView {
DispatchQueue.main.async { DispatchQueue.main.async {
...@@ -309,7 +320,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate { ...@@ -309,7 +320,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
overlayView.setBorderColor(color: UIColor.red.cgColor) overlayView.setBorderColor(color: UIColor.red.cgColor)
} }
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) { if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) || self.statusValidateImage == .PASSPORT {
self.lbDescription.textColor = .green self.lbDescription.textColor = .green
self.lbDescription.text = "Are you ready. Let's start!".localized() self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true self.imgCaution.isHidden = true
......
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="17156" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES"> <document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="19529" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/> <device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies> <dependencies>
<deployment identifier="iOS"/> <deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17125"/> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="19519"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/> <capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies> </dependencies>
<objects> <objects>
...@@ -16,6 +17,7 @@ ...@@ -16,6 +17,7 @@
<outlet property="lbDescription" destination="wnG-jG-HwA" id="dai-gb-2uU"/> <outlet property="lbDescription" destination="wnG-jG-HwA" id="dai-gb-2uU"/>
<outlet property="lbStep" destination="miZ-LC-N58" id="nFu-j2-7oT"/> <outlet property="lbStep" destination="miZ-LC-N58" id="nFu-j2-7oT"/>
<outlet property="viewCamera" destination="lKp-BC-igm" id="SQO-Z9-Rni"/> <outlet property="viewCamera" destination="lKp-BC-igm" id="SQO-Z9-Rni"/>
<outlet property="viewTakePhoto" destination="Bk2-Lj-tpb" id="JLc-5I-ege"/>
</connections> </connections>
</placeholder> </placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/> <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
...@@ -33,18 +35,18 @@ ...@@ -33,18 +35,18 @@
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Caution.png" translatesAutoresizingMaskIntoConstraints="NO" id="Uu0-ZS-WyV"> <imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" ambiguous="YES" image="Caution.png" translatesAutoresizingMaskIntoConstraints="NO" id="Uu0-ZS-WyV">
<rect key="frame" x="147" y="649" width="120" height="32"/> <rect key="frame" x="147" y="651" width="120" height="32"/>
<constraints> <constraints>
<constraint firstAttribute="width" constant="120" id="NyR-vg-E0a"/> <constraint firstAttribute="width" constant="120" id="NyR-vg-E0a"/>
<constraint firstAttribute="height" constant="32" id="gHe-IN-Yuj"/> <constraint firstAttribute="height" constant="32" id="gHe-IN-Yuj"/>
</constraints> </constraints>
</imageView> </imageView>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="bMq-BQ-SN5"> <view contentMode="scaleToFill" ambiguous="YES" translatesAutoresizingMaskIntoConstraints="NO" id="bMq-BQ-SN5">
<rect key="frame" x="173.5" y="741.5" width="67" height="20.5"/> <rect key="frame" x="174" y="743.5" width="66.5" height="20.5"/>
<subviews> <subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 1/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="miZ-LC-N58"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 1/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="miZ-LC-N58">
<rect key="frame" x="10" y="3" width="47" height="14.5"/> <rect key="frame" x="10" y="3" width="46.5" height="14.5"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/> <fontDescription key="fontDescription" type="system" pointSize="12"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
...@@ -64,19 +66,62 @@ ...@@ -64,19 +66,62 @@
</userDefinedRuntimeAttribute> </userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes> </userDefinedRuntimeAttributes>
</view> </view>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="0tu-Uu-aZJ"> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Bk2-Lj-tpb">
<rect key="frame" x="182" y="782" width="50" height="50"/> <rect key="frame" x="16" y="772" width="382" height="60"/>
<subviews>
<button opaque="NO" contentMode="scaleAspectFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="0tu-Uu-aZJ">
<rect key="frame" x="0.0" y="0.0" width="382" height="60"/>
<color key="backgroundColor" red="0.55669790509999995" green="0.57545536760000005" blue="0.93220157223147948" alpha="1" colorSpace="custom" customColorSpace="displayP3"/>
<constraints>
<constraint firstAttribute="height" constant="60" id="MGa-aI-9ef"/>
</constraints>
<inset key="titleEdgeInsets" minX="20" minY="0.0" maxX="0.0" maxY="0.0"/>
<connections>
<action selector="onCapturePhoto:" destination="-1" eventType="touchUpInside" id="Vg0-NJ-EOP"/>
</connections>
</button>
<stackView hidden="YES" opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="10" translatesAutoresizingMaskIntoConstraints="NO" id="jhl-n8-gXe">
<rect key="frame" x="0.0" y="0.0" width="382" height="60"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="qlD-QQ-347">
<rect key="frame" x="0.0" y="0.0" width="186" height="60"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="ic_camera.png" translatesAutoresizingMaskIntoConstraints="NO" id="PNe-oO-0lf">
<rect key="frame" x="136" y="8" width="50" height="44"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="Cq6-8N-XfV"/>
</constraints>
</imageView>
</subviews>
<constraints>
<constraint firstAttribute="trailing" secondItem="PNe-oO-0lf" secondAttribute="trailing" id="1Da-Ou-rv3"/>
<constraint firstItem="PNe-oO-0lf" firstAttribute="top" secondItem="qlD-QQ-347" secondAttribute="top" constant="8" id="FpC-uN-NXG"/>
<constraint firstAttribute="bottom" secondItem="PNe-oO-0lf" secondAttribute="bottom" constant="8" id="bOJ-cI-mwf"/>
</constraints>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Chụp" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="qXK-WY-fpY">
<rect key="frame" x="196" y="0.0" width="186" height="60"/>
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
</stackView>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="50" id="MGa-aI-9ef"/> <constraint firstItem="jhl-n8-gXe" firstAttribute="top" secondItem="Bk2-Lj-tpb" secondAttribute="top" id="8KH-J3-kNq"/>
<constraint firstAttribute="width" constant="50" id="hiM-7d-T1r"/> <constraint firstAttribute="trailing" secondItem="jhl-n8-gXe" secondAttribute="trailing" id="KCy-KA-KEj"/>
<constraint firstAttribute="trailing" secondItem="0tu-Uu-aZJ" secondAttribute="trailing" id="f2R-d2-elD"/>
<constraint firstAttribute="bottom" secondItem="jhl-n8-gXe" secondAttribute="bottom" id="gS6-2w-qs3"/>
<constraint firstAttribute="bottom" secondItem="0tu-Uu-aZJ" secondAttribute="bottom" id="jB7-yA-4Or"/>
<constraint firstItem="0tu-Uu-aZJ" firstAttribute="leading" secondItem="Bk2-Lj-tpb" secondAttribute="leading" id="lxV-7o-6li"/>
<constraint firstItem="jhl-n8-gXe" firstAttribute="leading" secondItem="Bk2-Lj-tpb" secondAttribute="leading" id="rqp-Ca-IdB"/>
<constraint firstItem="0tu-Uu-aZJ" firstAttribute="top" secondItem="Bk2-Lj-tpb" secondAttribute="top" id="v98-oo-7BM"/>
</constraints> </constraints>
<state key="normal" image="iconCap.png"/> </view>
<connections> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" ambiguous="YES" text=" " textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="wnG-jG-HwA">
<action selector="onCapturePhoto:" destination="-1" eventType="touchUpInside" id="Vg0-NJ-EOP"/> <rect key="frame" x="205" y="703" width="4.5" height="20.5"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="wnG-jG-HwA">
<rect key="frame" x="205" y="701" width="4.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
...@@ -86,10 +131,11 @@ ...@@ -86,10 +131,11 @@
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints> <constraints>
<constraint firstItem="bMq-BQ-SN5" firstAttribute="top" secondItem="wnG-jG-HwA" secondAttribute="bottom" constant="20" id="3RZ-t3-RxF"/> <constraint firstItem="bMq-BQ-SN5" firstAttribute="top" secondItem="wnG-jG-HwA" secondAttribute="bottom" constant="20" id="3RZ-t3-RxF"/>
<constraint firstItem="0tu-Uu-aZJ" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="446-x7-rap"/> <constraint firstItem="Bk2-Lj-tpb" firstAttribute="leading" secondItem="hB9-pv-7hU" secondAttribute="leading" constant="16" id="722-Sw-Jce"/>
<constraint firstItem="0tu-Uu-aZJ" firstAttribute="top" secondItem="bMq-BQ-SN5" secondAttribute="bottom" constant="20" id="9ne-hi-Dep"/>
<constraint firstItem="bMq-BQ-SN5" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="Cej-FU-Dtq"/> <constraint firstItem="bMq-BQ-SN5" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="Cej-FU-Dtq"/>
<constraint firstItem="hB9-pv-7hU" firstAttribute="bottom" secondItem="Bk2-Lj-tpb" secondAttribute="bottom" constant="30" id="DZc-wk-Z60"/>
<constraint firstItem="lKp-BC-igm" firstAttribute="leading" secondItem="hB9-pv-7hU" secondAttribute="leading" id="Feg-QV-jKA"/> <constraint firstItem="lKp-BC-igm" firstAttribute="leading" secondItem="hB9-pv-7hU" secondAttribute="leading" id="Feg-QV-jKA"/>
<constraint firstItem="hB9-pv-7hU" firstAttribute="trailing" secondItem="Bk2-Lj-tpb" secondAttribute="trailing" constant="16" id="KSV-Q0-yAz"/>
<constraint firstItem="lKp-BC-igm" firstAttribute="top" secondItem="hB9-pv-7hU" secondAttribute="top" id="M7T-cU-AYs"/> <constraint firstItem="lKp-BC-igm" firstAttribute="top" secondItem="hB9-pv-7hU" secondAttribute="top" id="M7T-cU-AYs"/>
<constraint firstItem="Uu0-ZS-WyV" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="RJl-D9-e7j"/> <constraint firstItem="Uu0-ZS-WyV" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="RJl-D9-e7j"/>
<constraint firstItem="hB9-pv-7hU" firstAttribute="trailing" secondItem="lKp-BC-igm" secondAttribute="trailing" id="V4W-de-wre"/> <constraint firstItem="hB9-pv-7hU" firstAttribute="trailing" secondItem="lKp-BC-igm" secondAttribute="trailing" id="V4W-de-wre"/>
...@@ -97,14 +143,22 @@ ...@@ -97,14 +143,22 @@
<constraint firstItem="hB9-pv-7hU" firstAttribute="bottom" secondItem="lKp-BC-igm" secondAttribute="bottom" id="bsH-Lw-Emb"/> <constraint firstItem="hB9-pv-7hU" firstAttribute="bottom" secondItem="lKp-BC-igm" secondAttribute="bottom" id="bsH-Lw-Emb"/>
<constraint firstItem="wnG-jG-HwA" firstAttribute="top" secondItem="Uu0-ZS-WyV" secondAttribute="bottom" constant="20" id="eIh-l6-wou"/> <constraint firstItem="wnG-jG-HwA" firstAttribute="top" secondItem="Uu0-ZS-WyV" secondAttribute="bottom" constant="20" id="eIh-l6-wou"/>
<constraint firstItem="wnG-jG-HwA" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="mg9-DV-NWp"/> <constraint firstItem="wnG-jG-HwA" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="mg9-DV-NWp"/>
<constraint firstItem="Bk2-Lj-tpb" firstAttribute="top" secondItem="bMq-BQ-SN5" secondAttribute="bottom" constant="8" symbolic="YES" id="nPt-SA-gMF"/>
<constraint firstItem="Yoc-45-cmQ" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="qpt-JY-I2K"/> <constraint firstItem="Yoc-45-cmQ" firstAttribute="centerX" secondItem="hB9-pv-7hU" secondAttribute="centerX" id="qpt-JY-I2K"/>
<constraint firstItem="hB9-pv-7hU" firstAttribute="bottom" secondItem="0tu-Uu-aZJ" secondAttribute="bottom" constant="30" id="zb1-xI-WE2"/>
</constraints> </constraints>
<point key="canvasLocation" x="133" y="154"/> <variation key="default">
<mask key="subviews">
<exclude reference="Bk2-Lj-tpb"/>
</mask>
</variation>
<point key="canvasLocation" x="131.8840579710145" y="153.34821428571428"/>
</view> </view>
</objects> </objects>
<resources> <resources>
<image name="Caution.png" width="84.5" height="84.5"/> <image name="Caution.png" width="84.5" height="84.5"/>
<image name="iconCap.png" width="172" height="172"/> <image name="ic_camera.png" width="512" height="512"/>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources> </resources>
</document> </document>
...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg ...@@ -25,7 +25,7 @@ class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDeleg
private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = private var modelDataHandler: SBKModelDataHandler? =
SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
private var result: [Float]? private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000 private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000 private let delayBetweenInferencesMs: Double = 1000
......
...@@ -30,7 +30,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -30,7 +30,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) private var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
private var result: [Float]? private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000 private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000 private let delayBetweenInferencesMs: Double = 1000
...@@ -74,6 +74,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -74,6 +74,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
self.checkScreen() self.checkScreen()
self.loadCamera() self.loadCamera()
modelDataHandler?.subtract = 0
} }
func loadViewFromNib() -> UIView? { func loadViewFromNib() -> UIView? {
...@@ -206,94 +207,108 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -206,94 +207,108 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) { func getMaxResult(result: [Float]) -> Int {
var max = result[0]
var index = 0
for i in 0..<result.count {
index = max < result[i] ? i : index
max = max < result[i] ? result[i] : max
}
return index
}
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace, pureImage: CVPixelBuffer) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if let result = result {
if self.checkStatusRecord { let maxResult = getMaxResult(result: result)
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1) if self.checkStatusRecord {
self.numberPass += 1 maxResult == 0 ? (self.numberTrue += 1) : (self.numberFalse += 1)
DispatchQueue.main.async { self.numberPass += 1
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer) DispatchQueue.main.async {
let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage) let ciimage : CIImage = CIImage(cvPixelBuffer: pureImage)
if self.result![0] < self.result![1] { let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage)
if statusFace == .STRAIGHTFACE && self.checkStep == 0 { if maxResult == 0 {
if self.dataImageSuccess.count == 0 { if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
self.dataImageSuccess.append(imageView) if self.dataImageSuccess.count == 0 {
} self.dataImageSuccess.append(imageView)
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil) }
self.lbDescription.textColor = UIColor.green self.completionSuccessFaceRecordStep(.FACE_STRAIGHT, nil, nil)
self.lbDescription.text = "Exactly".localized() self.lbDescription.textColor = UIColor.green
self.checkStep = 1 self.lbDescription.text = "Exactly".localized()
self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.checkStep = 1
} else if statusFace != .STRAIGHTFACE && self.checkStep == 0 { self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FBA02E")
self.lbDescription.textColor = UIColor.red } else if statusFace != .STRAIGHTFACE && self.checkStep == 0 {
self.lbDescription.text = "Please look straight".localized() self.lbDescription.textColor = UIColor.red
self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FE3500") self.lbDescription.text = "Please look straight".localized()
self.completionSuccessFaceRecordStep(.FACE_STRAIGHT_FAILED, nil, nil) self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FE3500")
} else if statusFace == .TORIGHT && self.checkStep == 1 { self.completionSuccessFaceRecordStep(.FACE_STRAIGHT_FAILED, nil, nil)
self.lbDescription.textColor = UIColor.green } else if statusFace == .TORIGHT && self.checkStep == 1 {
self.lbDescription.text = "Exactly".localized() self.lbDescription.textColor = UIColor.green
self.checkStep = 2 self.lbDescription.text = "Exactly".localized()
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.checkStep = 2
self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil) self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
if self.dataImageSuccess.count == 1 { self.completionSuccessFaceRecordStep(.FACE_RIGHT, nil, nil)
self.dataImageSuccess.append(imageView) if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView)
}
} else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the right".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT_FAILED, nil, nil)
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FE3500")
} else if statusFace == .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = "Exactly".localized()
self.checkStep = 3
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView)
}
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil)
} else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the left".localized()
self.completionSuccessFaceRecordStep(.FACE_LEFT_FAILED, nil, nil)
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FE3500")
} }
} else if statusFace != .TORIGHT && self.checkStep == 1 { } else {
self.lbDescription.textColor = UIColor.red self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the right".localized() self.lbDescription.text = "Incorrect face, please check!".localized()
self.completionSuccessFaceRecordStep(.FACE_RIGHT_FAILED, nil, nil) self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil)
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FE3500") }
} else if statusFace == .TOLEFT && self.checkStep == 2 { }
self.lbDescription.textColor = UIColor.green } else {
self.lbDescription.text = "Exactly".localized() if maxResult == 0 {
self.checkStep = 3 DispatchQueue.main.async {
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E") self.checkStartRecord = true
self.completionSuccessFaceRecordStep(.FACE_LEFT, nil, nil) self.lbDescription.textColor = UIColor.white
if self.dataImageSuccess.count == 2 { if !self.checkStatusRecord {
self.dataImageSuccess.append(imageView) self.checkStatusRecord = true
self.timeRecord = 0
self.checkStep = 0
self.viewCheckStep1.backgroundColor = .red
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#333333")
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#333333")
} }
} else if statusFace != .TOLEFT && self.checkStep == 2 { self.completionSuccessFaceRecordStep(.FACE_READY, nil, nil)
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the left".localized()
self.completionSuccessFaceRecordStep(.FACE_LEFT_FAILED, nil, nil)
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FE3500")
} }
} else { } else {
self.lbDescription.textColor = UIColor.red DispatchQueue.main.async {
self.lbDescription.text = "Incorrect face, please check!".localized() self.checkStartRecord = false
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil) self.lbDescription.textColor = UIColor.red
} self.lbDescription.text = "Incorrect face, please check!".localized()
} self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil)
} else {
if result![0] < result![1] {
DispatchQueue.main.async {
self.checkStartRecord = true
self.lbDescription.textColor = UIColor.white
if !self.checkStatusRecord {
self.checkStatusRecord = true
self.timeRecord = 0
self.checkStep = 0
self.viewCheckStep1.backgroundColor = .red
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#333333")
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#333333")
} }
self.completionSuccessFaceRecordStep(.FACE_READY, nil, nil)
}
} else {
DispatchQueue.main.async {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
self.completionSuccessFaceRecordStep(.FACE_FAKE, nil, nil)
} }
} }
} }
} }
@IBAction func onRecord(_ sender: Any) { @IBAction func onRecord(_ sender: Any) {
...@@ -312,19 +327,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -312,19 +327,20 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
debugPrint("unable to get image from sample buffer") debugPrint("unable to get image from sample buffer")
return return
} }
let pureImage = self.resizeImageFace(pixelBuffer: frame)
if self.startusCheck { if self.startusCheck {
if #available(iOS 11.0, *) { if #available(iOS 11.0, *) {
if self.screenHeight == 2436 { if self.screenHeight == 2436 {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame.resized(to: CGSize(width: 360, height: 480))!))
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame.resized(to: CGSize(width: 360, height: 480))!), pureImage: pureImage)
} else { } else {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame)) self.detectFace(in: pureImage, pureImage: pureImage)
} }
} }
} }
} }
func checkScreen(){ func checkScreen(){
if UIDevice().userInterfaceIdiom == .phone { if UIDevice().userInterfaceIdiom == .phone {
self.screenHeight = UIScreen.main.nativeBounds.height self.screenHeight = UIScreen.main.nativeBounds.height
...@@ -406,7 +422,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -406,7 +422,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
} }
@available(iOS 11.0, *) @available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) { private func detectFace(in image: CVPixelBuffer, pureImage: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async { DispatchQueue.main.async {
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
...@@ -423,7 +439,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -423,7 +439,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.startTimer() self.startTimer()
DispatchQueue.global().async { DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString) self.didOutput(pixelBuffer: image, statusFace: statusString, pureImage: pureImage)
} }
} }
} else { } else {
...@@ -437,7 +453,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega ...@@ -437,7 +453,7 @@ open class SBKRecordFaceView: UIView, AVCaptureVideoDataOutputSampleBufferDelega
self.lbDescription.text = "Incorrect face, please check!".localized() self.lbDescription.text = "Incorrect face, please check!".localized()
DispatchQueue.global().async { DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR) self.didOutput(pixelBuffer: image, statusFace: .ERROR, pureImage: pureImage)
} }
} }
} }
......
No preview for this file type
...@@ -14,9 +14,13 @@ class SBKValidateInput { ...@@ -14,9 +14,13 @@ class SBKValidateInput {
static let shared = SBKValidateInput() static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel) var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo) var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.faceModel)
public typealias CompletionHandle = (_ data: Bool) -> Void public typealias CompletionHandle = (_ data: Bool) -> Void
init(){
modelDataHandler?.inputWidth = 280
modelDataHandler?.inputHeight = 210
modelDataHandler?.subtract = 0
}
func validateCard(imageInput: UIImage) -> Int { func validateCard(imageInput: UIImage) -> Int {
let ciimage = CIImage(image: imageInput) let ciimage = CIImage(image: imageInput)
...@@ -56,24 +60,23 @@ class SBKValidateInput { ...@@ -56,24 +60,23 @@ class SBKValidateInput {
let currentTimeMs = Date().timeIntervalSince1970 * 1000 let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR } guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result == nil { if result == nil {
return .ERROR return .ERROR
} }
switch self.getResultCard(result: result!) { let max = getResultCard(result: result!)
let total = result!.reduce(0, +)
switch max {
case 0: case 0:
return .IMAGE_FAKE return .IMAGE_FAKE
case 1: case 1, 3, 17:
return .IMAGE_FRONT return .IMAGE_FRONT
case 2: case 2, 4, 18:
return .IMAGE_BACK return .IMAGE_BACK
case 3: case 25:
return .PASSPORT return .PASSPORT
case 4,5,6,7,8,9:
return .IMAGE_FAKE
default: default:
return .ERROR return .IMAGE_FAKE
} }
} }
...@@ -97,12 +100,17 @@ class SBKValidateInput { ...@@ -97,12 +100,17 @@ class SBKValidateInput {
previousInferenceTimeMs = currentTimeMs previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference. // Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer) result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] { if result != nil {
let max = getResultCard(result: result!)
if max == 0 {
return true return true
} else { } else {
return false return false
} }
} else {
return false
}
} }
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer { func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
...@@ -172,19 +180,6 @@ class SBKValidateInput { ...@@ -172,19 +180,6 @@ class SBKValidateInput {
} }
} }
//Xử lý ảnh hiển thị
// func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
// let imageCap = image
//
// let widthCrop = imageCap.size.width - imageCap.size.width / 10
//
// UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
// image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
// let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
// UIGraphicsEndImageContext()
// return croppedImage
// }
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? { func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image let imageCap = image
...@@ -222,6 +217,7 @@ class SBKValidateInput { ...@@ -222,6 +217,7 @@ class SBKValidateInput {
return croppedImage return croppedImage
} }
func saveImage(imageName: String, image: UIImage) -> String? { func saveImage(imageName: String, image: UIImage) -> String? {
guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil } guard let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return nil }
let fileName = imageName let fileName = imageName
...@@ -230,7 +226,6 @@ class SBKValidateInput { ...@@ -230,7 +226,6 @@ class SBKValidateInput {
if FileManager.default.fileExists(atPath: fileURL.path) { if FileManager.default.fileExists(atPath: fileURL.path) {
do { do {
try FileManager.default.removeItem(atPath: fileURL.path) try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed old image")
} catch let removeError { } catch let removeError {
print("couldn't remove file at path", removeError) print("couldn't remove file at path", removeError)
} }
......
...@@ -21,4 +21,4 @@ SPEC CHECKSUMS: ...@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74 PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2 COCOAPODS: 1.11.2
...@@ -21,4 +21,4 @@ SPEC CHECKSUMS: ...@@ -21,4 +21,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74 PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.2 COCOAPODS: 1.11.2
...@@ -19,42 +19,42 @@ ...@@ -19,42 +19,42 @@
/* End PBXAggregateTarget section */ /* End PBXAggregateTarget section */
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
0E50E955DF4C94AC6E7C2697FA5B9C7A /* Pods-OCR-SDK-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; }; 14EF6CE57AF79DF3DF9DDEF552DBCE07 /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */; };
1EF42BA7A3728956E0819AA1586A19E0 /* InterpreterError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0913DC467F75C7CDF2AE04C72B69A1FB /* InterpreterError.swift */; }; 1DBA00810896C68345BAAB9EC3D4BC43 /* Delegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */; };
35CC16F28EFE959BA1EDD59D0A1A579D /* QuantizationParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = F09E3157B8F619A01871F4BF3F3D5F4A /* QuantizationParameters.swift */; }; 56D34D29C62CA9929729D78E80151233 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; };
89DFE11EB6B97622CBE570E11A422B2E /* Interpreter.swift in Sources */ = {isa = PBXBuildFile; fileRef = F9C3BA0790EFEE218C4A5B8BAF9E2A88 /* Interpreter.swift */; }; 5A7115688121EFEFF729EC91F439837C /* TensorFlowLite.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */; };
A4419DB0A7CDB0A886E77F5FCF873CF8 /* Pods-OCR-SDK-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */; }; 7EB5CB3C010CEDFEB0E7D9FACDF15269 /* Interpreter.swift in Sources */ = {isa = PBXBuildFile; fileRef = F9C3BA0790EFEE218C4A5B8BAF9E2A88 /* Interpreter.swift */; };
A6791FA73099A3B48BA52F23ECCB68A5 /* TensorFlowLiteSwift-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = 7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; }; 9084531A7C0A8F488024857611234ED0 /* Pods-OCR-SDK-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
B55CE53BD5779359C4A8B8043853867F /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; }; 9A09B475A9B8DAD6A15D02BF26256202 /* InterpreterError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0913DC467F75C7CDF2AE04C72B69A1FB /* InterpreterError.swift */; };
BF7ACB61FB65A0CDE562908EA112C028 /* Tensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */; }; A06B2E2BEB8E190AA96F6C8D60C06DF3 /* Tensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */; };
BF92AADA2ACACF7675F24D2858B4E641 /* TensorFlowLite.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */; }; BE006785C37447198AB55DA91D33F44B /* TensorFlowLiteSwift-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = 7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
C253298E121F79C0613AADC72D225217 /* TensorFlowLiteSwift-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 02877DCEF805FE230F0F6C1718AF2E3E /* TensorFlowLiteSwift-dummy.m */; }; C3BC23FAAF367C5D4FA7A0CF969898A5 /* TensorFlowLiteSwift-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 02877DCEF805FE230F0F6C1718AF2E3E /* TensorFlowLiteSwift-dummy.m */; };
E3387D3D7DF235DD921900AECE7177DF /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */; }; E9C14D76F9B901587F16C0521F9D1655 /* Pods-OCR-SDK-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */; };
E43D8122B3F44AC0C1C4063E9FE12FCE /* Delegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */; }; EF6660C4AC5A8492404E85A54A2BB6C8 /* QuantizationParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = F09E3157B8F619A01871F4BF3F3D5F4A /* QuantizationParameters.swift */; };
F17F59AED183359775B920759288A367 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; }; F3159A1F917BB740CA13A639C68B0AF3 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; };
/* End PBXBuildFile section */ /* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */ /* Begin PBXContainerItemProxy section */
7186AA350E2F51E290EB66113727A4BB /* PBXContainerItemProxy */ = { 46F536B326C88C9C82A6C50AFD12998B /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy; isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */; containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1; proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6; remoteGlobalIDString = 10418167F619D6DA72BADAD10F9EC02B;
remoteInfo = TensorFlowLiteC; remoteInfo = TensorFlowLiteSwift;
}; };
74A713F5EF5E6AC9BBF4B0F890C83299 /* PBXContainerItemProxy */ = { 59EA02C48415F2680BE281A5B06A3743 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy; isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */; containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1; proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6; remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteC; remoteInfo = TensorFlowLiteC;
}; };
95F66FC5DE60D6FA2B1A8FB0A7BA488B /* PBXContainerItemProxy */ = { 64214CA206765D231785F6D5C183A7FF /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy; isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */; containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1; proxyType = 1;
remoteGlobalIDString = 10418167F619D6DA72BADAD10F9EC02B; remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteSwift; remoteInfo = TensorFlowLiteC;
}; };
/* End PBXContainerItemProxy section */ /* End PBXContainerItemProxy section */
...@@ -65,7 +65,7 @@ ...@@ -65,7 +65,7 @@
2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = TensorFlowLite.swift; path = tensorflow/lite/swift/Sources/TensorFlowLite.swift; sourceTree = "<group>"; }; 2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = TensorFlowLite.swift; path = tensorflow/lite/swift/Sources/TensorFlowLite.swift; sourceTree = "<group>"; };
28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.release.xcconfig; sourceTree = "<group>"; }; 28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.release.xcconfig; sourceTree = "<group>"; };
319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-OCR-SDK-dummy.m"; sourceTree = "<group>"; }; 319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-OCR-SDK-dummy.m"; sourceTree = "<group>"; };
5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = Pods_OCR_SDK.framework; path = "Pods-OCR-SDK.framework"; sourceTree = BUILT_PRODUCTS_DIR; }; 5131EA118CFE71670689AFC2E40810BB /* Pods-OCR-SDK */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = "Pods-OCR-SDK"; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Model.swift; path = tensorflow/lite/swift/Sources/Model.swift; sourceTree = "<group>"; }; 561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Model.swift; path = tensorflow/lite/swift/Sources/Model.swift; sourceTree = "<group>"; };
73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS14.0.sdk/System/Library/Frameworks/Foundation.framework; sourceTree = DEVELOPER_DIR; }; 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS14.0.sdk/System/Library/Frameworks/Foundation.framework; sourceTree = DEVELOPER_DIR; };
7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-umbrella.h"; sourceTree = "<group>"; }; 7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-umbrella.h"; sourceTree = "<group>"; };
...@@ -78,7 +78,7 @@ ...@@ -78,7 +78,7 @@
CD35CA67D5609CA37E90AB1EB079DA0E /* Pods-OCR-SDK-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-OCR-SDK-acknowledgements.markdown"; sourceTree = "<group>"; }; CD35CA67D5609CA37E90AB1EB079DA0E /* Pods-OCR-SDK-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-OCR-SDK-acknowledgements.markdown"; sourceTree = "<group>"; };
D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Tensor.swift; path = tensorflow/lite/swift/Sources/Tensor.swift; sourceTree = "<group>"; }; D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Tensor.swift; path = tensorflow/lite/swift/Sources/Tensor.swift; sourceTree = "<group>"; };
DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.release.xcconfig; sourceTree = "<group>"; }; DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.release.xcconfig; sourceTree = "<group>"; };
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = TensorFlowLite.framework; path = TensorFlowLiteSwift.framework; sourceTree = BUILT_PRODUCTS_DIR; }; E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLiteSwift */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = TensorFlowLiteSwift; path = TensorFlowLite.framework; sourceTree = BUILT_PRODUCTS_DIR; };
E5BE6F6A63BB5D807303C4C35FBA7AC5 /* TensorFlowLiteC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteC.framework; path = Frameworks/TensorFlowLiteC.framework; sourceTree = "<group>"; }; E5BE6F6A63BB5D807303C4C35FBA7AC5 /* TensorFlowLiteC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteC.framework; path = Frameworks/TensorFlowLiteC.framework; sourceTree = "<group>"; };
E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; }; E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Delegate.swift; path = tensorflow/lite/swift/Sources/Delegate.swift; sourceTree = "<group>"; }; E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Delegate.swift; path = tensorflow/lite/swift/Sources/Delegate.swift; sourceTree = "<group>"; };
...@@ -91,19 +91,19 @@ ...@@ -91,19 +91,19 @@
/* End PBXFileReference section */ /* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */ /* Begin PBXFrameworksBuildPhase section */
546F04BEEFDD30F503EE46336EDF4B53 /* Frameworks */ = { 484F964E332219587DDF8C0B3DAE5303 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase; isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
F17F59AED183359775B920759288A367 /* Foundation.framework in Frameworks */, 56D34D29C62CA9929729D78E80151233 /* Foundation.framework in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
8729DA754D88D85B8A8D6C8102FB3D61 /* Frameworks */ = { 6AA503E2E0AB6CC051E919AD6E9BF561 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase; isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
B55CE53BD5779359C4A8B8043853867F /* Foundation.framework in Frameworks */, F3159A1F917BB740CA13A639C68B0AF3 /* Foundation.framework in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
...@@ -143,15 +143,6 @@ ...@@ -143,15 +143,6 @@
name = Core; name = Core;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */ = {
isa = PBXGroup;
children = (
5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */,
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */,
);
name = Products;
sourceTree = "<group>";
};
529BB94F807F2327E94EE6E69FFB0527 /* Frameworks */ = { 529BB94F807F2327E94EE6E69FFB0527 /* Frameworks */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
...@@ -217,6 +208,15 @@ ...@@ -217,6 +208,15 @@
path = "../Target Support Files/TensorFlowLiteC"; path = "../Target Support Files/TensorFlowLiteC";
sourceTree = "<group>"; sourceTree = "<group>";
}; };
8DF39E865FD888AFD4DA2E688A22093F /* Products */ = {
isa = PBXGroup;
children = (
5131EA118CFE71670689AFC2E40810BB /* Pods-OCR-SDK */,
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLiteSwift */,
);
name = Products;
sourceTree = "<group>";
};
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */ = { C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
...@@ -241,7 +241,7 @@ ...@@ -241,7 +241,7 @@
9D940727FF8FB9C785EB98E56350EF41 /* Podfile */, 9D940727FF8FB9C785EB98E56350EF41 /* Podfile */,
D210D550F4EA176C3123ED886F8F87F5 /* Frameworks */, D210D550F4EA176C3123ED886F8F87F5 /* Frameworks */,
2DAE4706825B937CABA2FBBBA1A265AE /* Pods */, 2DAE4706825B937CABA2FBBBA1A265AE /* Pods */,
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */, 8DF39E865FD888AFD4DA2E688A22093F /* Products */,
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */, C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */,
); );
sourceTree = "<group>"; sourceTree = "<group>";
...@@ -257,19 +257,19 @@ ...@@ -257,19 +257,19 @@
/* End PBXGroup section */ /* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */ /* Begin PBXHeadersBuildPhase section */
B81C9F64D8DFE71EB24113D3EA3667D3 /* Headers */ = { 1D341DF083D35BC15F6DED8BD506397D /* Headers */ = {
isa = PBXHeadersBuildPhase; isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
A6791FA73099A3B48BA52F23ECCB68A5 /* TensorFlowLiteSwift-umbrella.h in Headers */, BE006785C37447198AB55DA91D33F44B /* TensorFlowLiteSwift-umbrella.h in Headers */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
C665BC548D0669B97E728330258F25C8 /* Headers */ = { AC8042D0CBFEBA4649AC69109A222E94 /* Headers */ = {
isa = PBXHeadersBuildPhase; isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
0E50E955DF4C94AC6E7C2697FA5B9C7A /* Pods-OCR-SDK-umbrella.h in Headers */, 9084531A7C0A8F488024857611234ED0 /* Pods-OCR-SDK-umbrella.h in Headers */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
...@@ -278,41 +278,41 @@ ...@@ -278,41 +278,41 @@
/* Begin PBXNativeTarget section */ /* Begin PBXNativeTarget section */
10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */ = { 10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */ = {
isa = PBXNativeTarget; isa = PBXNativeTarget;
buildConfigurationList = 44F0B3745F9D5A6855C1205F3C0401AE /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */; buildConfigurationList = 117645AFADBD3CD45056500CD6D8C9F0 /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */;
buildPhases = ( buildPhases = (
B81C9F64D8DFE71EB24113D3EA3667D3 /* Headers */, 1D341DF083D35BC15F6DED8BD506397D /* Headers */,
0815E5E7EEAD07CAC94FB6DF593B3868 /* Sources */, C1AAE5F3DAC70A83F0B8D3A404BDBCFF /* Sources */,
546F04BEEFDD30F503EE46336EDF4B53 /* Frameworks */, 484F964E332219587DDF8C0B3DAE5303 /* Frameworks */,
42DA36B2352679AE6D81CC3D497062B4 /* Resources */, F0AC653F3FCB200441797E130403F04E /* Resources */,
); );
buildRules = ( buildRules = (
); );
dependencies = ( dependencies = (
EF69265E72B2B6190ECEC50A2F624C11 /* PBXTargetDependency */, C231AC1CD65BBE93E9E63242DE6F3243 /* PBXTargetDependency */,
); );
name = TensorFlowLiteSwift; name = TensorFlowLiteSwift;
productName = TensorFlowLiteSwift; productName = TensorFlowLite;
productReference = E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */; productReference = E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLiteSwift */;
productType = "com.apple.product-type.framework"; productType = "com.apple.product-type.framework";
}; };
DE1F4D51AD94C30627575AEE202FD099 /* Pods-OCR-SDK */ = { DE1F4D51AD94C30627575AEE202FD099 /* Pods-OCR-SDK */ = {
isa = PBXNativeTarget; isa = PBXNativeTarget;
buildConfigurationList = B70B252D508AC00514E4751918F71C7E /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */; buildConfigurationList = 7BB9016B5737013F5C0419F94194071E /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */;
buildPhases = ( buildPhases = (
C665BC548D0669B97E728330258F25C8 /* Headers */, AC8042D0CBFEBA4649AC69109A222E94 /* Headers */,
F89B8B2A999618D7E7BEBB9741F4CE8B /* Sources */, 410D7E44F39A1EB09D574FDDF4843D29 /* Sources */,
8729DA754D88D85B8A8D6C8102FB3D61 /* Frameworks */, 6AA503E2E0AB6CC051E919AD6E9BF561 /* Frameworks */,
AFA0F55BCEBEA58C3418C1675448D70A /* Resources */, F96110A889A27DA212E3BE9C35BDB88A /* Resources */,
); );
buildRules = ( buildRules = (
); );
dependencies = ( dependencies = (
B9B3EA213DF319228DCB503D0F59B5A2 /* PBXTargetDependency */, AABA15FF1710E27E7176CEA5FA540E47 /* PBXTargetDependency */,
82F118637C8A2F46BA62DF3BCE921D75 /* PBXTargetDependency */, E0234EED1FBA99973B6AA98CBC87E6AB /* PBXTargetDependency */,
); );
name = "Pods-OCR-SDK"; name = "Pods-OCR-SDK";
productName = "Pods-OCR-SDK"; productName = Pods_OCR_SDK;
productReference = 5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */; productReference = 5131EA118CFE71670689AFC2E40810BB /* Pods-OCR-SDK */;
productType = "com.apple.product-type.framework"; productType = "com.apple.product-type.framework";
}; };
/* End PBXNativeTarget section */ /* End PBXNativeTarget section */
...@@ -329,11 +329,11 @@ ...@@ -329,11 +329,11 @@
developmentRegion = en; developmentRegion = en;
hasScannedForEncodings = 0; hasScannedForEncodings = 0;
knownRegions = ( knownRegions = (
en,
Base, Base,
en,
); );
mainGroup = CF1408CF629C7361332E53B88F7BD30C; mainGroup = CF1408CF629C7361332E53B88F7BD30C;
productRefGroup = 5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */; productRefGroup = 8DF39E865FD888AFD4DA2E688A22093F /* Products */;
projectDirPath = ""; projectDirPath = "";
projectRoot = ""; projectRoot = "";
targets = ( targets = (
...@@ -345,14 +345,14 @@ ...@@ -345,14 +345,14 @@
/* End PBXProject section */ /* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */ /* Begin PBXResourcesBuildPhase section */
42DA36B2352679AE6D81CC3D497062B4 /* Resources */ = { F0AC653F3FCB200441797E130403F04E /* Resources */ = {
isa = PBXResourcesBuildPhase; isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
AFA0F55BCEBEA58C3418C1675448D70A /* Resources */ = { F96110A889A27DA212E3BE9C35BDB88A /* Resources */ = {
isa = PBXResourcesBuildPhase; isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
...@@ -362,54 +362,98 @@ ...@@ -362,54 +362,98 @@
/* End PBXResourcesBuildPhase section */ /* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */
0815E5E7EEAD07CAC94FB6DF593B3868 /* Sources */ = { 410D7E44F39A1EB09D574FDDF4843D29 /* Sources */ = {
isa = PBXSourcesBuildPhase; isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
E43D8122B3F44AC0C1C4063E9FE12FCE /* Delegate.swift in Sources */, E9C14D76F9B901587F16C0521F9D1655 /* Pods-OCR-SDK-dummy.m in Sources */,
89DFE11EB6B97622CBE570E11A422B2E /* Interpreter.swift in Sources */,
1EF42BA7A3728956E0819AA1586A19E0 /* InterpreterError.swift in Sources */,
E3387D3D7DF235DD921900AECE7177DF /* Model.swift in Sources */,
35CC16F28EFE959BA1EDD59D0A1A579D /* QuantizationParameters.swift in Sources */,
BF7ACB61FB65A0CDE562908EA112C028 /* Tensor.swift in Sources */,
BF92AADA2ACACF7675F24D2858B4E641 /* TensorFlowLite.swift in Sources */,
C253298E121F79C0613AADC72D225217 /* TensorFlowLiteSwift-dummy.m in Sources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
F89B8B2A999618D7E7BEBB9741F4CE8B /* Sources */ = { C1AAE5F3DAC70A83F0B8D3A404BDBCFF /* Sources */ = {
isa = PBXSourcesBuildPhase; isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
A4419DB0A7CDB0A886E77F5FCF873CF8 /* Pods-OCR-SDK-dummy.m in Sources */, 1DBA00810896C68345BAAB9EC3D4BC43 /* Delegate.swift in Sources */,
7EB5CB3C010CEDFEB0E7D9FACDF15269 /* Interpreter.swift in Sources */,
9A09B475A9B8DAD6A15D02BF26256202 /* InterpreterError.swift in Sources */,
14EF6CE57AF79DF3DF9DDEF552DBCE07 /* Model.swift in Sources */,
EF6660C4AC5A8492404E85A54A2BB6C8 /* QuantizationParameters.swift in Sources */,
A06B2E2BEB8E190AA96F6C8D60C06DF3 /* Tensor.swift in Sources */,
5A7115688121EFEFF729EC91F439837C /* TensorFlowLite.swift in Sources */,
C3BC23FAAF367C5D4FA7A0CF969898A5 /* TensorFlowLiteSwift-dummy.m in Sources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
/* End PBXSourcesBuildPhase section */ /* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */ /* Begin PBXTargetDependency section */
82F118637C8A2F46BA62DF3BCE921D75 /* PBXTargetDependency */ = { AABA15FF1710E27E7176CEA5FA540E47 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteSwift;
target = 10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */;
targetProxy = 95F66FC5DE60D6FA2B1A8FB0A7BA488B /* PBXContainerItemProxy */;
};
B9B3EA213DF319228DCB503D0F59B5A2 /* PBXTargetDependency */ = {
isa = PBXTargetDependency; isa = PBXTargetDependency;
name = TensorFlowLiteC; name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */; target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 74A713F5EF5E6AC9BBF4B0F890C83299 /* PBXContainerItemProxy */; targetProxy = 64214CA206765D231785F6D5C183A7FF /* PBXContainerItemProxy */;
}; };
EF69265E72B2B6190ECEC50A2F624C11 /* PBXTargetDependency */ = { C231AC1CD65BBE93E9E63242DE6F3243 /* PBXTargetDependency */ = {
isa = PBXTargetDependency; isa = PBXTargetDependency;
name = TensorFlowLiteC; name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */; target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 7186AA350E2F51E290EB66113727A4BB /* PBXContainerItemProxy */; targetProxy = 59EA02C48415F2680BE281A5B06A3743 /* PBXContainerItemProxy */;
};
E0234EED1FBA99973B6AA98CBC87E6AB /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteSwift;
target = 10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */;
targetProxy = 46F536B326C88C9C82A6C50AFD12998B /* PBXContainerItemProxy */;
}; };
/* End PBXTargetDependency section */ /* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */ /* Begin XCBuildConfiguration section */
28E0C0106DD4345E71D76C7FA44C18FE /* Debug */ = { 4B1553A3517450622C3156A1897246D4 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = bitcode;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
4DCDF5BB20B68FE74968B4A053AB4295 /* Debug */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */; baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = { buildSettings = {
...@@ -454,6 +498,52 @@ ...@@ -454,6 +498,52 @@
}; };
name = Debug; name = Debug;
}; };
56343F68504E99AAC07564215415855F /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
5B27AD80C84B2688EBB2932F044E0363 /* Release */ = { 5B27AD80C84B2688EBB2932F044E0363 /* Release */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = 28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */; baseConfigurationReference = 28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */;
...@@ -479,11 +569,11 @@ ...@@ -479,11 +569,11 @@
}; };
name = Release; name = Release;
}; };
6B7B33209571F28B85CE49B1C78BD7E2 /* Release */ = { 60B52B04B18AF4ADD9B736ED471FBDD4 /* Debug */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
baseConfigurationReference = DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */; baseConfigurationReference = 80B58F1E1746E350BEB337CA9F756D65 /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = { buildSettings = {
BITCODE_GENERATION_MODE = bitcode; BITCODE_GENERATION_MODE = marker;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
...@@ -507,7 +597,7 @@ ...@@ -507,7 +597,7 @@
ONLY_ACTIVE_ARCH = NO; ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = ( OTHER_CFLAGS = (
"$(inherited)", "$(inherited)",
"-fembed-bitcode", "-fembed-bitcode-marker",
); );
PRODUCT_MODULE_NAME = TensorFlowLite; PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite; PRODUCT_NAME = TensorFlowLite;
...@@ -516,12 +606,11 @@ ...@@ -516,12 +606,11 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0; SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2"; TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64"; VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic"; VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = ""; VERSION_INFO_PREFIX = "";
}; };
name = Release; name = Debug;
}; };
903A0004D3E6651EFD5D2E16214D101B /* Release */ = { 903A0004D3E6651EFD5D2E16214D101B /* Release */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
...@@ -675,112 +764,23 @@ ...@@ -675,112 +764,23 @@
}; };
name = Debug; name = Debug;
}; };
B976E9C0D4ED126FD693AEF88E605112 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 80B58F1E1746E350BEB337CA9F756D65 /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = marker;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Debug;
};
EFC6D33E08C2FE49F5DE5DEA51A66DE5 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
/* End XCBuildConfiguration section */ /* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */ /* Begin XCConfigurationList section */
243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */ = { 117645AFADBD3CD45056500CD6D8C9F0 /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */ = {
isa = XCConfigurationList; isa = XCConfigurationList;
buildConfigurations = ( buildConfigurations = (
B577B5487933DC5B570EF057336156E5 /* Debug */, 60B52B04B18AF4ADD9B736ED471FBDD4 /* Debug */,
5B27AD80C84B2688EBB2932F044E0363 /* Release */, 4B1553A3517450622C3156A1897246D4 /* Release */,
); );
defaultConfigurationIsVisible = 0; defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release; defaultConfigurationName = Release;
}; };
44F0B3745F9D5A6855C1205F3C0401AE /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */ = { 243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */ = {
isa = XCConfigurationList; isa = XCConfigurationList;
buildConfigurations = ( buildConfigurations = (
B976E9C0D4ED126FD693AEF88E605112 /* Debug */, B577B5487933DC5B570EF057336156E5 /* Debug */,
6B7B33209571F28B85CE49B1C78BD7E2 /* Release */, 5B27AD80C84B2688EBB2932F044E0363 /* Release */,
); );
defaultConfigurationIsVisible = 0; defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release; defaultConfigurationName = Release;
...@@ -794,11 +794,11 @@ ...@@ -794,11 +794,11 @@
defaultConfigurationIsVisible = 0; defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release; defaultConfigurationName = Release;
}; };
B70B252D508AC00514E4751918F71C7E /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */ = { 7BB9016B5737013F5C0419F94194071E /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */ = {
isa = XCConfigurationList; isa = XCConfigurationList;
buildConfigurations = ( buildConfigurations = (
28E0C0106DD4345E71D76C7FA44C18FE /* Debug */, 4DCDF5BB20B68FE74968B4A053AB4295 /* Debug */,
EFC6D33E08C2FE49F5DE5DEA51A66DE5 /* Release */, 56343F68504E99AAC07564215415855F /* Release */,
); );
defaultConfigurationIsVisible = 0; defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release; defaultConfigurationName = Release;
......
...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO ...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks" FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers" HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks' LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC" OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR} PODS_BUILD_DIR = ${BUILD_DIR}
......
...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO ...@@ -2,7 +2,8 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks" FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers" HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks' LD_RUNPATH_SEARCH_PATHS = $(inherited) /usr/lib/swift '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
LIBRARY_SEARCH_PATHS = $(inherited) "${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" /usr/lib/swift
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC" OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR} PODS_BUILD_DIR = ${BUILD_DIR}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment