Commit 244eb7f9 authored by apkadmin's avatar apkadmin

update build full bitcode

parent 7088d89b
No preview for this file type
......@@ -10,24 +10,10 @@
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */; };
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; };
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 954230E425344601006F13F9 /* valid_card_10102020.tflite */; };
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDB4247D171500AF50DE /* ExtString.swift */; };
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDC2247D1FA200AF50DE /* Localizable.strings */; };
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */; };
9546DDDC247E197800AF50DE /* Global.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDDB247E197800AF50DE /* Global.swift */; };
9551057C2477746A0053036F /* OCR_SDK.h in Headers */ = {isa = PBXBuildFile; fileRef = 9551057A2477746A0053036F /* OCR_SDK.h */; settings = {ATTRIBUTES = (Public, ); }; };
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510583247774CC0053036F /* SBOCRRequest.swift */; };
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510585247774CC0053036F /* ExtUiViewController.swift */; };
955105AD247774CC0053036F /* Loadding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510586247774CC0053036F /* Loadding.swift */; };
955105AE247774CC0053036F /* TutorialFace1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510588247774CC0053036F /* TutorialFace1.png */; };
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510589247774CC0053036F /* TutorialFaceP.png */; };
955105B0247774CC0053036F /* TutorialFace3.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058A247774CC0053036F /* TutorialFace3.png */; };
955105B1247774CC0053036F /* TutorialFace2.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058B247774CC0053036F /* TutorialFace2.png */; };
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058D247774CC0053036F /* TutorialFaceCheckBox.png */; };
955105B4247774CC0053036F /* background.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058E247774CC0053036F /* background.png */; };
955105B5247774CC0053036F /* iconCap.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058F247774CC0053036F /* iconCap.png */; };
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */; };
955105B9247774CC0053036F /* cmndF1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510593247774CC0053036F /* cmndF1.png */; };
955105BB247774CC0053036F /* cmndF2.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510595247774CC0053036F /* cmndF2.png */; };
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 95510598247774CC0053036F /* SBKTutorialFaceVC.xib */; };
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510599247774CC0053036F /* SBKTutorialFaceVC.swift */; };
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9551059B247774CC0053036F /* SBKTutorialVC.swift */; };
......@@ -40,39 +26,64 @@
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */; };
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105AA247774CC0053036F /* SBKCaptureCardVC.swift */; };
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105C9247775290053036F /* SB_KYC_SDK.swift */; };
955105FA2477B52C0053036F /* back.png in Resources */ = {isa = PBXBuildFile; fileRef = 955105F92477B52C0053036F /* back.png */; };
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4B249083A1001FB052 /* SBValidateInput.swift */; };
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4D249098C2001FB052 /* ExtUIColor.swift */; };
955BECE624935A14001FB052 /* ic_record.png in Resources */ = {isa = PBXBuildFile; fileRef = 955BECE524935A14001FB052 /* ic_record.png */; };
955E7AC924D957140048FC06 /* Next@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AC824D957140048FC06 /* Next@2x.png */; };
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7ADE24D967B10048FC06 /* Card-2@2x.png */; };
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */; };
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */; };
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB56D24DBB9B7000C88D2 /* Back@2x.png */; };
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */; };
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */; };
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */; };
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */; };
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */; };
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */; };
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */; };
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */; };
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */ = {isa = PBXBuildFile; fileRef = 957DF5F224C035C700FE6A67 /* objcio.cer */; };
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9580130D2489F1EA00846F8A /* SBKRecordFace.swift */; };
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9580130E2489F1EA00846F8A /* SBKRecordFace.xib */; };
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801346248A237000846F8A /* SBKModelDataHandler.swift */; };
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */; };
958D36C224C18BB1004B27EB /* Pods_OCR_SDK.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
95A6BA6224E1627800A34ADD /* back_light.png in Resources */ = {isa = PBXBuildFile; fileRef = 95A6BA6124E1627800A34ADD /* back_light.png */; };
95FAB2672499C89400CE7913 /* rotate.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAB2662499C89400CE7913 /* rotate.png */; };
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51D24EA3FE300C161F2 /* Caution@2x.png */; };
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */; };
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; };
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; };
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; };
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; };
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; };
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; };
B7622A902646FB690077D3CF /* Global.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A782646FB690077D3CF /* Global.swift */; };
B7622A912646FB690077D3CF /* ExtUiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */; };
B7622A922646FB690077D3CF /* ExtCGImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7B2646FB690077D3CF /* ExtCGImage.swift */; };
B7622A932646FB690077D3CF /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7C2646FB690077D3CF /* ExtString.swift */; };
B7622A942646FB690077D3CF /* ExtUIColor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7D2646FB690077D3CF /* ExtUIColor.swift */; };
B7622A952646FB690077D3CF /* ExtCVPixelBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */; };
B7622A962646FB690077D3CF /* ExtUIImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7F2646FB690077D3CF /* ExtUIImage.swift */; };
B7622A972646FB690077D3CF /* ExtCIImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A802646FB690077D3CF /* ExtCIImage.swift */; };
B7622A982646FB690077D3CF /* Loadding.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A812646FB690077D3CF /* Loadding.swift */; };
B7622AA52646FB690077D3CF /* SBValidateInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A8F2646FB690077D3CF /* SBValidateInput.swift */; };
B7622AAE2647E5730077D3CF /* FaceDetection.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA72647E5730077D3CF /* FaceDetection.swift */; };
B7622AAF2647E5730077D3CF /* EMSimilarity.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA82647E5730077D3CF /* EMSimilarity.swift */; };
B7622AB02647E5730077D3CF /* OptionsFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA92647E5730077D3CF /* OptionsFace.swift */; };
B7622AB12647E5730077D3CF /* AnchorOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAA2647E5730077D3CF /* AnchorOption.swift */; };
B7622AB22647E5730077D3CF /* NormalizeOp.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAB2647E5730077D3CF /* NormalizeOp.swift */; };
B7622AB32647E5730077D3CF /* Detection.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAC2647E5730077D3CF /* Detection.swift */; };
B7622AB42647E5730077D3CF /* LandMark.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAD2647E5730077D3CF /* LandMark.swift */; };
B7622AC42647EB230077D3CF /* OverLayCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AC32647EB230077D3CF /* OverLayCardView.swift */; };
B7622AEB2647EE420077D3CF /* TutorialFace1.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACB2647EE420077D3CF /* TutorialFace1.png */; };
B7622AEC2647EE420077D3CF /* TutorialFaceP.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACC2647EE420077D3CF /* TutorialFaceP.png */; };
B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACD2647EE420077D3CF /* Scan-4@2x.png */; };
B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACE2647EE420077D3CF /* Back@2x.png */; };
B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACF2647EE420077D3CF /* TutorialFace3.png */; };
B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD02647EE420077D3CF /* TutorialFace2.png */; };
B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD12647EE420077D3CF /* Hat@2x.png */; };
B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD22647EE420077D3CF /* Glasses@2x.png */; };
B7622AF32647EE420077D3CF /* Caution@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD32647EE420077D3CF /* Caution@2x.png */; };
B7622AF42647EE420077D3CF /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD42647EE420077D3CF /* Do not place outside.png */; };
B7622AF52647EE420077D3CF /* rotate.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD52647EE420077D3CF /* rotate.png */; };
B7622AF62647EE420077D3CF /* Button_Do@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD62647EE420077D3CF /* Button_Do@2x.png */; };
B7622AF72647EE420077D3CF /* Brighness@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD72647EE420077D3CF /* Brighness@2x.png */; };
B7622AF82647EE420077D3CF /* Card-2@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD82647EE420077D3CF /* Card-2@2x.png */; };
B7622AF92647EE420077D3CF /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */; };
B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */; };
B7622AFB2647EE420077D3CF /* background.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADB2647EE420077D3CF /* background.png */; };
B7622AFC2647EE420077D3CF /* Scan-5@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADC2647EE420077D3CF /* Scan-5@2x.png */; };
B7622AFD2647EE420077D3CF /* iconCap.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADD2647EE420077D3CF /* iconCap.png */; };
B7622AFE2647EE420077D3CF /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADE2647EE420077D3CF /* Place within the box.png */; };
B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADF2647EE420077D3CF /* Avoid glare.png */; };
B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */; };
B7622B012647EE420077D3CF /* back_light.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE12647EE420077D3CF /* back_light.png */; };
B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE22647EE420077D3CF /* Holdphone@2x.png */; };
B7622B032647EE420077D3CF /* ic_record.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE32647EE420077D3CF /* ic_record.png */; };
B7622B042647EE420077D3CF /* Next@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE42647EE420077D3CF /* Next@2x.png */; };
B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE52647EE420077D3CF /* Scan-3@2x.png */; };
B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE62647EE420077D3CF /* Scan-1@2x.png */; };
B7622B072647EE420077D3CF /* cmndF1.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE72647EE420077D3CF /* cmndF1.png */; };
B7622B082647EE420077D3CF /* back.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE82647EE420077D3CF /* back.png */; };
B7622B092647EE420077D3CF /* cmndF2.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE92647EE420077D3CF /* cmndF2.png */; };
B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */; };
CCCF85EB83511B97EF23244B /* Pods_OCR_SDK.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */; };
/* End PBXBuildFile section */
......@@ -97,29 +108,15 @@
9509925E25355E0300C570D8 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKValidateCardView.swift; sourceTree = "<group>"; };
95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; };
954230E425344601006F13F9 /* valid_card_10102020.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_card_10102020.tflite; sourceTree = "<group>"; };
9546DDB4247D171500AF50DE /* ExtString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
9546DDC1247D1FA200AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDC3247D1FAA00AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDD1247D2C0C00AF50DE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/SBKCaptureCardVC.xib; sourceTree = "<group>"; };
9546DDD4247D2C1700AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDD6247D2C1A00AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDDB247E197800AF50DE /* Global.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Global.swift; sourceTree = "<group>"; };
955105772477746A0053036F /* SB_KYC_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SB_KYC_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
9551057A2477746A0053036F /* OCR_SDK.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = OCR_SDK.h; sourceTree = "<group>"; };
9551057B2477746A0053036F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
95510583247774CC0053036F /* SBOCRRequest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBOCRRequest.swift; sourceTree = "<group>"; };
95510585247774CC0053036F /* ExtUiViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUiViewController.swift; sourceTree = "<group>"; };
95510586247774CC0053036F /* Loadding.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Loadding.swift; sourceTree = "<group>"; };
95510588247774CC0053036F /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
95510589247774CC0053036F /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
9551058A247774CC0053036F /* TutorialFace3.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace3.png; sourceTree = "<group>"; };
9551058B247774CC0053036F /* TutorialFace2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace2.png; sourceTree = "<group>"; };
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceCheckBox.png; sourceTree = "<group>"; };
9551058E247774CC0053036F /* background.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = background.png; sourceTree = "<group>"; };
9551058F247774CC0053036F /* iconCap.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = iconCap.png; sourceTree = "<group>"; };
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Screen Shot 2020-05-12 at 15.14.44.png"; sourceTree = "<group>"; };
95510593247774CC0053036F /* cmndF1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF1.png; sourceTree = "<group>"; };
95510595247774CC0053036F /* cmndF2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF2.png; sourceTree = "<group>"; };
95510598247774CC0053036F /* SBKTutorialFaceVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKTutorialFaceVC.xib; sourceTree = "<group>"; };
95510599247774CC0053036F /* SBKTutorialFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialFaceVC.swift; sourceTree = "<group>"; };
9551059B247774CC0053036F /* SBKTutorialVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialVC.swift; sourceTree = "<group>"; };
......@@ -132,39 +129,65 @@
955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureFaceVC.swift; sourceTree = "<group>"; };
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureCardVC.swift; sourceTree = "<group>"; };
955105C9247775290053036F /* SB_KYC_SDK.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SB_KYC_SDK.swift; sourceTree = "<group>"; };
955105F92477B52C0053036F /* back.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back.png; sourceTree = "<group>"; };
955BEC4B249083A1001FB052 /* SBValidateInput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBValidateInput.swift; sourceTree = "<group>"; };
955BEC4D249098C2001FB052 /* ExtUIColor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtUIColor.swift; sourceTree = "<group>"; };
955BECE524935A14001FB052 /* ic_record.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_record.png; sourceTree = "<group>"; };
955E7AC824D957140048FC06 /* Next@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Next@2x.png"; sourceTree = "<group>"; };
955E7ADE24D967B10048FC06 /* Card-2@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Card-2@2x.png"; sourceTree = "<group>"; };
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Passport-2 copy@2x.png"; sourceTree = "<group>"; };
956BB56D24DBB9B7000C88D2 /* Back@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Back@2x.png"; sourceTree = "<group>"; };
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Hat@2x.png"; sourceTree = "<group>"; };
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Glasses@2x.png"; sourceTree = "<group>"; };
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Brighness@2x.png"; sourceTree = "<group>"; };
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Holdphone@2x.png"; sourceTree = "<group>"; };
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-1@2x.png"; sourceTree = "<group>"; };
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-3@2x.png"; sourceTree = "<group>"; };
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-4@2x.png"; sourceTree = "<group>"; };
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-5@2x.png"; sourceTree = "<group>"; };
957DF5F224C035C700FE6A67 /* objcio.cer */ = {isa = PBXFileReference; lastKnownFileType = file; path = objcio.cer; sourceTree = "<group>"; };
9580130D2489F1EA00846F8A /* SBKRecordFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBKRecordFace.swift; sourceTree = "<group>"; };
9580130E2489F1EA00846F8A /* SBKRecordFace.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = SBKRecordFace.xib; sourceTree = "<group>"; };
95801346248A237000846F8A /* SBKModelDataHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKModelDataHandler.swift; sourceTree = "<group>"; };
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CVPixelBufferExtension.swift; sourceTree = "<group>"; };
95A6BA6124E1627800A34ADD /* back_light.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back_light.png; sourceTree = "<group>"; };
95FAB2662499C89400CE7913 /* rotate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = rotate.png; sourceTree = "<group>"; };
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Caution@2x.png"; sourceTree = "<group>"; };
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Button_Do@2x.png"; sourceTree = "<group>"; };
95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKValidateCardView.xib; sourceTree = "<group>"; };
A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKRecordFaceView.swift; sourceTree = "<group>"; };
A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKRecordFaceView.xib; sourceTree = "<group>"; };
B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
B7622A782646FB690077D3CF /* Global.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Global.swift; sourceTree = "<group>"; };
B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUiViewController.swift; sourceTree = "<group>"; };
B7622A7B2646FB690077D3CF /* ExtCGImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCGImage.swift; sourceTree = "<group>"; };
B7622A7C2646FB690077D3CF /* ExtString.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
B7622A7D2646FB690077D3CF /* ExtUIColor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUIColor.swift; sourceTree = "<group>"; };
B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCVPixelBuffer.swift; sourceTree = "<group>"; };
B7622A7F2646FB690077D3CF /* ExtUIImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUIImage.swift; sourceTree = "<group>"; };
B7622A802646FB690077D3CF /* ExtCIImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCIImage.swift; sourceTree = "<group>"; };
B7622A812646FB690077D3CF /* Loadding.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Loadding.swift; sourceTree = "<group>"; };
B7622A8F2646FB690077D3CF /* SBValidateInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBValidateInput.swift; sourceTree = "<group>"; };
B7622AA72647E5730077D3CF /* FaceDetection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceDetection.swift; sourceTree = "<group>"; };
B7622AA82647E5730077D3CF /* EMSimilarity.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EMSimilarity.swift; sourceTree = "<group>"; };
B7622AA92647E5730077D3CF /* OptionsFace.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OptionsFace.swift; sourceTree = "<group>"; };
B7622AAA2647E5730077D3CF /* AnchorOption.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AnchorOption.swift; sourceTree = "<group>"; };
B7622AAB2647E5730077D3CF /* NormalizeOp.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NormalizeOp.swift; sourceTree = "<group>"; };
B7622AAC2647E5730077D3CF /* Detection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Detection.swift; sourceTree = "<group>"; };
B7622AAD2647E5730077D3CF /* LandMark.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LandMark.swift; sourceTree = "<group>"; };
B7622AB52647E6230077D3CF /* face_detection_front.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = face_detection_front.tflite; sourceTree = "<group>"; };
B7622AC32647EB230077D3CF /* OverLayCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OverLayCardView.swift; sourceTree = "<group>"; };
B7622ACB2647EE420077D3CF /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
B7622ACD2647EE420077D3CF /* Scan-4@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-4@2x.png"; sourceTree = "<group>"; };
B7622ACE2647EE420077D3CF /* Back@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Back@2x.png"; sourceTree = "<group>"; };
B7622ACF2647EE420077D3CF /* TutorialFace3.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace3.png; sourceTree = "<group>"; };
B7622AD02647EE420077D3CF /* TutorialFace2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace2.png; sourceTree = "<group>"; };
B7622AD12647EE420077D3CF /* Hat@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Hat@2x.png"; sourceTree = "<group>"; };
B7622AD22647EE420077D3CF /* Glasses@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Glasses@2x.png"; sourceTree = "<group>"; };
B7622AD32647EE420077D3CF /* Caution@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Caution@2x.png"; sourceTree = "<group>"; };
B7622AD42647EE420077D3CF /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
B7622AD52647EE420077D3CF /* rotate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = rotate.png; sourceTree = "<group>"; };
B7622AD62647EE420077D3CF /* Button_Do@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Button_Do@2x.png"; sourceTree = "<group>"; };
B7622AD72647EE420077D3CF /* Brighness@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Brighness@2x.png"; sourceTree = "<group>"; };
B7622AD82647EE420077D3CF /* Card-2@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Card-2@2x.png"; sourceTree = "<group>"; };
B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceCheckBox.png; sourceTree = "<group>"; };
B7622ADB2647EE420077D3CF /* background.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = background.png; sourceTree = "<group>"; };
B7622ADC2647EE420077D3CF /* Scan-5@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-5@2x.png"; sourceTree = "<group>"; };
B7622ADD2647EE420077D3CF /* iconCap.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = iconCap.png; sourceTree = "<group>"; };
B7622ADE2647EE420077D3CF /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
B7622ADF2647EE420077D3CF /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Screen Shot 2020-05-12 at 15.14.44.png"; sourceTree = "<group>"; };
B7622AE12647EE420077D3CF /* back_light.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back_light.png; sourceTree = "<group>"; };
B7622AE22647EE420077D3CF /* Holdphone@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Holdphone@2x.png"; sourceTree = "<group>"; };
B7622AE32647EE420077D3CF /* ic_record.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_record.png; sourceTree = "<group>"; };
B7622AE42647EE420077D3CF /* Next@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Next@2x.png"; sourceTree = "<group>"; };
B7622AE52647EE420077D3CF /* Scan-3@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-3@2x.png"; sourceTree = "<group>"; };
B7622AE62647EE420077D3CF /* Scan-1@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-1@2x.png"; sourceTree = "<group>"; };
B7622AE72647EE420077D3CF /* cmndF1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF1.png; sourceTree = "<group>"; };
B7622AE82647EE420077D3CF /* back.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back.png; sourceTree = "<group>"; };
B7622AE92647EE420077D3CF /* cmndF2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF2.png; sourceTree = "<group>"; };
B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Passport-2 copy@2x.png"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
......@@ -219,11 +242,12 @@
isa = PBXGroup;
children = (
957DF5F824C036E100FE6A67 /* FileSSL */,
B7622AA62647E5730077D3CF /* Models */,
9580134A248A25E700846F8A /* Model */,
95801345248A237000846F8A /* ModelDataHandler */,
95510582247774CC0053036F /* service */,
95510596247774CC0053036F /* UI */,
95510584247774CC0053036F /* Utils */,
B7622A772646FB690077D3CF /* Utils */,
9551057A2477746A0053036F /* OCR_SDK.h */,
9551057B2477746A0053036F /* Info.plist */,
955105C9247775290053036F /* SB_KYC_SDK.swift */,
......@@ -240,63 +264,10 @@
path = service;
sourceTree = "<group>";
};
95510584247774CC0053036F /* Utils */ = {
isa = PBXGroup;
children = (
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */,
95510585247774CC0053036F /* ExtUiViewController.swift */,
95510586247774CC0053036F /* Loadding.swift */,
95510587247774CC0053036F /* image */,
9546DDB4247D171500AF50DE /* ExtString.swift */,
9546DDDB247E197800AF50DE /* Global.swift */,
955BEC4B249083A1001FB052 /* SBValidateInput.swift */,
955BEC4D249098C2001FB052 /* ExtUIColor.swift */,
);
path = Utils;
sourceTree = "<group>";
};
95510587247774CC0053036F /* image */ = {
isa = PBXGroup;
children = (
95FAF56C24EA83C900C161F2 /* Avoid glare.png */,
95FAF56D24EA83C900C161F2 /* Do not place outside.png */,
95FAF56B24EA83C800C161F2 /* Place within the box.png */,
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */,
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */,
95A6BA6124E1627800A34ADD /* back_light.png */,
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */,
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */,
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */,
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */,
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */,
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */,
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */,
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */,
956BB56D24DBB9B7000C88D2 /* Back@2x.png */,
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */,
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */,
955E7ADE24D967B10048FC06 /* Card-2@2x.png */,
955E7AC824D957140048FC06 /* Next@2x.png */,
95FAB2662499C89400CE7913 /* rotate.png */,
955BECE524935A14001FB052 /* ic_record.png */,
955105F92477B52C0053036F /* back.png */,
95510588247774CC0053036F /* TutorialFace1.png */,
95510589247774CC0053036F /* TutorialFaceP.png */,
9551058A247774CC0053036F /* TutorialFace3.png */,
9551058B247774CC0053036F /* TutorialFace2.png */,
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */,
9551058E247774CC0053036F /* background.png */,
9551058F247774CC0053036F /* iconCap.png */,
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */,
95510593247774CC0053036F /* cmndF1.png */,
95510595247774CC0053036F /* cmndF2.png */,
);
path = image;
sourceTree = "<group>";
};
95510596247774CC0053036F /* UI */ = {
isa = PBXGroup;
children = (
B7622AC22647EB230077D3CF /* View */,
9580130C2489F1C100846F8A /* SBKRecordFace */,
95510597247774CC0053036F /* SBKTutorialFace */,
9551059A247774CC0053036F /* SBKTutorial */,
......@@ -394,12 +365,100 @@
9580134A248A25E700846F8A /* Model */ = {
isa = PBXGroup;
children = (
B7622AB52647E6230077D3CF /* face_detection_front.tflite */,
954230E425344601006F13F9 /* valid_card_10102020.tflite */,
95182D0524B3343D00405EA9 /* liveness.tflite */,
);
path = Model;
sourceTree = "<group>";
};
B7622A772646FB690077D3CF /* Utils */ = {
isa = PBXGroup;
children = (
B7622A792646FB690077D3CF /* Extension */,
B7622A782646FB690077D3CF /* Global.swift */,
B7622ACA2647EE420077D3CF /* image */,
B7622A812646FB690077D3CF /* Loadding.swift */,
B7622A8F2646FB690077D3CF /* SBValidateInput.swift */,
);
path = Utils;
sourceTree = "<group>";
};
B7622A792646FB690077D3CF /* Extension */ = {
isa = PBXGroup;
children = (
B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */,
B7622A7B2646FB690077D3CF /* ExtCGImage.swift */,
B7622A7C2646FB690077D3CF /* ExtString.swift */,
B7622A7D2646FB690077D3CF /* ExtUIColor.swift */,
B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */,
B7622A7F2646FB690077D3CF /* ExtUIImage.swift */,
B7622A802646FB690077D3CF /* ExtCIImage.swift */,
);
path = Extension;
sourceTree = "<group>";
};
B7622AA62647E5730077D3CF /* Models */ = {
isa = PBXGroup;
children = (
B7622AA72647E5730077D3CF /* FaceDetection.swift */,
B7622AA82647E5730077D3CF /* EMSimilarity.swift */,
B7622AA92647E5730077D3CF /* OptionsFace.swift */,
B7622AAA2647E5730077D3CF /* AnchorOption.swift */,
B7622AAB2647E5730077D3CF /* NormalizeOp.swift */,
B7622AAC2647E5730077D3CF /* Detection.swift */,
B7622AAD2647E5730077D3CF /* LandMark.swift */,
);
path = Models;
sourceTree = "<group>";
};
B7622AC22647EB230077D3CF /* View */ = {
isa = PBXGroup;
children = (
B7622AC32647EB230077D3CF /* OverLayCardView.swift */,
);
path = View;
sourceTree = "<group>";
};
B7622ACA2647EE420077D3CF /* image */ = {
isa = PBXGroup;
children = (
B7622ACB2647EE420077D3CF /* TutorialFace1.png */,
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */,
B7622ACD2647EE420077D3CF /* Scan-4@2x.png */,
B7622ACE2647EE420077D3CF /* Back@2x.png */,
B7622ACF2647EE420077D3CF /* TutorialFace3.png */,
B7622AD02647EE420077D3CF /* TutorialFace2.png */,
B7622AD12647EE420077D3CF /* Hat@2x.png */,
B7622AD22647EE420077D3CF /* Glasses@2x.png */,
B7622AD32647EE420077D3CF /* Caution@2x.png */,
B7622AD42647EE420077D3CF /* Do not place outside.png */,
B7622AD52647EE420077D3CF /* rotate.png */,
B7622AD62647EE420077D3CF /* Button_Do@2x.png */,
B7622AD72647EE420077D3CF /* Brighness@2x.png */,
B7622AD82647EE420077D3CF /* Card-2@2x.png */,
B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */,
B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */,
B7622ADB2647EE420077D3CF /* background.png */,
B7622ADC2647EE420077D3CF /* Scan-5@2x.png */,
B7622ADD2647EE420077D3CF /* iconCap.png */,
B7622ADE2647EE420077D3CF /* Place within the box.png */,
B7622ADF2647EE420077D3CF /* Avoid glare.png */,
B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */,
B7622AE12647EE420077D3CF /* back_light.png */,
B7622AE22647EE420077D3CF /* Holdphone@2x.png */,
B7622AE32647EE420077D3CF /* ic_record.png */,
B7622AE42647EE420077D3CF /* Next@2x.png */,
B7622AE52647EE420077D3CF /* Scan-3@2x.png */,
B7622AE62647EE420077D3CF /* Scan-1@2x.png */,
B7622AE72647EE420077D3CF /* cmndF1.png */,
B7622AE82647EE420077D3CF /* back.png */,
B7622AE92647EE420077D3CF /* cmndF2.png */,
B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */,
);
path = image;
sourceTree = "<group>";
};
BF236FF2605D4B46583CACB8 /* Frameworks */ = {
isa = PBXGroup;
children = (
......@@ -480,50 +539,50 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */,
B7622AFB2647EE420077D3CF /* background.png in Resources */,
B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */,
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */,
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */,
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */,
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */,
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */,
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */,
95A6BA6224E1627800A34ADD /* back_light.png in Resources */,
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */,
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */,
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */,
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */,
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */,
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */,
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */,
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */,
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */,
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */,
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */,
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */,
955E7AC924D957140048FC06 /* Next@2x.png in Resources */,
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */,
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */,
95FAB2672499C89400CE7913 /* rotate.png in Resources */,
B7622B072647EE420077D3CF /* cmndF1.png in Resources */,
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */,
B7622AF82647EE420077D3CF /* Card-2@2x.png in Resources */,
B7622AF32647EE420077D3CF /* Caution@2x.png in Resources */,
B7622AEB2647EE420077D3CF /* TutorialFace1.png in Resources */,
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */,
955BECE624935A14001FB052 /* ic_record.png in Resources */,
955105FA2477B52C0053036F /* back.png in Resources */,
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
955105AE247774CC0053036F /* TutorialFace1.png in Resources */,
955105B4247774CC0053036F /* background.png in Resources */,
955105BB247774CC0053036F /* cmndF2.png in Resources */,
955105C4247774CC0053036F /* SBKResultFaceVC.xib in Resources */,
955105B5247774CC0053036F /* iconCap.png in Resources */,
955105B9247774CC0053036F /* cmndF1.png in Resources */,
955105B1247774CC0053036F /* TutorialFace2.png in Resources */,
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */,
B7622B082647EE420077D3CF /* back.png in Resources */,
B7622AF72647EE420077D3CF /* Brighness@2x.png in Resources */,
B7622AFC2647EE420077D3CF /* Scan-5@2x.png in Resources */,
B7622AEC2647EE420077D3CF /* TutorialFaceP.png in Resources */,
B7622AF62647EE420077D3CF /* Button_Do@2x.png in Resources */,
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */,
B7622AF42647EE420077D3CF /* Do not place outside.png in Resources */,
955105C5247774CC0053036F /* SBKCaptureFaceVC.xib in Resources */,
955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */,
B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */,
B7622B012647EE420077D3CF /* back_light.png in Resources */,
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */,
B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */,
B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */,
B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */,
B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */,
955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */,
955105B0247774CC0053036F /* TutorialFace3.png in Resources */,
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */,
B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */,
B7622AF52647EE420077D3CF /* rotate.png in Resources */,
B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */,
B7622B032647EE420077D3CF /* ic_record.png in Resources */,
B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */,
B7622AFE2647EE420077D3CF /* Place within the box.png in Resources */,
B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */,
B7622AFD2647EE420077D3CF /* iconCap.png in Resources */,
B7622B042647EE420077D3CF /* Next@2x.png in Resources */,
B7622B092647EE420077D3CF /* cmndF2.png in Resources */,
B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */,
B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */,
B7622AF92647EE420077D3CF /* Car-2 copy@2x.png in Resources */,
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
......@@ -560,25 +619,36 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
955105AD247774CC0053036F /* Loadding.swift in Sources */,
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */,
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */,
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */,
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */,
B7622A932646FB690077D3CF /* ExtString.swift in Sources */,
B7622AB42647E5730077D3CF /* LandMark.swift in Sources */,
B7622A972646FB690077D3CF /* ExtCIImage.swift in Sources */,
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */,
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */,
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */,
B7622A962646FB690077D3CF /* ExtUIImage.swift in Sources */,
B7622AAE2647E5730077D3CF /* FaceDetection.swift in Sources */,
B7622A912646FB690077D3CF /* ExtUiViewController.swift in Sources */,
955105C3247774CC0053036F /* SBKResultFaceVC.swift in Sources */,
B7622A902646FB690077D3CF /* Global.swift in Sources */,
B7622AB22647E5730077D3CF /* NormalizeOp.swift in Sources */,
B7622A922646FB690077D3CF /* ExtCGImage.swift in Sources */,
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */,
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */,
9546DDDC247E197800AF50DE /* Global.swift in Sources */,
B7622A952646FB690077D3CF /* ExtCVPixelBuffer.swift in Sources */,
B7622AC42647EB230077D3CF /* OverLayCardView.swift in Sources */,
B7622AB02647E5730077D3CF /* OptionsFace.swift in Sources */,
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */,
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */,
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */,
B7622AA52646FB690077D3CF /* SBValidateInput.swift in Sources */,
B7622AB32647E5730077D3CF /* Detection.swift in Sources */,
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */,
B7622A982646FB690077D3CF /* Loadding.swift in Sources */,
955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */,
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */,
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */,
B7622AB12647E5730077D3CF /* AnchorOption.swift in Sources */,
B7622AAF2647E5730077D3CF /* EMSimilarity.swift in Sources */,
B7622A942646FB690077D3CF /* ExtUIColor.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
......@@ -611,6 +681,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
......@@ -642,6 +713,7 @@
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_BITCODE = YES;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
......@@ -662,7 +734,9 @@
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-fembed-bitcode";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
VERSIONING_SYSTEM = "apple-generic";
......@@ -674,6 +748,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
......@@ -705,6 +780,7 @@
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_BITCODE = YES;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
......@@ -719,7 +795,9 @@
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-fembed-bitcode";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
VALIDATE_PRODUCT = YES;
......@@ -740,6 +818,10 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/OCR-SDK",
);
HEADER_SEARCH_PATHS = (
"$(inherited)",
"${SRCROOT}/TensorFlowLite",
......@@ -774,6 +856,10 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/OCR-SDK",
);
HEADER_SEARCH_PATHS = (
"$(inherited)",
"${SRCROOT}/TensorFlowLite",
......
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "B08B0591-1A45-4415-9C04-B83659FBFE35"
type = "0"
version = "2.0">
</Bucket>
......@@ -36,6 +36,7 @@ typealias FileInfo = (name: String, extension: String)
enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "valid_card_10102020", extension: "tflite")
static let landMarkModel: FileInfo = (name: "face_detection_front", extension: "tflite")
}
/// This class handles all data preprocessing and makes calls to run inference on a given frame
......@@ -213,16 +214,7 @@ class SBKModelDataHandler {
return results
}
/// Returns the RGB data representation of the given image buffer with the specified `byteCount`.
///
/// - Parameters
/// - buffer: The pixel buffer to convert to RGB data.
/// - byteCount: The expected byte count for the RGB data calculated using the values that the
/// model was trained on: `batchSize * imageWidth * imageHeight * componentsCount`.
/// - isModelQuantized: Whether the model is quantized (i.e. fixed point values rather than
/// floating point values).
/// - Returns: The RGB data representation of the image buffer or `nil` if the buffer could not be
/// converted.
private func rgbDataFromBuffer(
_ buffer: CVPixelBuffer,
byteCount: Int,
......@@ -293,26 +285,12 @@ class SBKModelDataHandler {
// MARK: - Extensions
extension Data {
/// Creates a new buffer by copying the buffer pointer of the given array.
///
/// - Warning: The given array's element type `T` must be trivial in that it can be copied bit
/// for bit with no indirection or reference-counting operations; otherwise, reinterpreting
/// data from the resulting buffer has undefined behavior.
/// - Parameter array: An array with elements of type `T`.
init<T>(copyingBufferOf array: [T]) {
self = array.withUnsafeBufferPointer(Data.init)
}
}
extension Array {
/// Creates a new array from the bytes of the given unsafe data.
///
/// - Warning: The array's `Element` type must be trivial in that it can be copied bit for bit
/// with no indirection or reference-counting operations; otherwise, copying the raw bytes in
/// the `unsafeData`'s buffer to a new array returns an unsafe copy.
/// - Note: Returns `nil` if `unsafeData.count` is not a multiple of
/// `MemoryLayout<Element>.stride`.
/// - Parameter unsafeData: The data containing the bytes to turn into an array.
init?(unsafeData: Data) {
guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil }
#if swift(>=5.0)
......@@ -327,20 +305,3 @@ extension Array {
#endif // swift(>=5.0)
}
}
extension UIImage {
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
}
//
// AnchorOption.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class AnchorOption {
init(inputSizeWidth: Int, inputSizeHeight: Int, minScale: Double, maxScale: Double, anchorOffsetX: Double, anchorOffsetY: Double, numLayers: Int, featureMapWidth: [Int], featureMapHeight: [Int], strides: [Int], aspectRatios: [Double], reduceBoxesInLowestLayer: Bool, interpolatedScaleAspectRatio: Double, fixedAnchorSize: Bool) {
self.inputSizeWidth = inputSizeWidth
self.inputSizeHeight = inputSizeHeight
self.minScale = minScale
self.maxScale = maxScale
self.anchorOffsetX = anchorOffsetX
self.anchorOffsetY = anchorOffsetY
self.numLayers = numLayers
self.featureMapWidth = featureMapWidth
self.featureMapHeight = featureMapHeight
self.strides = strides
self.aspectRatios = aspectRatios
self.reduceBoxesInLowestLayer = reduceBoxesInLowestLayer
self.interpolatedScaleAspectRatio = interpolatedScaleAspectRatio
self.fixedAnchorSize = fixedAnchorSize
}
var inputSizeWidth: Int
var inputSizeHeight: Int
var minScale: Double
var maxScale: Double
var anchorOffsetX: Double
var anchorOffsetY: Double
var numLayers: Int
var featureMapWidth: [Int]
var featureMapHeight: [Int]
var strides: [Int]
var aspectRatios: [Double]
var reduceBoxesInLowestLayer: Bool
var interpolatedScaleAspectRatio: Double
var fixedAnchorSize: Bool
func stridesSize() -> Int {
return strides.count
}
func featureMapHeightSize() -> Int {
return featureMapHeight.count
}
func featureMapWidthSize() -> Int {
return featureMapWidth.count
}
}
//
// Detection.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
struct Detection {
init(score: Double, xMin: Double, yMin: Double, width: Double, height: Double, classID: Int, landMark: [Landmark]) {
self.score = score
self.xMin = xMin
self.yMin = yMin
self.width = width
self.height = height
self.classID = classID
self.landMark = landMark
}
var score: Double
var xMin: Double
var yMin: Double
var width: Double
var height: Double
var classID: Int
var landMark: [Landmark]
}
//
// EMSimilarity.swift
// SwiftSim
//
// Created by Evan Moss on 8/1/16.
// Copyright © 2016 Enterprising Technologies LLC. All rights reserved.
//
// The MIT License (MIT)
//
// Copyright (c) 2016 Evan Moss
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import Foundation
enum EMSimilarityMode {
case Cosine
case Tanimoto
case Ochiai
case JaccardIndex
case JaccardDistance
case Dice
case Hamming
}
enum EMVectorSizeMismatchMode {
case Bail
case Truncate
}
class EMSimilarity {
/** Similarity metric mode **/
private var currentSimMode = [EMSimilarityMode.Cosine]
/** Set the currentSimMode via push **/
func pushSimMode(mode: EMSimilarityMode) {
self.currentSimMode.append(mode)
}
/** Pop the currentSimMode via pop if it won't make the stack empty **/
func popSimMode() {
if self.currentSimMode.count > 1 {
let _ = self.currentSimMode.popLast()
}
}
/** Get the currently set similarity mode **/
func getCurrentSimMode() -> EMSimilarityMode? {
return self.currentSimMode.last
}
/** Mismatch Mode **/
private var currentMismatchMode = [EMVectorSizeMismatchMode.Bail]
/** Set the currentMismatcMode via push **/
func pushMismatchMode(mode: EMVectorSizeMismatchMode) {
self.currentMismatchMode.append(mode)
}
/** Pop the currentMismatchMode via pop if it won't make the stack empty **/
func popMismatchMode() {
if self.currentMismatchMode.count > 1 {
let _ = self.currentMismatchMode.popLast()
}
}
/** Get the currently set mistmatch mode **/
func getCurrentMismatchMode() -> EMVectorSizeMismatchMode? {
return self.currentMismatchMode.last
}
/** Dot Product **/
private func dot(A: [Double], B: [Double]) -> Double {
var x: Double = 0
for i in 0...A.count-1 {
x += A[i] * B[i]
}
return x
}
/** Vector Magnitude **/
private func magnitude(A: [Double]) -> Double {
var x: Double = 0
for elt in A {
x += elt * elt
}
return sqrt(x)
}
/** Cosine similarity **/
private func cosineSim(A: [Double], B: [Double]) -> Double {
return dot(A: A, B: B) / (magnitude(A: A) * magnitude(A: B))
}
/** Tanimoto similarity **/
private func tanimotoSim(A: [Double], B: [Double]) -> Double {
let Amag = magnitude(A: A)
let Bmag = magnitude(A: B)
let AdotB = dot(A: A, B: B)
return AdotB / (Amag * Amag + Bmag * Bmag - AdotB)
}
/** Ochiai similarity **/
private func ochiaiSim(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / sqrt(Double(a.count) * Double(b.count))
}
/** Jaccard index **/
private func jaccardIndex(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / Double(a.union(b).count)
}
/** Jaccard distance **/
private func jaccardDist(A: [Double], B: [Double]) -> Double {
return 1.0 - jaccardIndex(A: A, B: B)
}
/** Dice coeeficient **/
private func diceCoef(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return 2.0 * Double(a.intersection(b).count) / (Double(a.count) + Double(b.count))
}
/** Hamming distance **/
private func hammingDist(A: [Double], B: [Double]) -> Double {
var x: Double = 0
if A.isEmpty {
return x
}
for i in 0...A.count-1 {
if A[i] != B[i] {
x += 1
}
}
return x
}
private let encforceEqualVectorSizes: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Hamming]
private let bailOnEmptyInput: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Ochiai]
private let allowEmptyInputs: Set<EMSimilarityMode> = [.Hamming]
/**
* Main compute mode
* Double types
* Returns the similarity results or -1.0 on caught error
*/
func compute(A: [Double], B: [Double]) -> Double {
// get the mode
var mode = EMSimilarityMode.Cosine
if let _mode = self.getCurrentSimMode() {
mode = _mode
}
else {
return -1
}
// are both vectors empty?
if A.isEmpty && B.isEmpty && !allowEmptyInputs.contains(mode) {
// divide by zero -> D.N.E.
return -1
}
// is one of the vectors empty and would this case a divide by zero error?
if bailOnEmptyInput.contains(mode) && (A.isEmpty || B.isEmpty) {
return -1
}
// look for vector size mismatch for modes in encforceEqualVectorSizes
if encforceEqualVectorSizes.contains(mode) && A.count != B.count {
if let mismatchMode = self.getCurrentMismatchMode() {
switch mismatchMode {
case .Bail:
return -1
case .Truncate:
let a = A.count < B.count ? A : B
let _b = A.count < B.count ? B : A
var b = [Double]()
if a.count > 0 {
for i in 0...a.count-1 {
b.append(_b[i])
}
}
return compute(A: a, B: b)
}
}
else {
return -1
}
}
switch mode {
case .Cosine:
return cosineSim(A: A, B: B)
case .Tanimoto:
return tanimotoSim(A: A, B: B)
case .Ochiai:
return ochiaiSim(A: A, B: B)
case .JaccardIndex:
return jaccardIndex(A: A, B: B)
case .JaccardDistance:
return jaccardDist(A: A, B: B)
case .Dice:
return diceCoef(A: A, B: B)
case .Hamming:
return hammingDist(A: A, B: B)
}
}
}
//
// FaceDetection.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
class FaceDetection{
public func getAnchors(options: AnchorOption) -> [Anchor] {
var _anchors: [Anchor] = []
if (options.stridesSize() != options.numLayers) {
print("strides_size and num_layers must be equal.")
return []
}
var layerID: Int = 0
while (layerID < options.stridesSize()) {
var anchorHeight: [Double] = []
var anchorWidth: [Double] = []
var aspectRatios: [Double] = []
var scales: [Double] = []
var lastSameStrideLayer: Int = layerID
while (lastSameStrideLayer < options.stridesSize() &&
options.strides[lastSameStrideLayer] == options.strides[layerID]) {
let scale: Double = options.minScale + (options.maxScale - options.minScale) * Double(lastSameStrideLayer) / (Double(options.stridesSize()) - 1.0)
if (lastSameStrideLayer == 0 && options.reduceBoxesInLowestLayer) {
aspectRatios.append(1.0)
aspectRatios.append(2.0)
aspectRatios.append(0.5)
scales.append(0.1)
scales.append(scale)
scales.append(scale)
} else {
for i in 0..<options.aspectRatios.count {
aspectRatios.append(options.aspectRatios[i])
scales.append(scale)
}
if options.interpolatedScaleAspectRatio > 0 {
var scaleNext: Double = 0.0
if lastSameStrideLayer == (options.stridesSize() - 1) {
scaleNext = 1.0
} else {
scaleNext = options.minScale + (options.maxScale - options.minScale) + Double(lastSameStrideLayer + 1) / Double(options.stridesSize() - 1)
}
scales.append(sqrt(scale * scaleNext))
aspectRatios.append(options.interpolatedScaleAspectRatio)
}
}
lastSameStrideLayer += 1
}
for i in 0..<aspectRatios.count {
let ratioSQRT: Double = sqrt(aspectRatios[i])
anchorHeight.append(scales[i] / ratioSQRT)
anchorWidth.append(scales[i] * ratioSQRT)
}
var featureMapHeight: Int = 0
var featureMapWidth: Int = 0
if (options.featureMapHeightSize() > 0) {
featureMapHeight = options.featureMapHeight[layerID]
featureMapWidth = options.featureMapWidth[layerID]
} else {
let stride: Int = options.strides[layerID]
featureMapHeight = Int(options.inputSizeHeight / stride)
featureMapWidth = Int(options.inputSizeWidth / stride)
}
for y in 0..<featureMapHeight {
for x: Int in 0..<featureMapWidth {
for anchorID in 0..<anchorHeight.count {
let xCenter: Double = Double(Double(x) + options.anchorOffsetX) / Double(featureMapWidth)
let yCenter: Double = Double(Double(y) + options.anchorOffsetY) / Double(featureMapHeight)
var w: Double = 0
var h: Double = 0
if (options.fixedAnchorSize) {
w = 1.0
h = 1.0
} else {
w = anchorWidth[anchorID]
h = anchorHeight[anchorID]
}
_anchors.append(Anchor(xCenter: xCenter, yCenter: yCenter, h: h, w: w))
}
}
}
layerID = lastSameStrideLayer
}
return _anchors
}
//1 classificators: 2: regression
func process(options: OptionsFace, rawScores: [Double] , rawBoxes: [Double] , anchors: [Anchor]) -> [Detection] {
var detectionScores: [Double] = []
var detectionClasses: [Int] = []
let boxes = options.numBoxes
for i in 0..<boxes {
var classId = -1
var maxScore: Double = 5e-324
for scoreIdx in 0..<options.numClasses {
var score = rawScores[i * options.numClasses + scoreIdx]
if options.sigmoidScore {
if options.scoreClippingThresh > 0 {
if score < -options.scoreClippingThresh {
score = -options.scoreClippingThresh
}
if score > options.scoreClippingThresh {
score = options.scoreClippingThresh
}
score = 1.0 / (1.0 + exp(-score))
if maxScore < score {
maxScore = score
classId = scoreIdx
}
}
}
}
detectionClasses.append(classId)
detectionScores.append(maxScore)
}
let detections: [Detection] = convertToDetections(rawBoxes: rawBoxes, anchors: anchors, detectionScores: detectionScores, detectionClasses: detectionClasses, options: options)
return detections
}
func convertToDetections(
rawBoxes: [Double],
anchors: [Anchor],
detectionScores: [Double],
detectionClasses: [Int],
options: OptionsFace) -> [Detection]{
var _outputDetections : [Detection] = []
for i in 0..<options.numBoxes {
if detectionScores[i] < options.minScoreThresh {
continue
}
let boxOffset: Int = 0
let boxData = decodeBox(rawBoxes: rawBoxes, i:i, anchors: anchors, options: options)
var landmark: [Landmark] = []
for k in 0..<options.numKeypoints {
let x: Double = boxData[boxOffset + 4 + k * 2]
var y: Double = 0.0
if (options.flipVertically) {
y = 1 - boxData[boxOffset + 4 + k * 2 + 1]
} else {
y = boxData[boxOffset + 4 + k * 2 + 1]
}
let tmpLand: Landmark = Landmark(x: x, y: y)
landmark.append(tmpLand)
}
let detection: Detection = convertToDetection(
boxYMin: boxData[boxOffset + 0],
boxXMin: boxData[boxOffset + 1],
boxYMax: boxData[boxOffset + 2],
boxXMax: boxData[boxOffset + 3],
landmark: landmark,
score: detectionScores[i],
classID: detectionClasses[i],
flipVertically: options.flipVertically)
_outputDetections.append(detection)
}
return _outputDetections
}
func origNms(detections: [Detection],threshold: Double, img_width: Int, img_height: Int) -> [Detection] {
if detections.count <= 0 {
return []
}
var x1: [Double] = []
var x2: [Double] = []
var y1: [Double] = []
var y2: [Double] = []
var s : [Double] = []
detections.forEach{detection in
x1.append(detection.xMin * Double(img_width))
x2.append((detection.xMin + detection.width) * Double(img_width))
y1.append(detection.yMin * Double(img_height))
y2.append((detection.yMin + detection.height) * Double(img_height))
s.append(detection.score)
}
let _x1 = x1
let _x2 = x2
let _y1 = y1
let _y2 = y2
let area: [Double] = multiplyArray(x: subArray(x: _x2, y: _x1) , y: subArray(x: _y2 , y: _y1))
let I: [Double] = _quickSort(a: s)
var positions: [Int] = []
I.forEach{element in
positions.append(s.firstIndex(of: element)!)
}
var pick: [Int] = []
while positions.count > 0 {
let ind0: [Int] = [positions.last!]
let ind1: [Int] = positions.dropLast()
let xx1 = _maximum(value: _itemIndex(item: _x1, positions: ind0)[0], itemIndex: _itemIndex(item: _x1, positions: ind1))
let yy1 = _maximum(value: _itemIndex(item: _y1, positions: ind0)[0], itemIndex: _itemIndex(item: _y1, positions: ind1))
let xx2 = _minimum(value: _itemIndex(item: _x2, positions: ind0)[0], itemIndex: _itemIndex(item: _x2, positions: ind1))
let yy2 = _minimum(value: _itemIndex(item: _y2, positions: ind0)[0], itemIndex: _itemIndex(item: _y2, positions: ind1))
let w = _maximum(value: 0.0, itemIndex: subArray(x: xx2 ,y: xx1))
let h = _maximum(value: 0.0, itemIndex: subArray(x: yy2, y: yy1))
let inter = multiplyArray(x: w, y: h)
let o = divideArray(x: inter,
y: subArray(x: _sum(a: _itemIndex(item: area, positions: ind0)[0], b: _itemIndex(item: area, positions: ind1)), y: inter))
pick.append(ind0[0])
let _inCorrectIndex: [Int] = inCorrectIndex(positions: positions, o: o, threshold: threshold)
positions = removeInCorrectIndex(positions: positions, inCorrectIndex: _inCorrectIndex)
}
var _detections: [Detection] = []
pick.forEach{element in _detections.append(detections[element])}
return _detections
}
}
func subArray(x: [Double], y:[Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] - y[b])
}
return a
}
func multiplyArray(x: [Double], y:[Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] * y[b])
}
return a
}
func divideArray(x: [Double], y: [Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] / y[b])
}
return a
}
func decodeBox(rawBoxes: [Double], i: Int, anchors: [Anchor], options: OptionsFace) -> [Double] {
var boxData: [Double] = [Double](repeating: 0.0, count: options.numCoords)
let boxOffset: Int = i * options.numCoords + options.boxCoordOffset
var yCenter: Double = rawBoxes[boxOffset]
var xCenter: Double = rawBoxes[boxOffset + 1]
var h: Double = rawBoxes[boxOffset + 2]
var w: Double = rawBoxes[boxOffset + 3]
if (options.reverseOutputOrder) {
xCenter = rawBoxes[boxOffset]
yCenter = rawBoxes[boxOffset + 1]
w = rawBoxes[boxOffset + 2]
h = rawBoxes[boxOffset + 3]
}
xCenter = xCenter / options.xScale * anchors[i].w + anchors[i].xCenter
yCenter = yCenter / options.yScale * anchors[i].h + anchors[i].yCenter
if (options.applyExponentialOnBoxSize) {
h = exp(h / options.hScale) * anchors[i].h
w = exp(w / options.wScale) * anchors[i].w
} else {
h = h / options.hScale * anchors[i].h
w = w / options.wScale * anchors[i].w
}
let yMin: Double = yCenter - h / 2.0
let xMin: Double = xCenter - w / 2.0
let yMax: Double = yCenter + h / 2.0
let xMax: Double = xCenter + w / 2.0
boxData[0] = yMin
boxData[1] = xMin
boxData[2] = yMax
boxData[3] = xMax
if (options.numKeypoints > 0) {
for k in 0..<options.numKeypoints {
let offset: Int = i * options.numCoords +
options.keypointCoordOffset +
k * options.numValuesPerKeypoint
var keyPointY: Double = rawBoxes[offset]
var keyPointX: Double = rawBoxes[offset + 1]
if (options.reverseOutputOrder) {
keyPointX = rawBoxes[offset]
keyPointY = rawBoxes[offset + 1]
}
boxData[4 + k * options.numValuesPerKeypoint] =
keyPointX / options.xScale * anchors[i].w + anchors[i].xCenter
boxData[4 + k * options.numValuesPerKeypoint + 1] =
keyPointY / options.yScale * anchors[i].h + anchors[i].yCenter
}
}
return boxData
}
func convertToDetection(
boxYMin: Double,
boxXMin: Double,
boxYMax: Double,
boxXMax: Double,
landmark: [Landmark],
score: Double,
classID: Int,
flipVertically: Bool) -> Detection {
var _yMin: Double = 0.0
if flipVertically {
_yMin = 1.0 - boxYMax
}
else {
_yMin = boxYMin
}
return Detection(score: score, xMin: boxXMin, yMin: _yMin, width: (boxXMax - boxXMin), height: (boxYMax - boxYMin), classID: classID, landMark: landmark)
}
func clamp(lower: Int,higher: Int, val: Int) -> Int {
if val < lower {
return 0
}
else if val > higher {
return 255
}
else {
return val
}
}
func getRotatedImageByteIndex(x: Int, y: Int, rotatedImageWidth: Int) -> Int {
return rotatedImageWidth * (y + 1) - (x + 1)
}
func _quickSort(a: [Double]) -> [Double] {
if a.count <= 1{
return a
}
let pivot = a[0]
var less: [Double] = []
var more: [Double] = []
var pivotList: [Double] = []
a.forEach{i in
if (i < pivot) {
less.append(i)
} else if (i > pivot) {
more.append(i)
} else {
pivotList.append(i)
}
}
less = _quickSort(a: less)
more = _quickSort(a: more)
less += pivotList
less += more
return less
}
func _itemIndex(item: [Double], positions:[Int]) -> [Double] {
var _temp: [Double] = []
positions.forEach {element in _temp.append(item[element])}
return _temp
}
func _minimum(value: Double, itemIndex: [Double]) -> [Double] {
var _temp: [Double] = []
itemIndex.forEach{element in
if value < element {
_temp.append(value)
}
else {
_temp.append(element)
}
}
return _temp
}
func _maximum(value: Double, itemIndex: [Double]) -> [Double] {
var _temp: [Double] = []
itemIndex.forEach{element in
if value > element {
_temp.append(value)
}
else {
_temp.append(element)
}
}
return _temp
}
func _sum(a: Double, b: [Double]) -> [Double] {
var _temp: [Double] = []
b.forEach{element in
_temp.append(a + element)
}
return _temp
}
func inCorrectIndex(positions: [Int], o: [Double], threshold: Double) -> [Int] {
var _index: [Int] = []
for i in 0..<o.count {
if o[i] > threshold {
_index.append(positions[i])
}
}
return _index
}
func removeInCorrectIndex(positions: [Int], inCorrectIndex: [Int]) -> [Int] {
var temp = positions
temp.remove(at: positions.count - 1)
inCorrectIndex.forEach{ element in temp = temp.filter(){$0 != element}}
return temp
}
//Uint32List convertImage(Uint8List plane0, Uint8List plane1, Uint8List plane2,
// int bytesPerRow, int bytesPerPixel, int width, int height) {
// int hexFF = 255
// int x, y, uvIndex, index
// int yp, up, vp
// int r, g, b
// int rt, gt, bt
//
// Uint32List image = new Uint32List(width * height)
//
// for (x = 0 x < width x++) {
// for (y = 0 y < height y++) {
// uvIndex =
// bytesPerPixel * ((x / 2).round() + bytesPerRow * ((y / 2).round()))
// index = y * width + x
//
// yp = plane0[index]
// up = plane1[uvIndex]
// vp = plane2[uvIndex]
//
// rt = (yp + vp * 1436 / 1024 - 179).round()
// gt = (yp - up * 46549 / 131072 + 44 - vp * 93604 / 131072 + 91).round()
// bt = (yp + up * 1814 / 1024 - 227).round()
// r = clamp(0, 255, rt)
// g = clamp(0, 255, gt)
// b = clamp(0, 255, bt)
//
// image[getRotatedImageByteIndex(y, x, height)] =
// (hexFF << 24) | (b << 16) | (g << 8) | r
// }
// }
// return image
//}
//func FaceAlign(
// rawImage: CVPixelBuffer, detections: [Detection], width: Int) -> [Any] {
// var desiredLeftEye: Landmark = Landmark(x: 0.35, y: 0.35)
// var desiredFaceWidth: Int = width
// var desiredFaceHeight: Int = width
//
// imglib.PngEncoder pngEncoder = new imglib.PngEncoder(level: 0, filter: 0)
// List<int> byteData = pngEncoder.encodeImage(rawImage)
//
// Detection detection
// List<dynamic> newFaces = new List()
//
// for (detection in detections) {
// Landmark leftEyeCenter = detection.landmark[0]
// Landmark rightEyeCenter = detection.landmark[1]
//
// double dY = (rightEyeCenter.y - leftEyeCenter.y) * rawImage.height
// double dX = (rightEyeCenter.x - leftEyeCenter.x) * rawImage.width
//
// double angle = atan2(dY, dX)
// angle = (angle > 0 ? angle : (2 * pi + angle)) * 360 / (2 * pi)
//
// double desiredRightEyeX = 1.0 - desiredLeftEye.x
// double dist = sqrt((dX * dX) + (dY * dY))
// double desiredDist = (desiredRightEyeX - desiredLeftEye.x)
// desiredDist *= desiredFaceWidth
//
// double scale = desiredDist / dist
//
// double eyeCenterX =
// ((leftEyeCenter.x + rightEyeCenter.x) / 2) * rawImage.width
// double eyeCenterY =
// ((leftEyeCenter.y + rightEyeCenter.y) / 2) * rawImage.height
//
// List<int> eyeCenter = new List()
// eyeCenter.add(eyeCenterX.round())
// eyeCenter.add(eyeCenterY.round())
//
// List<double> desiredLeftEye_push = new List()
// desiredLeftEye_push.add(desiredLeftEye.x)
// desiredLeftEye_push.add(desiredLeftEye.y)
//
// List<int> dstSize = new List()
// dstSize.add(desiredFaceWidth)
// dstSize.add(desiredFaceHeight)
// dynamic byteFace = await ImgProc.faceAlign(
// byteData, eyeCenter, desiredLeftEye_push, angle, scale, dstSize)
// newFaces.add(byteFace)
// }
// return newFaces
//}
//
// LandMark.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class Landmark{
init(x: Double, y: Double) {
self.x = x
self.y = y
}
var x: Double
var y: Double
}
//
// NormalizeOp.swift
// movanai
//
// Created by Nguyen Van An on 4/4/21.
//
import Foundation
class NormalizeOp {
var x: Float = 0
var y: Float = 0
init(_ x: Float, _ y: Float ) {
self.x = x
self.y = y
}
}
//
// OptionsFace.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class OptionsFace {
init(numClasses: Int, numBoxes: Int, numCoords: Int, keypointCoordOffset: Int, ignoreClasses: [Int], scoreClippingThresh: Double, minScoreThresh: Double, numKeypoints: Int, numValuesPerKeypoint: Int, boxCoordOffset: Int, xScale: Double, yScale: Double, wScale: Double, hScale: Double, applyExponentialOnBoxSize: Bool, reverseOutputOrder: Bool, sigmoidScore: Bool, flipVertically: Bool) {
self.numClasses = numClasses
self.numBoxes = numBoxes
self.numCoords = numCoords
self.keypointCoordOffset = keypointCoordOffset
self.ignoreClasses = ignoreClasses
self.scoreClippingThresh = scoreClippingThresh
self.minScoreThresh = minScoreThresh
self.numKeypoints = numKeypoints
self.numValuesPerKeypoint = numValuesPerKeypoint
self.boxCoordOffset = boxCoordOffset
self.xScale = xScale
self.yScale = yScale
self.wScale = wScale
self.hScale = hScale
self.applyExponentialOnBoxSize = applyExponentialOnBoxSize
self.reverseOutputOrder = reverseOutputOrder
self.sigmoidScore = sigmoidScore
self.flipVertically = flipVertically
}
var numClasses: Int
var numBoxes: Int
var numCoords: Int
var keypointCoordOffset : Int
var ignoreClasses: [Int]
var scoreClippingThresh: Double
var minScoreThresh: Double
var numKeypoints: Int
var numValuesPerKeypoint: Int
var boxCoordOffset: Int
var xScale: Double
var yScale: Double
var wScale : Double
var hScale: Double
var applyExponentialOnBoxSize: Bool
var reverseOutputOrder: Bool
var sigmoidScore: Bool
var flipVertically: Bool
}
class Anchor {
init(xCenter: Double, yCenter: Double, h: Double, w: Double) {
self.xCenter = xCenter
self.yCenter = yCenter
self.h = h
self.w = w
}
var xCenter: Double
var yCenter: Double
var h: Double
var w: Double
}
......@@ -33,6 +33,11 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
var completionSuccessCard: (_ data: [String: Any]?, _ cardImage: Data?, _ faceImage: Data?) -> Void = {_,_,_ in}
private var previewWidth: CGFloat = 128.0
private var previewHeight: CGFloat = 128.0
private var cropZone: CGRect?
private var cropImage: CGRect?
var overlayView : OverLayCardView?
override func viewDidLoad() {
super.viewDidLoad()
self.btnRotateScreen.layer.cornerRadius = 20
......@@ -41,7 +46,7 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
init() {
super.init(nibName: "SBKCaptureCardVC", bundle: Bundle(for: SBKCaptureCardVC.self))
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
......@@ -70,20 +75,16 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
self.loadConfigUI()
self.lbDescription.text = self.descriptionScreen
captureSession.sessionPreset = .high
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
......@@ -92,16 +93,14 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
......@@ -120,59 +119,35 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
initOverLay()
let overlay = createOverlay(frame: view.frame, xOffset: view.frame.midX, yOffset: view.frame.midY, radius: 50.0)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.viewCamera.frame.size.width
let fHeight = self.viewCamera.frame.size.height
let leftSpace = fWidth/2 - (fHeight * 8 / 10 / 2 * 3 / 4)
if self.statusScreen == "horizontal" {
path.addRoundedRect(in: CGRect(x: leftSpace, y: self.viewCamera.frame.origin.y,
width: fHeight * 8 / 10 * 3 / 4, height: fHeight * 8 / 10 ),
cornerWidth: 20, cornerHeight: 20)
} else {
path.addRoundedRect(in: CGRect(x: fWidth/20, y: fHeight / 8,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 20, cornerHeight: 20)
func initOverLay(){
if self.overlayView == nil {
let fWidth = self.view.bounds.size.width
let fHeight = self.view.bounds.size.height
let squareWidth = fWidth/2
self.overlayView = OverLayCardView(frame: self.viewCamera.bounds)
self.overlayView!.marginTop = self.viewCamera.frame.origin.x + fWidth/20
previewWidth = self.viewCamera.frame.width * 18 / 20
previewHeight = self.viewCamera.frame.width * 18 * 3 / 20 / 4
self.overlayView!.previewWidth = self.previewWidth
self.overlayView!.previewHeight = self.previewHeight
self.overlayView!.borderLength = 50
self.overlayView!.borderWidth = 5
self.overlayView!.connerRadius = 0
viewCamera.addSubview(self.overlayView!)
self.overlayView!.setLayer()
}
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = colorBorder
borderLayer.lineWidth = 5
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
else { return }
self.captureSession.stopRunning()
self.captureSession.removeOutput(self.videoDataOutput)
......@@ -181,6 +156,7 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
let viewController = SBKResultCapture()
viewController.imageData = imageData
viewController.checkScreen = self.checkScreen
viewController.cropZone = self.cropImage!
viewController.idFront = self.idFront
viewController.URLToken = self.URLToken
viewController.statusScreen = self.statusScreen
......@@ -205,39 +181,6 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
}
}
// func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
// let validateView = SBKValidateInput.shared
// let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
// var imageOutput: UIImage?
// if UIDevice.current.userInterfaceIdiom == .pad {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20 * 3 / 4), scale: 1.0)
// } else {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20 * 3 / 4), scale: 1.0)
// }
//
// let ciimage = CIImage(image: imageOutput!)
// let tmpcontext = CIContext(options: nil)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
// }
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20), scale: 1.0)
} else {
//cat anh cho iphone
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width/20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.height * 18 * 3 / 20 / 4 ), scale: 1.0)
}
imageOutput = validateView.formatImageModal(image: imageOutput!, size: CGSize(width: 224, height: 224))
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@IBAction func onSelectRotateScreen(_ sender: Any) {
}
......@@ -263,7 +206,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
......@@ -273,52 +216,57 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = validateInput.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
if self.cropZone == nil {
DispatchQueue.main.async {
let scaleSizeWidth = self.viewCamera.frame.width / CGFloat( CVPixelBufferGetWidth(imageFrameInput))
let scaleSizeHeight = self.viewCamera.frame.height / CGFloat( CVPixelBufferGetHeight(imageFrameInput))
let x = self.overlayView!.marginTop
let y = (self.viewCamera.frame.height - self.previewHeight) / 2
self.cropZone = CGRect(x: CGFloat(x) / scaleSizeWidth, y: CGFloat(y + 30) / scaleSizeHeight, width: CGFloat(self.previewWidth) / scaleSizeWidth , height: CGFloat(self.previewHeight - 60) / scaleSizeHeight )
self.cropImage = CGRect(x: CGFloat(x - 10) / scaleSizeWidth, y: CGFloat(y + 10) / scaleSizeHeight, width: CGFloat(self.previewWidth + 20) / scaleSizeWidth , height: CGFloat(self.previewHeight - 20) / scaleSizeHeight )
}
if validateImageCard == .IMAGE_FRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGE_BACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.blue.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if self.overlayView != nil {
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_FRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_BACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.overlayView!.setBorderColor(color: UIColor.blue.cgColor)
}
}
}
}
......
......@@ -34,6 +34,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
var statusScreen: String = "vertical" //horizontal
public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel()
private var previewWidth: CGFloat = 128.0
private var previewHeight: CGFloat = 128.0
private var cropZone: CGRect?
private var cropImage: CGRect?
var overlayView: OverLayCardView?
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
......@@ -66,7 +71,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
self.completionSuccessCardStep(nil, nil, false)
return
}
self.loadCamera()
self.labelTypeCard.frame = CGRect(x: self.center.x - 50, y: self.frame.size.height / 8 + self.viewCamera.frame.origin.y - 40, width: 100, height: 20)
......@@ -128,19 +133,27 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
//Cài đặt máy ảnh
func loadCamera() {
captureSession.sessionPreset = .hd1920x1080
captureSession.sessionPreset = .high
var device : AVCaptureDevice?
if #available(iOS 11.1, *) {
guard let backCamera = AVCaptureDevice.DiscoverySession(
guard let devicetype = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .back).devices.first else {
print("Unable to access back camera!")
return
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
}
device = devicetype
} else if #available(iOS 11.0, *) {
guard let devicetype = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera],
mediaType: .video,
position: .back).devices.first else {
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
}
device = devicetype
}
do {
self.inputCamera = try AVCaptureDeviceInput(device: backCamera)
self.inputCamera = try AVCaptureDeviceInput(device: device!)
if captureSession.canAddInput(self.inputCamera) {
captureSession.addInput(self.inputCamera)
......@@ -163,7 +176,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
}
//set frame preview
......@@ -181,62 +194,40 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
let overlay = createOverlay(frame: self.bounds)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
path.addRoundedRect(in: CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 0, cornerHeight: 0)
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = colorBorder
borderLayer.lineWidth = 5
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
public override func draw(_ rect: CGRect) {
initOverLay()
}
func initOverLay(){
if self.overlayView == nil {
self.loadCamera()
let fWidth = self.bounds.size.width
self.overlayView = OverLayCardView(frame: self.viewCamera.bounds)
self.overlayView!.marginTop = self.viewCamera.frame.origin.x + fWidth/20
previewWidth = self.viewCamera.frame.width * 18 / 20
previewHeight = self.viewCamera.frame.width * 18 * 3 / 20 / 4
self.overlayView!.previewWidth = self.previewWidth
self.overlayView!.previewHeight = self.previewHeight
self.overlayView!.borderLength = 50
self.overlayView!.borderWidth = 2
self.overlayView!.connerRadius = 0
viewCamera.addSubview(self.overlayView!)
self.overlayView!.setLayer()
}
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else { return }
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height / 8 + UIImage(data: imageData)!.size.height / 50, width: UIImage(data: imageData)!.size.width * 18 / 20, height: UIImage(data: imageData)!.size.width * 18 / 20 * 3 / 4 ), scale: 1.0)
let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
}
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) {
......@@ -249,41 +240,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
}
}
// func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
// let validateView = SBKValidateInput.shared
// let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
// var imageOutput: UIImage?
// if UIDevice.current.userInterfaceIdiom == .pad {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
// } else {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 1.5 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4 + 7), scale: 1.0)
// }
//
// let ciimage = CIImage(image: imageOutput!)
// let eaglContext = EAGLContext(api: .openGLES2)
// let tmpcontext = CIContext(eaglContext: eaglContext!)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
// }
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20), scale: 1.0)
} else {
//cat anh cho iphone
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width/20, y: imageInput.size.height / 6, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 * 3 / 20 / 4), scale: 1.0)
}
imageOutput = validateView.formatImageModal(image: imageOutput!, size: CGSize(width: 224, height: 224))
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
}
extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
......@@ -294,21 +250,33 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
if captureSession.canAddOutput(self.videoDataOutput) {
self.captureSession.addOutput(self.videoDataOutput)
}
//self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
if self.cropZone == nil && self.overlayView != nil {
DispatchQueue.main.async {
let scaleSizeWidth = self.viewCamera.frame.width / CGFloat( CVPixelBufferGetWidth(imageFrameInput))
let scaleSizeHeight = self.viewCamera.frame.height / CGFloat( CVPixelBufferGetHeight(imageFrameInput))
let x = self.overlayView!.marginTop
let y = (self.viewCamera.frame.height - self.previewHeight) / 2
self.cropZone = CGRect(x: CGFloat(x) / scaleSizeWidth, y: CGFloat(y) / scaleSizeHeight, width: CGFloat(self.previewWidth) / scaleSizeWidth , height: CGFloat(self.previewHeight) / scaleSizeHeight )
self.cropImage = CGRect(x: CGFloat(x - 10) / scaleSizeWidth, y: CGFloat(y + 10) / scaleSizeHeight, width: CGFloat(self.previewWidth + 20) / scaleSizeWidth , height: CGFloat(self.previewHeight - 20) / scaleSizeHeight )
}
}
guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if let overlayView = self.overlayView {
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
......@@ -322,9 +290,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_FRONT && self.typeCamera == .BACK {
......@@ -332,9 +298,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_BACK && self.typeCamera == .FRONT {
......@@ -342,9 +306,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) {
......@@ -352,10 +314,9 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.blue.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.blue.cgColor)
}
}
}
}
}
......@@ -11,6 +11,7 @@ class SBKResultCapture: UIViewController {
@IBOutlet weak var imgPhotoCard: UIImageView!
public var imageData: Data?
public var dataCrop: Data?
var cropZone: CGRect?
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var lbDescription: UILabel!
......@@ -61,7 +62,9 @@ class SBKResultCapture: UIViewController {
if UIDevice.current.userInterfaceIdiom == .pad {
cropImage = SBKValidateInput.shared.cropImageHorizontal(image: imageCap!, rect: CGRect(x: imageCap!.size.width * 1 / 10, y: imageCap!.size.height * 3 / 20, width: imageCap!.size.width * 8 / 10, height: imageCap!.size.height * 8 / 10), scale: 1.0)!.rotate(radians: .pi / 2)
} else {
cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 20, y: imageCap!.size.height / 8 + imageCap!.size.height / 50, width: imageCap!.size.width * 18 / 20, height: imageCap!.size.width * 18 / 20 * 3 / 4 ), scale: scale)
if self.cropZone != nil {
cropImage = imageCap!.crop(rect: self.cropZone!, scale: 1.0)
}
}
dataCrop = cropImage!.pngData()
......@@ -72,14 +75,7 @@ class SBKResultCapture: UIViewController {
fatalError("init(coder:) has not been implemented")
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func navigateToFace() {
DispatchQueue.main.async {
......@@ -153,27 +149,3 @@ class SBKResultCapture: UIViewController {
}
}
extension UIImage {
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)
// Rotate around middle
context.rotate(by: CGFloat(radians))
// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
//
// OverLayView.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import AVFoundation
import UIKit
class OverLayCardView: UIView {
@IBInspectable var previewWidth: CGFloat = 100
@IBInspectable var previewHeight: CGFloat = 100
@IBInspectable var borderLength : CGFloat = 10
@IBInspectable var borderPadding : CGFloat = 0
@IBInspectable var borderWidth : CGFloat = 2
@IBInspectable var borderColor : CGColor = UIColor.white.cgColor
@IBInspectable var marginTop : CGFloat = 0
@IBInspectable var marginLeft : CGFloat = 0
@IBInspectable var connerRadius : CGFloat = 8
let vContainer: UIView = {
let view = UIView()
view.backgroundColor = UIColor.black.withAlphaComponent(0.6)
return view
}()
let vContainer2: UIView = {
let view = UIView()
return view
}()
override init(frame: CGRect) {
super.init(frame: frame)
vContainer.frame = self.bounds
self.addSubview(vContainer)
vContainer2.frame = self.bounds
self.addSubview(vContainer2)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setLayer(){
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//add to container dask
let maskLayer = CALayer()
maskLayer.frame = self.bounds
let circleLayer = CAShapeLayer()
circleLayer.frame = CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height)
let finalPath = UIBezierPath(roundedRect: CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height), cornerRadius: 0)
let rectPath = UIBezierPath(roundedRect: CGRect(x: x, y: y,width: previewWidth, height: previewHeight), cornerRadius: connerRadius)
finalPath.append(rectPath.reversing())
circleLayer.path = finalPath.cgPath
maskLayer.addSublayer(circleLayer)
vContainer.layer.mask = maskLayer
let clearLayer = CALayer()
clearLayer.frame = vContainer2.bounds
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
func setBorderColor(color: CGColor){
if borderColor != color {
vContainer2.layer.sublayers = nil
borderColor = color
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
}
}
//
// ExtCGImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
import CoreGraphics
import CoreImage
import VideoToolbox
extension CGImage{
func toCVPixelBuffer() -> CVPixelBuffer? {
let frameSize = CGSize(width: self.width, height: self.height)
var pixelBuffer:CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(frameSize.width), Int(frameSize.height), kCVPixelFormatType_32BGRA , nil, &pixelBuffer)
if status != kCVReturnSuccess {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0))
let data = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: data, width: Int(frameSize.width), height: Int(frameSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue)
context?.draw(self, in: CGRect(x: 0, y: 0, width: self.width, height: self.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
public static func create(pixelBuffer: CVPixelBuffer) -> CGImage? {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
return cgImage
}
}
//
// ExtCIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension CIImage{
func toUIImage() -> UIImage {
let eaglContext = EAGLContext(api: .openGLES2)
let context:CIContext = CIContext(eaglContext: eaglContext!)
let cgImage:CGImage = context.createCGImage(self, from: self.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
}
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
import UIKit
import Accelerate
extension CVPixelBuffer {
/**
Returns thumbnail by cropping pixel buffer to biggest square and scaling the cropped image to
model dimensions.
*/
func centerThumbnail(ofSize size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
let thumbnailSize = min(imageWidth, imageHeight)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
......@@ -139,5 +115,76 @@ extension CVPixelBuffer {
return pixelBuffer
}
func resized(to size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA ||
pixelBufferType == kCVPixelFormatType_32ARGB)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
// Finds the biggest square in the pixel buffer and advances rows based on it.
guard let inputBaseAddress = CVPixelBufferGetBaseAddress(self) else {
return nil
}
// Gets vImage Buffer from input image
var inputVImageBuffer = vImage_Buffer(data: inputBaseAddress, height: UInt(imageHeight), width: UInt(imageWidth), rowBytes: inputImageRowBytes)
let scaledImageRowBytes = Int(size.width) * imageChannels
guard let scaledImageBytes = malloc(Int(size.height) * scaledImageRowBytes) else {
return nil
}
// Allocates a vImage buffer for scaled image.
var scaledVImageBuffer = vImage_Buffer(data: scaledImageBytes, height: UInt(size.height), width: UInt(size.width), rowBytes: scaledImageRowBytes)
// Performs the scale operation on input image buffer and stores it in scaled image buffer.
let scaleError = vImageScale_ARGB8888(&inputVImageBuffer, &scaledVImageBuffer, nil, vImage_Flags(0))
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
guard scaleError == kvImageNoError else {
return nil
}
let releaseCallBack: CVPixelBufferReleaseBytesCallback = {mutablePointer, pointer in
if let pointer = pointer {
free(UnsafeMutableRawPointer(mutating: pointer))
}
}
var scaledPixelBuffer: CVPixelBuffer?
// Converts the scaled vImage buffer to CVPixelBuffer
let conversionStatus = CVPixelBufferCreateWithBytes(nil, Int(size.width), Int(size.height), pixelBufferType, scaledImageBytes, scaledImageRowBytes, releaseCallBack, nil, nil, &scaledPixelBuffer)
guard conversionStatus == kCVReturnSuccess else {
free(scaledImageBytes)
return nil
}
return scaledPixelBuffer
}
func toUIImage() -> UIImage {
let ciimage : CIImage = CIImage(cvPixelBuffer: self)
let imageView : UIImage = ciimage.toUIImage()
return imageView
}
func crop(rect: CGRect, scale: CGFloat) -> CVPixelBuffer? {
let imageCgi = CGImage.create(pixelBuffer: self)!
return imageCgi.cropping(to: rect)!.toCVPixelBuffer()
}
}
//
// ExtUIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIImage {
//Xử lý ảnh hiển thị
func crop(rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
self.draw(at: CGPoint(x: -rect.minX / scale, y: -rect.minY / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
context.translateBy(x: newSize.width/2, y: newSize.height/2)
context.rotate(by: CGFloat(radians))
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func resize(targetSize: CGSize) -> UIImage {
let size = self.size
let widthRatio = targetSize.width / self.size.width
let heightRatio = targetSize.height / self.size.height
var newSize: CGSize
if(widthRatio > heightRatio) {
newSize = CGSize(width: size.width * heightRatio, height: size.height * heightRatio)
} else {
newSize = CGSize(width: size.width * widthRatio, height: size.height * widthRatio)
}
let rect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, 1.0)
self.draw(in: rect)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
func toCVPixel() -> CVPixelBuffer {
let ciimage = CIImage(image: self)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return cgimage!.toCVPixelBuffer()!
}
}
......@@ -37,4 +37,24 @@ extension UIViewController {
func validateImage(image: Data) -> Bool {
return true
}
func popupBackToCaptureCardFront() {
let alert = UIAlertController(title: "Confirm".localized(), message: "You will go back to front card photography.".localized(), preferredStyle: .alert)
let actionClose = UIAlertAction(title: "No".localized(), style: .cancel, handler: nil)
alert.addAction(actionClose)
let actionOk = UIAlertAction(title: "Yes".localized(), style: .default, handler: {
action in
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
})
alert.addAction(actionOk)
present(alert, animated: true, completion: nil)
}
}
......@@ -6,13 +6,45 @@ target 'OCR-SDK' do
use_frameworks!
# Pods for OCR-SDK
pod 'TensorFlowLiteSwift'
pod 'TensorFlowLiteSwift', '~>2.2.0'
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
#post_install do |installer|
# installer.pods_project.build_configurations.each do |config|
# config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
# config.build_settings["BITCODE_GENERATION_MODE"] = "bitcode"
# end
#end
#bitcode enable
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
# set valid architecture
config.build_settings['VALID_ARCHS'] = 'arm64 armv7 armv7s i386 x86_64'
# build active architecture only (Debug build all)
config.build_settings['ONLY_ACTIVE_ARCH'] = 'NO'
config.build_settings['ENABLE_BITCODE'] = 'YES'
if config.name == 'Release' || config.name == 'Pro'
config.build_settings['BITCODE_GENERATION_MODE'] = 'bitcode'
else # Debug
config.build_settings['BITCODE_GENERATION_MODE'] = 'marker'
end
cflags = config.build_settings['OTHER_CFLAGS'] || ['$(inherited)']
if config.name == 'Release' || config.name == 'Pro'
cflags << '-fembed-bitcode'
else # Debug
cflags << '-fembed-bitcode-marker'
end
config.build_settings['OTHER_CFLAGS'] = cflags
end
end
end
......@@ -4,7 +4,7 @@ PODS:
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 2.2.0)
SPEC REPOS:
trunk:
......@@ -15,6 +15,6 @@ SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 61617ddc17c979c1fe27a952bf716e6bd14ac52c
COCOAPODS: 1.10.1
......@@ -4,7 +4,7 @@ PODS:
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 2.2.0)
SPEC REPOS:
trunk:
......@@ -15,6 +15,6 @@ SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 61617ddc17c979c1fe27a952bf716e6bd14ac52c
COCOAPODS: 1.10.1
......@@ -402,6 +402,7 @@
baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = marker;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -411,6 +412,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -421,6 +423,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -429,6 +436,7 @@
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -440,14 +448,22 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = bitcode;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Release;
};
......@@ -489,7 +505,6 @@
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
......@@ -552,7 +567,6 @@
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
......@@ -587,13 +601,21 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = marker;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Debug;
};
......@@ -601,6 +623,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = bitcode;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
......@@ -609,6 +632,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
......@@ -620,6 +644,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
......@@ -628,6 +657,7 @@
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -637,6 +667,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = marker;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
......@@ -645,6 +676,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
......@@ -656,6 +688,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
......@@ -663,6 +700,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -673,6 +711,7 @@
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -682,6 +721,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -692,6 +732,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -701,6 +746,7 @@
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
</dict>
</dict>
</plist>
......@@ -4,21 +4,23 @@
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DE1F4D51AD94C30627575AEE202FD099"
BuildableName = "Pods_OCR_SDK.framework"
BlueprintName = "Pods-OCR-SDK"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "10418167F619D6DA72BADAD10F9EC02B"
BuildableName = "TensorFlowLite.framework"
BlueprintName = "TensorFlowLiteSwift"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment