Commit a67016f7 authored by Nguyễn Văn An's avatar Nguyễn Văn An

up model card id

parent 7088d89b
No preview for this file type
......@@ -7,27 +7,14 @@
objects = {
/* Begin PBXBuildFile section */
60B3E4342745FC5C00D58AD2 /* idcard15072021.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */; };
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9509925E25355E0300C570D8 /* SBKValidateCardView.swift */; };
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; };
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 954230E425344601006F13F9 /* valid_card_10102020.tflite */; };
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDB4247D171500AF50DE /* ExtString.swift */; };
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDC2247D1FA200AF50DE /* Localizable.strings */; };
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */; };
9546DDDC247E197800AF50DE /* Global.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDDB247E197800AF50DE /* Global.swift */; };
9551057C2477746A0053036F /* OCR_SDK.h in Headers */ = {isa = PBXBuildFile; fileRef = 9551057A2477746A0053036F /* OCR_SDK.h */; settings = {ATTRIBUTES = (Public, ); }; };
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510583247774CC0053036F /* SBOCRRequest.swift */; };
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510585247774CC0053036F /* ExtUiViewController.swift */; };
955105AD247774CC0053036F /* Loadding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510586247774CC0053036F /* Loadding.swift */; };
955105AE247774CC0053036F /* TutorialFace1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510588247774CC0053036F /* TutorialFace1.png */; };
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510589247774CC0053036F /* TutorialFaceP.png */; };
955105B0247774CC0053036F /* TutorialFace3.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058A247774CC0053036F /* TutorialFace3.png */; };
955105B1247774CC0053036F /* TutorialFace2.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058B247774CC0053036F /* TutorialFace2.png */; };
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058D247774CC0053036F /* TutorialFaceCheckBox.png */; };
955105B4247774CC0053036F /* background.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058E247774CC0053036F /* background.png */; };
955105B5247774CC0053036F /* iconCap.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058F247774CC0053036F /* iconCap.png */; };
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */; };
955105B9247774CC0053036F /* cmndF1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510593247774CC0053036F /* cmndF1.png */; };
955105BB247774CC0053036F /* cmndF2.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510595247774CC0053036F /* cmndF2.png */; };
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 95510598247774CC0053036F /* SBKTutorialFaceVC.xib */; };
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510599247774CC0053036F /* SBKTutorialFaceVC.swift */; };
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9551059B247774CC0053036F /* SBKTutorialVC.swift */; };
......@@ -40,39 +27,64 @@
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */; };
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105AA247774CC0053036F /* SBKCaptureCardVC.swift */; };
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105C9247775290053036F /* SB_KYC_SDK.swift */; };
955105FA2477B52C0053036F /* back.png in Resources */ = {isa = PBXBuildFile; fileRef = 955105F92477B52C0053036F /* back.png */; };
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4B249083A1001FB052 /* SBValidateInput.swift */; };
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4D249098C2001FB052 /* ExtUIColor.swift */; };
955BECE624935A14001FB052 /* ic_record.png in Resources */ = {isa = PBXBuildFile; fileRef = 955BECE524935A14001FB052 /* ic_record.png */; };
955E7AC924D957140048FC06 /* Next@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AC824D957140048FC06 /* Next@2x.png */; };
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7ADE24D967B10048FC06 /* Card-2@2x.png */; };
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */; };
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */; };
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB56D24DBB9B7000C88D2 /* Back@2x.png */; };
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */; };
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */; };
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */; };
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */; };
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */; };
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */; };
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */; };
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */; };
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */ = {isa = PBXBuildFile; fileRef = 957DF5F224C035C700FE6A67 /* objcio.cer */; };
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9580130D2489F1EA00846F8A /* SBKRecordFace.swift */; };
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9580130E2489F1EA00846F8A /* SBKRecordFace.xib */; };
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801346248A237000846F8A /* SBKModelDataHandler.swift */; };
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */; };
958D36C224C18BB1004B27EB /* Pods_OCR_SDK.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
95A6BA6224E1627800A34ADD /* back_light.png in Resources */ = {isa = PBXBuildFile; fileRef = 95A6BA6124E1627800A34ADD /* back_light.png */; };
95FAB2672499C89400CE7913 /* rotate.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAB2662499C89400CE7913 /* rotate.png */; };
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51D24EA3FE300C161F2 /* Caution@2x.png */; };
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */; };
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; };
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; };
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; };
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6EF25299E160058D675 /* SBKValidateCardView.xib */; };
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */; };
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */ = {isa = PBXBuildFile; fileRef = A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */; };
B7622A902646FB690077D3CF /* Global.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A782646FB690077D3CF /* Global.swift */; };
B7622A912646FB690077D3CF /* ExtUiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */; };
B7622A922646FB690077D3CF /* ExtCGImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7B2646FB690077D3CF /* ExtCGImage.swift */; };
B7622A932646FB690077D3CF /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7C2646FB690077D3CF /* ExtString.swift */; };
B7622A942646FB690077D3CF /* ExtUIColor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7D2646FB690077D3CF /* ExtUIColor.swift */; };
B7622A952646FB690077D3CF /* ExtCVPixelBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */; };
B7622A962646FB690077D3CF /* ExtUIImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A7F2646FB690077D3CF /* ExtUIImage.swift */; };
B7622A972646FB690077D3CF /* ExtCIImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A802646FB690077D3CF /* ExtCIImage.swift */; };
B7622A982646FB690077D3CF /* Loadding.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A812646FB690077D3CF /* Loadding.swift */; };
B7622AA52646FB690077D3CF /* SBValidateInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622A8F2646FB690077D3CF /* SBValidateInput.swift */; };
B7622AAE2647E5730077D3CF /* FaceDetection.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA72647E5730077D3CF /* FaceDetection.swift */; };
B7622AAF2647E5730077D3CF /* EMSimilarity.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA82647E5730077D3CF /* EMSimilarity.swift */; };
B7622AB02647E5730077D3CF /* OptionsFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AA92647E5730077D3CF /* OptionsFace.swift */; };
B7622AB12647E5730077D3CF /* AnchorOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAA2647E5730077D3CF /* AnchorOption.swift */; };
B7622AB22647E5730077D3CF /* NormalizeOp.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAB2647E5730077D3CF /* NormalizeOp.swift */; };
B7622AB32647E5730077D3CF /* Detection.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAC2647E5730077D3CF /* Detection.swift */; };
B7622AB42647E5730077D3CF /* LandMark.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AAD2647E5730077D3CF /* LandMark.swift */; };
B7622AC42647EB230077D3CF /* OverLayCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7622AC32647EB230077D3CF /* OverLayCardView.swift */; };
B7622AEB2647EE420077D3CF /* TutorialFace1.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACB2647EE420077D3CF /* TutorialFace1.png */; };
B7622AEC2647EE420077D3CF /* TutorialFaceP.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACC2647EE420077D3CF /* TutorialFaceP.png */; };
B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACD2647EE420077D3CF /* Scan-4@2x.png */; };
B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACE2647EE420077D3CF /* Back@2x.png */; };
B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ACF2647EE420077D3CF /* TutorialFace3.png */; };
B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD02647EE420077D3CF /* TutorialFace2.png */; };
B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD12647EE420077D3CF /* Hat@2x.png */; };
B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD22647EE420077D3CF /* Glasses@2x.png */; };
B7622AF32647EE420077D3CF /* Caution@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD32647EE420077D3CF /* Caution@2x.png */; };
B7622AF42647EE420077D3CF /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD42647EE420077D3CF /* Do not place outside.png */; };
B7622AF52647EE420077D3CF /* rotate.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD52647EE420077D3CF /* rotate.png */; };
B7622AF62647EE420077D3CF /* Button_Do@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD62647EE420077D3CF /* Button_Do@2x.png */; };
B7622AF72647EE420077D3CF /* Brighness@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD72647EE420077D3CF /* Brighness@2x.png */; };
B7622AF82647EE420077D3CF /* Card-2@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD82647EE420077D3CF /* Card-2@2x.png */; };
B7622AF92647EE420077D3CF /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */; };
B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */; };
B7622AFB2647EE420077D3CF /* background.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADB2647EE420077D3CF /* background.png */; };
B7622AFC2647EE420077D3CF /* Scan-5@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADC2647EE420077D3CF /* Scan-5@2x.png */; };
B7622AFD2647EE420077D3CF /* iconCap.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADD2647EE420077D3CF /* iconCap.png */; };
B7622AFE2647EE420077D3CF /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADE2647EE420077D3CF /* Place within the box.png */; };
B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622ADF2647EE420077D3CF /* Avoid glare.png */; };
B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */; };
B7622B012647EE420077D3CF /* back_light.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE12647EE420077D3CF /* back_light.png */; };
B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE22647EE420077D3CF /* Holdphone@2x.png */; };
B7622B032647EE420077D3CF /* ic_record.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE32647EE420077D3CF /* ic_record.png */; };
B7622B042647EE420077D3CF /* Next@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE42647EE420077D3CF /* Next@2x.png */; };
B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE52647EE420077D3CF /* Scan-3@2x.png */; };
B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE62647EE420077D3CF /* Scan-1@2x.png */; };
B7622B072647EE420077D3CF /* cmndF1.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE72647EE420077D3CF /* cmndF1.png */; };
B7622B082647EE420077D3CF /* back.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE82647EE420077D3CF /* back.png */; };
B7622B092647EE420077D3CF /* cmndF2.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AE92647EE420077D3CF /* cmndF2.png */; };
B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */; };
CCCF85EB83511B97EF23244B /* Pods_OCR_SDK.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */; };
/* End PBXBuildFile section */
......@@ -93,33 +105,20 @@
/* Begin PBXFileReference section */
2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = idcard15072021.tflite; sourceTree = "<group>"; };
8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
9509925E25355E0300C570D8 /* SBKValidateCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKValidateCardView.swift; sourceTree = "<group>"; };
95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; };
954230E425344601006F13F9 /* valid_card_10102020.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_card_10102020.tflite; sourceTree = "<group>"; };
9546DDB4247D171500AF50DE /* ExtString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
9546DDC1247D1FA200AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDC3247D1FAA00AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDD1247D2C0C00AF50DE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/SBKCaptureCardVC.xib; sourceTree = "<group>"; };
9546DDD4247D2C1700AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDD6247D2C1A00AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDDB247E197800AF50DE /* Global.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Global.swift; sourceTree = "<group>"; };
955105772477746A0053036F /* SB_KYC_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SB_KYC_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
9551057A2477746A0053036F /* OCR_SDK.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = OCR_SDK.h; sourceTree = "<group>"; };
9551057B2477746A0053036F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
95510583247774CC0053036F /* SBOCRRequest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBOCRRequest.swift; sourceTree = "<group>"; };
95510585247774CC0053036F /* ExtUiViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUiViewController.swift; sourceTree = "<group>"; };
95510586247774CC0053036F /* Loadding.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Loadding.swift; sourceTree = "<group>"; };
95510588247774CC0053036F /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
95510589247774CC0053036F /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
9551058A247774CC0053036F /* TutorialFace3.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace3.png; sourceTree = "<group>"; };
9551058B247774CC0053036F /* TutorialFace2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace2.png; sourceTree = "<group>"; };
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceCheckBox.png; sourceTree = "<group>"; };
9551058E247774CC0053036F /* background.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = background.png; sourceTree = "<group>"; };
9551058F247774CC0053036F /* iconCap.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = iconCap.png; sourceTree = "<group>"; };
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Screen Shot 2020-05-12 at 15.14.44.png"; sourceTree = "<group>"; };
95510593247774CC0053036F /* cmndF1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF1.png; sourceTree = "<group>"; };
95510595247774CC0053036F /* cmndF2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF2.png; sourceTree = "<group>"; };
95510598247774CC0053036F /* SBKTutorialFaceVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKTutorialFaceVC.xib; sourceTree = "<group>"; };
95510599247774CC0053036F /* SBKTutorialFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialFaceVC.swift; sourceTree = "<group>"; };
9551059B247774CC0053036F /* SBKTutorialVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialVC.swift; sourceTree = "<group>"; };
......@@ -132,39 +131,65 @@
955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureFaceVC.swift; sourceTree = "<group>"; };
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureCardVC.swift; sourceTree = "<group>"; };
955105C9247775290053036F /* SB_KYC_SDK.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SB_KYC_SDK.swift; sourceTree = "<group>"; };
955105F92477B52C0053036F /* back.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back.png; sourceTree = "<group>"; };
955BEC4B249083A1001FB052 /* SBValidateInput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBValidateInput.swift; sourceTree = "<group>"; };
955BEC4D249098C2001FB052 /* ExtUIColor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtUIColor.swift; sourceTree = "<group>"; };
955BECE524935A14001FB052 /* ic_record.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_record.png; sourceTree = "<group>"; };
955E7AC824D957140048FC06 /* Next@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Next@2x.png"; sourceTree = "<group>"; };
955E7ADE24D967B10048FC06 /* Card-2@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Card-2@2x.png"; sourceTree = "<group>"; };
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Passport-2 copy@2x.png"; sourceTree = "<group>"; };
956BB56D24DBB9B7000C88D2 /* Back@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Back@2x.png"; sourceTree = "<group>"; };
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Hat@2x.png"; sourceTree = "<group>"; };
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Glasses@2x.png"; sourceTree = "<group>"; };
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Brighness@2x.png"; sourceTree = "<group>"; };
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Holdphone@2x.png"; sourceTree = "<group>"; };
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-1@2x.png"; sourceTree = "<group>"; };
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-3@2x.png"; sourceTree = "<group>"; };
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-4@2x.png"; sourceTree = "<group>"; };
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-5@2x.png"; sourceTree = "<group>"; };
957DF5F224C035C700FE6A67 /* objcio.cer */ = {isa = PBXFileReference; lastKnownFileType = file; path = objcio.cer; sourceTree = "<group>"; };
9580130D2489F1EA00846F8A /* SBKRecordFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBKRecordFace.swift; sourceTree = "<group>"; };
9580130E2489F1EA00846F8A /* SBKRecordFace.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = SBKRecordFace.xib; sourceTree = "<group>"; };
95801346248A237000846F8A /* SBKModelDataHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKModelDataHandler.swift; sourceTree = "<group>"; };
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CVPixelBufferExtension.swift; sourceTree = "<group>"; };
95A6BA6124E1627800A34ADD /* back_light.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back_light.png; sourceTree = "<group>"; };
95FAB2662499C89400CE7913 /* rotate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = rotate.png; sourceTree = "<group>"; };
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Caution@2x.png"; sourceTree = "<group>"; };
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Button_Do@2x.png"; sourceTree = "<group>"; };
95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
A442B6EF25299E160058D675 /* SBKValidateCardView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKValidateCardView.xib; sourceTree = "<group>"; };
A442B6F12529A13A0058D675 /* SBKRecordFaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKRecordFaceView.swift; sourceTree = "<group>"; };
A442B6F32529A1440058D675 /* SBKRecordFaceView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKRecordFaceView.xib; sourceTree = "<group>"; };
B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
B7622A782646FB690077D3CF /* Global.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Global.swift; sourceTree = "<group>"; };
B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUiViewController.swift; sourceTree = "<group>"; };
B7622A7B2646FB690077D3CF /* ExtCGImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCGImage.swift; sourceTree = "<group>"; };
B7622A7C2646FB690077D3CF /* ExtString.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
B7622A7D2646FB690077D3CF /* ExtUIColor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUIColor.swift; sourceTree = "<group>"; };
B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCVPixelBuffer.swift; sourceTree = "<group>"; };
B7622A7F2646FB690077D3CF /* ExtUIImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUIImage.swift; sourceTree = "<group>"; };
B7622A802646FB690077D3CF /* ExtCIImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtCIImage.swift; sourceTree = "<group>"; };
B7622A812646FB690077D3CF /* Loadding.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Loadding.swift; sourceTree = "<group>"; };
B7622A8F2646FB690077D3CF /* SBValidateInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBValidateInput.swift; sourceTree = "<group>"; };
B7622AA72647E5730077D3CF /* FaceDetection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceDetection.swift; sourceTree = "<group>"; };
B7622AA82647E5730077D3CF /* EMSimilarity.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EMSimilarity.swift; sourceTree = "<group>"; };
B7622AA92647E5730077D3CF /* OptionsFace.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OptionsFace.swift; sourceTree = "<group>"; };
B7622AAA2647E5730077D3CF /* AnchorOption.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AnchorOption.swift; sourceTree = "<group>"; };
B7622AAB2647E5730077D3CF /* NormalizeOp.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NormalizeOp.swift; sourceTree = "<group>"; };
B7622AAC2647E5730077D3CF /* Detection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Detection.swift; sourceTree = "<group>"; };
B7622AAD2647E5730077D3CF /* LandMark.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LandMark.swift; sourceTree = "<group>"; };
B7622AB52647E6230077D3CF /* face_detection_front.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = face_detection_front.tflite; sourceTree = "<group>"; };
B7622AC32647EB230077D3CF /* OverLayCardView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OverLayCardView.swift; sourceTree = "<group>"; };
B7622ACB2647EE420077D3CF /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
B7622ACD2647EE420077D3CF /* Scan-4@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-4@2x.png"; sourceTree = "<group>"; };
B7622ACE2647EE420077D3CF /* Back@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Back@2x.png"; sourceTree = "<group>"; };
B7622ACF2647EE420077D3CF /* TutorialFace3.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace3.png; sourceTree = "<group>"; };
B7622AD02647EE420077D3CF /* TutorialFace2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace2.png; sourceTree = "<group>"; };
B7622AD12647EE420077D3CF /* Hat@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Hat@2x.png"; sourceTree = "<group>"; };
B7622AD22647EE420077D3CF /* Glasses@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Glasses@2x.png"; sourceTree = "<group>"; };
B7622AD32647EE420077D3CF /* Caution@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Caution@2x.png"; sourceTree = "<group>"; };
B7622AD42647EE420077D3CF /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
B7622AD52647EE420077D3CF /* rotate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = rotate.png; sourceTree = "<group>"; };
B7622AD62647EE420077D3CF /* Button_Do@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Button_Do@2x.png"; sourceTree = "<group>"; };
B7622AD72647EE420077D3CF /* Brighness@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Brighness@2x.png"; sourceTree = "<group>"; };
B7622AD82647EE420077D3CF /* Card-2@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Card-2@2x.png"; sourceTree = "<group>"; };
B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceCheckBox.png; sourceTree = "<group>"; };
B7622ADB2647EE420077D3CF /* background.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = background.png; sourceTree = "<group>"; };
B7622ADC2647EE420077D3CF /* Scan-5@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-5@2x.png"; sourceTree = "<group>"; };
B7622ADD2647EE420077D3CF /* iconCap.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = iconCap.png; sourceTree = "<group>"; };
B7622ADE2647EE420077D3CF /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
B7622ADF2647EE420077D3CF /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Screen Shot 2020-05-12 at 15.14.44.png"; sourceTree = "<group>"; };
B7622AE12647EE420077D3CF /* back_light.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back_light.png; sourceTree = "<group>"; };
B7622AE22647EE420077D3CF /* Holdphone@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Holdphone@2x.png"; sourceTree = "<group>"; };
B7622AE32647EE420077D3CF /* ic_record.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_record.png; sourceTree = "<group>"; };
B7622AE42647EE420077D3CF /* Next@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Next@2x.png"; sourceTree = "<group>"; };
B7622AE52647EE420077D3CF /* Scan-3@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-3@2x.png"; sourceTree = "<group>"; };
B7622AE62647EE420077D3CF /* Scan-1@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-1@2x.png"; sourceTree = "<group>"; };
B7622AE72647EE420077D3CF /* cmndF1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF1.png; sourceTree = "<group>"; };
B7622AE82647EE420077D3CF /* back.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back.png; sourceTree = "<group>"; };
B7622AE92647EE420077D3CF /* cmndF2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF2.png; sourceTree = "<group>"; };
B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Passport-2 copy@2x.png"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
......@@ -219,11 +244,12 @@
isa = PBXGroup;
children = (
957DF5F824C036E100FE6A67 /* FileSSL */,
B7622AA62647E5730077D3CF /* Models */,
9580134A248A25E700846F8A /* Model */,
95801345248A237000846F8A /* ModelDataHandler */,
95510582247774CC0053036F /* service */,
95510596247774CC0053036F /* UI */,
95510584247774CC0053036F /* Utils */,
B7622A772646FB690077D3CF /* Utils */,
9551057A2477746A0053036F /* OCR_SDK.h */,
9551057B2477746A0053036F /* Info.plist */,
955105C9247775290053036F /* SB_KYC_SDK.swift */,
......@@ -240,63 +266,10 @@
path = service;
sourceTree = "<group>";
};
95510584247774CC0053036F /* Utils */ = {
isa = PBXGroup;
children = (
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */,
95510585247774CC0053036F /* ExtUiViewController.swift */,
95510586247774CC0053036F /* Loadding.swift */,
95510587247774CC0053036F /* image */,
9546DDB4247D171500AF50DE /* ExtString.swift */,
9546DDDB247E197800AF50DE /* Global.swift */,
955BEC4B249083A1001FB052 /* SBValidateInput.swift */,
955BEC4D249098C2001FB052 /* ExtUIColor.swift */,
);
path = Utils;
sourceTree = "<group>";
};
95510587247774CC0053036F /* image */ = {
isa = PBXGroup;
children = (
95FAF56C24EA83C900C161F2 /* Avoid glare.png */,
95FAF56D24EA83C900C161F2 /* Do not place outside.png */,
95FAF56B24EA83C800C161F2 /* Place within the box.png */,
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */,
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */,
95A6BA6124E1627800A34ADD /* back_light.png */,
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */,
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */,
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */,
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */,
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */,
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */,
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */,
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */,
956BB56D24DBB9B7000C88D2 /* Back@2x.png */,
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */,
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */,
955E7ADE24D967B10048FC06 /* Card-2@2x.png */,
955E7AC824D957140048FC06 /* Next@2x.png */,
95FAB2662499C89400CE7913 /* rotate.png */,
955BECE524935A14001FB052 /* ic_record.png */,
955105F92477B52C0053036F /* back.png */,
95510588247774CC0053036F /* TutorialFace1.png */,
95510589247774CC0053036F /* TutorialFaceP.png */,
9551058A247774CC0053036F /* TutorialFace3.png */,
9551058B247774CC0053036F /* TutorialFace2.png */,
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */,
9551058E247774CC0053036F /* background.png */,
9551058F247774CC0053036F /* iconCap.png */,
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */,
95510593247774CC0053036F /* cmndF1.png */,
95510595247774CC0053036F /* cmndF2.png */,
);
path = image;
sourceTree = "<group>";
};
95510596247774CC0053036F /* UI */ = {
isa = PBXGroup;
children = (
B7622AC22647EB230077D3CF /* View */,
9580130C2489F1C100846F8A /* SBKRecordFace */,
95510597247774CC0053036F /* SBKTutorialFace */,
9551059A247774CC0053036F /* SBKTutorial */,
......@@ -394,12 +367,101 @@
9580134A248A25E700846F8A /* Model */ = {
isa = PBXGroup;
children = (
60B3E4332745FC5C00D58AD2 /* idcard15072021.tflite */,
B7622AB52647E6230077D3CF /* face_detection_front.tflite */,
954230E425344601006F13F9 /* valid_card_10102020.tflite */,
95182D0524B3343D00405EA9 /* liveness.tflite */,
);
path = Model;
sourceTree = "<group>";
};
B7622A772646FB690077D3CF /* Utils */ = {
isa = PBXGroup;
children = (
B7622A792646FB690077D3CF /* Extension */,
B7622A782646FB690077D3CF /* Global.swift */,
B7622ACA2647EE420077D3CF /* image */,
B7622A812646FB690077D3CF /* Loadding.swift */,
B7622A8F2646FB690077D3CF /* SBValidateInput.swift */,
);
path = Utils;
sourceTree = "<group>";
};
B7622A792646FB690077D3CF /* Extension */ = {
isa = PBXGroup;
children = (
B7622A7A2646FB690077D3CF /* ExtUiViewController.swift */,
B7622A7B2646FB690077D3CF /* ExtCGImage.swift */,
B7622A7C2646FB690077D3CF /* ExtString.swift */,
B7622A7D2646FB690077D3CF /* ExtUIColor.swift */,
B7622A7E2646FB690077D3CF /* ExtCVPixelBuffer.swift */,
B7622A7F2646FB690077D3CF /* ExtUIImage.swift */,
B7622A802646FB690077D3CF /* ExtCIImage.swift */,
);
path = Extension;
sourceTree = "<group>";
};
B7622AA62647E5730077D3CF /* Models */ = {
isa = PBXGroup;
children = (
B7622AA72647E5730077D3CF /* FaceDetection.swift */,
B7622AA82647E5730077D3CF /* EMSimilarity.swift */,
B7622AA92647E5730077D3CF /* OptionsFace.swift */,
B7622AAA2647E5730077D3CF /* AnchorOption.swift */,
B7622AAB2647E5730077D3CF /* NormalizeOp.swift */,
B7622AAC2647E5730077D3CF /* Detection.swift */,
B7622AAD2647E5730077D3CF /* LandMark.swift */,
);
path = Models;
sourceTree = "<group>";
};
B7622AC22647EB230077D3CF /* View */ = {
isa = PBXGroup;
children = (
B7622AC32647EB230077D3CF /* OverLayCardView.swift */,
);
path = View;
sourceTree = "<group>";
};
B7622ACA2647EE420077D3CF /* image */ = {
isa = PBXGroup;
children = (
B7622ACB2647EE420077D3CF /* TutorialFace1.png */,
B7622ACC2647EE420077D3CF /* TutorialFaceP.png */,
B7622ACD2647EE420077D3CF /* Scan-4@2x.png */,
B7622ACE2647EE420077D3CF /* Back@2x.png */,
B7622ACF2647EE420077D3CF /* TutorialFace3.png */,
B7622AD02647EE420077D3CF /* TutorialFace2.png */,
B7622AD12647EE420077D3CF /* Hat@2x.png */,
B7622AD22647EE420077D3CF /* Glasses@2x.png */,
B7622AD32647EE420077D3CF /* Caution@2x.png */,
B7622AD42647EE420077D3CF /* Do not place outside.png */,
B7622AD52647EE420077D3CF /* rotate.png */,
B7622AD62647EE420077D3CF /* Button_Do@2x.png */,
B7622AD72647EE420077D3CF /* Brighness@2x.png */,
B7622AD82647EE420077D3CF /* Card-2@2x.png */,
B7622AD92647EE420077D3CF /* Car-2 copy@2x.png */,
B7622ADA2647EE420077D3CF /* TutorialFaceCheckBox.png */,
B7622ADB2647EE420077D3CF /* background.png */,
B7622ADC2647EE420077D3CF /* Scan-5@2x.png */,
B7622ADD2647EE420077D3CF /* iconCap.png */,
B7622ADE2647EE420077D3CF /* Place within the box.png */,
B7622ADF2647EE420077D3CF /* Avoid glare.png */,
B7622AE02647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png */,
B7622AE12647EE420077D3CF /* back_light.png */,
B7622AE22647EE420077D3CF /* Holdphone@2x.png */,
B7622AE32647EE420077D3CF /* ic_record.png */,
B7622AE42647EE420077D3CF /* Next@2x.png */,
B7622AE52647EE420077D3CF /* Scan-3@2x.png */,
B7622AE62647EE420077D3CF /* Scan-1@2x.png */,
B7622AE72647EE420077D3CF /* cmndF1.png */,
B7622AE82647EE420077D3CF /* back.png */,
B7622AE92647EE420077D3CF /* cmndF2.png */,
B7622AEA2647EE420077D3CF /* Passport-2 copy@2x.png */,
);
path = image;
sourceTree = "<group>";
};
BF236FF2605D4B46583CACB8 /* Frameworks */ = {
isa = PBXGroup;
children = (
......@@ -480,50 +542,51 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
B7622AF12647EE420077D3CF /* Hat@2x.png in Resources */,
B7622AFB2647EE420077D3CF /* background.png in Resources */,
B7622AED2647EE420077D3CF /* Scan-4@2x.png in Resources */,
954230E525344620006F13F9 /* valid_card_10102020.tflite in Resources */,
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */,
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */,
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */,
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */,
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */,
95A6BA6224E1627800A34ADD /* back_light.png in Resources */,
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */,
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */,
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */,
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */,
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */,
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */,
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */,
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */,
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */,
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */,
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */,
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */,
955E7AC924D957140048FC06 /* Next@2x.png in Resources */,
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */,
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */,
95FAB2672499C89400CE7913 /* rotate.png in Resources */,
B7622B072647EE420077D3CF /* cmndF1.png in Resources */,
A442B6F025299E160058D675 /* SBKValidateCardView.xib in Resources */,
B7622AF82647EE420077D3CF /* Card-2@2x.png in Resources */,
B7622AF32647EE420077D3CF /* Caution@2x.png in Resources */,
B7622AEB2647EE420077D3CF /* TutorialFace1.png in Resources */,
A442B6F42529A1440058D675 /* SBKRecordFaceView.xib in Resources */,
955BECE624935A14001FB052 /* ic_record.png in Resources */,
955105FA2477B52C0053036F /* back.png in Resources */,
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
955105AE247774CC0053036F /* TutorialFace1.png in Resources */,
955105B4247774CC0053036F /* background.png in Resources */,
955105BB247774CC0053036F /* cmndF2.png in Resources */,
955105C4247774CC0053036F /* SBKResultFaceVC.xib in Resources */,
955105B5247774CC0053036F /* iconCap.png in Resources */,
955105B9247774CC0053036F /* cmndF1.png in Resources */,
955105B1247774CC0053036F /* TutorialFace2.png in Resources */,
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */,
B7622B082647EE420077D3CF /* back.png in Resources */,
B7622AF72647EE420077D3CF /* Brighness@2x.png in Resources */,
B7622AFC2647EE420077D3CF /* Scan-5@2x.png in Resources */,
B7622AEC2647EE420077D3CF /* TutorialFaceP.png in Resources */,
B7622AF62647EE420077D3CF /* Button_Do@2x.png in Resources */,
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */,
B7622AF42647EE420077D3CF /* Do not place outside.png in Resources */,
955105C5247774CC0053036F /* SBKCaptureFaceVC.xib in Resources */,
955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */,
B7622AFF2647EE420077D3CF /* Avoid glare.png in Resources */,
B7622B012647EE420077D3CF /* back_light.png in Resources */,
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */,
B7622AEE2647EE420077D3CF /* Back@2x.png in Resources */,
B7622B022647EE420077D3CF /* Holdphone@2x.png in Resources */,
B7622B002647EE420077D3CF /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */,
B7622B062647EE420077D3CF /* Scan-1@2x.png in Resources */,
955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */,
955105B0247774CC0053036F /* TutorialFace3.png in Resources */,
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */,
B7622AF22647EE420077D3CF /* Glasses@2x.png in Resources */,
B7622AF52647EE420077D3CF /* rotate.png in Resources */,
B7622B0A2647EE420077D3CF /* Passport-2 copy@2x.png in Resources */,
B7622B032647EE420077D3CF /* ic_record.png in Resources */,
B7622B052647EE420077D3CF /* Scan-3@2x.png in Resources */,
B7622AFE2647EE420077D3CF /* Place within the box.png in Resources */,
B7622AFA2647EE420077D3CF /* TutorialFaceCheckBox.png in Resources */,
B7622AFD2647EE420077D3CF /* iconCap.png in Resources */,
B7622B042647EE420077D3CF /* Next@2x.png in Resources */,
60B3E4342745FC5C00D58AD2 /* idcard15072021.tflite in Resources */,
B7622B092647EE420077D3CF /* cmndF2.png in Resources */,
B7622AF02647EE420077D3CF /* TutorialFace2.png in Resources */,
B7622AEF2647EE420077D3CF /* TutorialFace3.png in Resources */,
B7622AF92647EE420077D3CF /* Car-2 copy@2x.png in Resources */,
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
......@@ -560,25 +623,36 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
955105AD247774CC0053036F /* Loadding.swift in Sources */,
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */,
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */,
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */,
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */,
B7622A932646FB690077D3CF /* ExtString.swift in Sources */,
B7622AB42647E5730077D3CF /* LandMark.swift in Sources */,
B7622A972646FB690077D3CF /* ExtCIImage.swift in Sources */,
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */,
A442B6F22529A13A0058D675 /* SBKRecordFaceView.swift in Sources */,
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */,
B7622A962646FB690077D3CF /* ExtUIImage.swift in Sources */,
B7622AAE2647E5730077D3CF /* FaceDetection.swift in Sources */,
B7622A912646FB690077D3CF /* ExtUiViewController.swift in Sources */,
955105C3247774CC0053036F /* SBKResultFaceVC.swift in Sources */,
B7622A902646FB690077D3CF /* Global.swift in Sources */,
B7622AB22647E5730077D3CF /* NormalizeOp.swift in Sources */,
B7622A922646FB690077D3CF /* ExtCGImage.swift in Sources */,
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */,
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */,
9546DDDC247E197800AF50DE /* Global.swift in Sources */,
B7622A952646FB690077D3CF /* ExtCVPixelBuffer.swift in Sources */,
B7622AC42647EB230077D3CF /* OverLayCardView.swift in Sources */,
B7622AB02647E5730077D3CF /* OptionsFace.swift in Sources */,
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */,
9509925F25355E0300C570D8 /* SBKValidateCardView.swift in Sources */,
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */,
B7622AA52646FB690077D3CF /* SBValidateInput.swift in Sources */,
B7622AB32647E5730077D3CF /* Detection.swift in Sources */,
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */,
B7622A982646FB690077D3CF /* Loadding.swift in Sources */,
955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */,
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */,
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */,
B7622AB12647E5730077D3CF /* AnchorOption.swift in Sources */,
B7622AAF2647E5730077D3CF /* EMSimilarity.swift in Sources */,
B7622A942646FB690077D3CF /* ExtUIColor.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
......@@ -611,6 +685,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
......@@ -642,6 +717,7 @@
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_BITCODE = YES;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
......@@ -662,7 +738,9 @@
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-fembed-bitcode";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
VERSIONING_SYSTEM = "apple-generic";
......@@ -674,6 +752,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
......@@ -705,6 +784,7 @@
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_BITCODE = YES;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
......@@ -719,7 +799,9 @@
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-fembed-bitcode";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
VALIDATE_PRODUCT = YES;
......@@ -740,6 +822,10 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/OCR-SDK",
);
HEADER_SEARCH_PATHS = (
"$(inherited)",
"${SRCROOT}/TensorFlowLite",
......@@ -774,6 +860,10 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/OCR-SDK",
);
HEADER_SEARCH_PATHS = (
"$(inherited)",
"${SRCROOT}/TensorFlowLite",
......
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "B08B0591-1A45-4415-9C04-B83659FBFE35"
type = "0"
version = "2.0">
</Bucket>
......@@ -35,7 +35,8 @@ typealias FileInfo = (name: String, extension: String)
/// Information about the MobileNet model.
enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "valid_card_10102020", extension: "tflite")
static let cardModel: FileInfo = (name: "idcard15072021", extension: "tflite")
static let landMarkModel: FileInfo = (name: "face_detection_front", extension: "tflite")
}
/// This class handles all data preprocessing and makes calls to run inference on a given frame
......@@ -75,7 +76,7 @@ class SBKModelDataHandler {
/// labels files are successfully loaded from the app's main bundle. Default `threadCount` is 1.
init?(modelFileInfo: FileInfo, threadCount: Int = 1) {
let modelFilename = modelFileInfo.name
// Construct the path to the model file.
let bundle = Bundle(for: SBKRecordFace.self)
guard let modelPath = bundle.path(
......@@ -86,7 +87,6 @@ class SBKModelDataHandler {
return nil
}
let delegate = MetalDelegate()
// Specify the options for the `Interpreter`.
self.threadCount = threadCount
......@@ -94,7 +94,7 @@ class SBKModelDataHandler {
options.threadCount = threadCount
do {
// Create the `Interpreter`.
interpreter = try Interpreter(modelPath: modelPath, options: options, delegates: [delegate])
interpreter = try Interpreter(modelPath: modelPath, options: options)
// Allocate memory for the model's input `Tensor`s.
try interpreter.allocateTensors()
} catch let error {
......@@ -142,87 +142,63 @@ class SBKModelDataHandler {
// MARK: - Internal Methods
/// Performs image preprocessing, invokes the `Interpreter`, and processes the inference results.
func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? {
let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
assert(sourcePixelFormat == kCVPixelFormatType_32ARGB ||
sourcePixelFormat == kCVPixelFormatType_32BGRA ||
sourcePixelFormat == kCVPixelFormatType_32RGBA)
let imageChannels = 4
assert(imageChannels >= inputChannels)
// Crops the image to the biggest square in the center and scales it down to model dimensions.
let scaledSize = CGSize(width: inputWidth, height: inputHeight)
guard let thumbnailPixelBuffer = pixelBuffer.centerThumbnail(ofSize: scaledSize) else {
return nil
}
let interval: TimeInterval
let outputTensor: Tensor
do {
let inputTensor = try interpreter.input(at: 0)
// Remove the alpha component from the image buffer to get the RGB data.
guard let rgbData = rgbDataFromBuffer(
thumbnailPixelBuffer,
byteCount: batchSize * inputWidth * inputHeight * inputChannels,
isModelQuantized: inputTensor.dataType == .uInt8
) else {
print("Failed to convert the image buffer to RGB data.")
func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? {
let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
assert(sourcePixelFormat == kCVPixelFormatType_32ARGB ||
sourcePixelFormat == kCVPixelFormatType_32BGRA ||
sourcePixelFormat == kCVPixelFormatType_32RGBA)
let imageChannels = 4
assert(imageChannels >= inputChannels)
let scaledSize = CGSize(width: inputWidth, height: inputHeight)
guard let thumbnailPixelBuffer = pixelBuffer.resized(to: scaledSize) else {
return nil
}
let imageCap = UIImage(data: rgbData)
// self.fromImage(image: imageCap!, datas: rgbData, imagesss: imageCap!)
// Copy the RGB data to the input `Tensor`.
try interpreter.copy(rgbData, toInputAt: 0)
// Run inference by invoking the `Interpreter`.
let startDate = Date()
try interpreter.invoke()
interval = Date().timeIntervalSince(startDate) * 1000
// Get the output `Tensor` to process the inference results.
outputTensor = try interpreter.output(at: 0)
} catch let error {
print("Failed to invoke the interpreter with error: \(error.localizedDescription)")
return nil
}
let results: [Float]
switch outputTensor.dataType {
case .uInt8:
guard let quantization = outputTensor.quantizationParameters else {
print("No results returned because the quantization values for the output tensor are nil.")
let interval: TimeInterval
let outputTensor: Tensor
do {
let inputTensor = try interpreter.input(at: 0)
guard let rgbData = rgbDataFromBuffer(
thumbnailPixelBuffer,
byteCount: batchSize * inputWidth * inputHeight * inputChannels,
isModelQuantized: inputTensor.dataType == .uInt8
) else {
print("Failed to convert the image buffer to RGB data.")
return nil
}
try interpreter.copy(rgbData, toInputAt: 0)
let startDate = Date()
try interpreter.invoke()
interval = Date().timeIntervalSince(startDate) * 1000
outputTensor = try interpreter.output(at: 0)
} catch let error {
print("Failed to invoke the interpreter with error: \(error.localizedDescription)")
return nil
}
let quantizedResults = [UInt8](outputTensor.data)
results = quantizedResults.map {
quantization.scale * Float(Int($0) - quantization.zeroPoint)
let results: [Float]
switch outputTensor.dataType {
case .uInt8:
guard let quantization = outputTensor.quantizationParameters else {
print("No results returned because the quantization values for the output tensor are nil.")
return nil
}
let quantizedResults = [UInt8](outputTensor.data)
results = quantizedResults.map {
quantization.scale * Float(Int($0) - quantization.zeroPoint)
}
case .float32:
results = [Float32](unsafeData: outputTensor.data) ?? []
default:
print("Output tensor data type \(outputTensor.dataType) is unsupported for this example app.")
return nil
}
case .float32:
results = [Float32](unsafeData: outputTensor.data) ?? []
default:
print("Output tensor data type \(outputTensor.dataType) is unsupported for this example app.")
return nil
return results
}
return results
}
/// Returns the RGB data representation of the given image buffer with the specified `byteCount`.
///
/// - Parameters
/// - buffer: The pixel buffer to convert to RGB data.
/// - byteCount: The expected byte count for the RGB data calculated using the values that the
/// model was trained on: `batchSize * imageWidth * imageHeight * componentsCount`.
/// - isModelQuantized: Whether the model is quantized (i.e. fixed point values rather than
/// floating point values).
/// - Returns: The RGB data representation of the image buffer or `nil` if the buffer could not be
/// converted.
private func rgbDataFromBuffer(
_ buffer: CVPixelBuffer,
byteCount: Int,
......@@ -293,26 +269,12 @@ class SBKModelDataHandler {
// MARK: - Extensions
extension Data {
/// Creates a new buffer by copying the buffer pointer of the given array.
///
/// - Warning: The given array's element type `T` must be trivial in that it can be copied bit
/// for bit with no indirection or reference-counting operations; otherwise, reinterpreting
/// data from the resulting buffer has undefined behavior.
/// - Parameter array: An array with elements of type `T`.
init<T>(copyingBufferOf array: [T]) {
self = array.withUnsafeBufferPointer(Data.init)
}
}
extension Array {
/// Creates a new array from the bytes of the given unsafe data.
///
/// - Warning: The array's `Element` type must be trivial in that it can be copied bit for bit
/// with no indirection or reference-counting operations; otherwise, copying the raw bytes in
/// the `unsafeData`'s buffer to a new array returns an unsafe copy.
/// - Note: Returns `nil` if `unsafeData.count` is not a multiple of
/// `MemoryLayout<Element>.stride`.
/// - Parameter unsafeData: The data containing the bytes to turn into an array.
init?(unsafeData: Data) {
guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil }
#if swift(>=5.0)
......@@ -327,20 +289,3 @@ extension Array {
#endif // swift(>=5.0)
}
}
extension UIImage {
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
}
//
// AnchorOption.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class AnchorOption {
init(inputSizeWidth: Int, inputSizeHeight: Int, minScale: Double, maxScale: Double, anchorOffsetX: Double, anchorOffsetY: Double, numLayers: Int, featureMapWidth: [Int], featureMapHeight: [Int], strides: [Int], aspectRatios: [Double], reduceBoxesInLowestLayer: Bool, interpolatedScaleAspectRatio: Double, fixedAnchorSize: Bool) {
self.inputSizeWidth = inputSizeWidth
self.inputSizeHeight = inputSizeHeight
self.minScale = minScale
self.maxScale = maxScale
self.anchorOffsetX = anchorOffsetX
self.anchorOffsetY = anchorOffsetY
self.numLayers = numLayers
self.featureMapWidth = featureMapWidth
self.featureMapHeight = featureMapHeight
self.strides = strides
self.aspectRatios = aspectRatios
self.reduceBoxesInLowestLayer = reduceBoxesInLowestLayer
self.interpolatedScaleAspectRatio = interpolatedScaleAspectRatio
self.fixedAnchorSize = fixedAnchorSize
}
var inputSizeWidth: Int
var inputSizeHeight: Int
var minScale: Double
var maxScale: Double
var anchorOffsetX: Double
var anchorOffsetY: Double
var numLayers: Int
var featureMapWidth: [Int]
var featureMapHeight: [Int]
var strides: [Int]
var aspectRatios: [Double]
var reduceBoxesInLowestLayer: Bool
var interpolatedScaleAspectRatio: Double
var fixedAnchorSize: Bool
func stridesSize() -> Int {
return strides.count
}
func featureMapHeightSize() -> Int {
return featureMapHeight.count
}
func featureMapWidthSize() -> Int {
return featureMapWidth.count
}
}
//
// Detection.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
struct Detection {
init(score: Double, xMin: Double, yMin: Double, width: Double, height: Double, classID: Int, landMark: [Landmark]) {
self.score = score
self.xMin = xMin
self.yMin = yMin
self.width = width
self.height = height
self.classID = classID
self.landMark = landMark
}
var score: Double
var xMin: Double
var yMin: Double
var width: Double
var height: Double
var classID: Int
var landMark: [Landmark]
}
//
// EMSimilarity.swift
// SwiftSim
//
// Created by Evan Moss on 8/1/16.
// Copyright © 2016 Enterprising Technologies LLC. All rights reserved.
//
// The MIT License (MIT)
//
// Copyright (c) 2016 Evan Moss
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import Foundation
enum EMSimilarityMode {
case Cosine
case Tanimoto
case Ochiai
case JaccardIndex
case JaccardDistance
case Dice
case Hamming
}
enum EMVectorSizeMismatchMode {
case Bail
case Truncate
}
class EMSimilarity {
/** Similarity metric mode **/
private var currentSimMode = [EMSimilarityMode.Cosine]
/** Set the currentSimMode via push **/
func pushSimMode(mode: EMSimilarityMode) {
self.currentSimMode.append(mode)
}
/** Pop the currentSimMode via pop if it won't make the stack empty **/
func popSimMode() {
if self.currentSimMode.count > 1 {
let _ = self.currentSimMode.popLast()
}
}
/** Get the currently set similarity mode **/
func getCurrentSimMode() -> EMSimilarityMode? {
return self.currentSimMode.last
}
/** Mismatch Mode **/
private var currentMismatchMode = [EMVectorSizeMismatchMode.Bail]
/** Set the currentMismatcMode via push **/
func pushMismatchMode(mode: EMVectorSizeMismatchMode) {
self.currentMismatchMode.append(mode)
}
/** Pop the currentMismatchMode via pop if it won't make the stack empty **/
func popMismatchMode() {
if self.currentMismatchMode.count > 1 {
let _ = self.currentMismatchMode.popLast()
}
}
/** Get the currently set mistmatch mode **/
func getCurrentMismatchMode() -> EMVectorSizeMismatchMode? {
return self.currentMismatchMode.last
}
/** Dot Product **/
private func dot(A: [Double], B: [Double]) -> Double {
var x: Double = 0
for i in 0...A.count-1 {
x += A[i] * B[i]
}
return x
}
/** Vector Magnitude **/
private func magnitude(A: [Double]) -> Double {
var x: Double = 0
for elt in A {
x += elt * elt
}
return sqrt(x)
}
/** Cosine similarity **/
private func cosineSim(A: [Double], B: [Double]) -> Double {
return dot(A: A, B: B) / (magnitude(A: A) * magnitude(A: B))
}
/** Tanimoto similarity **/
private func tanimotoSim(A: [Double], B: [Double]) -> Double {
let Amag = magnitude(A: A)
let Bmag = magnitude(A: B)
let AdotB = dot(A: A, B: B)
return AdotB / (Amag * Amag + Bmag * Bmag - AdotB)
}
/** Ochiai similarity **/
private func ochiaiSim(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / sqrt(Double(a.count) * Double(b.count))
}
/** Jaccard index **/
private func jaccardIndex(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / Double(a.union(b).count)
}
/** Jaccard distance **/
private func jaccardDist(A: [Double], B: [Double]) -> Double {
return 1.0 - jaccardIndex(A: A, B: B)
}
/** Dice coeeficient **/
private func diceCoef(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return 2.0 * Double(a.intersection(b).count) / (Double(a.count) + Double(b.count))
}
/** Hamming distance **/
private func hammingDist(A: [Double], B: [Double]) -> Double {
var x: Double = 0
if A.isEmpty {
return x
}
for i in 0...A.count-1 {
if A[i] != B[i] {
x += 1
}
}
return x
}
private let encforceEqualVectorSizes: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Hamming]
private let bailOnEmptyInput: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Ochiai]
private let allowEmptyInputs: Set<EMSimilarityMode> = [.Hamming]
/**
* Main compute mode
* Double types
* Returns the similarity results or -1.0 on caught error
*/
func compute(A: [Double], B: [Double]) -> Double {
// get the mode
var mode = EMSimilarityMode.Cosine
if let _mode = self.getCurrentSimMode() {
mode = _mode
}
else {
return -1
}
// are both vectors empty?
if A.isEmpty && B.isEmpty && !allowEmptyInputs.contains(mode) {
// divide by zero -> D.N.E.
return -1
}
// is one of the vectors empty and would this case a divide by zero error?
if bailOnEmptyInput.contains(mode) && (A.isEmpty || B.isEmpty) {
return -1
}
// look for vector size mismatch for modes in encforceEqualVectorSizes
if encforceEqualVectorSizes.contains(mode) && A.count != B.count {
if let mismatchMode = self.getCurrentMismatchMode() {
switch mismatchMode {
case .Bail:
return -1
case .Truncate:
let a = A.count < B.count ? A : B
let _b = A.count < B.count ? B : A
var b = [Double]()
if a.count > 0 {
for i in 0...a.count-1 {
b.append(_b[i])
}
}
return compute(A: a, B: b)
}
}
else {
return -1
}
}
switch mode {
case .Cosine:
return cosineSim(A: A, B: B)
case .Tanimoto:
return tanimotoSim(A: A, B: B)
case .Ochiai:
return ochiaiSim(A: A, B: B)
case .JaccardIndex:
return jaccardIndex(A: A, B: B)
case .JaccardDistance:
return jaccardDist(A: A, B: B)
case .Dice:
return diceCoef(A: A, B: B)
case .Hamming:
return hammingDist(A: A, B: B)
}
}
}
//
// FaceDetection.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
class FaceDetection{
public func getAnchors(options: AnchorOption) -> [Anchor] {
var _anchors: [Anchor] = []
if (options.stridesSize() != options.numLayers) {
print("strides_size and num_layers must be equal.")
return []
}
var layerID: Int = 0
while (layerID < options.stridesSize()) {
var anchorHeight: [Double] = []
var anchorWidth: [Double] = []
var aspectRatios: [Double] = []
var scales: [Double] = []
var lastSameStrideLayer: Int = layerID
while (lastSameStrideLayer < options.stridesSize() &&
options.strides[lastSameStrideLayer] == options.strides[layerID]) {
let scale: Double = options.minScale + (options.maxScale - options.minScale) * Double(lastSameStrideLayer) / (Double(options.stridesSize()) - 1.0)
if (lastSameStrideLayer == 0 && options.reduceBoxesInLowestLayer) {
aspectRatios.append(1.0)
aspectRatios.append(2.0)
aspectRatios.append(0.5)
scales.append(0.1)
scales.append(scale)
scales.append(scale)
} else {
for i in 0..<options.aspectRatios.count {
aspectRatios.append(options.aspectRatios[i])
scales.append(scale)
}
if options.interpolatedScaleAspectRatio > 0 {
var scaleNext: Double = 0.0
if lastSameStrideLayer == (options.stridesSize() - 1) {
scaleNext = 1.0
} else {
scaleNext = options.minScale + (options.maxScale - options.minScale) + Double(lastSameStrideLayer + 1) / Double(options.stridesSize() - 1)
}
scales.append(sqrt(scale * scaleNext))
aspectRatios.append(options.interpolatedScaleAspectRatio)
}
}
lastSameStrideLayer += 1
}
for i in 0..<aspectRatios.count {
let ratioSQRT: Double = sqrt(aspectRatios[i])
anchorHeight.append(scales[i] / ratioSQRT)
anchorWidth.append(scales[i] * ratioSQRT)
}
var featureMapHeight: Int = 0
var featureMapWidth: Int = 0
if (options.featureMapHeightSize() > 0) {
featureMapHeight = options.featureMapHeight[layerID]
featureMapWidth = options.featureMapWidth[layerID]
} else {
let stride: Int = options.strides[layerID]
featureMapHeight = Int(options.inputSizeHeight / stride)
featureMapWidth = Int(options.inputSizeWidth / stride)
}
for y in 0..<featureMapHeight {
for x: Int in 0..<featureMapWidth {
for anchorID in 0..<anchorHeight.count {
let xCenter: Double = Double(Double(x) + options.anchorOffsetX) / Double(featureMapWidth)
let yCenter: Double = Double(Double(y) + options.anchorOffsetY) / Double(featureMapHeight)
var w: Double = 0
var h: Double = 0
if (options.fixedAnchorSize) {
w = 1.0
h = 1.0
} else {
w = anchorWidth[anchorID]
h = anchorHeight[anchorID]
}
_anchors.append(Anchor(xCenter: xCenter, yCenter: yCenter, h: h, w: w))
}
}
}
layerID = lastSameStrideLayer
}
return _anchors
}
//1 classificators: 2: regression
func process(options: OptionsFace, rawScores: [Double] , rawBoxes: [Double] , anchors: [Anchor]) -> [Detection] {
var detectionScores: [Double] = []
var detectionClasses: [Int] = []
let boxes = options.numBoxes
for i in 0..<boxes {
var classId = -1
var maxScore: Double = 5e-324
for scoreIdx in 0..<options.numClasses {
var score = rawScores[i * options.numClasses + scoreIdx]
if options.sigmoidScore {
if options.scoreClippingThresh > 0 {
if score < -options.scoreClippingThresh {
score = -options.scoreClippingThresh
}
if score > options.scoreClippingThresh {
score = options.scoreClippingThresh
}
score = 1.0 / (1.0 + exp(-score))
if maxScore < score {
maxScore = score
classId = scoreIdx
}
}
}
}
detectionClasses.append(classId)
detectionScores.append(maxScore)
}
let detections: [Detection] = convertToDetections(rawBoxes: rawBoxes, anchors: anchors, detectionScores: detectionScores, detectionClasses: detectionClasses, options: options)
return detections
}
func convertToDetections(
rawBoxes: [Double],
anchors: [Anchor],
detectionScores: [Double],
detectionClasses: [Int],
options: OptionsFace) -> [Detection]{
var _outputDetections : [Detection] = []
for i in 0..<options.numBoxes {
if detectionScores[i] < options.minScoreThresh {
continue
}
let boxOffset: Int = 0
let boxData = decodeBox(rawBoxes: rawBoxes, i:i, anchors: anchors, options: options)
var landmark: [Landmark] = []
for k in 0..<options.numKeypoints {
let x: Double = boxData[boxOffset + 4 + k * 2]
var y: Double = 0.0
if (options.flipVertically) {
y = 1 - boxData[boxOffset + 4 + k * 2 + 1]
} else {
y = boxData[boxOffset + 4 + k * 2 + 1]
}
let tmpLand: Landmark = Landmark(x: x, y: y)
landmark.append(tmpLand)
}
let detection: Detection = convertToDetection(
boxYMin: boxData[boxOffset + 0],
boxXMin: boxData[boxOffset + 1],
boxYMax: boxData[boxOffset + 2],
boxXMax: boxData[boxOffset + 3],
landmark: landmark,
score: detectionScores[i],
classID: detectionClasses[i],
flipVertically: options.flipVertically)
_outputDetections.append(detection)
}
return _outputDetections
}
func origNms(detections: [Detection],threshold: Double, img_width: Int, img_height: Int) -> [Detection] {
if detections.count <= 0 {
return []
}
var x1: [Double] = []
var x2: [Double] = []
var y1: [Double] = []
var y2: [Double] = []
var s : [Double] = []
detections.forEach{detection in
x1.append(detection.xMin * Double(img_width))
x2.append((detection.xMin + detection.width) * Double(img_width))
y1.append(detection.yMin * Double(img_height))
y2.append((detection.yMin + detection.height) * Double(img_height))
s.append(detection.score)
}
let _x1 = x1
let _x2 = x2
let _y1 = y1
let _y2 = y2
let area: [Double] = multiplyArray(x: subArray(x: _x2, y: _x1) , y: subArray(x: _y2 , y: _y1))
let I: [Double] = _quickSort(a: s)
var positions: [Int] = []
I.forEach{element in
positions.append(s.firstIndex(of: element)!)
}
var pick: [Int] = []
while positions.count > 0 {
let ind0: [Int] = [positions.last!]
let ind1: [Int] = positions.dropLast()
let xx1 = _maximum(value: _itemIndex(item: _x1, positions: ind0)[0], itemIndex: _itemIndex(item: _x1, positions: ind1))
let yy1 = _maximum(value: _itemIndex(item: _y1, positions: ind0)[0], itemIndex: _itemIndex(item: _y1, positions: ind1))
let xx2 = _minimum(value: _itemIndex(item: _x2, positions: ind0)[0], itemIndex: _itemIndex(item: _x2, positions: ind1))
let yy2 = _minimum(value: _itemIndex(item: _y2, positions: ind0)[0], itemIndex: _itemIndex(item: _y2, positions: ind1))
let w = _maximum(value: 0.0, itemIndex: subArray(x: xx2 ,y: xx1))
let h = _maximum(value: 0.0, itemIndex: subArray(x: yy2, y: yy1))
let inter = multiplyArray(x: w, y: h)
let o = divideArray(x: inter,
y: subArray(x: _sum(a: _itemIndex(item: area, positions: ind0)[0], b: _itemIndex(item: area, positions: ind1)), y: inter))
pick.append(ind0[0])
let _inCorrectIndex: [Int] = inCorrectIndex(positions: positions, o: o, threshold: threshold)
positions = removeInCorrectIndex(positions: positions, inCorrectIndex: _inCorrectIndex)
}
var _detections: [Detection] = []
pick.forEach{element in _detections.append(detections[element])}
return _detections
}
}
func subArray(x: [Double], y:[Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] - y[b])
}
return a
}
func multiplyArray(x: [Double], y:[Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] * y[b])
}
return a
}
func divideArray(x: [Double], y: [Double]) -> [Double] {
var a: [Double] = []
for b in 0..<x.count {
a.append(x[b] / y[b])
}
return a
}
func decodeBox(rawBoxes: [Double], i: Int, anchors: [Anchor], options: OptionsFace) -> [Double] {
var boxData: [Double] = [Double](repeating: 0.0, count: options.numCoords)
let boxOffset: Int = i * options.numCoords + options.boxCoordOffset
var yCenter: Double = rawBoxes[boxOffset]
var xCenter: Double = rawBoxes[boxOffset + 1]
var h: Double = rawBoxes[boxOffset + 2]
var w: Double = rawBoxes[boxOffset + 3]
if (options.reverseOutputOrder) {
xCenter = rawBoxes[boxOffset]
yCenter = rawBoxes[boxOffset + 1]
w = rawBoxes[boxOffset + 2]
h = rawBoxes[boxOffset + 3]
}
xCenter = xCenter / options.xScale * anchors[i].w + anchors[i].xCenter
yCenter = yCenter / options.yScale * anchors[i].h + anchors[i].yCenter
if (options.applyExponentialOnBoxSize) {
h = exp(h / options.hScale) * anchors[i].h
w = exp(w / options.wScale) * anchors[i].w
} else {
h = h / options.hScale * anchors[i].h
w = w / options.wScale * anchors[i].w
}
let yMin: Double = yCenter - h / 2.0
let xMin: Double = xCenter - w / 2.0
let yMax: Double = yCenter + h / 2.0
let xMax: Double = xCenter + w / 2.0
boxData[0] = yMin
boxData[1] = xMin
boxData[2] = yMax
boxData[3] = xMax
if (options.numKeypoints > 0) {
for k in 0..<options.numKeypoints {
let offset: Int = i * options.numCoords +
options.keypointCoordOffset +
k * options.numValuesPerKeypoint
var keyPointY: Double = rawBoxes[offset]
var keyPointX: Double = rawBoxes[offset + 1]
if (options.reverseOutputOrder) {
keyPointX = rawBoxes[offset]
keyPointY = rawBoxes[offset + 1]
}
boxData[4 + k * options.numValuesPerKeypoint] =
keyPointX / options.xScale * anchors[i].w + anchors[i].xCenter
boxData[4 + k * options.numValuesPerKeypoint + 1] =
keyPointY / options.yScale * anchors[i].h + anchors[i].yCenter
}
}
return boxData
}
func convertToDetection(
boxYMin: Double,
boxXMin: Double,
boxYMax: Double,
boxXMax: Double,
landmark: [Landmark],
score: Double,
classID: Int,
flipVertically: Bool) -> Detection {
var _yMin: Double = 0.0
if flipVertically {
_yMin = 1.0 - boxYMax
}
else {
_yMin = boxYMin
}
return Detection(score: score, xMin: boxXMin, yMin: _yMin, width: (boxXMax - boxXMin), height: (boxYMax - boxYMin), classID: classID, landMark: landmark)
}
func clamp(lower: Int,higher: Int, val: Int) -> Int {
if val < lower {
return 0
}
else if val > higher {
return 255
}
else {
return val
}
}
func getRotatedImageByteIndex(x: Int, y: Int, rotatedImageWidth: Int) -> Int {
return rotatedImageWidth * (y + 1) - (x + 1)
}
func _quickSort(a: [Double]) -> [Double] {
if a.count <= 1{
return a
}
let pivot = a[0]
var less: [Double] = []
var more: [Double] = []
var pivotList: [Double] = []
a.forEach{i in
if (i < pivot) {
less.append(i)
} else if (i > pivot) {
more.append(i)
} else {
pivotList.append(i)
}
}
less = _quickSort(a: less)
more = _quickSort(a: more)
less += pivotList
less += more
return less
}
func _itemIndex(item: [Double], positions:[Int]) -> [Double] {
var _temp: [Double] = []
positions.forEach {element in _temp.append(item[element])}
return _temp
}
func _minimum(value: Double, itemIndex: [Double]) -> [Double] {
var _temp: [Double] = []
itemIndex.forEach{element in
if value < element {
_temp.append(value)
}
else {
_temp.append(element)
}
}
return _temp
}
func _maximum(value: Double, itemIndex: [Double]) -> [Double] {
var _temp: [Double] = []
itemIndex.forEach{element in
if value > element {
_temp.append(value)
}
else {
_temp.append(element)
}
}
return _temp
}
func _sum(a: Double, b: [Double]) -> [Double] {
var _temp: [Double] = []
b.forEach{element in
_temp.append(a + element)
}
return _temp
}
func inCorrectIndex(positions: [Int], o: [Double], threshold: Double) -> [Int] {
var _index: [Int] = []
for i in 0..<o.count {
if o[i] > threshold {
_index.append(positions[i])
}
}
return _index
}
func removeInCorrectIndex(positions: [Int], inCorrectIndex: [Int]) -> [Int] {
var temp = positions
temp.remove(at: positions.count - 1)
inCorrectIndex.forEach{ element in temp = temp.filter(){$0 != element}}
return temp
}
//Uint32List convertImage(Uint8List plane0, Uint8List plane1, Uint8List plane2,
// int bytesPerRow, int bytesPerPixel, int width, int height) {
// int hexFF = 255
// int x, y, uvIndex, index
// int yp, up, vp
// int r, g, b
// int rt, gt, bt
//
// Uint32List image = new Uint32List(width * height)
//
// for (x = 0 x < width x++) {
// for (y = 0 y < height y++) {
// uvIndex =
// bytesPerPixel * ((x / 2).round() + bytesPerRow * ((y / 2).round()))
// index = y * width + x
//
// yp = plane0[index]
// up = plane1[uvIndex]
// vp = plane2[uvIndex]
//
// rt = (yp + vp * 1436 / 1024 - 179).round()
// gt = (yp - up * 46549 / 131072 + 44 - vp * 93604 / 131072 + 91).round()
// bt = (yp + up * 1814 / 1024 - 227).round()
// r = clamp(0, 255, rt)
// g = clamp(0, 255, gt)
// b = clamp(0, 255, bt)
//
// image[getRotatedImageByteIndex(y, x, height)] =
// (hexFF << 24) | (b << 16) | (g << 8) | r
// }
// }
// return image
//}
//func FaceAlign(
// rawImage: CVPixelBuffer, detections: [Detection], width: Int) -> [Any] {
// var desiredLeftEye: Landmark = Landmark(x: 0.35, y: 0.35)
// var desiredFaceWidth: Int = width
// var desiredFaceHeight: Int = width
//
// imglib.PngEncoder pngEncoder = new imglib.PngEncoder(level: 0, filter: 0)
// List<int> byteData = pngEncoder.encodeImage(rawImage)
//
// Detection detection
// List<dynamic> newFaces = new List()
//
// for (detection in detections) {
// Landmark leftEyeCenter = detection.landmark[0]
// Landmark rightEyeCenter = detection.landmark[1]
//
// double dY = (rightEyeCenter.y - leftEyeCenter.y) * rawImage.height
// double dX = (rightEyeCenter.x - leftEyeCenter.x) * rawImage.width
//
// double angle = atan2(dY, dX)
// angle = (angle > 0 ? angle : (2 * pi + angle)) * 360 / (2 * pi)
//
// double desiredRightEyeX = 1.0 - desiredLeftEye.x
// double dist = sqrt((dX * dX) + (dY * dY))
// double desiredDist = (desiredRightEyeX - desiredLeftEye.x)
// desiredDist *= desiredFaceWidth
//
// double scale = desiredDist / dist
//
// double eyeCenterX =
// ((leftEyeCenter.x + rightEyeCenter.x) / 2) * rawImage.width
// double eyeCenterY =
// ((leftEyeCenter.y + rightEyeCenter.y) / 2) * rawImage.height
//
// List<int> eyeCenter = new List()
// eyeCenter.add(eyeCenterX.round())
// eyeCenter.add(eyeCenterY.round())
//
// List<double> desiredLeftEye_push = new List()
// desiredLeftEye_push.add(desiredLeftEye.x)
// desiredLeftEye_push.add(desiredLeftEye.y)
//
// List<int> dstSize = new List()
// dstSize.add(desiredFaceWidth)
// dstSize.add(desiredFaceHeight)
// dynamic byteFace = await ImgProc.faceAlign(
// byteData, eyeCenter, desiredLeftEye_push, angle, scale, dstSize)
// newFaces.add(byteFace)
// }
// return newFaces
//}
//
// LandMark.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class Landmark{
init(x: Double, y: Double) {
self.x = x
self.y = y
}
var x: Double
var y: Double
}
//
// NormalizeOp.swift
// movanai
//
// Created by Nguyen Van An on 4/4/21.
//
import Foundation
class NormalizeOp {
var x: Float = 0
var y: Float = 0
init(_ x: Float, _ y: Float ) {
self.x = x
self.y = y
}
}
//
// OptionsFace.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class OptionsFace {
init(numClasses: Int, numBoxes: Int, numCoords: Int, keypointCoordOffset: Int, ignoreClasses: [Int], scoreClippingThresh: Double, minScoreThresh: Double, numKeypoints: Int, numValuesPerKeypoint: Int, boxCoordOffset: Int, xScale: Double, yScale: Double, wScale: Double, hScale: Double, applyExponentialOnBoxSize: Bool, reverseOutputOrder: Bool, sigmoidScore: Bool, flipVertically: Bool) {
self.numClasses = numClasses
self.numBoxes = numBoxes
self.numCoords = numCoords
self.keypointCoordOffset = keypointCoordOffset
self.ignoreClasses = ignoreClasses
self.scoreClippingThresh = scoreClippingThresh
self.minScoreThresh = minScoreThresh
self.numKeypoints = numKeypoints
self.numValuesPerKeypoint = numValuesPerKeypoint
self.boxCoordOffset = boxCoordOffset
self.xScale = xScale
self.yScale = yScale
self.wScale = wScale
self.hScale = hScale
self.applyExponentialOnBoxSize = applyExponentialOnBoxSize
self.reverseOutputOrder = reverseOutputOrder
self.sigmoidScore = sigmoidScore
self.flipVertically = flipVertically
}
var numClasses: Int
var numBoxes: Int
var numCoords: Int
var keypointCoordOffset : Int
var ignoreClasses: [Int]
var scoreClippingThresh: Double
var minScoreThresh: Double
var numKeypoints: Int
var numValuesPerKeypoint: Int
var boxCoordOffset: Int
var xScale: Double
var yScale: Double
var wScale : Double
var hScale: Double
var applyExponentialOnBoxSize: Bool
var reverseOutputOrder: Bool
var sigmoidScore: Bool
var flipVertically: Bool
}
class Anchor {
init(xCenter: Double, yCenter: Double, h: Double, w: Double) {
self.xCenter = xCenter
self.yCenter = yCenter
self.h = h
self.w = w
}
var xCenter: Double
var yCenter: Double
var h: Double
var w: Double
}
......@@ -33,6 +33,11 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
var completionSuccessCard: (_ data: [String: Any]?, _ cardImage: Data?, _ faceImage: Data?) -> Void = {_,_,_ in}
private var previewWidth: CGFloat = 128.0
private var previewHeight: CGFloat = 128.0
private var cropZone: CGRect?
private var cropImage: CGRect?
var overlayView : OverLayCardView?
override func viewDidLoad() {
super.viewDidLoad()
self.btnRotateScreen.layer.cornerRadius = 20
......@@ -41,7 +46,7 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
init() {
super.init(nibName: "SBKCaptureCardVC", bundle: Bundle(for: SBKCaptureCardVC.self))
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
......@@ -67,41 +72,36 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
//Cài đặt máy ảnh
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setupLivePreview()
self.loadConfigUI()
self.lbDescription.text = self.descriptionScreen
captureSession.sessionPreset = .high
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
......@@ -120,59 +120,35 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
initOverLay()
let overlay = createOverlay(frame: view.frame, xOffset: view.frame.midX, yOffset: view.frame.midY, radius: 50.0)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.viewCamera.frame.size.width
let fHeight = self.viewCamera.frame.size.height
let leftSpace = fWidth/2 - (fHeight * 8 / 10 / 2 * 3 / 4)
if self.statusScreen == "horizontal" {
path.addRoundedRect(in: CGRect(x: leftSpace, y: self.viewCamera.frame.origin.y,
width: fHeight * 8 / 10 * 3 / 4, height: fHeight * 8 / 10 ),
cornerWidth: 20, cornerHeight: 20)
} else {
path.addRoundedRect(in: CGRect(x: fWidth/20, y: fHeight / 8,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 20, cornerHeight: 20)
func initOverLay(){
if self.overlayView == nil {
let fWidth = self.view.bounds.size.width
let fHeight = self.view.bounds.size.height
let squareWidth = fWidth/2
self.overlayView = OverLayCardView(frame: self.viewCamera.bounds)
self.overlayView!.marginTop = self.viewCamera.frame.origin.x + fWidth/20
previewWidth = self.viewCamera.frame.width * 18 / 20
previewHeight = self.viewCamera.frame.width * 18 * 3 / 20 / 4
self.overlayView!.previewWidth = self.previewWidth
self.overlayView!.previewHeight = self.previewHeight
self.overlayView!.borderLength = 50
self.overlayView!.borderWidth = 5
self.overlayView!.connerRadius = 0
viewCamera.addSubview(self.overlayView!)
self.overlayView!.setLayer()
}
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = colorBorder
borderLayer.lineWidth = 5
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
else { return }
self.captureSession.stopRunning()
self.captureSession.removeOutput(self.videoDataOutput)
......@@ -181,6 +157,7 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
let viewController = SBKResultCapture()
viewController.imageData = imageData
viewController.checkScreen = self.checkScreen
viewController.cropZone = self.cropImage!
viewController.idFront = self.idFront
viewController.URLToken = self.URLToken
viewController.statusScreen = self.statusScreen
......@@ -205,39 +182,6 @@ class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
}
}
// func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
// let validateView = SBKValidateInput.shared
// let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
// var imageOutput: UIImage?
// if UIDevice.current.userInterfaceIdiom == .pad {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20 * 3 / 4), scale: 1.0)
// } else {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20 * 3 / 4), scale: 1.0)
// }
//
// let ciimage = CIImage(image: imageOutput!)
// let tmpcontext = CIContext(options: nil)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
// }
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20), scale: 1.0)
} else {
//cat anh cho iphone
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width/20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.height * 18 * 3 / 20 / 4 ), scale: 1.0)
}
imageOutput = validateView.formatImageModal(image: imageOutput!, size: CGSize(width: 224, height: 224))
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@IBAction func onSelectRotateScreen(_ sender: Any) {
}
......@@ -263,7 +207,7 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
......@@ -273,52 +217,57 @@ extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = validateInput.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
if self.cropZone == nil {
DispatchQueue.main.async {
let scaleSizeWidth = self.viewCamera.frame.width / CGFloat( CVPixelBufferGetWidth(imageFrameInput))
let scaleSizeHeight = self.viewCamera.frame.height / CGFloat( CVPixelBufferGetHeight(imageFrameInput))
let x = self.overlayView!.marginTop
let y = (self.viewCamera.frame.height - self.previewHeight) / 2
self.cropZone = CGRect(x: CGFloat(x) / scaleSizeWidth, y: CGFloat(y + 30) / scaleSizeHeight, width: CGFloat(self.previewWidth) / scaleSizeWidth , height: CGFloat(self.previewHeight - 60) / scaleSizeHeight )
self.cropImage = CGRect(x: CGFloat(x - 10) / scaleSizeWidth, y: CGFloat(y + 10) / scaleSizeHeight, width: CGFloat(self.previewWidth + 20) / scaleSizeWidth , height: CGFloat(self.previewHeight - 20) / scaleSizeHeight )
}
if validateImageCard == .IMAGE_FRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGE_BACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.blue.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if self.overlayView != nil {
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGE_FAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_FRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_BACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
self.overlayView!.setBorderColor(color: UIColor.red.cgColor)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGE_BACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.overlayView!.setBorderColor(color: UIColor.blue.cgColor)
}
}
}
}
......
......@@ -34,6 +34,11 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
var statusScreen: String = "vertical" //horizontal
public var typeCamera: TypeCard = TypeCard.FRONT
public let labelTypeCard = UILabel()
private var previewWidth: CGFloat = 128.0
private var previewHeight: CGFloat = 128.0
private var cropZone: CGRect?
private var cropImage: CGRect?
var overlayView: OverLayCardView?
public var completionSuccessCardStep: (_ validate: ValidateCard?, _ pathImage: String?, _ permissionCamera: Bool?)->Void = {_,_,_ in}
......@@ -66,7 +71,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
self.completionSuccessCardStep(nil, nil, false)
return
}
self.loadCamera()
self.labelTypeCard.frame = CGRect(x: self.center.x - 50, y: self.frame.size.height / 8 + self.viewCamera.frame.origin.y - 40, width: 100, height: 20)
......@@ -128,19 +133,27 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
//Cài đặt máy ảnh
func loadCamera() {
captureSession.sessionPreset = .hd1920x1080
captureSession.sessionPreset = .high
var device : AVCaptureDevice?
if #available(iOS 11.1, *) {
guard let backCamera = AVCaptureDevice.DiscoverySession(
guard let devicetype = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .back).devices.first else {
print("Unable to access back camera!")
return
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
}
device = devicetype
} else if #available(iOS 11.0, *) {
guard let devicetype = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera],
mediaType: .video,
position: .back).devices.first else {
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
}
device = devicetype
}
do {
self.inputCamera = try AVCaptureDeviceInput(device: backCamera)
self.inputCamera = try AVCaptureDeviceInput(device: device!)
if captureSession.canAddInput(self.inputCamera) {
captureSession.addInput(self.inputCamera)
......@@ -163,7 +176,7 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
}
//set frame preview
......@@ -181,62 +194,40 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
let overlay = createOverlay(frame: self.bounds)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.bounds.size.width
let fHeight = self.bounds.size.height
let squareWidth = fWidth/2
let topLeft = CGPoint(x: fWidth/2-squareWidth/3, y: fHeight/4)
path.addRoundedRect(in: CGRect(x: self.viewCamera.frame.origin.x + fWidth/20, y: topLeft.y - fHeight/20,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 0, cornerHeight: 0)
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = colorBorder
borderLayer.lineWidth = 5
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
public override func draw(_ rect: CGRect) {
initOverLay()
}
func initOverLay(){
if self.overlayView == nil {
self.loadCamera()
let fWidth = self.bounds.size.width
self.overlayView = OverLayCardView(frame: self.viewCamera.bounds)
self.overlayView!.marginTop = self.viewCamera.frame.origin.x + fWidth/20
previewWidth = self.viewCamera.frame.width * 18 / 20
previewHeight = self.viewCamera.frame.width * 18 * 3 / 20 / 4
self.overlayView!.previewWidth = self.previewWidth
self.overlayView!.previewHeight = self.previewHeight
self.overlayView!.borderLength = 50
self.overlayView!.borderWidth = 2
self.overlayView!.connerRadius = 0
viewCamera.addSubview(self.overlayView!)
self.overlayView!.setLayer()
}
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else { return }
let cropImage = self.cropImage(image: UIImage(data: imageData)!, rect: CGRect(x: UIImage(data: imageData)!.size.width / 20, y: UIImage(data: imageData)!.size.height / 8 + UIImage(data: imageData)!.size.height / 50, width: UIImage(data: imageData)!.size.width * 18 / 20, height: UIImage(data: imageData)!.size.width * 18 / 20 * 3 / 4 ), scale: 1.0)
let cropImage = UIImage(data: imageData)!.crop(rect: self.cropImage!, scale: 1.0)
if let image = cropImage, let urlImage = SBKValidateInput.shared.saveImage(imageName: "imagecard\(typeCamera).png", image: image) {
self.completionSuccessCardStep(nil, urlImage, nil)
}
}
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == TypeCard.FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == TypeCard.BACK) {
......@@ -249,41 +240,6 @@ open class SBKValidateCardView: UIView, AVCapturePhotoCaptureDelegate {
}
}
}
// func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
// let validateView = SBKValidateInput.shared
// let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
// var imageOutput: UIImage?
// if UIDevice.current.userInterfaceIdiom == .pad {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 2 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4), scale: 1.0)
// } else {
// imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height * 1.5 / 7, width: imageInput.size.width - imageInput.size.width/10, height: (imageInput.size.width - imageInput.size.width/10) * 3/4 + 7), scale: 1.0)
// }
//
// let ciimage = CIImage(image: imageOutput!)
// let eaglContext = EAGLContext(api: .openGLES2)
// let tmpcontext = CIContext(eaglContext: eaglContext!)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
// }
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if UIDevice.current.userInterfaceIdiom == .pad {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20), scale: 1.0)
} else {
//cat anh cho iphone
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width/20, y: imageInput.size.height / 6, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 * 3 / 20 / 4), scale: 1.0)
}
imageOutput = validateView.formatImageModal(image: imageOutput!, size: CGSize(width: 224, height: 224))
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
}
extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
......@@ -294,21 +250,33 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
if captureSession.canAddOutput(self.videoDataOutput) {
self.captureSession.addOutput(self.videoDataOutput)
}
//self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
public func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
if self.cropZone == nil && self.overlayView != nil {
DispatchQueue.main.async {
let scaleSizeWidth = self.viewCamera.frame.width / CGFloat( CVPixelBufferGetWidth(imageFrameInput))
let scaleSizeHeight = self.viewCamera.frame.height / CGFloat( CVPixelBufferGetHeight(imageFrameInput))
let x = self.overlayView!.marginTop
let y = (self.viewCamera.frame.height - self.previewHeight) / 2
self.cropZone = CGRect(x: CGFloat(x) / scaleSizeWidth, y: CGFloat(y) / scaleSizeHeight, width: CGFloat(self.previewWidth) / scaleSizeWidth , height: CGFloat(self.previewHeight) / scaleSizeHeight )
self.cropImage = CGRect(x: CGFloat(x - 10) / scaleSizeWidth, y: CGFloat(y + 10) / scaleSizeHeight, width: CGFloat(self.previewWidth + 20) / scaleSizeWidth , height: CGFloat(self.previewHeight - 20) / scaleSizeHeight )
}
}
guard let crop = self.cropZone else {return}
guard let imageOutput = imageFrameInput.crop(rect: crop, scale: 1.0) else {return}
let validateImageCard = SBKValidateInput.shared.didOutput(pixelBuffer: imageOutput)
if let overlayView = self.overlayView {
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
......@@ -322,9 +290,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_FRONT && self.typeCamera == .BACK {
......@@ -332,9 +298,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if validateImageCard == .IMAGE_BACK && self.typeCamera == .FRONT {
......@@ -342,9 +306,7 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.red.cgColor)
}
if (self.statusValidateImage == ValidateCard.IMAGE_FRONT && self.typeCamera == .FRONT) || (self.statusValidateImage == .IMAGE_BACK && self.typeCamera == .BACK) {
......@@ -352,10 +314,9 @@ extension SBKValidateCardView: AVCaptureVideoDataOutputSampleBufferDelegate {
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.frame, colorBorder: UIColor.blue.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
overlayView.setBorderColor(color: UIColor.blue.cgColor)
}
}
}
}
}
......@@ -11,6 +11,7 @@ class SBKResultCapture: UIViewController {
@IBOutlet weak var imgPhotoCard: UIImageView!
public var imageData: Data?
public var dataCrop: Data?
var cropZone: CGRect?
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var lbDescription: UILabel!
......@@ -61,7 +62,9 @@ class SBKResultCapture: UIViewController {
if UIDevice.current.userInterfaceIdiom == .pad {
cropImage = SBKValidateInput.shared.cropImageHorizontal(image: imageCap!, rect: CGRect(x: imageCap!.size.width * 1 / 10, y: imageCap!.size.height * 3 / 20, width: imageCap!.size.width * 8 / 10, height: imageCap!.size.height * 8 / 10), scale: 1.0)!.rotate(radians: .pi / 2)
} else {
cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 20, y: imageCap!.size.height / 8 + imageCap!.size.height / 50, width: imageCap!.size.width * 18 / 20, height: imageCap!.size.width * 18 / 20 * 3 / 4 ), scale: scale)
if self.cropZone != nil {
cropImage = imageCap!.crop(rect: self.cropZone!, scale: 1.0)
}
}
dataCrop = cropImage!.pngData()
......@@ -72,14 +75,7 @@ class SBKResultCapture: UIViewController {
fatalError("init(coder:) has not been implemented")
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func navigateToFace() {
DispatchQueue.main.async {
......@@ -153,27 +149,3 @@ class SBKResultCapture: UIViewController {
}
}
extension UIImage {
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)
// Rotate around middle
context.rotate(by: CGFloat(radians))
// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
//
// OverLayView.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import AVFoundation
import UIKit
class OverLayCardView: UIView {
@IBInspectable var previewWidth: CGFloat = 100
@IBInspectable var previewHeight: CGFloat = 100
@IBInspectable var borderLength : CGFloat = 10
@IBInspectable var borderPadding : CGFloat = 0
@IBInspectable var borderWidth : CGFloat = 2
@IBInspectable var borderColor : CGColor = UIColor.white.cgColor
@IBInspectable var marginTop : CGFloat = 0
@IBInspectable var marginLeft : CGFloat = 0
@IBInspectable var connerRadius : CGFloat = 8
let vContainer: UIView = {
let view = UIView()
view.backgroundColor = UIColor.black.withAlphaComponent(0.6)
return view
}()
let vContainer2: UIView = {
let view = UIView()
return view
}()
override init(frame: CGRect) {
super.init(frame: frame)
vContainer.frame = self.bounds
self.addSubview(vContainer)
vContainer2.frame = self.bounds
self.addSubview(vContainer2)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setLayer(){
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//add to container dask
let maskLayer = CALayer()
maskLayer.frame = self.bounds
let circleLayer = CAShapeLayer()
circleLayer.frame = CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height)
let finalPath = UIBezierPath(roundedRect: CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height), cornerRadius: 0)
let rectPath = UIBezierPath(roundedRect: CGRect(x: x, y: y,width: previewWidth, height: previewHeight), cornerRadius: connerRadius)
finalPath.append(rectPath.reversing())
circleLayer.path = finalPath.cgPath
maskLayer.addSublayer(circleLayer)
vContainer.layer.mask = maskLayer
let clearLayer = CALayer()
clearLayer.frame = vContainer2.bounds
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
func setBorderColor(color: CGColor){
if borderColor != color {
vContainer2.layer.sublayers = nil
borderColor = color
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
}
}
//
// ExtCGImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
import CoreGraphics
import CoreImage
import VideoToolbox
extension CGImage{
func toCVPixelBuffer() -> CVPixelBuffer? {
let frameSize = CGSize(width: self.width, height: self.height)
var pixelBuffer:CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(frameSize.width), Int(frameSize.height), kCVPixelFormatType_32BGRA , nil, &pixelBuffer)
if status != kCVReturnSuccess {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0))
let data = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: data, width: Int(frameSize.width), height: Int(frameSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue)
context?.draw(self, in: CGRect(x: 0, y: 0, width: self.width, height: self.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
public static func create(pixelBuffer: CVPixelBuffer) -> CGImage? {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
return cgImage
}
}
//
// ExtCIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension CIImage{
func toUIImage() -> UIImage {
let eaglContext = EAGLContext(api: .openGLES2)
let context:CIContext = CIContext(eaglContext: eaglContext!)
let cgImage:CGImage = context.createCGImage(self, from: self.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
}
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
import UIKit
import Accelerate
extension CVPixelBuffer {
/**
Returns thumbnail by cropping pixel buffer to biggest square and scaling the cropped image to
model dimensions.
*/
func centerThumbnail(ofSize size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
let thumbnailSize = min(imageWidth, imageHeight)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
......@@ -139,5 +115,76 @@ extension CVPixelBuffer {
return pixelBuffer
}
func resized(to size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA ||
pixelBufferType == kCVPixelFormatType_32ARGB)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
// Finds the biggest square in the pixel buffer and advances rows based on it.
guard let inputBaseAddress = CVPixelBufferGetBaseAddress(self) else {
return nil
}
// Gets vImage Buffer from input image
var inputVImageBuffer = vImage_Buffer(data: inputBaseAddress, height: UInt(imageHeight), width: UInt(imageWidth), rowBytes: inputImageRowBytes)
let scaledImageRowBytes = Int(size.width) * imageChannels
guard let scaledImageBytes = malloc(Int(size.height) * scaledImageRowBytes) else {
return nil
}
// Allocates a vImage buffer for scaled image.
var scaledVImageBuffer = vImage_Buffer(data: scaledImageBytes, height: UInt(size.height), width: UInt(size.width), rowBytes: scaledImageRowBytes)
// Performs the scale operation on input image buffer and stores it in scaled image buffer.
let scaleError = vImageScale_ARGB8888(&inputVImageBuffer, &scaledVImageBuffer, nil, vImage_Flags(0))
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
guard scaleError == kvImageNoError else {
return nil
}
let releaseCallBack: CVPixelBufferReleaseBytesCallback = {mutablePointer, pointer in
if let pointer = pointer {
free(UnsafeMutableRawPointer(mutating: pointer))
}
}
var scaledPixelBuffer: CVPixelBuffer?
// Converts the scaled vImage buffer to CVPixelBuffer
let conversionStatus = CVPixelBufferCreateWithBytes(nil, Int(size.width), Int(size.height), pixelBufferType, scaledImageBytes, scaledImageRowBytes, releaseCallBack, nil, nil, &scaledPixelBuffer)
guard conversionStatus == kCVReturnSuccess else {
free(scaledImageBytes)
return nil
}
return scaledPixelBuffer
}
func toUIImage() -> UIImage {
let ciimage : CIImage = CIImage(cvPixelBuffer: self)
let imageView : UIImage = ciimage.toUIImage()
return imageView
}
func crop(rect: CGRect, scale: CGFloat) -> CVPixelBuffer? {
let imageCgi = CGImage.create(pixelBuffer: self)!
return imageCgi.cropping(to: rect)!.toCVPixelBuffer()
}
}
//
// ExtUIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIImage {
//Xử lý ảnh hiển thị
func crop(rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
self.draw(at: CGPoint(x: -rect.minX / scale, y: -rect.minY / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
context.translateBy(x: newSize.width/2, y: newSize.height/2)
context.rotate(by: CGFloat(radians))
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func resize(targetSize: CGSize) -> UIImage {
let size = self.size
let widthRatio = targetSize.width / self.size.width
let heightRatio = targetSize.height / self.size.height
var newSize: CGSize
if(widthRatio > heightRatio) {
newSize = CGSize(width: size.width * heightRatio, height: size.height * heightRatio)
} else {
newSize = CGSize(width: size.width * widthRatio, height: size.height * widthRatio)
}
let rect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, 1.0)
self.draw(in: rect)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
func toCVPixel() -> CVPixelBuffer {
let ciimage = CIImage(image: self)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return cgimage!.toCVPixelBuffer()!
}
}
......@@ -37,4 +37,24 @@ extension UIViewController {
func validateImage(image: Data) -> Bool {
return true
}
func popupBackToCaptureCardFront() {
let alert = UIAlertController(title: "Confirm".localized(), message: "You will go back to front card photography.".localized(), preferredStyle: .alert)
let actionClose = UIAlertAction(title: "No".localized(), style: .cancel, handler: nil)
alert.addAction(actionClose)
let actionOk = UIAlertAction(title: "Yes".localized(), style: .default, handler: {
action in
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
})
alert.addAction(actionOk)
present(alert, animated: true, completion: nil)
}
}
......@@ -57,9 +57,7 @@ class SBKValidateInput {
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result == nil {
return .ERROR
}
......@@ -72,7 +70,7 @@ class SBKValidateInput {
return .IMAGE_BACK
case 3:
return .PASSPORT
case 4:
case 4,5,6,7,8,9:
return .IMAGE_FAKE
default:
return .ERROR
......
......@@ -6,13 +6,45 @@ target 'OCR-SDK' do
use_frameworks!
# Pods for OCR-SDK
pod 'TensorFlowLiteSwift'
pod 'TensorFlowLiteSwift', '~> 0.0.1-nightly'
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
#post_install do |installer|
# installer.pods_project.build_configurations.each do |config|
# config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
# config.build_settings["BITCODE_GENERATION_MODE"] = "bitcode"
# end
#end
#bitcode enable
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
# set valid architecture
config.build_settings['VALID_ARCHS'] = 'arm64 armv7 armv7s i386 x86_64'
# build active architecture only (Debug build all)
config.build_settings['ONLY_ACTIVE_ARCH'] = 'NO'
config.build_settings['ENABLE_BITCODE'] = 'YES'
if config.name == 'Release' || config.name == 'Pro'
config.build_settings['BITCODE_GENERATION_MODE'] = 'bitcode'
else # Debug
config.build_settings['BITCODE_GENERATION_MODE'] = 'marker'
end
cflags = config.build_settings['OTHER_CFLAGS'] || ['$(inherited)']
if config.name == 'Release' || config.name == 'Pro'
cflags << '-fembed-bitcode'
else # Debug
cflags << '-fembed-bitcode-marker'
end
config.build_settings['OTHER_CFLAGS'] = cflags
end
end
end
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
- TensorFlowLiteC (0.0.1-nightly.20211102):
- TensorFlowLiteC/Core (= 0.0.1-nightly.20211102)
- TensorFlowLiteC/Core (0.0.1-nightly.20211102)
- TensorFlowLiteSwift (0.0.1-nightly.20211102):
- TensorFlowLiteSwift/Core (= 0.0.1-nightly.20211102)
- TensorFlowLiteSwift/Core (0.0.1-nightly.20211102):
- TensorFlowLiteC (= 0.0.1-nightly.20211102)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 0.0.1-nightly)
SPEC REPOS:
trunk:
......@@ -12,9 +16,9 @@ SPEC REPOS:
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
TensorFlowLiteC: 671507d2d839e633f3dc7ab464677d28857a37b6
TensorFlowLiteSwift: 039777297cdea077fcdd2f8a0f000c1d651ac45f
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.1
COCOAPODS: 1.10.2
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
- TensorFlowLiteC (0.0.1-nightly.20211102):
- TensorFlowLiteC/Core (= 0.0.1-nightly.20211102)
- TensorFlowLiteC/Core (0.0.1-nightly.20211102)
- TensorFlowLiteSwift (0.0.1-nightly.20211102):
- TensorFlowLiteSwift/Core (= 0.0.1-nightly.20211102)
- TensorFlowLiteSwift/Core (0.0.1-nightly.20211102):
- TensorFlowLiteC (= 0.0.1-nightly.20211102)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 0.0.1-nightly)
SPEC REPOS:
trunk:
......@@ -12,9 +16,9 @@ SPEC REPOS:
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
TensorFlowLiteC: 671507d2d839e633f3dc7ab464677d28857a37b6
TensorFlowLiteSwift: 039777297cdea077fcdd2f8a0f000c1d651ac45f
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 5d1bc9d5125d5fec48a2110d5d6596947a9bac74
COCOAPODS: 1.10.1
COCOAPODS: 1.10.2
......@@ -20,37 +20,36 @@
/* Begin PBXBuildFile section */
0E50E955DF4C94AC6E7C2697FA5B9C7A /* Pods-OCR-SDK-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
159E5ED488040D65D259274291C04076 /* TensorFlowLiteSwift-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */; };
3007E028EAC848C7FE9EB82C555DEEB1 /* TensorFlowLiteSwift-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
47BF9B1F51D39FF8329114D77A2AF59B /* TensorFlowLite.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */; };
4B299DCBDED6FB976CD401970CCD2B09 /* Tensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */; };
5C436F69B5E1626E44091BFE1094AA29 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; };
5CE750D966ADCFB96953D9059A7B2EC0 /* Interpreter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */; };
8C5A6C7DD5512D6B750AB6526168E12E /* Delegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */; };
9A479BCCD778CC0BD44D8B0D93971E75 /* MetalDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */; };
A2CEC558C0CD29098EE844CC6BF8744A /* InterpreterError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */; };
1EF42BA7A3728956E0819AA1586A19E0 /* InterpreterError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0913DC467F75C7CDF2AE04C72B69A1FB /* InterpreterError.swift */; };
35CC16F28EFE959BA1EDD59D0A1A579D /* QuantizationParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = F09E3157B8F619A01871F4BF3F3D5F4A /* QuantizationParameters.swift */; };
89DFE11EB6B97622CBE570E11A422B2E /* Interpreter.swift in Sources */ = {isa = PBXBuildFile; fileRef = F9C3BA0790EFEE218C4A5B8BAF9E2A88 /* Interpreter.swift */; };
A4419DB0A7CDB0A886E77F5FCF873CF8 /* Pods-OCR-SDK-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */; };
A8115D88A11116E6295A00D03464DEA3 /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 89DEEE5839289BAF4A77F053163A252B /* Model.swift */; };
A6791FA73099A3B48BA52F23ECCB68A5 /* TensorFlowLiteSwift-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = 7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
B55CE53BD5779359C4A8B8043853867F /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; };
FE1B44864AF81A13ACF3A198F81D3EA0 /* QuantizationParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = 85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */; };
BF7ACB61FB65A0CDE562908EA112C028 /* Tensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */; };
BF92AADA2ACACF7675F24D2858B4E641 /* TensorFlowLite.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */; };
C253298E121F79C0613AADC72D225217 /* TensorFlowLiteSwift-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 02877DCEF805FE230F0F6C1718AF2E3E /* TensorFlowLiteSwift-dummy.m */; };
E3387D3D7DF235DD921900AECE7177DF /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */; };
E43D8122B3F44AC0C1C4063E9FE12FCE /* Delegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */; };
F17F59AED183359775B920759288A367 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
4AFBB2D1F12F0A7C0221B72930320356 /* PBXContainerItemProxy */ = {
7186AA350E2F51E290EB66113727A4BB /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteC;
};
9870C87BB4FE7718B31A636346B46856 /* PBXContainerItemProxy */ = {
74A713F5EF5E6AC9BBF4B0F890C83299 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteC;
};
AE7AC2AACDE9489CCFFB0B316945A9B3 /* PBXContainerItemProxy */ = {
95F66FC5DE60D6FA2B1A8FB0A7BA488B /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
......@@ -60,52 +59,51 @@
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.debug.xcconfig; sourceTree = "<group>"; };
1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = MetalDelegate.swift; path = tensorflow/lite/experimental/swift/Sources/MetalDelegate.swift; sourceTree = "<group>"; };
26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "TensorFlowLiteSwift-dummy.m"; sourceTree = "<group>"; };
02877DCEF805FE230F0F6C1718AF2E3E /* TensorFlowLiteSwift-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "TensorFlowLiteSwift-dummy.m"; sourceTree = "<group>"; };
067155ADE547B880C324E22691E663E7 /* TensorFlowLiteSwift-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-prefix.pch"; sourceTree = "<group>"; };
0913DC467F75C7CDF2AE04C72B69A1FB /* InterpreterError.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = InterpreterError.swift; path = tensorflow/lite/swift/Sources/InterpreterError.swift; sourceTree = "<group>"; };
2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = TensorFlowLite.swift; path = tensorflow/lite/swift/Sources/TensorFlowLite.swift; sourceTree = "<group>"; };
28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.release.xcconfig; sourceTree = "<group>"; };
319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-OCR-SDK-dummy.m"; sourceTree = "<group>"; };
4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.release.xcconfig; sourceTree = "<group>"; };
5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = Pods_OCR_SDK.framework; path = "Pods-OCR-SDK.framework"; sourceTree = BUILT_PRODUCTS_DIR; };
62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Tensor.swift; path = tensorflow/lite/experimental/swift/Sources/Tensor.swift; sourceTree = "<group>"; };
68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Delegate.swift; path = tensorflow/lite/experimental/swift/Sources/Delegate.swift; sourceTree = "<group>"; };
6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = InterpreterError.swift; path = tensorflow/lite/experimental/swift/Sources/InterpreterError.swift; sourceTree = "<group>"; };
561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Model.swift; path = tensorflow/lite/swift/Sources/Model.swift; sourceTree = "<group>"; };
73010CC983E3809BECEE5348DA1BB8C6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS14.0.sdk/System/Library/Frameworks/Foundation.framework; sourceTree = DEVELOPER_DIR; };
7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.release.xcconfig; sourceTree = "<group>"; };
85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = QuantizationParameters.swift; path = tensorflow/lite/experimental/swift/Sources/QuantizationParameters.swift; sourceTree = "<group>"; };
89DEEE5839289BAF4A77F053163A252B /* Model.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Model.swift; path = tensorflow/lite/experimental/swift/Sources/Model.swift; sourceTree = "<group>"; };
7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-umbrella.h"; sourceTree = "<group>"; };
80B58F1E1746E350BEB337CA9F756D65 /* TensorFlowLiteSwift.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.debug.xcconfig; sourceTree = "<group>"; };
95AF8B70832AC786D25547BE83582636 /* TensorFlowLiteC.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.debug.xcconfig; sourceTree = "<group>"; };
9D940727FF8FB9C785EB98E56350EF41 /* Podfile */ = {isa = PBXFileReference; explicitFileType = text.script.ruby; includeInIndex = 1; indentWidth = 2; lastKnownFileType = text; name = Podfile; path = ../Podfile; sourceTree = SOURCE_ROOT; tabWidth = 2; xcLanguageSpecificationIdentifier = xcode.lang.ruby; };
A5060FDF49BFDF183010E92441345498 /* TensorFlowLiteC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteC.framework; path = Frameworks/TensorFlowLiteC.framework; sourceTree = "<group>"; };
A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = TensorFlowLite.swift; path = tensorflow/lite/experimental/swift/Sources/TensorFlowLite.swift; sourceTree = "<group>"; };
AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
B303EFFF1FC874FA6CED46B45A292FB8 /* Pods-OCR-SDK-acknowledgements.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-OCR-SDK-acknowledgements.plist"; sourceTree = "<group>"; };
B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Interpreter.swift; path = tensorflow/lite/experimental/swift/Sources/Interpreter.swift; sourceTree = "<group>"; };
BE006A1273F8AB1A760841C70B8ECC17 /* TensorFlowLiteSwift-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-prefix.pch"; sourceTree = "<group>"; };
C01D0D72AF295736AFA18A2133847DC0 /* TensorFlowLiteSwift-Info.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "TensorFlowLiteSwift-Info.plist"; sourceTree = "<group>"; };
CD35CA67D5609CA37E90AB1EB079DA0E /* Pods-OCR-SDK-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-OCR-SDK-acknowledgements.markdown"; sourceTree = "<group>"; };
CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.debug.xcconfig; sourceTree = "<group>"; };
DADD9C83FCF7E2F0C4969D06D0E88B94 /* TensorFlowLiteSwift.modulemap */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.module; path = TensorFlowLiteSwift.modulemap; sourceTree = "<group>"; };
D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Tensor.swift; path = tensorflow/lite/swift/Sources/Tensor.swift; sourceTree = "<group>"; };
DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.release.xcconfig; sourceTree = "<group>"; };
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = TensorFlowLite.framework; path = TensorFlowLiteSwift.framework; sourceTree = BUILT_PRODUCTS_DIR; };
E4EEE4038E7A669620B485A8C87E940B /* TensorFlowLiteSwift-Info.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "TensorFlowLiteSwift-Info.plist"; sourceTree = "<group>"; };
E5BE6F6A63BB5D807303C4C35FBA7AC5 /* TensorFlowLiteC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteC.framework; path = Frameworks/TensorFlowLiteC.framework; sourceTree = "<group>"; };
E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Delegate.swift; path = tensorflow/lite/swift/Sources/Delegate.swift; sourceTree = "<group>"; };
F09E3157B8F619A01871F4BF3F3D5F4A /* QuantizationParameters.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = QuantizationParameters.swift; path = tensorflow/lite/swift/Sources/QuantizationParameters.swift; sourceTree = "<group>"; };
F3B636B674AD0C3233D0DBB52FB64E70 /* Pods-OCR-SDK.modulemap */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.module; path = "Pods-OCR-SDK.modulemap"; sourceTree = "<group>"; };
F8FF339CAAA73550A8B6730FB164563D /* Pods-OCR-SDK-Info.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-OCR-SDK-Info.plist"; sourceTree = "<group>"; };
FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-umbrella.h"; sourceTree = "<group>"; };
F9C3BA0790EFEE218C4A5B8BAF9E2A88 /* Interpreter.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Interpreter.swift; path = tensorflow/lite/swift/Sources/Interpreter.swift; sourceTree = "<group>"; };
FB49E79B09F04392F9E81293E19F0BB3 /* TensorFlowLiteSwift.modulemap */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.module; path = TensorFlowLiteSwift.modulemap; sourceTree = "<group>"; };
FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "Pods-OCR-SDK-umbrella.h"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
8729DA754D88D85B8A8D6C8102FB3D61 /* Frameworks */ = {
546F04BEEFDD30F503EE46336EDF4B53 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
B55CE53BD5779359C4A8B8043853867F /* Foundation.framework in Frameworks */,
F17F59AED183359775B920759288A367 /* Foundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
8B6DD0D4147F5310BBD57700589CE1DC /* Frameworks */ = {
8729DA754D88D85B8A8D6C8102FB3D61 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
5C436F69B5E1626E44091BFE1094AA29 /* Foundation.framework in Frameworks */,
B55CE53BD5779359C4A8B8043853867F /* Foundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
......@@ -128,15 +126,23 @@
path = "Target Support Files/Pods-OCR-SDK";
sourceTree = "<group>";
};
1CAE4DFA30976CB39FF45A59917E2812 /* Pods */ = {
2DAE4706825B937CABA2FBBBA1A265AE /* Pods */ = {
isa = PBXGroup;
children = (
6661CAE509B528212186DCE3A5F21F03 /* TensorFlowLiteC */,
A60D71B0C504F0801CA7AF415D647168 /* TensorFlowLiteSwift */,
5BAAAD17FF87DC625BFA6C1E0F1B067D /* TensorFlowLiteC */,
C269C3FC95242DBA88414DF90B33BDB3 /* TensorFlowLiteSwift */,
);
name = Pods;
sourceTree = "<group>";
};
4585CF3ECE5F665A5620382136CB39B4 /* Core */ = {
isa = PBXGroup;
children = (
529BB94F807F2327E94EE6E69FFB0527 /* Frameworks */,
);
name = Core;
sourceTree = "<group>";
};
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */ = {
isa = PBXGroup;
children = (
......@@ -146,14 +152,12 @@
name = Products;
sourceTree = "<group>";
};
56FDD2F0EC6001AEBBCE1A2B224A3117 /* Support Files */ = {
529BB94F807F2327E94EE6E69FFB0527 /* Frameworks */ = {
isa = PBXGroup;
children = (
195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */,
7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */,
E5BE6F6A63BB5D807303C4C35FBA7AC5 /* TensorFlowLiteC.framework */,
);
name = "Support Files";
path = "../Target Support Files/TensorFlowLiteC";
name = Frameworks;
sourceTree = "<group>";
};
578452D2E740E91742655AC8F1636D1F /* iOS */ = {
......@@ -164,46 +168,53 @@
name = iOS;
sourceTree = "<group>";
};
6661CAE509B528212186DCE3A5F21F03 /* TensorFlowLiteC */ = {
5BAAAD17FF87DC625BFA6C1E0F1B067D /* TensorFlowLiteC */ = {
isa = PBXGroup;
children = (
FE52148DC6DE55C0D03EB75D362940B6 /* Frameworks */,
56FDD2F0EC6001AEBBCE1A2B224A3117 /* Support Files */,
4585CF3ECE5F665A5620382136CB39B4 /* Core */,
7883D61179A120721C8404E382EF620A /* Support Files */,
);
name = TensorFlowLiteC;
path = TensorFlowLiteC;
sourceTree = "<group>";
};
91D362ED26DBFCB0F7D03D165E0B9F45 /* Support Files */ = {
7170AAEA90A9F81D73AF9FEC2F47F645 /* Support Files */ = {
isa = PBXGroup;
children = (
DADD9C83FCF7E2F0C4969D06D0E88B94 /* TensorFlowLiteSwift.modulemap */,
26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */,
E4EEE4038E7A669620B485A8C87E940B /* TensorFlowLiteSwift-Info.plist */,
BE006A1273F8AB1A760841C70B8ECC17 /* TensorFlowLiteSwift-prefix.pch */,
FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */,
CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */,
4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */,
FB49E79B09F04392F9E81293E19F0BB3 /* TensorFlowLiteSwift.modulemap */,
02877DCEF805FE230F0F6C1718AF2E3E /* TensorFlowLiteSwift-dummy.m */,
C01D0D72AF295736AFA18A2133847DC0 /* TensorFlowLiteSwift-Info.plist */,
067155ADE547B880C324E22691E663E7 /* TensorFlowLiteSwift-prefix.pch */,
7BB78CF0D5141ED7C4167574B56FDEEF /* TensorFlowLiteSwift-umbrella.h */,
80B58F1E1746E350BEB337CA9F756D65 /* TensorFlowLiteSwift.debug.xcconfig */,
DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */,
);
name = "Support Files";
path = "../Target Support Files/TensorFlowLiteSwift";
sourceTree = "<group>";
};
A60D71B0C504F0801CA7AF415D647168 /* TensorFlowLiteSwift */ = {
75B402B1BE50C8CEB29BAC109041FE6A /* Core */ = {
isa = PBXGroup;
children = (
E8C5BB813B8D21775F145C7D96C9A1F8 /* Delegate.swift */,
F9C3BA0790EFEE218C4A5B8BAF9E2A88 /* Interpreter.swift */,
0913DC467F75C7CDF2AE04C72B69A1FB /* InterpreterError.swift */,
561F721947FF13763E2BFBD88E1D17D4 /* Model.swift */,
F09E3157B8F619A01871F4BF3F3D5F4A /* QuantizationParameters.swift */,
D1B54317E5FC5FFAA397A37D6DCF30C7 /* Tensor.swift */,
2522449B7C98F47092B5FA08DE960223 /* TensorFlowLite.swift */,
);
name = Core;
sourceTree = "<group>";
};
7883D61179A120721C8404E382EF620A /* Support Files */ = {
isa = PBXGroup;
children = (
68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */,
B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */,
6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */,
1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */,
89DEEE5839289BAF4A77F053163A252B /* Model.swift */,
85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */,
62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */,
A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */,
91D362ED26DBFCB0F7D03D165E0B9F45 /* Support Files */,
95AF8B70832AC786D25547BE83582636 /* TensorFlowLiteC.debug.xcconfig */,
28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */,
);
name = TensorFlowLiteSwift;
path = TensorFlowLiteSwift;
name = "Support Files";
path = "../Target Support Files/TensorFlowLiteC";
sourceTree = "<group>";
};
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */ = {
......@@ -214,12 +225,22 @@
name = "Targets Support Files";
sourceTree = "<group>";
};
C269C3FC95242DBA88414DF90B33BDB3 /* TensorFlowLiteSwift */ = {
isa = PBXGroup;
children = (
75B402B1BE50C8CEB29BAC109041FE6A /* Core */,
7170AAEA90A9F81D73AF9FEC2F47F645 /* Support Files */,
);
name = TensorFlowLiteSwift;
path = TensorFlowLiteSwift;
sourceTree = "<group>";
};
CF1408CF629C7361332E53B88F7BD30C = {
isa = PBXGroup;
children = (
9D940727FF8FB9C785EB98E56350EF41 /* Podfile */,
D210D550F4EA176C3123ED886F8F87F5 /* Frameworks */,
1CAE4DFA30976CB39FF45A59917E2812 /* Pods */,
2DAE4706825B937CABA2FBBBA1A265AE /* Pods */,
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */,
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */,
);
......@@ -233,22 +254,14 @@
name = Frameworks;
sourceTree = "<group>";
};
FE52148DC6DE55C0D03EB75D362940B6 /* Frameworks */ = {
isa = PBXGroup;
children = (
A5060FDF49BFDF183010E92441345498 /* TensorFlowLiteC.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
0F9B2386AF71C96911F88C28B73F4C75 /* Headers */ = {
B81C9F64D8DFE71EB24113D3EA3667D3 /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
3007E028EAC848C7FE9EB82C555DEEB1 /* TensorFlowLiteSwift-umbrella.h in Headers */,
A6791FA73099A3B48BA52F23ECCB68A5 /* TensorFlowLiteSwift-umbrella.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
......@@ -265,17 +278,17 @@
/* Begin PBXNativeTarget section */
10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */ = {
isa = PBXNativeTarget;
buildConfigurationList = 1B801DC8F9DF101559E64AF88DCA3C38 /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */;
buildConfigurationList = 44F0B3745F9D5A6855C1205F3C0401AE /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */;
buildPhases = (
0F9B2386AF71C96911F88C28B73F4C75 /* Headers */,
0BAE3604C85CF619AAD6CF0F71B70A87 /* Sources */,
8B6DD0D4147F5310BBD57700589CE1DC /* Frameworks */,
7FEB097FEB7BE4080C0F6A64E1BA754B /* Resources */,
B81C9F64D8DFE71EB24113D3EA3667D3 /* Headers */,
0815E5E7EEAD07CAC94FB6DF593B3868 /* Sources */,
546F04BEEFDD30F503EE46336EDF4B53 /* Frameworks */,
42DA36B2352679AE6D81CC3D497062B4 /* Resources */,
);
buildRules = (
);
dependencies = (
9E74450114542DC132D78B2E754EB799 /* PBXTargetDependency */,
EF69265E72B2B6190ECEC50A2F624C11 /* PBXTargetDependency */,
);
name = TensorFlowLiteSwift;
productName = TensorFlowLiteSwift;
......@@ -294,8 +307,8 @@
buildRules = (
);
dependencies = (
F23723F1BA35C6E95E1869D00B39A6D1 /* PBXTargetDependency */,
BB492F9DF4CA3F4EC2ABF5405274BEFE /* PBXTargetDependency */,
B9B3EA213DF319228DCB503D0F59B5A2 /* PBXTargetDependency */,
82F118637C8A2F46BA62DF3BCE921D75 /* PBXTargetDependency */,
);
name = "Pods-OCR-SDK";
productName = "Pods-OCR-SDK";
......@@ -308,8 +321,8 @@
BFDFE7DC352907FC980B868725387E98 /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 1100;
LastUpgradeCheck = 1100;
LastSwiftUpdateCheck = 1240;
LastUpgradeCheck = 1240;
};
buildConfigurationList = 4821239608C13582E20E6DA73FD5F1F9 /* Build configuration list for PBXProject "Pods" */;
compatibilityVersion = "Xcode 9.3";
......@@ -332,7 +345,7 @@
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7FEB097FEB7BE4080C0F6A64E1BA754B /* Resources */ = {
42DA36B2352679AE6D81CC3D497062B4 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
......@@ -349,19 +362,18 @@
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
0BAE3604C85CF619AAD6CF0F71B70A87 /* Sources */ = {
0815E5E7EEAD07CAC94FB6DF593B3868 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
8C5A6C7DD5512D6B750AB6526168E12E /* Delegate.swift in Sources */,
5CE750D966ADCFB96953D9059A7B2EC0 /* Interpreter.swift in Sources */,
A2CEC558C0CD29098EE844CC6BF8744A /* InterpreterError.swift in Sources */,
9A479BCCD778CC0BD44D8B0D93971E75 /* MetalDelegate.swift in Sources */,
A8115D88A11116E6295A00D03464DEA3 /* Model.swift in Sources */,
FE1B44864AF81A13ACF3A198F81D3EA0 /* QuantizationParameters.swift in Sources */,
4B299DCBDED6FB976CD401970CCD2B09 /* Tensor.swift in Sources */,
47BF9B1F51D39FF8329114D77A2AF59B /* TensorFlowLite.swift in Sources */,
159E5ED488040D65D259274291C04076 /* TensorFlowLiteSwift-dummy.m in Sources */,
E43D8122B3F44AC0C1C4063E9FE12FCE /* Delegate.swift in Sources */,
89DFE11EB6B97622CBE570E11A422B2E /* Interpreter.swift in Sources */,
1EF42BA7A3728956E0819AA1586A19E0 /* InterpreterError.swift in Sources */,
E3387D3D7DF235DD921900AECE7177DF /* Model.swift in Sources */,
35CC16F28EFE959BA1EDD59D0A1A579D /* QuantizationParameters.swift in Sources */,
BF7ACB61FB65A0CDE562908EA112C028 /* Tensor.swift in Sources */,
BF92AADA2ACACF7675F24D2858B4E641 /* TensorFlowLite.swift in Sources */,
C253298E121F79C0613AADC72D225217 /* TensorFlowLiteSwift-dummy.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
......@@ -376,23 +388,23 @@
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
9E74450114542DC132D78B2E754EB799 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 9870C87BB4FE7718B31A636346B46856 /* PBXContainerItemProxy */;
};
BB492F9DF4CA3F4EC2ABF5405274BEFE /* PBXTargetDependency */ = {
82F118637C8A2F46BA62DF3BCE921D75 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteSwift;
target = 10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */;
targetProxy = AE7AC2AACDE9489CCFFB0B316945A9B3 /* PBXContainerItemProxy */;
targetProxy = 95F66FC5DE60D6FA2B1A8FB0A7BA488B /* PBXContainerItemProxy */;
};
B9B3EA213DF319228DCB503D0F59B5A2 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 74A713F5EF5E6AC9BBF4B0F890C83299 /* PBXContainerItemProxy */;
};
F23723F1BA35C6E95E1869D00B39A6D1 /* PBXTargetDependency */ = {
EF69265E72B2B6190ECEC50A2F624C11 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 4AFBB2D1F12F0A7C0221B72930320356 /* PBXContainerItemProxy */;
targetProxy = 7186AA350E2F51E290EB66113727A4BB /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
......@@ -402,6 +414,7 @@
baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = marker;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -411,6 +424,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -421,6 +435,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -429,6 +448,7 @@
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -436,18 +456,70 @@
};
5B27AD80C84B2688EBB2932F044E0363 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */;
baseConfigurationReference = 28C1063AEAB9221745A5095D0A5CB58B /* TensorFlowLiteC.release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = bitcode;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Release;
};
6B7B33209571F28B85CE49B1C78BD7E2 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = DB7493E289C58AD80ED1C5CA0650F203 /* TensorFlowLiteSwift.release.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = bitcode;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
......@@ -489,7 +561,6 @@
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
......@@ -552,7 +623,6 @@
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
......@@ -583,24 +653,33 @@
};
B577B5487933DC5B570EF057336156E5 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */;
baseConfigurationReference = 95AF8B70832AC786D25547BE83582636 /* TensorFlowLiteC.debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = marker;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Debug;
};
C5821B1A36E5FA22C0539BCDFF79EEEE /* Release */ = {
B976E9C0D4ED126FD693AEF88E605112 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */;
baseConfigurationReference = 80B58F1E1746E350BEB337CA9F756D65 /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = marker;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
......@@ -609,6 +688,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
......@@ -620,42 +700,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
E5639158B25FD66B96B6044043FC8E95 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
"-fembed-bitcode-marker",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
......@@ -663,6 +712,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -673,6 +723,7 @@
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -682,6 +733,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -692,6 +744,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -701,6 +758,7 @@
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -709,20 +767,20 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
1B801DC8F9DF101559E64AF88DCA3C38 /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */ = {
243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */ = {
isa = XCConfigurationList;
buildConfigurations = (
E5639158B25FD66B96B6044043FC8E95 /* Debug */,
C5821B1A36E5FA22C0539BCDFF79EEEE /* Release */,
B577B5487933DC5B570EF057336156E5 /* Debug */,
5B27AD80C84B2688EBB2932F044E0363 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */ = {
44F0B3745F9D5A6855C1205F3C0401AE /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */ = {
isa = XCConfigurationList;
buildConfigurations = (
B577B5487933DC5B570EF057336156E5 /* Debug */,
5B27AD80C84B2688EBB2932F044E0363 /* Release */,
B976E9C0D4ED126FD693AEF88E605112 /* Debug */,
6B7B33209571F28B85CE49B1C78BD7E2 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
......
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
LastUpgradeVersion = "1240"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
......
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
LastUpgradeVersion = "1240"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
......@@ -14,9 +14,9 @@
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DE1F4D51AD94C30627575AEE202FD099"
BuildableName = "Pods_OCR_SDK.framework"
BlueprintName = "Pods-OCR-SDK"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
......
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
LastUpgradeVersion = "1240"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
......
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "10418167F619D6DA72BADAD10F9EC02B"
BuildableName = "TensorFlowLite.framework"
BlueprintName = "TensorFlowLiteSwift"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
</dict>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
</dict>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
......@@ -3,8 +3,6 @@ This application makes use of the following third party libraries:
## TensorFlowLiteSwift
Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
......@@ -207,4 +205,87 @@ Copyright 2019 The TensorFlow Authors. All rights reserved.
See the License for the specific language governing permissions and
limitations under the License.
------------------
Files: third_party/compute_library/...
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------
Files: ACKNOWLEDGEMENTS
LICENSE
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
------------------
Files: third_party/hexagon
Copyright (c) 2016-2019, The Linux Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted (subject to the limitations in the
disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The Linux Foundation nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Generated by CocoaPods - https://cocoapods.org
......@@ -14,9 +14,7 @@
</dict>
<dict>
<key>FooterText</key>
<string>Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
<string> Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
......@@ -217,6 +215,89 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------
Files: third_party/compute_library/...
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------
Files: ACKNOWLEDGEMENTS
LICENSE
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
------------------
Files: third_party/hexagon
Copyright (c) 2016-2019, The Linux Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted (subject to the limitations in the
disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The Linux Foundation nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
</string>
<key>License</key>
<string>Apache</string>
......
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
......@@ -15,7 +15,7 @@
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>2.2.0</string>
<string>0.0.1</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
......
#import "builtin_ops.h"
#import "c_api.h"
#import "c_api_experimental.h"
#import "common.h"
#import "metal_delegate.h"
#import "xnnpack_delegate.h"
#import "c_api_types.h"
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_BUILTIN_OPS_H_
#define TENSORFLOW_LITE_BUILTIN_OPS_H_
// DO NOT EDIT MANUALLY: This file is automatically generated by
// `schema/builtin_ops_header/generator.cc`.
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
// The enum for builtin operators.
// Note: CUSTOM, DELEGATE, and PLACEHOLDER_FOR_GREATER_OP_CODES are 3 special
// ops which are not real built-in ops.
typedef enum {
kTfLiteBuiltinAdd = 0,
kTfLiteBuiltinAveragePool2d = 1,
kTfLiteBuiltinConcatenation = 2,
kTfLiteBuiltinConv2d = 3,
kTfLiteBuiltinDepthwiseConv2d = 4,
kTfLiteBuiltinDepthToSpace = 5,
kTfLiteBuiltinDequantize = 6,
kTfLiteBuiltinEmbeddingLookup = 7,
kTfLiteBuiltinFloor = 8,
kTfLiteBuiltinFullyConnected = 9,
kTfLiteBuiltinHashtableLookup = 10,
kTfLiteBuiltinL2Normalization = 11,
kTfLiteBuiltinL2Pool2d = 12,
kTfLiteBuiltinLocalResponseNormalization = 13,
kTfLiteBuiltinLogistic = 14,
kTfLiteBuiltinLshProjection = 15,
kTfLiteBuiltinLstm = 16,
kTfLiteBuiltinMaxPool2d = 17,
kTfLiteBuiltinMul = 18,
kTfLiteBuiltinRelu = 19,
kTfLiteBuiltinReluN1To1 = 20,
kTfLiteBuiltinRelu6 = 21,
kTfLiteBuiltinReshape = 22,
kTfLiteBuiltinResizeBilinear = 23,
kTfLiteBuiltinRnn = 24,
kTfLiteBuiltinSoftmax = 25,
kTfLiteBuiltinSpaceToDepth = 26,
kTfLiteBuiltinSvdf = 27,
kTfLiteBuiltinTanh = 28,
kTfLiteBuiltinConcatEmbeddings = 29,
kTfLiteBuiltinSkipGram = 30,
kTfLiteBuiltinCall = 31,
kTfLiteBuiltinCustom = 32,
kTfLiteBuiltinEmbeddingLookupSparse = 33,
kTfLiteBuiltinPad = 34,
kTfLiteBuiltinUnidirectionalSequenceRnn = 35,
kTfLiteBuiltinGather = 36,
kTfLiteBuiltinBatchToSpaceNd = 37,
kTfLiteBuiltinSpaceToBatchNd = 38,
kTfLiteBuiltinTranspose = 39,
kTfLiteBuiltinMean = 40,
kTfLiteBuiltinSub = 41,
kTfLiteBuiltinDiv = 42,
kTfLiteBuiltinSqueeze = 43,
kTfLiteBuiltinUnidirectionalSequenceLstm = 44,
kTfLiteBuiltinStridedSlice = 45,
kTfLiteBuiltinBidirectionalSequenceRnn = 46,
kTfLiteBuiltinExp = 47,
kTfLiteBuiltinTopkV2 = 48,
kTfLiteBuiltinSplit = 49,
kTfLiteBuiltinLogSoftmax = 50,
kTfLiteBuiltinDelegate = 51,
kTfLiteBuiltinBidirectionalSequenceLstm = 52,
kTfLiteBuiltinCast = 53,
kTfLiteBuiltinPrelu = 54,
kTfLiteBuiltinMaximum = 55,
kTfLiteBuiltinArgMax = 56,
kTfLiteBuiltinMinimum = 57,
kTfLiteBuiltinLess = 58,
kTfLiteBuiltinNeg = 59,
kTfLiteBuiltinPadv2 = 60,
kTfLiteBuiltinGreater = 61,
kTfLiteBuiltinGreaterEqual = 62,
kTfLiteBuiltinLessEqual = 63,
kTfLiteBuiltinSelect = 64,
kTfLiteBuiltinSlice = 65,
kTfLiteBuiltinSin = 66,
kTfLiteBuiltinTransposeConv = 67,
kTfLiteBuiltinSparseToDense = 68,
kTfLiteBuiltinTile = 69,
kTfLiteBuiltinExpandDims = 70,
kTfLiteBuiltinEqual = 71,
kTfLiteBuiltinNotEqual = 72,
kTfLiteBuiltinLog = 73,
kTfLiteBuiltinSum = 74,
kTfLiteBuiltinSqrt = 75,
kTfLiteBuiltinRsqrt = 76,
kTfLiteBuiltinShape = 77,
kTfLiteBuiltinPow = 78,
kTfLiteBuiltinArgMin = 79,
kTfLiteBuiltinFakeQuant = 80,
kTfLiteBuiltinReduceProd = 81,
kTfLiteBuiltinReduceMax = 82,
kTfLiteBuiltinPack = 83,
kTfLiteBuiltinLogicalOr = 84,
kTfLiteBuiltinOneHot = 85,
kTfLiteBuiltinLogicalAnd = 86,
kTfLiteBuiltinLogicalNot = 87,
kTfLiteBuiltinUnpack = 88,
kTfLiteBuiltinReduceMin = 89,
kTfLiteBuiltinFloorDiv = 90,
kTfLiteBuiltinReduceAny = 91,
kTfLiteBuiltinSquare = 92,
kTfLiteBuiltinZerosLike = 93,
kTfLiteBuiltinFill = 94,
kTfLiteBuiltinFloorMod = 95,
kTfLiteBuiltinRange = 96,
kTfLiteBuiltinResizeNearestNeighbor = 97,
kTfLiteBuiltinLeakyRelu = 98,
kTfLiteBuiltinSquaredDifference = 99,
kTfLiteBuiltinMirrorPad = 100,
kTfLiteBuiltinAbs = 101,
kTfLiteBuiltinSplitV = 102,
kTfLiteBuiltinUnique = 103,
kTfLiteBuiltinCeil = 104,
kTfLiteBuiltinReverseV2 = 105,
kTfLiteBuiltinAddN = 106,
kTfLiteBuiltinGatherNd = 107,
kTfLiteBuiltinCos = 108,
kTfLiteBuiltinWhere = 109,
kTfLiteBuiltinRank = 110,
kTfLiteBuiltinElu = 111,
kTfLiteBuiltinReverseSequence = 112,
kTfLiteBuiltinMatrixDiag = 113,
kTfLiteBuiltinQuantize = 114,
kTfLiteBuiltinMatrixSetDiag = 115,
kTfLiteBuiltinRound = 116,
kTfLiteBuiltinHardSwish = 117,
kTfLiteBuiltinIf = 118,
kTfLiteBuiltinWhile = 119,
kTfLiteBuiltinNonMaxSuppressionV4 = 120,
kTfLiteBuiltinNonMaxSuppressionV5 = 121,
kTfLiteBuiltinScatterNd = 122,
kTfLiteBuiltinSelectV2 = 123,
kTfLiteBuiltinDensify = 124,
kTfLiteBuiltinSegmentSum = 125,
kTfLiteBuiltinBatchMatmul = 126,
kTfLiteBuiltinPlaceholderForGreaterOpCodes = 127,
kTfLiteBuiltinCumsum = 128,
kTfLiteBuiltinCallOnce = 129,
kTfLiteBuiltinBroadcastTo = 130,
kTfLiteBuiltinRfft2d = 131,
kTfLiteBuiltinConv3d = 132,
kTfLiteBuiltinImag = 133,
kTfLiteBuiltinReal = 134,
kTfLiteBuiltinComplexAbs = 135,
kTfLiteBuiltinHashtable = 136,
kTfLiteBuiltinHashtableFind = 137,
kTfLiteBuiltinHashtableImport = 138,
kTfLiteBuiltinHashtableSize = 139,
kTfLiteBuiltinReduceAll = 140,
kTfLiteBuiltinConv3dTranspose = 141,
kTfLiteBuiltinVarHandle = 142,
kTfLiteBuiltinReadVariable = 143,
kTfLiteBuiltinAssignVariable = 144,
kTfLiteBuiltinBroadcastArgs = 145,
kTfLiteBuiltinRandomStandardNormal = 146,
kTfLiteBuiltinBucketize = 147,
kTfLiteBuiltinRandomUniform = 148,
} TfLiteBuiltinOperator;
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // TENSORFLOW_LITE_BUILTIN_OPS_H_
......@@ -17,8 +17,9 @@ limitations under the License.
#include <stdarg.h>
#include <stdint.h>
#include <stdlib.h>
#include "common.h"
#include "c_api_types.h" // IWYU pragma: export
// --------------------------------------------------------------------------
/// C API for TensorFlow Lite.
......@@ -66,34 +67,39 @@ limitations under the License.
/// TfLiteInterpreterOptionsDelete(options);
/// TfLiteModelDelete(model);
#ifdef SWIG
#define TFL_CAPI_EXPORT
#else
#if defined(_WIN32)
#ifdef TFL_COMPILE_LIBRARY
#define TFL_CAPI_EXPORT __declspec(dllexport)
#else
#define TFL_CAPI_EXPORT __declspec(dllimport)
#endif // TFL_COMPILE_LIBRARY
#else
#define TFL_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // SWIG
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
// --------------------------------------------------------------------------
// TfLiteVersion returns a string describing version information of the
// TensorFlow Lite library. TensorFlow Lite uses semantic versioning.
TFL_CAPI_EXPORT extern const char* TfLiteVersion(void);
// Opaque types used by the C API.
// --------------------------------------------------------------------------
// TfLiteModel wraps a loaded TensorFlow Lite model.
typedef struct TfLiteModel TfLiteModel;
// TfLiteInterpreterOptions allows customized interpreter configuration.
typedef struct TfLiteInterpreterOptions TfLiteInterpreterOptions;
// Allows delegation of nodes to alternative backends.
typedef struct TfLiteDelegate TfLiteDelegate;
// TfLiteInterpreter provides inference from a provided model.
typedef struct TfLiteInterpreter TfLiteInterpreter;
// A tensor in the interpreter system which is a wrapper around a buffer of
// data including a dimensionality (or NULL if not currently defined).
typedef struct TfLiteTensor TfLiteTensor;
// --------------------------------------------------------------------------
// TfLiteVersion returns a string describing version information of the
// TensorFlow Lite library. TensorFlow Lite uses semantic versioning.
TFL_CAPI_EXPORT extern const char* TfLiteVersion(void);
// Returns a model from the provided buffer, or null on failure.
//
// NOTE: The caller retains ownership of the `model_data` and should ensure that
// the lifetime of the `model_data` must be at least as long as the lifetime
// of the `TfLiteModel`.
TFL_CAPI_EXPORT extern TfLiteModel* TfLiteModelCreate(const void* model_data,
size_t model_size);
......@@ -104,10 +110,6 @@ TFL_CAPI_EXPORT extern TfLiteModel* TfLiteModelCreateFromFile(
// Destroys the model instance.
TFL_CAPI_EXPORT extern void TfLiteModelDelete(TfLiteModel* model);
// --------------------------------------------------------------------------
// TfLiteInterpreterOptions allows customized interpreter configuration.
typedef struct TfLiteInterpreterOptions TfLiteInterpreterOptions;
// Returns a new interpreter options instances.
TFL_CAPI_EXPORT extern TfLiteInterpreterOptions*
TfLiteInterpreterOptionsCreate();
......@@ -134,17 +136,13 @@ TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsAddDelegate(
//
// * `reporter` takes the provided `user_data` object, as well as a C-style
// format string and arg list (see also vprintf).
// * `user_data` is optional. If provided, it is owned by the client and must
// * `user_data` is optional. If non-null, it is owned by the client and must
// remain valid for the duration of the interpreter lifetime.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsSetErrorReporter(
TfLiteInterpreterOptions* options,
void (*reporter)(void* user_data, const char* format, va_list args),
void* user_data);
// --------------------------------------------------------------------------
// TfLiteInterpreter provides inference from a provided model.
typedef struct TfLiteInterpreter TfLiteInterpreter;
// Returns a new interpreter using the provided model and options, or null on
// failure.
//
......@@ -176,7 +174,11 @@ TFL_CAPI_EXPORT extern TfLiteTensor* TfLiteInterpreterGetInputTensor(
//
// NOTE: After a resize, the client *must* explicitly allocate tensors before
// attempting to access the resized tensor data or invoke the interpreter.
//
// REQUIRES: 0 <= input_index < TfLiteInterpreterGetInputTensorCount(tensor)
//
// This function makes a copy of the input dimensions, so the client can safely
// deallocate `input_dims` immediately after this function returns.
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterResizeInputTensor(
TfLiteInterpreter* interpreter, int32_t input_index, const int* input_dims,
int32_t input_dims_size);
......@@ -191,9 +193,34 @@ TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterAllocateTensors(
// Runs inference for the loaded graph.
//
// Before calling this function, the caller should first invoke
// TfLiteInterpreterAllocateTensors() and should also set the values for the
// input tensors. After successfully calling this function, the values for the
// output tensors will be set.
//
// NOTE: It is possible that the interpreter is not in a ready state to
// evaluate (e.g., if a ResizeInputTensor() has been performed without a call to
// evaluate (e.g., if AllocateTensors() hasn't been called, or if a
// ResizeInputTensor() has been performed without a subsequent call to
// AllocateTensors()).
//
// If the (experimental!) delegate fallback option was enabled in the
// interpreter options, then the interpreter will automatically fall back to
// not using any delegates if execution with delegates fails. For details, see
// TfLiteInterpreterOptionsSetEnableDelegateFallback in c_api_experimental.h.
//
// Returns one of the following status codes:
// - kTfLiteOk: Success. Output is valid.
// - kTfLiteDelegateError: Execution with delegates failed, due to a problem
// with the delegate(s). If fallback was not enabled, output is invalid.
// If fallback was enabled, this return value indicates that fallback
// succeeded, the output is valid, and all delegates previously applied to
// the interpreter have been undone.
// - kTfLiteApplicationError: Same as for kTfLiteDelegateError, except that
// the problem was not with the delegate itself, but rather was
// due to an incompatibility between the delegate(s) and the
// interpreter or model.
// - kTfLiteError: Unexpected/runtime failure. Output is invalid.
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterInvoke(
TfLiteInterpreter* interpreter);
......@@ -202,7 +229,7 @@ TFL_CAPI_EXPORT extern int32_t TfLiteInterpreterGetOutputTensorCount(
const TfLiteInterpreter* interpreter);
// Returns the tensor associated with the output index.
// REQUIRES: 0 <= input_index < TfLiteInterpreterGetOutputTensorCount(tensor)
// REQUIRES: 0 <= output_index < TfLiteInterpreterGetOutputTensorCount(tensor)
//
// NOTE: The shape and underlying data buffer for output tensors may be not
// be available until after the output tensor has been both sized and allocated.
......
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_C_C_API_EXPERIMENTAL_H_
#define TENSORFLOW_LITE_C_C_API_EXPERIMENTAL_H_
#include "builtin_ops.h"
#include "c_api.h"
#include "common.h"
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
/// Resets all variable tensors to zero.
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterResetVariableTensors(
TfLiteInterpreter* interpreter);
/// Adds an op registration for a builtin operator.
///
/// Op registrations are used to map ops referenced in the flatbuffer model
/// to executable function pointers (`TfLiteRegistration`s).
///
/// NOTE: The interpreter will make a shallow copy of `registration` internally,
/// so the caller should ensure that its contents (function pointers, etc...)
/// remain valid for the duration of the interpreter's lifetime. A common
/// practice is making the provided `TfLiteRegistration` instance static.
///
/// Code that uses this function should NOT call
/// `TfLiteInterpreterOptionsSetOpResolver` on the same options object.
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT void TfLiteInterpreterOptionsAddBuiltinOp(
TfLiteInterpreterOptions* options, TfLiteBuiltinOperator op,
const TfLiteRegistration* registration, int32_t min_version,
int32_t max_version);
/// Adds an op registration for a custom operator.
///
/// Op registrations are used to map ops referenced in the flatbuffer model
/// to executable function pointers (`TfLiteRegistration`s).
///
/// NOTE: The interpreter will make a shallow copy of `registration` internally,
/// so the caller should ensure that its contents (function pointers, etc...)
/// remain valid for the duration of any created interpreter's lifetime. A
/// common practice is making the provided `TfLiteRegistration` instance static.
///
/// The lifetime of the string pointed to by `name` must be at least as long
/// as the lifetime of the `TfLiteInterpreterOptions`.
///
/// Code that uses this function should NOT call
/// `TfLiteInterpreterOptionsSetOpResolver` on the same options object.
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT void TfLiteInterpreterOptionsAddCustomOp(
TfLiteInterpreterOptions* options, const char* name,
const TfLiteRegistration* registration, int32_t min_version,
int32_t max_version);
/// Registers callbacks for resolving builtin or custom operators.
///
/// The `TfLiteInterpreterOptionsSetOpResolver` function provides an alternative
/// method for registering builtin ops and/or custom ops, by providing operator
/// resolver callbacks. Unlike using `TfLiteInterpreterOptionsAddBuiltinOp`
/// and/or `TfLiteInterpreterOptionsAddAddCustomOp`, these let you register all
/// the operators in a single call.
///
/// Code that uses this function should NOT call
/// `TfLiteInterpreterOptionsAddBuiltin` or
/// `TfLiteInterpreterOptionsAddCustomOp` on the same options object.
///
/// If `op_resolver_user_data` is non-null, its lifetime must be at least as
/// long as the lifetime of the `TfLiteInterpreterOptions`.
///
/// WARNING: This is an experimental API and subject to change.
void TfLiteInterpreterOptionsSetOpResolver(
TfLiteInterpreterOptions* options,
const TfLiteRegistration* (*find_builtin_op)(void* user_data,
TfLiteBuiltinOperator op,
int version),
const TfLiteRegistration* (*find_custom_op)(void* user_data,
const char* custom_op,
int version),
void* op_resolver_user_data);
/// Returns a new interpreter using the provided model and options, or null on
/// failure, where the model uses only the operators explicitly added to the
/// options. This is the same as `TFLiteInterpreterCreate` from `c_api.h`,
/// except that the only operators that are supported are the ones registered
/// in `options` via calls to `TfLiteInterpreterOptionsSetOpResolver`,
/// `TfLiteInterpreterOptionsAddBuiltinOp`, and/or
/// `TfLiteInterpreterOptionsAddCustomOp`.
///
/// * `model` must be a valid model instance. The caller retains ownership of
/// the object, and can destroy it immediately after creating the interpreter;
/// the interpreter will maintain its own reference to the underlying model
/// data.
/// * `options` should not be null. The caller retains ownership of the object,
/// and can safely destroy it immediately after creating the interpreter.
///
/// NOTE: The client *must* explicitly allocate tensors before attempting to
/// access input tensor data or invoke the interpreter.
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern TfLiteInterpreter*
TfLiteInterpreterCreateWithSelectedOps(const TfLiteModel* model,
const TfLiteInterpreterOptions* options);
/// Enable or disable the NN API delegate for the interpreter (true to enable).
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsSetUseNNAPI(
TfLiteInterpreterOptions* options, bool enable);
/// Enable or disable CPU fallback for the interpreter (true to enable).
/// If enabled, TfLiteInterpreterInvoke will do automatic fallback from
/// executing with delegate(s) to regular execution without delegates
/// (i.e. on CPU).
///
/// Allowing the fallback is suitable only if both of the following hold:
/// - The caller is known not to cache pointers to tensor data across
/// TfLiteInterpreterInvoke calls.
/// - The model is not stateful (no variables, no LSTMs) or the state isn't
/// needed between batches.
///
/// When delegate fallback is enabled, TfLiteInterpreterInvoke will
/// behave as follows:
/// If one or more delegates were set in the interpreter options
/// (see TfLiteInterpreterOptionsAddDelegate),
/// AND inference fails,
/// then the interpreter will fall back to not using any delegates.
/// In that case, the previously applied delegate(s) will be automatically
/// undone, and an attempt will be made to return the interpreter to an
/// invokable state, which may invalidate previous tensor addresses,
/// and the inference will be attempted again, using input tensors with
/// the same value as previously set.
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsSetEnableDelegateFallback(
TfLiteInterpreterOptions* options, bool enable);
// Set if buffer handle output is allowed.
//
/// When using hardware delegation, Interpreter will make the data of output
/// tensors available in `tensor->data` by default. If the application can
/// consume the buffer handle directly (e.g. reading output from OpenGL
/// texture), it can set this flag to false, so Interpreter won't copy the
/// data from buffer handle to CPU memory. WARNING: This is an experimental
/// API and subject to change.
TFL_CAPI_EXPORT extern void TfLiteSetAllowBufferHandleOutput(
const TfLiteInterpreter* interpreter, bool allow_buffer_handle_output);
/// Allow a delegate to look at the graph and modify the graph to handle
/// parts of the graph themselves. After this is called, the graph may
/// contain new nodes that replace 1 more nodes.
/// 'delegate' must outlive the interpreter.
/// Use `TfLiteInterpreterOptionsAddDelegate` instead of this unless
/// absolutely required.
/// Returns one of the following three status codes:
/// 1. kTfLiteOk: Success.
/// 2. kTfLiteDelegateError: Delegation failed due to an error in the
/// delegate. The Interpreter has been restored to its pre-delegation state.
/// NOTE: This undoes all delegates previously applied to the Interpreter.
/// 3. kTfLiteError: Unexpected/runtime failure.
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterModifyGraphWithDelegate(
const TfLiteInterpreter* interpreter, TfLiteDelegate* delegate);
/// Returns the tensor index corresponding to the input tensor
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern int32_t TfLiteInterpreterGetInputTensorIndex(
const TfLiteInterpreter* interpreter, int32_t input_index);
/// Returns the tensor index corresponding to the output tensor
///
/// WARNING: This is an experimental API and subject to change.
TFL_CAPI_EXPORT extern int32_t TfLiteInterpreterGetOutputTensorIndex(
const TfLiteInterpreter* interpreter, int32_t output_index);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // TENSORFLOW_LITE_C_C_API_EXPERIMENTAL_H_
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// This file declares types used by the pure C inference API defined in c_api.h,
// some of which are also used in the C++ and C kernel and interpreter APIs.
#ifndef TENSORFLOW_LITE_C_C_API_TYPES_H_
#define TENSORFLOW_LITE_C_C_API_TYPES_H_
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
// Define TFL_CAPI_EXPORT macro to export a function properly with a shared
// library.
#ifdef SWIG
#define TFL_CAPI_EXPORT
#elif defined(TFL_STATIC_LIBRARY_BUILD)
#define TFL_CAPI_EXPORT
#else // not definded TFL_STATIC_LIBRARY_BUILD
#if defined(_WIN32)
#ifdef TFL_COMPILE_LIBRARY
#define TFL_CAPI_EXPORT __declspec(dllexport)
#else
#define TFL_CAPI_EXPORT __declspec(dllimport)
#endif // TFL_COMPILE_LIBRARY
#else
#define TFL_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // SWIG
// Note that new error status values may be added in future in order to
// indicate more fine-grained internal states, therefore, applications should
// not rely on status values being members of the enum.
typedef enum TfLiteStatus {
kTfLiteOk = 0,
// Generally referring to an error in the runtime (i.e. interpreter)
kTfLiteError = 1,
// Generally referring to an error from a TfLiteDelegate itself.
kTfLiteDelegateError = 2,
// Generally referring to an error in applying a delegate due to
// incompatibility between runtime and delegate, e.g., this error is returned
// when trying to apply a TF Lite delegate onto a model graph that's already
// immutable.
kTfLiteApplicationError = 3,
// Generally referring to serialized delegate data not being found.
// See tflite::delegates::Serialization.
kTfLiteDelegateDataNotFound = 4,
// Generally referring to data-writing issues in delegate serialization.
// See tflite::delegates::Serialization.
kTfLiteDelegateDataWriteError = 5,
// Generally referring to data-reading issues in delegate serialization.
// See tflite::delegates::Serialization.
kTfLiteDelegateDataReadError = 6,
// Generally referring to issues when the TF Lite model has ops that cannot be
// resolved at runtime. This could happen when the specific op is not
// registered or built with the TF Lite framework.
kTfLiteUnresolvedOps = 7,
} TfLiteStatus;
// Types supported by tensor
typedef enum {
kTfLiteNoType = 0,
kTfLiteFloat32 = 1,
kTfLiteInt32 = 2,
kTfLiteUInt8 = 3,
kTfLiteInt64 = 4,
kTfLiteString = 5,
kTfLiteBool = 6,
kTfLiteInt16 = 7,
kTfLiteComplex64 = 8,
kTfLiteInt8 = 9,
kTfLiteFloat16 = 10,
kTfLiteFloat64 = 11,
kTfLiteComplex128 = 12,
kTfLiteUInt64 = 13,
kTfLiteResource = 14,
kTfLiteVariant = 15,
kTfLiteUInt32 = 16,
} TfLiteType;
// Legacy. Will be deprecated in favor of TfLiteAffineQuantization.
// If per-layer quantization is specified this field will still be populated in
// addition to TfLiteAffineQuantization.
// Parameters for asymmetric quantization. Quantized values can be converted
// back to float using:
// real_value = scale * (quantized_value - zero_point)
typedef struct TfLiteQuantizationParams {
float scale;
int32_t zero_point;
} TfLiteQuantizationParams;
#ifdef __cplusplus
} // extern C
#endif
#endif // TENSORFLOW_LITE_C_C_API_TYPES_H_
......@@ -15,7 +15,7 @@ limitations under the License.
// This file defines common C types and APIs for implementing operations,
// delegates and other constructs in TensorFlow Lite. The actual operations and
// delegtes can be defined using C++, but the interface between the interpreter
// delegates can be defined using C++, but the interface between the interpreter
// and the operations are C.
//
// Summary of abstractions
......@@ -29,6 +29,9 @@ limitations under the License.
// TfLiteDelegate - allows delegation of nodes to alternative backends.
//
// Some abstractions in this file are created and managed by Interpreter.
//
// NOTE: The order of values in these structs are "semi-ABI stable". New values
// should be added only to the end of structs and never reordered.
#ifndef TENSORFLOW_LITE_C_COMMON_H_
#define TENSORFLOW_LITE_C_COMMON_H_
......@@ -37,12 +40,12 @@ limitations under the License.
#include <stddef.h>
#include <stdint.h>
#include "c_api_types.h" // IWYU pragma: export
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
typedef enum TfLiteStatus { kTfLiteOk = 0, kTfLiteError = 1 } TfLiteStatus;
// The list of external context types known to TF Lite. This list exists solely
// to avoid conflicts and to ensure ops can share the external contexts they
// need. Access to the external contexts is controlled by one of the
......@@ -51,7 +54,7 @@ typedef enum TfLiteExternalContextType {
kTfLiteEigenContext = 0, // include eigen_support.h to use.
kTfLiteGemmLowpContext = 1, // include gemm_support.h to use.
kTfLiteEdgeTpuContext = 2, // Placeholder for Edge TPU support.
kTfLiteCpuBackendContext = 3, // include cpu_backend_support.h to use.
kTfLiteCpuBackendContext = 3, // include cpu_backend_context.h to use.
kTfLiteMaxExternalContexts = 4
} TfLiteExternalContextType;
......@@ -63,7 +66,7 @@ struct TfLiteRegistration;
// An external context is a collection of information unrelated to the TF Lite
// framework, but useful to a subset of the ops. TF Lite knows very little
// about about the actual contexts, but it keeps a list of them, and is able to
// about the actual contexts, but it keeps a list of them, and is able to
// refresh them if configurations like the number of recommended threads
// change.
typedef struct TfLiteExternalContext {
......@@ -77,10 +80,16 @@ typedef struct TfLiteExternalContext {
// indices
typedef struct TfLiteIntArray {
int size;
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
#if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1
#if defined(_MSC_VER)
// Context for why this is needed is in http://b/189926408#comment21
int data[1];
#elif (!defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1) || \
defined(HEXAGON) || \
(defined(__clang__) && __clang_major__ == 7 && __clang_minor__ == 1)
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
int data[0];
#else
int data[];
......@@ -116,10 +125,15 @@ void TfLiteIntArrayFree(TfLiteIntArray* a);
// Fixed size list of floats. Used for per-channel quantization.
typedef struct TfLiteFloatArray {
int size;
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
#if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1
#if defined(_MSC_VER)
// Context for why this is needed is in http://b/189926408#comment21
float data[1];
#elif (!defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1) || \
defined(HEXAGON) || \
(defined(__clang__) && __clang_major__ == 7 && __clang_minor__ == 1)
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
float data[0];
#else
float data[];
......@@ -151,8 +165,16 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a);
do { \
(context)->ReportError((context), __VA_ARGS__); \
} while (false)
#define TF_LITE_MAYBE_KERNEL_LOG(context, ...) \
do { \
if ((context) != nullptr) { \
(context)->ReportError((context), __VA_ARGS__); \
} \
} while (false)
#else // TF_LITE_STRIP_ERROR_STRINGS
#define TF_LITE_KERNEL_LOG(context, ...)
#define TF_LITE_MAYBE_KERNEL_LOG(context, ...)
#endif // TF_LITE_STRIP_ERROR_STRINGS
// Check whether value is true, and if not return kTfLiteError from
......@@ -178,8 +200,9 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a);
#define TF_LITE_ENSURE_STATUS(a) \
do { \
if ((a) != kTfLiteOk) { \
return kTfLiteError; \
const TfLiteStatus s = (a); \
if (s != kTfLiteOk) { \
return s; \
} \
} while (0)
......@@ -187,6 +210,7 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a);
// the current function, while also reporting the location of the error.
// `a` and `b` may be evaluated more than once, so no side effects or
// extremely expensive computations should be done.
// NOTE: Use TF_LITE_ENSURE_TYPES_EQ if comparing TfLiteTypes.
#define TF_LITE_ENSURE_EQ(context, a, b) \
do { \
if ((a) != (b)) { \
......@@ -206,10 +230,22 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a);
} \
} while (0)
#define TF_LITE_ENSURE_NEAR(context, a, b, epsilon) \
do { \
auto delta = ((a) > (b)) ? ((a) - (b)) : ((b) - (a)); \
if (delta > epsilon) { \
TF_LITE_KERNEL_LOG((context), "%s:%d %s not near %s (%f != %f)", \
__FILE__, __LINE__, #a, #b, static_cast<double>(a), \
static_cast<double>(b)); \
return kTfLiteError; \
} \
} while (0)
#define TF_LITE_ENSURE_OK(context, status) \
do { \
if ((status) != kTfLiteOk) { \
return kTfLiteError; \
const TfLiteStatus s = (status); \
if ((s) != kTfLiteOk) { \
return s; \
} \
} while (0)
......@@ -218,26 +254,16 @@ typedef struct TfLiteComplex64 {
float re, im; // real and imaginary parts, respectively.
} TfLiteComplex64;
// Double-precision complex data type compatible with the C99 definition.
typedef struct TfLiteComplex128 {
double re, im; // real and imaginary parts, respectively.
} TfLiteComplex128;
// Half precision data type compatible with the C99 definition.
typedef struct TfLiteFloat16 {
uint16_t data;
} TfLiteFloat16;
// Types supported by tensor
typedef enum {
kTfLiteNoType = 0,
kTfLiteFloat32 = 1,
kTfLiteInt32 = 2,
kTfLiteUInt8 = 3,
kTfLiteInt64 = 4,
kTfLiteString = 5,
kTfLiteBool = 6,
kTfLiteInt16 = 7,
kTfLiteComplex64 = 8,
kTfLiteInt8 = 9,
kTfLiteFloat16 = 10,
} TfLiteType;
// Return the name of a given type, for error reporting purposes.
const char* TfLiteTypeGetName(TfLiteType type);
......@@ -254,22 +280,12 @@ typedef enum TfLiteQuantizationType {
typedef struct TfLiteQuantization {
// The type of quantization held by params.
TfLiteQuantizationType type;
// Holds a reference to one of the quantization param structures specified
// below.
// Holds an optional reference to a quantization param structure. The actual
// type depends on the value of the `type` field (see the comment there for
// the values and corresponding types).
void* params;
} TfLiteQuantization;
// Legacy. Will be deprecated in favor of TfLiteAffineQuantization.
// If per-layer quantization is specified this field will still be populated in
// addition to TfLiteAffineQuantization.
// Parameters for asymmetric quantization. Quantized values can be converted
// back to float using:
// real_value = scale * (quantized_value - zero_point)
typedef struct TfLiteQuantizationParams {
float scale;
int32_t zero_point;
} TfLiteQuantizationParams;
// Parameters for asymmetric quantization across a dimension (i.e per output
// channel quantization).
// quantized_dimension specifies which dimension the scales and zero_points
......@@ -289,29 +305,44 @@ typedef union TfLitePtrUnion {
* GetTensorData<TYPE>(tensor) instead, otherwise only access .data, as other
* members are deprecated. */
int32_t* i32;
uint32_t* u32;
int64_t* i64;
uint64_t* u64;
float* f;
TfLiteFloat16* f16;
double* f64;
char* raw;
const char* raw_const;
uint8_t* uint8;
bool* b;
int16_t* i16;
TfLiteComplex64* c64;
TfLiteComplex128* c128;
int8_t* int8;
/* Only use this member. */
void* data;
} TfLitePtrUnion;
// Memory allocation strategies. kTfLiteMmapRo is for read-only memory-mapped
// data (or data externally allocated). kTfLiteArenaRw is arena allocated
// data. kTfLiteDynamic is for tensors that are allocated during evaluation.
// Memory allocation strategies.
// * kTfLiteMmapRo: Read-only memory-mapped data, or data externally allocated.
// * kTfLiteArenaRw: Arena allocated with no guarantees about persistence,
// and available during eval.
// * kTfLiteArenaRwPersistent: Arena allocated but persistent across eval, and
// only available during eval.
// * kTfLiteDynamic: Allocated during eval, or for string tensors.
// * kTfLitePersistentRo: Allocated and populated during prepare. This is
// useful for tensors that can be computed during prepare and treated
// as constant inputs for downstream ops (also in prepare).
// * kTfLiteCustom: Custom memory allocation provided by the user. See
// TfLiteCustomAllocation below.
typedef enum TfLiteAllocationType {
kTfLiteMemNone = 0,
kTfLiteMmapRo,
kTfLiteArenaRw,
kTfLiteArenaRwPersistent,
kTfLiteDynamic,
kTfLitePersistentRo,
kTfLiteCustom,
} TfLiteAllocationType;
// The delegates should use zero or positive integers to represent handles.
......@@ -344,8 +375,29 @@ typedef struct TfLiteSparsity {
int dim_metadata_size;
} TfLiteSparsity;
// An tensor in the interpreter system which is a wrapper around a buffer of
// Defines a custom memory allocation not owned by the runtime.
// `data` should be aligned to kDefaultTensorAlignment defined in
// lite/util.h. (Currently 64 bytes)
// NOTE: See Interpreter.SetCustomAllocationForTensor for details on usage.
typedef struct TfLiteCustomAllocation {
void* data;
size_t bytes;
} TfLiteCustomAllocation;
// The flags used in `Interpreter::SetCustomAllocationForTensor`.
// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc.
typedef enum TfLiteCustomAllocationFlags {
kTfLiteCustomAllocationFlagsNone = 0,
// Skips checking whether allocation.data points to an aligned buffer as
// expected by the TFLite runtime.
// NOTE: Setting this flag can cause crashes when calling Invoke().
// Use with caution.
kTfLiteCustomAllocationFlagsSkipAlignCheck = 1,
} TfLiteCustomAllocationFlags;
// A tensor in the interpreter system which is a wrapper around a buffer of
// data including a dimensionality (or NULL if not currently defined).
#ifndef TF_LITE_STATIC_MEMORY
typedef struct TfLiteTensor {
// The data type specification for data stored in `data`. This affects
// what member of `data` union should be used.
......@@ -411,34 +463,9 @@ typedef struct TfLiteTensor {
const TfLiteIntArray* dims_signature;
} TfLiteTensor;
#ifndef TF_LITE_STATIC_MEMORY
// Free data memory of tensor `t`.
void TfLiteTensorDataFree(TfLiteTensor* t);
// Free quantization data.
void TfLiteQuantizationFree(TfLiteQuantization* quantization);
// Free sparsity parameters.
void TfLiteSparsityFree(TfLiteSparsity* sparsity);
// Free memory of tensor `t`.
void TfLiteTensorFree(TfLiteTensor* t);
// Set all of a tensor's fields (and free any previously allocated data).
void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims,
TfLiteQuantizationParams quantization, char* buffer,
size_t size, TfLiteAllocationType allocation_type,
const void* allocation, bool is_variable,
TfLiteTensor* tensor);
// Resize the allocated data of a (dynamic) tensor. Tensors with allocation
// types other than kTfLiteDynamic will be ignored.
void TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor);
#endif // TF_LITE_STATIC_MEMORY
// A structure representing an instance of a node.
// This structure only exhibits the inputs, outputs and user defined data, not
// other features like the type.
// This structure only exhibits the inputs, outputs, user defined data and some
// node properties (like statefulness), not other features like the type.
typedef struct TfLiteNode {
// Inputs to this node expressed as indices into the simulator's tensors.
TfLiteIntArray* inputs;
......@@ -471,7 +498,138 @@ typedef struct TfLiteNode {
// created by calling `interpreter.ModifyGraphWithDelegate`.
// WARNING: This is an experimental interface that is subject to change.
struct TfLiteDelegate* delegate;
// Whether this op might have side effect (e.g. stateful op).
bool might_have_side_effect;
} TfLiteNode;
#else // defined(TF_LITE_STATIC_MEMORY)?
// NOTE: This flag is opt-in only at compile time.
//
// Specific reduced TfLiteTensor struct for TF Micro runtime. This struct
// contains only the minimum fields required to initialize and prepare a micro
// inference graph. The fields in this struct have been ordered from
// largest-to-smallest for optimal struct sizeof.
//
// This struct does not use:
// - allocation
// - buffer_handle
// - data_is_stale
// - delegate
// - dims_signature
// - name
// - sparsity
typedef struct TfLiteTensor {
// TODO(b/155784997): Consider consolidating these quantization fields:
// Quantization information. Replaces params field above.
TfLiteQuantization quantization;
// Quantization information.
TfLiteQuantizationParams params;
// A union of data pointers. The appropriate type should be used for a typed
// tensor based on `type`.
TfLitePtrUnion data;
// A pointer to a structure representing the dimensionality interpretation
// that the buffer should have. NOTE: the product of elements of `dims`
// and the element datatype size should be equal to `bytes` below.
TfLiteIntArray* dims;
// The number of bytes required to store the data of this Tensor. I.e.
// (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if
// type is kTfLiteFloat32 and dims = {3, 2} then
// bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24.
size_t bytes;
// The data type specification for data stored in `data`. This affects
// what member of `data` union should be used.
TfLiteType type;
// How memory is mapped
// kTfLiteMmapRo: Memory mapped read only.
// i.e. weights
// kTfLiteArenaRw: Arena allocated read write memory
// (i.e. temporaries, outputs).
TfLiteAllocationType allocation_type;
// True if the tensor is a variable.
bool is_variable;
} TfLiteTensor;
// Specific reduced TfLiteNode struct for TF Micro runtime. This struct contains
// only the minimum fields required to represent a node.
//
// This struct does not use:
// - delegate
// - intermediates
// - temporaries
typedef struct TfLiteNode {
// Inputs to this node expressed as indices into the simulator's tensors.
TfLiteIntArray* inputs;
// Outputs to this node expressed as indices into the simulator's tensors.
TfLiteIntArray* outputs;
// intermediate tensors to this node expressed as indices into the simulator's
// tensors.
TfLiteIntArray* intermediates;
// Opaque data provided by the node implementer through `Registration.init`.
void* user_data;
// Opaque data provided to the node if the node is a builtin. This is usually
// a structure defined in builtin_op_data.h
void* builtin_data;
// Custom initial data. This is the opaque data provided in the flatbuffer.
// WARNING: This is an experimental interface that is subject to change.
const void* custom_initial_data;
int custom_initial_data_size;
} TfLiteNode;
#endif // TF_LITE_STATIC_MEMORY
// Light-weight tensor struct for TF Micro runtime. Provides the minimal amount
// of information required for a kernel to run during TfLiteRegistration::Eval.
// TODO(b/160955687): Move this field into TF_LITE_STATIC_MEMORY when TFLM
// builds with this flag by default internally.
typedef struct TfLiteEvalTensor {
// A union of data pointers. The appropriate type should be used for a typed
// tensor based on `type`.
TfLitePtrUnion data;
// A pointer to a structure representing the dimensionality interpretation
// that the buffer should have.
TfLiteIntArray* dims;
// The data type specification for data stored in `data`. This affects
// what member of `data` union should be used.
TfLiteType type;
} TfLiteEvalTensor;
#ifndef TF_LITE_STATIC_MEMORY
// Free data memory of tensor `t`.
void TfLiteTensorDataFree(TfLiteTensor* t);
// Free quantization data.
void TfLiteQuantizationFree(TfLiteQuantization* quantization);
// Free sparsity parameters.
void TfLiteSparsityFree(TfLiteSparsity* sparsity);
// Free memory of tensor `t`.
void TfLiteTensorFree(TfLiteTensor* t);
// Set all of a tensor's fields (and free any previously allocated data).
void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims,
TfLiteQuantizationParams quantization, char* buffer,
size_t size, TfLiteAllocationType allocation_type,
const void* allocation, bool is_variable,
TfLiteTensor* tensor);
// Resize the allocated data of a (dynamic) tensor. Tensors with allocation
// types other than kTfLiteDynamic will be ignored.
void TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor);
#endif // TF_LITE_STATIC_MEMORY
// WARNING: This is an experimental interface that is subject to change.
//
......@@ -497,6 +655,7 @@ typedef struct TfLiteContext {
// TfLiteDelegates can traverse the current execution plan by iterating
// through each member of this array and using GetNodeAndRegistration() to
// access details about a node. i.e.
//
// TfLiteIntArray* execution_plan;
// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &execution_plan));
// for (int exec_index = 0; exec_index < execution_plan->size; exec_index++) {
......@@ -505,6 +664,28 @@ typedef struct TfLiteContext {
// TfLiteRegistration* reg;
// context->GetNodeAndRegistration(context, node_index, &node, &reg);
// }
// Note: the memory pointed by '`*execution_plan` is OWNED by TfLite runtime.
// Future calls to GetExecutionPlan invalidates earlier outputs. The following
// code snippet shows the issue of such an invocation pattern. After calling
// CheckNode, subsequent access to `plan_1st` is undefined.
//
// void CheckNode(const TfLiteNode* node) {
// ...
// TfLiteIntArray* plan_2nd;
// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_2nd));
// ...
// }
//
// TfLiteIntArray* plan_1st;
// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_1st));
// for (int exec_index = 0; exec_index < plan_1st->size; exec_index++) {
// int node_index = plan_1st->data[exec_index];
// TfLiteNode* node;
// TfLiteRegistration* reg;
// context->GetNodeAndRegistration(context, node_index, &node, &reg);
// CheckNode(node);
// }
//
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*GetExecutionPlan)(struct TfLiteContext* context,
TfLiteIntArray** execution_plan);
......@@ -563,12 +744,11 @@ typedef struct TfLiteContext {
void* profiler;
// Allocate persistent buffer which has the same life time as the interpreter.
// Returns nullptr on failure.
// The memory is allocated from heap for TFL, and from tail in TFLM.
// If *ptr is not nullptr, the pointer will be reallocated.
// This method is only available in Prepare stage.
// This method is only available in Init or Prepare stage.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*AllocatePersistentBuffer)(struct TfLiteContext* ctx,
size_t bytes, void** ptr);
void* (*AllocatePersistentBuffer)(struct TfLiteContext* ctx, size_t bytes);
// Allocate a buffer which will be deallocated right after invoke phase.
// The memory is allocated from heap in TFL, and from volatile arena in TFLM.
......@@ -623,6 +803,30 @@ typedef struct TfLiteContext {
TfLiteStatus (*PreviewDelegatePartitioning)(
struct TfLiteContext* context, const TfLiteIntArray* nodes_to_replace,
TfLiteDelegateParams** partition_params_array, int* num_partitions);
// Returns a TfLiteTensor struct for a given index.
// WARNING: This is an experimental interface that is subject to change.
// WARNING: This method may not be available on all platforms.
TfLiteTensor* (*GetTensor)(const struct TfLiteContext* context,
int tensor_idx);
// Returns a TfLiteEvalTensor struct for a given index.
// WARNING: This is an experimental interface that is subject to change.
// WARNING: This method may not be available on all platforms.
TfLiteEvalTensor* (*GetEvalTensor)(const struct TfLiteContext* context,
int tensor_idx);
// Retrieves named metadata buffer from the TFLite model.
// Returns kTfLiteOk if metadata is successfully obtained from the flatbuffer
// Model: that is, there exists a `metadata` entry with given `name` string.
// (see TFLite's schema.fbs).
// The corresponding `buffer` information is populated in `ptr` & `bytes`.
// The data from `ptr` is valid for the lifetime of the Interpreter.
//
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*GetModelMetadata)(const struct TfLiteContext* context,
const char* name, const char** ptr,
size_t* bytes);
} TfLiteContext;
typedef struct TfLiteRegistration {
......@@ -697,7 +901,26 @@ typedef enum TfLiteDelegateFlags {
//
// If the delegate isn't capable to handle dynamic tensors, this flag need
// to be set to false.
kTfLiteDelegateFlagsAllowDynamicTensors = 1
kTfLiteDelegateFlagsAllowDynamicTensors = 1,
// This flag can be used by delegates (that allow dynamic tensors) to ensure
// applicable tensor shapes are automatically propagated in the case of tensor
// resizing.
// This means that non-dynamic (allocation_type != kTfLiteDynamic) I/O tensors
// of a delegate kernel will have correct shapes before its Prepare() method
// is called. The runtime leverages TFLite builtin ops in the original
// execution plan to propagate shapes.
//
// A few points to note:
// 1. This requires kTfLiteDelegateFlagsAllowDynamicTensors. If that flag is
// false, this one is redundant since the delegate kernels are re-initialized
// every time tensors are resized.
// 2. Enabling this flag adds some overhead to AllocateTensors(), since extra
// work is required to prepare the original execution plan.
// 3. This flag requires that the original execution plan only have ops with
// valid registrations (and not 'dummy' custom ops like with Flex).
// WARNING: This feature is experimental and subject to change.
kTfLiteDelegateFlagsRequirePropagatedShapes = 2
} TfLiteDelegateFlags;
// WARNING: This is an experimental interface that is subject to change.
......@@ -716,8 +939,9 @@ typedef struct TfLiteDelegate {
struct TfLiteDelegate* delegate);
// Copy the data from delegate buffer handle into raw memory of the given
// 'tensor'. This cannot be null. The delegate is allowed to allocate the raw
// bytes as long as it follows the rules for kTfLiteDynamic tensors.
// 'tensor'. Note that the delegate is allowed to allocate the raw bytes as
// long as it follows the rules for kTfLiteDynamic tensors, in which case this
// cannot be null.
TfLiteStatus (*CopyFromBufferHandle)(TfLiteContext* context,
struct TfLiteDelegate* delegate,
TfLiteBufferHandle buffer_handle,
......@@ -744,7 +968,7 @@ typedef struct TfLiteDelegate {
// Build a 'null' delegate, with all the fields properly set to their default
// values.
TfLiteDelegate TfLiteDelegateCreate();
TfLiteDelegate TfLiteDelegateCreate(void);
#ifdef __cplusplus
} // extern "C"
......
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
......@@ -13,66 +13,43 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
#define TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
#ifndef TENSORFLOW_LITE_DELEGATES_XNNPACK_XNNPACK_DELEGATE_H_
#define TENSORFLOW_LITE_DELEGATES_XNNPACK_XNNPACK_DELEGATE_H_
#ifdef SWIG
#define TFL_CAPI_EXPORT
#else
#if defined(_WIN32)
#ifdef TFL_COMPILE_LIBRARY
#define TFL_CAPI_EXPORT __declspec(dllexport)
#else
#define TFL_CAPI_EXPORT __declspec(dllimport)
#endif // TFL_COMPILE_LIBRARY
#else
#define TFL_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // SWIG
#include "common.h"
#ifdef __cplusplus
extern "C" {
#else
// For "C" 'bool' is not built-in type.
#include <stdbool.h>
#endif // __cplusplus
typedef struct TfLiteDelegate TfLiteDelegate;
typedef enum {
// waitUntilCompleted
TFLGpuDelegateWaitTypePassive,
// Minimize latency. It uses active spinning instead of mutex and consumes
// additional CPU resources.
TFLGpuDelegateWaitTypeActive,
// Useful when the output is used with GPU pipeline then or if external
// command encoder is set.
TFLGpuDelegateWaitTypeDoNotWait,
// Tries to avoid GPU sleep mode.
TFLGpuDelegateWaitTypeAggressive,
} TFLGpuDelegateWaitType;
// Creates a new delegate instance that need to be destroyed with
// DeleteFlowDelegate when delegate is no longer used by tflite.
typedef struct {
// Allows to quantify tensors, downcast values, process in float16 etc.
bool allow_precision_loss;
TFLGpuDelegateWaitType wait_type;
} TFLGpuDelegateOptions;
// Number of threads to use in the thread pool.
// 0 or negative value means no thread pool used.
int32_t num_threads;
} TfLiteXNNPackDelegateOptions;
// Returns a structure with the default XNNPack delegate options.
TFL_CAPI_EXPORT TfLiteXNNPackDelegateOptions
TfLiteXNNPackDelegateOptionsDefault();
// Creates a new delegate instance that need to be destroyed with
// `TFLDeleteTfLiteGpuDelegate` when delegate is no longer used by TFLite.
// `TfLiteXNNPackDelegateDelete` when delegate is no longer used by TFLite.
// When `options` is set to `nullptr`, the following default values are used:
// .precision_loss_allowed = false,
// .wait_type = kPassive,
TFL_CAPI_EXPORT extern TfLiteDelegate* TFLGpuDelegateCreate(
const TFLGpuDelegateOptions* options);
TFL_CAPI_EXPORT TfLiteDelegate* TfLiteXNNPackDelegateCreate(
const TfLiteXNNPackDelegateOptions* options);
// Returns the pthreadpool_t object used for parallelization in XNNPACK.
// Can return NULL if the XNNPack delegate is single-threaded.
//
// WARNING: This API is experimental and subject to change.
TFL_CAPI_EXPORT void* TfLiteXNNPackDelegateGetThreadPool(
TfLiteDelegate* delegate);
// Destroys a delegate created with `TFLGpuDelegateCreate` call.
TFL_CAPI_EXPORT extern void TFLGpuDelegateDelete(TfLiteDelegate* delegate);
// Destroys a delegate created with `TfLiteXNNPackDelegateCreate` call.
TFL_CAPI_EXPORT void TfLiteXNNPackDelegateDelete(TfLiteDelegate* delegate);
#ifdef __cplusplus
} // extern "C"
}
#endif // __cplusplus
#endif // TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
#endif // TENSORFLOW_LITE_DELEGATES_XNNPACK_XNNPACK_DELEGATE_H_
......@@ -2,5 +2,4 @@ framework module TensorFlowLiteC {
umbrella header "TensorFlowLiteC.h"
export *
module * { export * }
link framework "Metal"
}
Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
......@@ -201,3 +199,86 @@ Copyright 2019 The TensorFlow Authors. All rights reserved.
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------
Files: third_party/compute_library/...
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------
Files: ACKNOWLEDGEMENTS
LICENSE
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
------------------
Files: third_party/hexagon
Copyright (c) 2016-2019, The Linux Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted (subject to the limitations in the
disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The Linux Foundation nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<div align="center">
<img src="https://www.tensorflow.org/images/tf_logo_social.png">
<img src="https://www.tensorflow.org/images/tf_logo_horizontal.png">
</div>
[![Python](https://img.shields.io/pypi/pyversions/tensorflow.svg?style=plastic)](https://badge.fury.io/py/tensorflow)
[![PyPI](https://badge.fury.io/py/tensorflow.svg)](https://badge.fury.io/py/tensorflow)
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.4724125.svg)](https://doi.org/10.5281/zenodo.4724125)
**`Documentation`** |
------------------- |
[![Documentation](https://img.shields.io/badge/api-reference-blue.svg)](https://www.tensorflow.org/api_docs/) |
......@@ -92,40 +96,33 @@ for general questions and discussion, and please direct specific questions to
The TensorFlow project strives to abide by generally accepted best practices in
open-source software development:
[![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/tensorflow.svg)](https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:tensorflow)
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/1486/badge)](https://bestpractices.coreinfrastructure.org/projects/1486)
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v1.4%20adopted-ff69b4.svg)](CODE_OF_CONDUCT.md)
## Continuous build status
You can find more community-supported platforms and configurations in the
[TensorFlow SIG Build community builds table](https://github.com/tensorflow/build#community-supported-tensorflow-builds).
### Official Builds
Build Type | Status | Artifacts
------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------
**Linux CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Linux GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Linux XLA** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.html) | TBA
**macOS** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Android** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.html) | [![Download](https://api.bintray.com/packages/google/tensorflow/tensorflow/images/download.svg)](https://bintray.com/google/tensorflow/tensorflow/_latestVersion)
**Raspberry Pi 0 and 1** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py2.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py2.html) [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.html) | [Py2](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp27-none-linux_armv6l.whl) [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv6l.whl)
**Raspberry Pi 2 and 3** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py2.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py2.html) [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.html) | [Py2](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp27-none-linux_armv7l.whl) [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv7l.whl)
### Community Supported Builds
Build Type | Status | Artifacts
----------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------
**Linux AMD ROCm GPU** Nightly | [![Build Status](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly/badge/icon)](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly) | [Nightly](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly/lastSuccessfulBuild/)
**Linux AMD ROCm GPU** Stable Release | [![Build Status](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/badge/icon)](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/) | Release [1.15](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/lastSuccessfulBuild/) / [2.x](http://ml-ci.amd.com:21096/job/tensorflow-rocm-v2-release/lastSuccessfulBuild/)
**Linux s390x** Nightly | [![Build Status](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/badge/icon)](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/) | [Nightly](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/)
**Linux s390x CPU** Stable Release | [![Build Status](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/badge/icon)](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/) | [Release](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/)
**Linux ppc64le CPU** Nightly | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Nightly_Artifact/)
**Linux ppc64le CPU** Stable Release | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_CPU_Release_Build/)
**Linux ppc64le GPU** Nightly | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Nightly_Artifact/)
**Linux ppc64le GPU** Stable Release | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_GPU_Release_Build/)
**Linux CPU with Intel® MKL-DNN** Nightly | [![Build Status](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/badge/icon)](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/) | [Nightly](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/)
**Linux CPU with Intel® MKL-DNN** Stable Release | ![Build Status](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-release-whl/badge/icon) | Release [1.15](https://pypi.org/project/intel-tensorflow/1.15.0/) / [2.x](https://pypi.org/project/intel-tensorflow/)
**Red Hat® Enterprise Linux® 7.6 CPU & GPU** <br> Python 2.7, 3.6 | [![Build Status](https://jenkins-tensorflow.apps.ci.centos.org/buildStatus/icon?job=tensorflow-rhel7-3.6&build=2)](https://jenkins-tensorflow.apps.ci.centos.org/job/tensorflow-rhel7-3.6/2/) | [1.13.1 PyPI](https://tensorflow.pypi.thoth-station.ninja/index/)
Build Type | Status | Artifacts
----------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------
**Linux CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Linux GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Linux XLA** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.html) | TBA
**macOS** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Android** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.html) | [Download](https://bintray.com/google/tensorflow/tensorflow/_latestVersion)
**Raspberry Pi 0 and 1** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.html) | [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv6l.whl)
**Raspberry Pi 2 and 3** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.html) | [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv7l.whl)
**Libtensorflow MacOS CPU** | Status Temporarily Unavailable | [Nightly Binary](https://storage.googleapis.com/libtensorflow-nightly/prod/tensorflow/release/macos/latest/macos_cpu_libtensorflow_binaries.tar.gz) [Official GCS](https://storage.googleapis.com/tensorflow/)
**Libtensorflow Linux CPU** | Status Temporarily Unavailable | [Nightly Binary](https://storage.googleapis.com/libtensorflow-nightly/prod/tensorflow/release/ubuntu_16/latest/cpu/ubuntu_cpu_libtensorflow_binaries.tar.gz) [Official GCS](https://storage.googleapis.com/tensorflow/)
**Libtensorflow Linux GPU** | Status Temporarily Unavailable | [Nightly Binary](https://storage.googleapis.com/libtensorflow-nightly/prod/tensorflow/release/ubuntu_16/latest/gpu/ubuntu_gpu_libtensorflow_binaries.tar.gz) [Official GCS](https://storage.googleapis.com/tensorflow/)
**Libtensorflow Windows CPU** | Status Temporarily Unavailable | [Nightly Binary](https://storage.googleapis.com/libtensorflow-nightly/prod/tensorflow/release/windows/latest/cpu/windows_cpu_libtensorflow_binaries.tar.gz) [Official GCS](https://storage.googleapis.com/tensorflow/)
**Libtensorflow Windows GPU** | Status Temporarily Unavailable | [Nightly Binary](https://storage.googleapis.com/libtensorflow-nightly/prod/tensorflow/release/windows/latest/gpu/windows_gpu_libtensorflow_binaries.tar.gz) [Official GCS](https://storage.googleapis.com/tensorflow/)
## Resources
......@@ -133,15 +130,21 @@ Build Type | Status
* [TensorFlow Tutorials](https://www.tensorflow.org/tutorials/)
* [TensorFlow Official Models](https://github.com/tensorflow/models/tree/master/official)
* [TensorFlow Examples](https://github.com/tensorflow/examples)
* [TensorFlow in Practice from Coursera](https://www.coursera.org/specializations/tensorflow-in-practice)
* [DeepLearning.AI TensorFlow Developer Professional Certificate](https://www.coursera.org/specializations/tensorflow-in-practice)
* [TensorFlow: Data and Deployment from Coursera](https://www.coursera.org/specializations/tensorflow-data-and-deployment)
* [Getting Started with TensorFlow 2 from Coursera](https://www.coursera.org/learn/getting-started-with-tensor-flow2)
* [TensorFlow: Advanced Techniques from Coursera](https://www.coursera.org/specializations/tensorflow-advanced-techniques)
* [TensorFlow 2 for Deep Learning Specialization from Coursera](https://www.coursera.org/specializations/tensorflow2-deeplearning)
* [Intro to TensorFlow for A.I, M.L, and D.L from Coursera](https://www.coursera.org/learn/introduction-tensorflow)
* [Intro to TensorFlow for Deep Learning from Udacity](https://www.udacity.com/course/intro-to-tensorflow-for-deep-learning--ud187)
* [Introduction to TensorFlow Lite from Udacity](https://www.udacity.com/course/intro-to-tensorflow-lite--ud190)
* [Machine Learning with TensorFlow on GCP](https://www.coursera.org/specializations/machine-learning-tensorflow-gcp)
* [TensorFlow Codelabs](https://codelabs.developers.google.com/?cat=TensorFlow)
* [TensorFlow Blog](https://blog.tensorflow.org)
* [Learn ML with TensorFlow](https://www.tensorflow.org/resources/learn-ml)
* [TensorFlow Twitter](https://twitter.com/tensorflow)
* [TensorFlow YouTube](https://www.youtube.com/channel/UC0rqucBdTuFTjJiefW5t-IQ)
* [TensorFlow Roadmap](https://www.tensorflow.org/community/roadmap)
* [TensorFlow model optimization roadmap](https://www.tensorflow.org/model_optimization/guide/roadmap)
* [TensorFlow White Papers](https://www.tensorflow.org/about/bib)
* [TensorBoard Visualization Toolkit](https://github.com/tensorflow/tensorboard)
......
// Copyright 2019 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlowLiteC
/// A delegate that uses the `Metal` framework for performing TensorFlow Lite graph operations with
/// GPU acceleration.
///
/// - Important: This is an experimental interface that is subject to change.
public final class MetalDelegate: Delegate {
/// The configuration options for the `MetalDelegate`.
public let options: Options
// Conformance to the `Delegate` protocol.
public private(set) var cDelegate: CDelegate
/// Creates a new instance configured with the given `options`.
///
/// - Parameters:
/// - options: Configurations for the delegate. The default is a new instance of
/// `MetalDelegate.Options` with the default configuration values.
public init(options: Options = Options()) {
self.options = options
var delegateOptions = TFLGpuDelegateOptions()
delegateOptions.allow_precision_loss = options.allowsPrecisionLoss
delegateOptions.wait_type = options.waitType.cWaitType
cDelegate = TFLGpuDelegateCreate(&delegateOptions)
}
deinit {
TFLGpuDelegateDelete(cDelegate)
}
}
extension MetalDelegate {
/// Options for configuring the `MetalDelegate`.
public struct Options: Equatable, Hashable {
/// Indicates whether the GPU delegate allows precision loss, such as allowing `Float16`
/// precision for a `Float32` computation. The default is `false`.
public var allowsPrecisionLoss = false
/// A type indicating how the current thread should wait for work on the GPU to complete. The
/// default is `passive`.
public var waitType: ThreadWaitType = .passive
/// Creates a new instance with the default values.
public init() {}
}
}
/// A type indicating how the current thread should wait for work scheduled on the GPU to complete.
public enum ThreadWaitType: Equatable, Hashable {
/// The thread does not wait for the work to complete. Useful when the output of the work is used
/// with the GPU pipeline.
case none
/// The thread waits until the work is complete.
case passive
/// The thread waits for the work to complete with minimal latency, which may require additional
/// CPU resources.
case active
/// The thread waits for the work while trying to prevent the GPU from going into sleep mode.
case aggressive
/// The C `TFLGpuDelegateWaitType` for the current `ThreadWaitType`.
var cWaitType: TFLGpuDelegateWaitType {
switch self {
case .none:
return TFLGpuDelegateWaitTypeDoNotWait
case .passive:
return TFLGpuDelegateWaitTypePassive
case .active:
return TFLGpuDelegateWaitTypeActive
case .aggressive:
return TFLGpuDelegateWaitTypeAggressive
}
}
}
......@@ -15,7 +15,15 @@
import Foundation
import TensorFlowLiteC
#if os(Linux)
import SwiftGlibc
#else
import Darwin
#endif
/// A TensorFlow Lite interpreter that performs inference from a given model.
///
/// - Note: Interpreter instances are *not* thread-safe.
public final class Interpreter {
/// The configuration options for the `Interpreter`.
public let options: Options?
......@@ -39,6 +47,9 @@ public final class Interpreter {
/// The underlying `TfLiteInterpreter` C pointer.
private var cInterpreter: CInterpreter?
/// The underlying `TfLiteDelegate` C pointer for XNNPACK delegate.
private var cXNNPackDelegate: Delegate.CDelegate?
/// Creates a new instance with the given values.
///
/// - Parameters:
......@@ -78,6 +89,14 @@ public final class Interpreter {
)
}
delegates?.forEach { TfLiteInterpreterOptionsAddDelegate(cInterpreterOptions, $0.cDelegate) }
// Configure the XNNPack delegate after the other delegates explicitly added by the user.
options.map {
if $0.isXNNPackEnabled {
configureXNNPack(options: $0, cInterpreterOptions: cInterpreterOptions)
}
}
guard let cInterpreter = TfLiteInterpreterCreate(model.cModel, cInterpreterOptions) else {
throw InterpreterError.failedToCreateInterpreter
}
......@@ -86,6 +105,7 @@ public final class Interpreter {
deinit {
TfLiteInterpreterDelete(cInterpreter)
TfLiteXNNPackDelegateDelete(cXNNPackDelegate)
}
/// Invokes the interpreter to perform inference from the loaded graph.
......@@ -201,12 +221,13 @@ public final class Interpreter {
guard case 0...maxIndex = index else {
throw InterpreterError.invalidTensorIndex(index: index, maxIndex: maxIndex)
}
guard TfLiteInterpreterResizeInputTensor(
cInterpreter,
Int32(index),
shape.int32Dimensions,
Int32(shape.rank)
) == kTfLiteOk
guard
TfLiteInterpreterResizeInputTensor(
cInterpreter,
Int32(index),
shape.int32Dimensions,
Int32(shape.rank)
) == kTfLiteOk
else {
throw InterpreterError.failedToResizeInputTensor(index: index)
}
......@@ -236,11 +257,11 @@ public final class Interpreter {
}
#if swift(>=5.0)
let status = data.withUnsafeBytes {
TfLiteTensorCopyFromBuffer(cTensor, $0.baseAddress, data.count)
}
let status = data.withUnsafeBytes {
TfLiteTensorCopyFromBuffer(cTensor, $0.baseAddress, data.count)
}
#else
let status = data.withUnsafeBytes { TfLiteTensorCopyFromBuffer(cTensor, $0, data.count) }
let status = data.withUnsafeBytes { TfLiteTensorCopyFromBuffer(cTensor, $0, data.count) }
#endif // swift(>=5.0)
guard status == kTfLiteOk else { throw InterpreterError.failedToCopyDataToInputTensor }
return try input(at: index)
......@@ -256,6 +277,18 @@ public final class Interpreter {
throw InterpreterError.failedToAllocateTensors
}
}
// MARK: - Private
private func configureXNNPack(options: Options, cInterpreterOptions: OpaquePointer) {
var cXNNPackOptions = TfLiteXNNPackDelegateOptionsDefault()
if let threadCount = options.threadCount, threadCount > 0 {
cXNNPackOptions.num_threads = Int32(threadCount)
}
cXNNPackDelegate = TfLiteXNNPackDelegateCreate(&cXNNPackOptions)
TfLiteInterpreterOptionsAddDelegate(cInterpreterOptions, cXNNPackDelegate)
}
}
extension Interpreter {
......@@ -265,12 +298,34 @@ extension Interpreter {
/// indicating that the `Interpreter` will decide the number of threads to use.
public var threadCount: Int? = nil
/// Indicates whether an optimized set of floating point CPU kernels, provided by XNNPACK, is
/// enabled.
///
/// - Experiment:
/// Enabling this flag will enable use of a new, highly optimized set of CPU kernels provided
/// via the XNNPACK delegate. Currently, this is restricted to a subset of floating point
/// operations. Eventually, we plan to enable this by default, as it can provide significant
/// performance benefits for many classes of floating point models. See
/// https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/delegates/xnnpack/README.md
/// for more details.
///
/// - Important:
/// Things to keep in mind when enabling this flag:
///
/// * Startup time and resize time may increase.
/// * Baseline memory consumption may increase.
/// * Compatibility with other delegates (e.g., GPU) has not been fully validated.
/// * Quantized models will not see any benefit.
///
/// - Warning: This is an experimental interface that is subject to change.
public var isXNNPackEnabled: Bool = false
/// Creates a new instance with the default values.
public init() {}
}
}
/// A type alias for `Interpreter.Options` to support backwards compatiblity with the deprecated
/// A type alias for `Interpreter.Options` to support backwards compatibility with the deprecated
/// `InterpreterOptions` struct.
@available(*, deprecated, renamed: "Interpreter.Options")
public typealias InterpreterOptions = Interpreter.Options
......@@ -284,8 +339,19 @@ extension String {
/// - cFormat: The format C array as a template for substituting values.
/// - arguments: A C pointer to a `va_list` of arguments to substitute into `cFormat`.
init?(cFormat: UnsafePointer<CChar>, arguments: CVaListPointer) {
var buffer: UnsafeMutablePointer<CChar>?
guard vasprintf(&buffer, cFormat, arguments) != 0, let cString = buffer else { return nil }
self.init(validatingUTF8: cString)
#if os(Linux)
let length = Int(vsnprintf(nil, 0, cFormat, arguments) + 1) // null terminator
guard length > 0 else { return nil }
let buffer = UnsafeMutablePointer<CChar>.allocate(capacity: length)
defer {
buffer.deallocate()
}
guard vsnprintf(buffer, length, cFormat, arguments) == length - 1 else { return nil }
self.init(validatingUTF8: buffer)
#else
var buffer: UnsafeMutablePointer<CChar>?
guard vasprintf(&buffer, cFormat, arguments) != 0, let cString = buffer else { return nil }
self.init(validatingUTF8: cString)
#endif
}
}
......@@ -44,7 +44,7 @@ extension InterpreterError: LocalizedError {
case .failedToCreateInterpreter:
return "Failed to create the interpreter."
case .failedToResizeInputTensor(let index):
return "Failed to resize input tesnor at index \(index)."
return "Failed to resize input tensor at index \(index)."
case .failedToCopyDataToInputTensor:
return "Failed to copy data to input tensor."
case .failedToAllocateTensors:
......
......@@ -73,6 +73,8 @@ extension Tensor {
case float16
/// A 32-bit single precision floating point.
case float32
/// A 64-bit double precision floating point.
case float64
/// Creates a new instance from the given `TfLiteType` or `nil` if the data type is unsupported
/// or could not be determined because there was an error.
......@@ -94,6 +96,8 @@ extension Tensor {
self = .float16
case kTfLiteFloat32:
self = .float32
case kTfLiteFloat64:
self = .float64
case kTfLiteNoType:
fallthrough
default:
......
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment